Rust · 838759 bytes Raw Blame History
1 //! AST → IR lowering.
2 //!
3 //! Walks the typed AST and produces SSA IR. Handles variable allocation,
4 //! expression evaluation, assignments, and runtime calls for I/O.
5
6 use super::builder::FuncBuilder;
7 use super::inst::*;
8 use super::types::*;
9 use crate::ast::decl::{Decl, TypeSpec};
10 use crate::ast::expr::{BinaryOp, Expr, UnaryOp};
11 use crate::ast::stmt::*;
12 use crate::ast::unit::*;
13 use crate::sema::symtab::SymbolTable;
14
15 use crate::ast::decl::ArraySpec;
16 use std::collections::{HashMap, HashSet};
17 use std::io::Write;
18
19 /// Maximum array rank (Fortran allows up to 15).
20 const MAX_RANK: usize = 15;
21
22 /// Loop context for EXIT/CYCLE targeting.
23 struct LoopScope {
24 name: Option<String>,
25 header: BlockId, // CYCLE target
26 exit: BlockId, // EXIT target
27 }
28
29 /// Character variable kind: how string storage is managed.
30 #[derive(Clone, PartialEq)]
31 pub(crate) enum CharKind {
32 /// Not a character variable.
33 None,
34 /// Fixed-length character(N): addr points to N-byte stack buffer.
35 Fixed(i64),
36 /// Fixed-length character whose length is only known at runtime.
37 /// For locals, `addr` is a stack slot holding the heap buffer
38 /// pointer. For by-ref dummies, `addr` is the usual slot holding
39 /// the caller's pointer. `len_addr` stores the runtime length.
40 FixedRuntime { len_addr: ValueId },
41 /// Deferred-length character(:), allocatable: addr points to 32-byte StringDescriptor.
42 Deferred,
43 /// Assumed-length character(*) dummy parameter. The runtime
44 /// length is held in a hidden i64 parameter appended after the
45 /// normal positional args. `len_addr` is the alloca holding
46 /// the hidden param's value so reads can load it at runtime.
47 AssumedLen { len_addr: ValueId },
48 }
49
50 #[derive(Clone, Copy, PartialEq, Eq)]
51 enum HiddenResultAbi {
52 None,
53 ArrayDescriptor,
54 StringDescriptor,
55 }
56
57 /// Info about a local variable.
58 #[derive(Clone)]
59 struct LocalInfo {
60 addr: ValueId,
61 ty: IrType,
62 /// For arrays: (lower_bound, extent) per dimension. Empty for scalars.
63 dims: Vec<(i64, i64)>,
64 /// Is this an allocatable variable?
65 allocatable: bool,
66 /// Does this local carry runtime array metadata through a descriptor even
67 /// though it is not allocatable (for example an assumed-shape dummy)?
68 descriptor_arg: bool,
69 /// Is this a pass-by-reference parameter? If true, `addr` holds a pointer
70 /// to the caller's storage. Reads/writes go through the pointer.
71 by_ref: bool,
72 /// Character variable kind (fixed-length, deferred, or not character).
73 char_kind: CharKind,
74 /// Derived type name (for component access resolution). Empty for non-derived.
75 derived_type: Option<String>,
76 /// For PARAMETER-attributed locals whose initializer const-folds:
77 /// the compile-time value to inline at every use. When `Some`,
78 /// `Expr::Name` lookups should materialize this constant
79 /// directly via `b.const_i32`/`b.const_i64`/etc., instead of
80 /// loading through `addr`. Audit MAJOR-4: this lets parameters
81 /// avoid wasting a `.data` slot per scope.
82 inline_const: Option<ConstScalar>,
83 /// Fortran `POINTER` attribute on a scalar local. When true,
84 /// `addr` is an `alloca ptr<ty>` — a pointer slot that holds
85 /// the address of the associated target (or null when
86 /// unassociated). Reads/writes go through the slot just like
87 /// `by_ref`, but `by_ref` is reserved for dummy arguments that
88 /// cannot carry a Fortran `POINTER` attribute's semantics
89 /// (reassociation via `=>`, dereference on plain assignment).
90 is_pointer: bool,
91 /// Per-dimension runtime upper bound (i64 value id) for arrays
92 /// whose bounds are not compile-time constants — most commonly
93 /// explicit-shape dummies like `xs(n)` where `n` is a dummy
94 /// argument. When `Some`, bounds checks and cumulative-stride
95 /// computation consult this value instead of `dims[i].1`.
96 /// Empty vec (or an all-`None` vec) means the compile-time
97 /// `dims` is authoritative. Parallel to `dims`.
98 runtime_dim_upper: Vec<Option<ValueId>>,
99 }
100
101 /// Lowering context — tracks locals, loop scopes, and symbol table.
102 struct LowerCtx<'a> {
103 locals: HashMap<String, LocalInfo>,
104 loops: Vec<LoopScope>,
105 st: &'a SymbolTable,
106 /// Module-scoped globals visible by (lowercase module name,
107 /// lowercase variable name). Populated by the lower_file
108 /// pre-pass over `ProgramUnit::Module` units so any subsequent
109 /// function that USE-imports the module can resolve the name
110 /// to a `GlobalAddr`. Keying by (module, var) is what lets
111 /// install_globals_as_locals filter by the current function's
112 /// USE statements, honor ONLY lists, and apply renames.
113 globals: &'a HashMap<(String, String), ModuleGlobalInfo>,
114 type_layouts: &'a crate::sema::type_layout::TypeLayoutRegistry,
115 /// Names that a `use mod, only: ...` statement explicitly
116 /// excluded. install_globals_as_locals populates this from the
117 /// difference between a module's exported globals and the
118 /// only-list. Audit MAJOR-1: a reference to a name in this
119 /// set must produce a compile error rather than silently
120 /// lowering to const_int 0.
121 filtered_names: HashSet<String>,
122 /// For functions: address of the result variable (for RETURN).
123 result_addr: Option<ValueId>,
124 /// For functions: the return type.
125 result_type: Option<IrType>,
126 /// True when this function uses the sret (hidden-output-param) convention
127 /// because it returns an allocatable array. Stmt::Return emits `ret void`
128 /// instead of loading result_addr. Audit6 BLOCKING-1.
129 is_alloc_return: bool,
130 /// Names of functions in the compilation unit that return allocatable
131 /// arrays (sret convention). Used at call sites to detect when to
132 /// pass a temp descriptor as the hidden first arg. Audit6 BLOCKING-1.
133 alloc_return_funcs: &'a HashSet<String>,
134 /// Per-subroutine optional-parameter bitmap: maps lowercase callee name
135 /// to a Vec<bool> (one entry per positional parameter, true = OPTIONAL).
136 /// Pre-populated by `collect_optional_params` so call sites can pass
137 /// null pointers for absent optional arguments (PRESENT support).
138 optional_params: &'a HashMap<String, Vec<bool>>,
139 /// Per-subroutine/function descriptor-parameter bitmap: maps lowercase
140 /// callee name to a Vec<bool> (one entry per positional parameter,
141 /// true = lower this dummy through an ArrayDescriptor).
142 descriptor_params: &'a HashMap<String, Vec<bool>>,
143 /// Lowercase same-module subprogram name → Module::functions index.
144 /// Used so same-compilation-unit calls lower to FuncRef::Internal instead
145 /// of pretending to be external references.
146 internal_funcs: &'a HashMap<String, u32>,
147 /// Lowercase names of functions declared ELEMENTAL in this compilation unit.
148 elemental_funcs: &'a HashSet<String>,
149 /// Per-callee bitmap of which params are character(len=*).
150 /// Call sites append the string length as a hidden i64 arg for
151 /// each flagged position.
152 char_len_star_params: &'a HashMap<String, Vec<bool>>,
153 /// Per-callee list of host-associated variable names the callee
154 /// needs threaded in as hidden trailing pointer args. See the
155 /// closure-passing ABI documented on `lower_file::contained_host_refs`.
156 contained_host_refs: &'a HashMap<String, Vec<String>>,
157 /// Map from Fortran statement label (u64) to the IR basic block that
158 /// begins at that label. Pre-populated by `collect_label_blocks` before
159 /// lowering so that GOTO can branch forward as well as backward.
160 label_blocks: HashMap<u64, BlockId>,
161 }
162
163 impl<'a> LowerCtx<'a> {
164 fn new(
165 st: &'a SymbolTable,
166 globals: &'a HashMap<(String, String), ModuleGlobalInfo>,
167 type_layouts: &'a crate::sema::type_layout::TypeLayoutRegistry,
168 alloc_return_funcs: &'a HashSet<String>,
169 optional_params: &'a HashMap<String, Vec<bool>>,
170 descriptor_params: &'a HashMap<String, Vec<bool>>,
171 internal_funcs: &'a HashMap<String, u32>,
172 elemental_funcs: &'a HashSet<String>,
173 char_len_star_params: &'a HashMap<String, Vec<bool>>,
174 contained_host_refs: &'a HashMap<String, Vec<String>>,
175 ) -> Self {
176 Self {
177 locals: HashMap::new(),
178 loops: Vec::new(),
179 st,
180 globals,
181 type_layouts,
182 filtered_names: HashSet::new(),
183 result_addr: None,
184 result_type: None,
185 is_alloc_return: false,
186 alloc_return_funcs,
187 optional_params,
188 descriptor_params,
189 internal_funcs,
190 elemental_funcs,
191 char_len_star_params,
192 contained_host_refs,
193 label_blocks: HashMap::new(),
194 }
195 }
196
197 fn insert_scalar(&mut self, name: String, addr: ValueId, ty: IrType) {
198 self.locals.insert(
199 name,
200 LocalInfo {
201 addr,
202 ty,
203 dims: vec![],
204 allocatable: false,
205 descriptor_arg: false,
206 by_ref: false,
207 char_kind: CharKind::None,
208 derived_type: None,
209 inline_const: None,
210 is_pointer: false,
211 runtime_dim_upper: vec![],
212 },
213 );
214 }
215
216 fn insert_param_by_ref(&mut self, name: String, addr: ValueId, ty: IrType) {
217 self.locals.insert(
218 name,
219 LocalInfo {
220 addr,
221 ty,
222 dims: vec![],
223 allocatable: false,
224 descriptor_arg: false,
225 by_ref: true,
226 char_kind: CharKind::None,
227 derived_type: None,
228 inline_const: None,
229 is_pointer: false,
230 runtime_dim_upper: vec![],
231 },
232 );
233 }
234
235 fn push_loop(&mut self, name: Option<String>, header: BlockId, exit: BlockId) {
236 self.loops.push(LoopScope { name, header, exit });
237 }
238
239 fn pop_loop(&mut self) {
240 self.loops.pop();
241 }
242
243 /// Find loop by construct name (or innermost if None).
244 fn find_loop(&self, name: &Option<String>) -> Option<&LoopScope> {
245 if let Some(n) = name {
246 self.loops.iter().rev().find(|l| {
247 l.name
248 .as_deref()
249 .map(|s| s.eq_ignore_ascii_case(n))
250 .unwrap_or(false)
251 })
252 } else {
253 self.loops.last()
254 }
255 }
256 }
257
258 /// Lower a file of program units to an IR module.
259 ///
260 /// Two passes:
261 /// 1. Walk every `ProgramUnit::Module`, register its globals into
262 /// `module.globals` (with const-evaluated initializers where
263 /// possible) and into a `globals` resolution map keyed by
264 /// lowercase variable name. This map is what `Expr::Name`
265 /// lowering consults when a name isn't a local.
266 /// 2. Walk every unit again to lower its functions; module units
267 /// are skipped on this pass since their globals are already
268 /// installed.
269 pub fn lower_file(
270 units: &[SpannedUnit],
271 st: &SymbolTable,
272 type_layouts: &crate::sema::type_layout::TypeLayoutRegistry,
273 external_globals: HashMap<(String, String), ModuleGlobalInfo>,
274 external_descriptor_params: HashMap<String, Vec<bool>>,
275 external_char_len_star: HashMap<String, Vec<bool>>,
276 ) -> (Module, HashMap<(String, String), ModuleGlobalInfo>) {
277 let mut module = Module::new("main".into());
278 let mut globals: HashMap<(String, String), ModuleGlobalInfo> = external_globals;
279
280 // Pass 1: collect module-level variables. Submodule decls are
281 // installed under their parent module's name so the submodule's
282 // contained procedures can resolve them through the same global
283 // lookup the parent uses.
284 for unit in units {
285 match &unit.node {
286 ProgramUnit::Module { name, decls, .. } => {
287 collect_module_globals(&mut module, &mut globals, name, decls, st, type_layouts);
288 }
289 ProgramUnit::Submodule { parent, decls, .. } => {
290 collect_module_globals(&mut module, &mut globals, parent, decls, st, type_layouts);
291 }
292 _ => {}
293 }
294 }
295
296 // Pass 1.1: propagate transitively-visible globals through USE chains.
297 // When module A USEs module B, all of B's public globals should
298 // also be accessible as (A, var_name) in the globals map. Without
299 // this, `program p; use A` can't see B's variables even though A
300 // re-exports them via USE association.
301 for unit in units {
302 if let ProgramUnit::Module { name, uses, .. } = &unit.node {
303 let mod_key = name.to_lowercase();
304 // Collect USE'd module names.
305 let used_modules: Vec<String> = uses
306 .iter()
307 .filter_map(|u| {
308 if let Decl::UseStmt { module, .. } = &u.node {
309 Some(module.to_lowercase())
310 } else {
311 None
312 }
313 })
314 .collect();
315 // For each USE'd module, copy its globals into this module's namespace.
316 let mut to_add = Vec::new();
317 for used_mod in &used_modules {
318 for ((mk, var), info) in globals.iter() {
319 if mk == used_mod {
320 let new_key = (mod_key.clone(), var.clone());
321 if !globals.contains_key(&new_key) {
322 to_add.push((new_key, info.clone()));
323 }
324 }
325 }
326 }
327 for (key, info) in to_add {
328 globals.insert(key, info);
329 }
330 }
331 }
332
333 // Pass 1.5: walk every program unit (and its `contains` chain)
334 // and collect the names of functions whose result variable is
335 // declared `allocatable`. Audit6 BLOCKING-1: these need a hidden
336 // first parameter at lowering time so the caller can pass a
337 // descriptor address into which the function fills its result.
338 let mut alloc_return_funcs: HashSet<String> = HashSet::new();
339 for unit in units {
340 collect_alloc_return_funcs(&unit.node, &mut alloc_return_funcs);
341 }
342
343 // Pass 1.6: collect COMMON block variable types from all program
344 // units and emit one global per (block, variable) pair. F77 §5.5:
345 // all scopes that reference the same COMMON block must share the
346 // same backing memory. Each variable gets its own global so the IR
347 // type system sees the right element type; full contiguity (needed
348 // for EQUIVALENCE across COMMON boundaries) is deferred.
349 // Audit6 BLOCKING-2.
350 let mut emitted_common: HashSet<String> = HashSet::new();
351 for unit in units {
352 collect_and_emit_common_globals(&unit.node, &mut module, &mut emitted_common);
353 }
354
355 // Pass 1.7: collect optional-parameter bitmaps for every subroutine/function.
356 // Maps lowercase callee name → Vec<bool> (per-position, true = OPTIONAL).
357 // Used at call sites to pass null pointers for absent optional arguments
358 // so PRESENT() works correctly inside the callee.
359 let mut optional_params: HashMap<String, Vec<bool>> = HashMap::new();
360 for unit in units {
361 collect_optional_params(&unit.node, &mut optional_params);
362 }
363
364 let mut descriptor_params: HashMap<String, Vec<bool>> = external_descriptor_params;
365 for unit in units {
366 collect_descriptor_params(&unit.node, &mut descriptor_params);
367 }
368
369 let mut char_len_star_params: HashMap<String, Vec<bool>> = external_char_len_star;
370 for unit in units {
371 collect_char_len_star_params(&unit.node, &mut char_len_star_params);
372 }
373
374 let mut elemental_funcs: HashSet<String> = HashSet::new();
375 for unit in units {
376 collect_elemental_funcs(&unit.node, &mut elemental_funcs);
377 }
378
379 let mut internal_funcs: HashMap<String, u32> = HashMap::new();
380 let mut next_internal_idx: u32 = 0;
381 for unit in units {
382 collect_internal_func_names(&unit.node, &mut internal_funcs, &mut next_internal_idx);
383 }
384
385 // Host association: for every contained procedure, compute the set
386 // of host-local variables it references. Each such variable becomes
387 // a hidden trailing pointer parameter in the callee signature; call
388 // sites inside the host (or sibling contained procs) append the
389 // address of the matching variable from their own locals. This is
390 // the standard closure-passing ABI for Fortran contained
391 // subprograms (F2018 §19.5). Keyed by lowercase callee name.
392 let mut contained_host_refs: HashMap<String, Vec<String>> = HashMap::new();
393 for unit in units {
394 walk_contained_host_refs(&unit.node, &mut contained_host_refs);
395 }
396
397 // Pass 2: lower each unit. Modules already had their globals
398 // installed in pass 1; lower_unit's Module arm is a no-op.
399 // Top-level units have no host, so an empty host_uses slice.
400 let no_host: Vec<crate::ast::decl::SpannedDecl> = Vec::new();
401 let no_host_param_consts: HashMap<String, ConstScalar> = HashMap::new();
402 for unit in units {
403 lower_unit(
404 &mut module,
405 unit,
406 st,
407 &globals,
408 type_layouts,
409 &no_host,
410 &no_host_param_consts,
411 &no_host,
412 None,
413 None,
414 &alloc_return_funcs,
415 &optional_params,
416 &descriptor_params,
417 &internal_funcs,
418 &elemental_funcs,
419 &char_len_star_params,
420 &contained_host_refs,
421 false,
422 );
423 }
424 (module, globals)
425 }
426
427 /// Recursively walk `unit` and, for every contained subprogram, record
428 /// the ordered list of immediate-host-local variable names it reads or
429 /// writes. Subprograms with no host-local references get an empty
430 /// entry (still inserted, so call sites can cheaply check membership).
431 fn walk_contained_host_refs(unit: &ProgramUnit, out: &mut HashMap<String, Vec<String>>) {
432 // Nested CONTAINS: a contained proc may reference not just its
433 // immediate host's locals but also its host's host's locals and
434 // so on. Thread an accumulated ancestor-decls chain so every
435 // nested contained subprogram can find every name it depends on.
436 walk_contained_host_refs_inner(unit, &[], out);
437 }
438
439 fn walk_contained_host_refs_inner<'a>(
440 unit: &'a ProgramUnit,
441 ancestor_decls: &[&'a [crate::ast::decl::SpannedDecl]],
442 out: &mut HashMap<String, Vec<String>>,
443 ) {
444 let (my_decls, contains): (&[crate::ast::decl::SpannedDecl], &[SpannedUnit]) = match unit {
445 ProgramUnit::Program {
446 decls, contains, ..
447 } => (decls, contains),
448 ProgramUnit::Subroutine {
449 decls, contains, ..
450 } => (decls, contains),
451 ProgramUnit::Function {
452 decls, contains, ..
453 } => (decls, contains),
454 ProgramUnit::Module { contains, .. } | ProgramUnit::Submodule { contains, .. } => {
455 // Module / submodule procedures access module globals
456 // directly (not via host association closures). Recurse
457 // into each so any nested contains still gets analyzed
458 // against its own host.
459 for sub in contains {
460 walk_contained_host_refs_inner(&sub.node, ancestor_decls, out);
461 }
462 return;
463 }
464 _ => return,
465 };
466 // Extend the ancestor chain with THIS unit's decls before
467 // recursing into any nested contains.
468 let mut next_ancestors: Vec<&[crate::ast::decl::SpannedDecl]> = ancestor_decls.to_vec();
469 next_ancestors.push(my_decls);
470
471 // First pass: recurse so every nested sub's own refs are in
472 // `out`. We then fold each proc's contained procs' refs into
473 // its own list, because transitive forwarding requires the
474 // intermediate level to carry the outer-scope vars as hidden
475 // params even when it doesn't reference them directly.
476 for sub in contains {
477 walk_contained_host_refs_inner(&sub.node, &next_ancestors, out);
478 }
479 for sub in contains {
480 if let ProgramUnit::Subroutine {
481 name,
482 contains: sub_contains,
483 ..
484 }
485 | ProgramUnit::Function {
486 name,
487 contains: sub_contains,
488 ..
489 } = &sub.node
490 {
491 let mut refs_set: std::collections::HashSet<String> = std::collections::HashSet::new();
492 // Direct references in this sub's body, against each
493 // layer of the ancestor chain (so host-of-host names
494 // get collected).
495 for anc in &next_ancestors {
496 collect_host_references(&sub.node, anc, &mut refs_set);
497 }
498 // Transitive: every name that a nested contained proc
499 // needs from an ANCESTOR scope must also be forwarded
500 // through this sub. We filter the nested proc's refs
501 // to the union of the ancestor names so we don't accidentally
502 // pull in sub-local names — those resolve against this
503 // sub's own decls when the nested proc is lowered.
504 let mut ancestor_names: std::collections::HashSet<String> =
505 std::collections::HashSet::new();
506 for anc in &next_ancestors {
507 for decl in *anc {
508 if let Decl::TypeDecl { entities, .. } = &decl.node {
509 for e in entities {
510 ancestor_names.insert(e.name.to_lowercase());
511 }
512 }
513 }
514 }
515 for nested in sub_contains {
516 if let ProgramUnit::Subroutine {
517 name: nested_name, ..
518 }
519 | ProgramUnit::Function {
520 name: nested_name, ..
521 } = &nested.node
522 {
523 if let Some(nested_refs) = out.get(&nested_name.to_lowercase()) {
524 for r in nested_refs {
525 if ancestor_names.contains(r) {
526 refs_set.insert(r.clone());
527 }
528 }
529 }
530 }
531 }
532 let mut refs: Vec<String> = refs_set.into_iter().collect();
533 refs.sort();
534 out.insert(name.to_lowercase(), refs);
535 }
536 }
537 }
538
539 fn collect_internal_func_names(
540 unit: &ProgramUnit,
541 out: &mut HashMap<String, u32>,
542 next_idx: &mut u32,
543 ) {
544 match unit {
545 ProgramUnit::Program { name, contains, .. } => {
546 let fname = name.clone().unwrap_or_else(|| "main".into());
547 let body_name = format!("__prog_{}", fname).to_lowercase();
548 out.insert(body_name, *next_idx);
549 *next_idx += 1;
550 for sub in contains {
551 collect_internal_func_names(&sub.node, out, next_idx);
552 }
553 }
554 ProgramUnit::Subroutine {
555 name,
556 bind,
557 contains,
558 ..
559 }
560 | ProgramUnit::Function {
561 name,
562 bind,
563 contains,
564 ..
565 } => {
566 let idx = *next_idx;
567 *next_idx += 1;
568 out.insert(name.to_lowercase(), idx);
569 if let Some(bind) = bind {
570 if let Some(bind_name) = bind.name.as_deref() {
571 out.entry(
572 bind_name
573 .trim_matches('\'')
574 .trim_matches('"')
575 .to_lowercase(),
576 )
577 .or_insert(idx);
578 }
579 }
580 for sub in contains {
581 collect_internal_func_names(&sub.node, out, next_idx);
582 }
583 }
584 ProgramUnit::Module { contains, .. } | ProgramUnit::Submodule { contains, .. } => {
585 for sub in contains {
586 collect_internal_func_names(&sub.node, out, next_idx);
587 }
588 }
589 _ => {}
590 }
591 }
592
593 fn function_hidden_result_abi(
594 function_name: &str,
595 result: &Option<String>,
596 decls: &[crate::ast::decl::SpannedDecl],
597 ) -> HiddenResultAbi {
598 use crate::ast::decl::Attribute;
599 let result_key = result
600 .as_deref()
601 .unwrap_or(function_name)
602 .to_ascii_lowercase();
603 for decl in decls {
604 let Decl::TypeDecl {
605 type_spec,
606 attrs,
607 entities,
608 ..
609 } = &decl.node
610 else {
611 continue;
612 };
613 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
614 if let Attribute::Dimension(specs) = a {
615 Some(specs)
616 } else {
617 None
618 }
619 });
620 for entity in entities {
621 if entity.name.to_ascii_lowercase() != result_key {
622 continue;
623 }
624 let has_dims = entity.array_spec.as_ref().or(attr_dims).is_some();
625 let is_hidden_string = matches!(
626 type_spec,
627 TypeSpec::Character(Some(sel))
628 if matches!(sel.len, Some(crate::ast::decl::LenSpec::Colon))
629 ) && !has_dims
630 && attrs
631 .iter()
632 .any(|a| matches!(a, Attribute::Allocatable | Attribute::Pointer));
633 if is_hidden_string {
634 return HiddenResultAbi::StringDescriptor;
635 }
636 if attrs.iter().any(|a| matches!(a, Attribute::Allocatable)) {
637 return HiddenResultAbi::ArrayDescriptor;
638 }
639 return HiddenResultAbi::None;
640 }
641 }
642 HiddenResultAbi::None
643 }
644
645 /// Walk a program unit and any nested `contains` to collect the
646 /// names of functions whose result variable is lowered through the
647 /// 384-byte array descriptor hidden-result ABI. Deferred-length
648 /// scalar character results use the 32-byte string descriptor ABI
649 /// and are intentionally excluded here.
650 ///
651 /// Audit6 BLOCKING-1: a function `function f() result(r); integer,
652 /// allocatable :: r(:)` cannot be returned by value through the
653 /// usual scalar result alloca — the descriptor is 384 bytes and
654 /// the type system needs to know about it at every call site.
655 /// We model this with a hidden first `ptr<[i8 x 384]>` parameter
656 /// that the caller passes in, and the function writes its result
657 /// into that descriptor.
658 fn collect_alloc_return_funcs(unit: &ProgramUnit, out: &mut HashSet<String>) {
659 match unit {
660 ProgramUnit::Function {
661 name,
662 decls,
663 contains,
664 result,
665 ..
666 } => {
667 if function_hidden_result_abi(name, result, decls) == HiddenResultAbi::ArrayDescriptor {
668 out.insert(name.to_lowercase());
669 }
670 for sub in contains {
671 collect_alloc_return_funcs(&sub.node, out);
672 }
673 }
674 ProgramUnit::Program { contains, .. }
675 | ProgramUnit::Subroutine { contains, .. }
676 | ProgramUnit::Module { contains, .. }
677 | ProgramUnit::Submodule { contains, .. } => {
678 for sub in contains {
679 collect_alloc_return_funcs(&sub.node, out);
680 }
681 }
682 _ => {}
683 }
684 }
685
686 /// Scan a program unit and its CONTAINS chain and record, for each
687 /// subroutine/function, which of its positional dummy arguments carry
688 /// the OPTIONAL attribute.
689 ///
690 /// Result: `out` maps lowercase subroutine/function name →
691 /// `Vec<bool>` (index = parameter position, value = is_optional).
692 /// Used at call sites to pass null pointers for absent optional args,
693 /// enabling PRESENT() intrinsic queries inside the callee.
694 /// Collect which positional parameters are `character(len=*)` — assumed
695 /// length. These need hidden-length i64 parameters appended to the
696 /// function signature and the call site.
697 fn collect_char_len_star_params(unit: &ProgramUnit, out: &mut HashMap<String, Vec<bool>>) {
698 use crate::ast::unit::DummyArg;
699 let record = |name: &str,
700 args: &[DummyArg],
701 decls: &[crate::ast::decl::SpannedDecl],
702 out: &mut HashMap<String, Vec<bool>>| {
703 let param_names: Vec<String> = args
704 .iter()
705 .filter_map(|a| {
706 if let DummyArg::Name(n) = a {
707 Some(n.to_lowercase())
708 } else {
709 None
710 }
711 })
712 .collect();
713 if param_names.is_empty() {
714 return;
715 }
716 let flags: Vec<bool> = param_names
717 .iter()
718 .map(|pname| {
719 for d in decls {
720 if let crate::ast::decl::Decl::TypeDecl {
721 type_spec,
722 entities,
723 ..
724 } = &d.node
725 {
726 if entities.iter().any(|e| e.name.to_lowercase() == *pname) {
727 if let TypeSpec::Character(Some(sel)) = type_spec {
728 if matches!(&sel.len, Some(crate::ast::decl::LenSpec::Star)) {
729 return true;
730 }
731 }
732 }
733 }
734 }
735 false
736 })
737 .collect();
738 if flags.iter().any(|f| *f) {
739 out.insert(name.to_lowercase(), flags);
740 }
741 };
742 match unit {
743 ProgramUnit::Subroutine {
744 name,
745 args,
746 decls,
747 contains,
748 ..
749 } => {
750 record(name, args, decls, out);
751 for sub in contains {
752 collect_char_len_star_params(&sub.node, out);
753 }
754 }
755 ProgramUnit::Function {
756 name,
757 args,
758 decls,
759 contains,
760 ..
761 } => {
762 record(name, args, decls, out);
763 for sub in contains {
764 collect_char_len_star_params(&sub.node, out);
765 }
766 }
767 ProgramUnit::Program { contains, .. }
768 | ProgramUnit::Module { contains, .. }
769 | ProgramUnit::Submodule { contains, .. } => {
770 for sub in contains {
771 collect_char_len_star_params(&sub.node, out);
772 }
773 }
774 _ => {}
775 }
776 }
777
778 fn collect_optional_params(unit: &ProgramUnit, out: &mut HashMap<String, Vec<bool>>) {
779 use crate::ast::decl::Attribute;
780 use crate::ast::unit::DummyArg;
781 let record = |name: &str,
782 args: &[DummyArg],
783 decls: &[crate::ast::decl::SpannedDecl],
784 out: &mut HashMap<String, Vec<bool>>| {
785 let param_names: Vec<String> = args
786 .iter()
787 .filter_map(|a| {
788 if let DummyArg::Name(n) = a {
789 Some(n.to_lowercase())
790 } else {
791 None
792 }
793 })
794 .collect();
795 if param_names.is_empty() {
796 return;
797 }
798 let optional_flags: Vec<bool> = param_names
799 .iter()
800 .map(|pname| {
801 for d in decls {
802 if let crate::ast::decl::Decl::TypeDecl {
803 attrs, entities, ..
804 } = &d.node
805 {
806 let is_optional = attrs.iter().any(|a| matches!(a, Attribute::Optional));
807 if is_optional && entities.iter().any(|e| e.name.to_lowercase() == *pname) {
808 return true;
809 }
810 }
811 }
812 false
813 })
814 .collect();
815 out.insert(name.to_lowercase(), optional_flags);
816 };
817 match unit {
818 ProgramUnit::Subroutine {
819 name,
820 args,
821 decls,
822 contains,
823 ..
824 } => {
825 record(name, args, decls, out);
826 for sub in contains {
827 collect_optional_params(&sub.node, out);
828 }
829 }
830 ProgramUnit::Function {
831 name,
832 args,
833 decls,
834 contains,
835 ..
836 } => {
837 record(name, args, decls, out);
838 for sub in contains {
839 collect_optional_params(&sub.node, out);
840 }
841 }
842 ProgramUnit::Program { contains, .. }
843 | ProgramUnit::Module { contains, .. }
844 | ProgramUnit::Submodule { contains, .. } => {
845 for sub in contains {
846 collect_optional_params(&sub.node, out);
847 }
848 }
849 _ => {}
850 }
851 }
852
853 fn arg_uses_descriptor_from_decls(arg_name: &str, decls: &[crate::ast::decl::SpannedDecl]) -> bool {
854 let key = arg_name.to_lowercase();
855 for decl in decls {
856 if let Decl::TypeDecl {
857 attrs, entities, ..
858 } = &decl.node
859 {
860 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
861 if let crate::ast::decl::Attribute::Dimension(specs) = a {
862 Some(specs)
863 } else {
864 None
865 }
866 });
867 for entity in entities {
868 if entity.name.to_lowercase() != key {
869 continue;
870 }
871 let Some(specs) = entity.array_spec.as_ref().or(attr_dims) else {
872 return false;
873 };
874 return specs.iter().any(|spec| {
875 matches!(
876 spec,
877 ArraySpec::AssumedShape { .. }
878 | ArraySpec::AssumedSize { .. }
879 | ArraySpec::Deferred
880 | ArraySpec::AssumedRank
881 )
882 });
883 }
884 }
885 }
886 false
887 }
888
889 fn arg_uses_string_descriptor_from_decls(
890 arg_name: &str,
891 decls: &[crate::ast::decl::SpannedDecl],
892 ) -> bool {
893 let key = arg_name.to_lowercase();
894 for decl in decls {
895 if let Decl::TypeDecl {
896 type_spec,
897 attrs,
898 entities,
899 ..
900 } = &decl.node
901 {
902 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
903 if let crate::ast::decl::Attribute::Dimension(specs) = a {
904 Some(specs)
905 } else {
906 None
907 }
908 });
909 for entity in entities {
910 if entity.name.to_lowercase() != key {
911 continue;
912 }
913 let uses_descriptor = matches!(
914 type_spec,
915 TypeSpec::Character(Some(sel))
916 if matches!(sel.len, Some(crate::ast::decl::LenSpec::Colon))
917 ) && attrs.iter().any(|a| {
918 matches!(
919 a,
920 crate::ast::decl::Attribute::Allocatable
921 | crate::ast::decl::Attribute::Pointer
922 )
923 }) && entity.array_spec.as_ref().or(attr_dims).is_none();
924 return uses_descriptor;
925 }
926 }
927 }
928 false
929 }
930
931 /// Record which positional dummy arguments are lowered through an
932 /// ArrayDescriptor rather than a raw element pointer.
933 fn collect_descriptor_params(unit: &ProgramUnit, out: &mut HashMap<String, Vec<bool>>) {
934 use crate::ast::unit::DummyArg;
935 let record = |name: &str,
936 args: &[DummyArg],
937 decls: &[crate::ast::decl::SpannedDecl],
938 out: &mut HashMap<String, Vec<bool>>| {
939 let param_names: Vec<String> = args
940 .iter()
941 .filter_map(|a| {
942 if let DummyArg::Name(n) = a {
943 Some(n.to_lowercase())
944 } else {
945 None
946 }
947 })
948 .collect();
949 if param_names.is_empty() {
950 return;
951 }
952 let flags: Vec<bool> = param_names
953 .iter()
954 .map(|pname| arg_uses_descriptor_from_decls(pname, decls))
955 .collect();
956 out.insert(name.to_lowercase(), flags);
957 };
958 match unit {
959 ProgramUnit::Subroutine {
960 name,
961 args,
962 decls,
963 contains,
964 ..
965 } => {
966 record(name, args, decls, out);
967 for sub in contains {
968 collect_descriptor_params(&sub.node, out);
969 }
970 }
971 ProgramUnit::Function {
972 name,
973 args,
974 decls,
975 contains,
976 ..
977 } => {
978 record(name, args, decls, out);
979 for sub in contains {
980 collect_descriptor_params(&sub.node, out);
981 }
982 }
983 ProgramUnit::Program { contains, .. }
984 | ProgramUnit::Module { contains, .. }
985 | ProgramUnit::Submodule { contains, .. } => {
986 for sub in contains {
987 collect_descriptor_params(&sub.node, out);
988 }
989 }
990 _ => {}
991 }
992 }
993
994 /// Collect lowercase names of functions declared ELEMENTAL. Whole-array
995 /// lowering uses this side table to recognize elemental calls before symbol
996 /// resolution has become IR call refs.
997 fn collect_elemental_funcs(unit: &ProgramUnit, out: &mut HashSet<String>) {
998 use crate::ast::unit::Prefix;
999 match unit {
1000 ProgramUnit::Function {
1001 name,
1002 prefix,
1003 contains,
1004 ..
1005 } => {
1006 if prefix.iter().any(|p| matches!(p, Prefix::Elemental)) {
1007 out.insert(name.to_lowercase());
1008 }
1009 for sub in contains {
1010 collect_elemental_funcs(&sub.node, out);
1011 }
1012 }
1013 ProgramUnit::Program { contains, .. }
1014 | ProgramUnit::Subroutine { contains, .. }
1015 | ProgramUnit::Module { contains, .. }
1016 | ProgramUnit::Submodule { contains, .. } => {
1017 for sub in contains {
1018 collect_elemental_funcs(&sub.node, out);
1019 }
1020 }
1021 _ => {}
1022 }
1023 }
1024
1025 /// Scan a program unit (and its `contains` chain) for `Decl::CommonBlock`
1026 /// statements and emit one scalar global per *slot position* within each
1027 /// COMMON block. All scopes that declare the same block share these
1028 /// globals, giving correct F77 §5.5 shared-memory semantics for scalars.
1029 ///
1030 /// Naming: `afs_common_<block_name>_<slot_index>` (lowercase).
1031 /// The blank COMMON uses the synthetic block name `__blank__`. Using
1032 /// the slot position — not the local variable name — as the disambiguator
1033 /// matters when a contained subprogram aliases the same block under
1034 /// different local names: `common /blk/ a, b` in the host and
1035 /// `common /blk/ x, y` in a contained routine must resolve to the same
1036 /// two globals, not four separate ones. The slot's element type comes
1037 /// from whichever scope the module walker visits first; scopes that
1038 /// agree on positional types will get correct reads/writes, and
1039 /// scopes that disagree are an F77 undefined-behavior region we
1040 /// leave unhandled for now. Audit6 BLOCKING-2.
1041 fn common_slot_symbol(block: &str, slot_idx: usize) -> String {
1042 format!("afs_common_{}_{}", block, slot_idx)
1043 }
1044
1045 fn collect_and_emit_common_globals(
1046 unit: &ProgramUnit,
1047 module: &mut Module,
1048 emitted: &mut HashSet<String>,
1049 ) {
1050 use crate::ast::decl::Decl;
1051 let emit_for_decls = |decls: &[crate::ast::decl::SpannedDecl],
1052 module: &mut Module,
1053 emitted: &mut HashSet<String>| {
1054 for decl in decls {
1055 if let Decl::CommonBlock { name, vars } = &decl.node {
1056 let block_name = name.as_deref().unwrap_or("__blank__").to_lowercase();
1057 for (slot_idx, var) in vars.iter().enumerate() {
1058 let symbol = common_slot_symbol(&block_name, slot_idx);
1059 if emitted.contains(&symbol) {
1060 continue;
1061 }
1062 emitted.insert(symbol.clone());
1063 let elem_ty = arg_type_from_decls(&var.to_lowercase(), decls, None);
1064 module.add_global(Global {
1065 name: symbol,
1066 ty: elem_ty,
1067 initializer: Some(GlobalInit::Zero),
1068 });
1069 }
1070 }
1071 }
1072 };
1073 match unit {
1074 ProgramUnit::Program {
1075 decls, contains, ..
1076 } => {
1077 emit_for_decls(decls, module, emitted);
1078 for sub in contains {
1079 collect_and_emit_common_globals(&sub.node, module, emitted);
1080 }
1081 }
1082 ProgramUnit::Subroutine {
1083 decls, contains, ..
1084 } => {
1085 emit_for_decls(decls, module, emitted);
1086 for sub in contains {
1087 collect_and_emit_common_globals(&sub.node, module, emitted);
1088 }
1089 }
1090 ProgramUnit::Function {
1091 decls, contains, ..
1092 } => {
1093 emit_for_decls(decls, module, emitted);
1094 for sub in contains {
1095 collect_and_emit_common_globals(&sub.node, module, emitted);
1096 }
1097 }
1098 _ => {}
1099 }
1100 }
1101
1102 /// Install COMMON block variables as global_addr locals before `alloc_decls`
1103 /// runs. Because `alloc_decls` skips names already in `locals`, the COMMON
1104 /// variables are not re-alloca'd with private storage. Each variable is
1105 /// installed as a direct (non-by_ref) local whose addr is a GlobalAddr
1106 /// pointing to the shared COMMON global. Audit6 BLOCKING-2.
1107 fn install_common_locals(
1108 b: &mut FuncBuilder,
1109 locals: &mut HashMap<String, LocalInfo>,
1110 decls: &[crate::ast::decl::SpannedDecl],
1111 ) {
1112 use crate::ast::decl::Decl;
1113 for decl in decls {
1114 if let Decl::CommonBlock { name, vars } = &decl.node {
1115 let block_name = name.as_deref().unwrap_or("__blank__").to_lowercase();
1116 for (slot_idx, var) in vars.iter().enumerate() {
1117 let key = var.to_lowercase();
1118 if locals.contains_key(&key) {
1119 continue;
1120 }
1121 let symbol = common_slot_symbol(&block_name, slot_idx);
1122 let elem_ty = arg_type_from_decls(&key, decls, None);
1123 let addr = b.global_addr(&symbol, elem_ty.clone());
1124 locals.insert(
1125 key,
1126 LocalInfo {
1127 addr,
1128 ty: elem_ty,
1129 dims: vec![],
1130 allocatable: false,
1131 descriptor_arg: false,
1132 by_ref: false,
1133 char_kind: CharKind::None,
1134 derived_type: None,
1135 inline_const: None,
1136 is_pointer: false,
1137 runtime_dim_upper: vec![],
1138 },
1139 );
1140 }
1141 }
1142 }
1143 }
1144
1145 /// Install EQUIVALENCE group members as aliased locals before `alloc_decls`.
1146 ///
1147 /// F77 §5.4: each member of an equivalence group must share the same
1148 /// backing storage. We allocate one `[i8 x total]` backing store and
1149 /// install each variable with a GEP into it at its byte offset. The
1150 /// GEP element type matches the variable's declared type so that
1151 /// subsequent loads and stores are correctly typed (the verifier
1152 /// allows `store T, Ptr<T>` unconditionally). Audit6 BLOCKING-3.
1153 ///
1154 /// Supported members:
1155 /// * `Expr::Name` — scalar variable at offset 0 within itself.
1156 /// * `Expr::FunctionCall { callee: name, args: [Element(idx)] }` —
1157 /// array element `name(idx)`, at byte offset `(idx−1)*elem_size`
1158 /// relative to the start of the array. Array must already be in
1159 /// scope via a static (non-allocatable) TypeDecl so we can compute
1160 /// the size at compile time.
1161 ///
1162 /// The "anchor" of the group is the member with the smallest byte
1163 /// offset after mapping each member's internal offset to a shared
1164 /// coordinate space. All other members are GEP'd at their relative
1165 /// distance from the anchor. The backing store is sized to cover the
1166 /// maximum extent across all members.
1167 fn install_equivalence_locals(
1168 b: &mut FuncBuilder,
1169 locals: &mut HashMap<String, LocalInfo>,
1170 decls: &[crate::ast::decl::SpannedDecl],
1171 ) {
1172 use crate::ast::decl::Decl;
1173 use crate::ast::expr::Expr;
1174 use crate::ast::expr::SectionSubscript;
1175
1176 for decl in decls {
1177 if let Decl::EquivalenceStmt { groups } = &decl.node {
1178 for group in groups {
1179 // Resolve each member to (var_name, elem_ty, within_var_byte_offset).
1180 // within_var_byte_offset: for `name` → 0; for `name(i)` → (i-1)*elem_size.
1181 let mut members: Vec<(String, IrType, i64)> = Vec::new();
1182 for expr in group {
1183 match &expr.node {
1184 Expr::Name { name } => {
1185 let key = name.to_lowercase();
1186 let ty = arg_type_from_decls(&key, decls, None);
1187 members.push((key, ty, 0));
1188 }
1189 Expr::FunctionCall { callee, args } => {
1190 if let Expr::Name { name } = &callee.node {
1191 let key = name.to_lowercase();
1192 let ty = arg_type_from_decls(&key, decls, None);
1193 let idx = if let Some(sub) = args.first() {
1194 if let SectionSubscript::Element(e) = &sub.value {
1195 eval_const_int(e).unwrap_or(1)
1196 } else {
1197 1
1198 }
1199 } else {
1200 1
1201 };
1202 let byte_off = (idx.max(1) - 1) * ir_scalar_byte_size(&ty);
1203 members.push((key, ty, byte_off));
1204 }
1205 }
1206 _ => {} // skip complex expressions
1207 }
1208 }
1209 if members.is_empty() {
1210 continue;
1211 }
1212
1213 // Find the smallest within_var offset — this becomes the "origin".
1214 let min_off = members.iter().map(|(_, _, o)| *o).min().unwrap_or(0);
1215
1216 // Compute total backing store size (bytes).
1217 let total = members
1218 .iter()
1219 .map(|(_, ty, o)| (o - min_off) + ir_scalar_byte_size(ty))
1220 .max()
1221 .unwrap_or(8);
1222
1223 // Allocate the byte-array backing store.
1224 let backing_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), total as u64);
1225 let backing = b.alloca(backing_ty);
1226
1227 for (var_name, elem_ty, within_off) in &members {
1228 if locals.contains_key(var_name) {
1229 continue;
1230 }
1231 let rel = within_off - min_off; // byte offset into backing
1232 // GEP with element type = elem_ty so the result is Ptr<elem_ty>.
1233 // For I32 at rel=0: gep(backing, [0], I32) → Ptr<I32> (backing itself).
1234 // For I32 at rel=4: gep(backing, [1], I32) → Ptr<I32> (backing + 4).
1235 let elem_size = ir_scalar_byte_size(elem_ty);
1236 let gep_idx = if elem_size > 0 { rel / elem_size } else { 0 };
1237 let idx_val = b.const_i64(gep_idx);
1238 let addr = b.gep(backing, vec![idx_val], elem_ty.clone());
1239 locals.insert(
1240 var_name.clone(),
1241 LocalInfo {
1242 addr,
1243 ty: elem_ty.clone(),
1244 dims: vec![],
1245 allocatable: false,
1246 descriptor_arg: false,
1247 by_ref: false,
1248 char_kind: CharKind::None,
1249 derived_type: None,
1250 inline_const: None,
1251 is_pointer: false,
1252 runtime_dim_upper: vec![],
1253 },
1254 );
1255 }
1256 }
1257 }
1258 }
1259 }
1260
1261 /// Information about a module-level global, tracked in
1262 /// `lower_file`'s globals map so `install_globals_as_locals` can
1263 /// reconstruct a `LocalInfo` for it inside each function that
1264 /// USE-imports the module.
1265 #[derive(Clone)]
1266 pub struct ModuleGlobalInfo {
1267 pub symbol: String,
1268 pub ty: IrType,
1269 pub dims: Vec<(i64, i64)>,
1270 pub allocatable: bool,
1271 pub is_pointer: bool,
1272 pub deferred_char: bool,
1273 pub derived_type: Option<String>,
1274 pub(crate) char_kind: CharKind,
1275 /// External modules (from .amod files) — skip emitting Global
1276 /// data entries since the storage lives in the other .o file.
1277 pub external: bool,
1278 }
1279
1280 fn pointer_slot_type(value_ty: &IrType) -> IrType {
1281 if value_ty.is_ptr() {
1282 value_ty.clone()
1283 } else {
1284 IrType::Ptr(Box::new(value_ty.clone()))
1285 }
1286 }
1287
1288 /// Walk a module's declarations and emit a global per variable.
1289 /// Handles scalars (with literal initializers) and fixed-size
1290 /// arrays (with array-constructor initializers). Resolves the
1291 /// initializer at compile time where possible; otherwise falls
1292 /// through to zero-init.
1293 ///
1294 /// Array variables with non-literal initializers or dynamic dims
1295 /// are currently rejected by falling through to scalar emission —
1296 /// that's a known gap tracked for follow-up.
1297 /// Collect names that a contained subprogram references but doesn't
1298 /// declare locally (host-associated names).
1299 fn collect_host_references(
1300 sub: &ProgramUnit,
1301 host_decls: &[crate::ast::decl::SpannedDecl],
1302 refs: &mut std::collections::HashSet<String>,
1303 ) {
1304 // Collect host-declared names.
1305 let mut host_names = std::collections::HashSet::new();
1306 for d in host_decls {
1307 if let Decl::TypeDecl { entities, .. } = &d.node {
1308 for e in entities {
1309 host_names.insert(e.name.to_lowercase());
1310 }
1311 }
1312 }
1313 // Collect names declared locally in the subprogram.
1314 // Also seed with the function name and any RESULT(r) clause name —
1315 // those are implicit local names that don't appear in the
1316 // declaration list but must shadow same-named host variables when
1317 // deciding which references close over the host scope.
1318 let (sub_decls, sub_body, sub_args, sub_implicit_names): (
1319 &[crate::ast::decl::SpannedDecl],
1320 &[crate::ast::stmt::SpannedStmt],
1321 Vec<String>,
1322 Vec<String>,
1323 ) = match sub {
1324 ProgramUnit::Subroutine {
1325 decls,
1326 body,
1327 args,
1328 name,
1329 ..
1330 } => {
1331 let arg_names: Vec<String> = args
1332 .iter()
1333 .filter_map(|a| {
1334 if let DummyArg::Name(n) = a {
1335 Some(n.to_lowercase())
1336 } else {
1337 None
1338 }
1339 })
1340 .collect();
1341 (decls, body, arg_names, vec![name.to_lowercase()])
1342 }
1343 ProgramUnit::Function {
1344 decls,
1345 body,
1346 args,
1347 name,
1348 result,
1349 ..
1350 } => {
1351 let arg_names: Vec<String> = args
1352 .iter()
1353 .filter_map(|a| {
1354 if let DummyArg::Name(n) = a {
1355 Some(n.to_lowercase())
1356 } else {
1357 None
1358 }
1359 })
1360 .collect();
1361 let mut implicits = vec![name.to_lowercase()];
1362 if let Some(r) = result {
1363 implicits.push(r.to_lowercase());
1364 }
1365 (decls, body, arg_names, implicits)
1366 }
1367 _ => return,
1368 };
1369 let mut sub_locals = std::collections::HashSet::new();
1370 for n in sub_args {
1371 sub_locals.insert(n);
1372 }
1373 for n in sub_implicit_names {
1374 sub_locals.insert(n);
1375 }
1376 for d in sub_decls {
1377 if let Decl::TypeDecl { entities, .. } = &d.node {
1378 for e in entities {
1379 sub_locals.insert(e.name.to_lowercase());
1380 }
1381 }
1382 }
1383 // Walk the body looking for Name refs in host but not sub.
1384 for stmt in sub_body {
1385 collect_host_refs_stmt(stmt, &host_names, &sub_locals, refs);
1386 }
1387 }
1388
1389 fn collect_host_refs_stmt(
1390 stmt: &crate::ast::stmt::SpannedStmt,
1391 host_names: &std::collections::HashSet<String>,
1392 sub_locals: &std::collections::HashSet<String>,
1393 refs: &mut std::collections::HashSet<String>,
1394 ) {
1395 use crate::ast::stmt::Stmt;
1396 match &stmt.node {
1397 Stmt::Assignment { target, value } => {
1398 collect_host_refs_expr(target, host_names, sub_locals, refs);
1399 collect_host_refs_expr(value, host_names, sub_locals, refs);
1400 }
1401 Stmt::Print { items, .. } => {
1402 for e in items {
1403 collect_host_refs_expr(e, host_names, sub_locals, refs);
1404 }
1405 }
1406 Stmt::Write { items, .. } | Stmt::Read { items, .. } => {
1407 for e in items {
1408 collect_host_refs_expr(e, host_names, sub_locals, refs);
1409 }
1410 }
1411 Stmt::IfConstruct {
1412 condition,
1413 then_body,
1414 else_ifs,
1415 else_body,
1416 ..
1417 } => {
1418 collect_host_refs_expr(condition, host_names, sub_locals, refs);
1419 for s in then_body {
1420 collect_host_refs_stmt(s, host_names, sub_locals, refs);
1421 }
1422 for (c, b) in else_ifs {
1423 collect_host_refs_expr(c, host_names, sub_locals, refs);
1424 for s in b {
1425 collect_host_refs_stmt(s, host_names, sub_locals, refs);
1426 }
1427 }
1428 if let Some(b) = else_body {
1429 for s in b {
1430 collect_host_refs_stmt(s, host_names, sub_locals, refs);
1431 }
1432 }
1433 }
1434 Stmt::DoLoop { body, .. }
1435 | Stmt::DoWhile { body, .. }
1436 | Stmt::DoConcurrent { body, .. }
1437 | Stmt::Block { body, .. } => {
1438 for s in body {
1439 collect_host_refs_stmt(s, host_names, sub_locals, refs);
1440 }
1441 }
1442 Stmt::Call { args, .. } => {
1443 for arg in args {
1444 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
1445 collect_host_refs_expr(e, host_names, sub_locals, refs);
1446 }
1447 }
1448 }
1449 _ => {}
1450 }
1451 }
1452
1453 fn collect_host_refs_expr(
1454 expr: &crate::ast::expr::SpannedExpr,
1455 host_names: &std::collections::HashSet<String>,
1456 sub_locals: &std::collections::HashSet<String>,
1457 refs: &mut std::collections::HashSet<String>,
1458 ) {
1459 use crate::ast::expr::Expr;
1460 match &expr.node {
1461 Expr::Name { name } => {
1462 let key = name.to_lowercase();
1463 if host_names.contains(&key) && !sub_locals.contains(&key) {
1464 refs.insert(key);
1465 }
1466 }
1467 Expr::BinaryOp { left, right, .. } => {
1468 collect_host_refs_expr(left, host_names, sub_locals, refs);
1469 collect_host_refs_expr(right, host_names, sub_locals, refs);
1470 }
1471 Expr::UnaryOp { operand, .. } => {
1472 collect_host_refs_expr(operand, host_names, sub_locals, refs);
1473 }
1474 Expr::FunctionCall { callee, args } => {
1475 // Fortran parses `arr(i)` as a function call: the "callee"
1476 // is the array name. When the callee resolves to a host
1477 // variable (not a function name), it's a host-associated
1478 // array access. Always walk the callee name so both array
1479 // subscripts and real function calls pull host refs from
1480 // their subexpression tree.
1481 collect_host_refs_expr(callee, host_names, sub_locals, refs);
1482 for arg in args {
1483 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
1484 collect_host_refs_expr(e, host_names, sub_locals, refs);
1485 }
1486 }
1487 }
1488 Expr::ComponentAccess { base, .. } => {
1489 collect_host_refs_expr(base, host_names, sub_locals, refs);
1490 }
1491 Expr::ParenExpr { inner } => {
1492 collect_host_refs_expr(inner, host_names, sub_locals, refs);
1493 }
1494 _ => {}
1495 }
1496 }
1497
1498 /// Lookup marker for `collect_implicit_locals` — identifies which
1499 /// kind of enclosing program unit owns the body being walked so the
1500 /// helper can locate the matching `Scope` (and thus the right
1501 /// `ImplicitRules`) in the pre-resolved symbol table.
1502 #[derive(Clone, Copy)]
1503 enum UnitScope<'a> {
1504 Program(&'a str),
1505 Subroutine(&'a str),
1506 Function(&'a str),
1507 }
1508
1509 /// Materialise locals for any bare `Name` reference in `body` that
1510 /// isn't explicitly declared, doesn't come from a USE/host import,
1511 /// and isn't an intrinsic or sibling procedure. This is the Fortran
1512 /// default-typing rule (F2018 §8.7): under IMPLICIT NONE we stay
1513 /// silent (validate.rs reports an error); otherwise the name's first
1514 /// letter selects a type, we emit an alloca, and register it as a
1515 /// scalar local so subsequent assignments and reads land on storage
1516 /// instead of silently dropping into `const_int 0`.
1517 fn collect_implicit_locals(
1518 b: &mut FuncBuilder,
1519 ctx: &mut LowerCtx,
1520 body: &[crate::ast::stmt::SpannedStmt],
1521 unit: UnitScope,
1522 ) {
1523 use crate::sema::symtab::{ImplicitType, ScopeKind};
1524 let scope = ctx.st.all_scopes().iter().find(|s| match (&s.kind, unit) {
1525 (ScopeKind::Program(n), UnitScope::Program(name)) => n.eq_ignore_ascii_case(name),
1526 (ScopeKind::Subroutine(n), UnitScope::Subroutine(name)) => n.eq_ignore_ascii_case(name),
1527 (ScopeKind::Function(n), UnitScope::Function(name)) => n.eq_ignore_ascii_case(name),
1528 _ => false,
1529 });
1530 let Some(scope) = scope else {
1531 return;
1532 };
1533 if scope.implicit_rules.none_type {
1534 return;
1535 }
1536
1537 let mut candidates: Vec<String> = Vec::new();
1538 for stmt in body {
1539 collect_name_refs_stmt(stmt, &mut candidates);
1540 }
1541
1542 for name in candidates {
1543 let key = name.to_lowercase();
1544 if ctx.locals.contains_key(&key) {
1545 continue;
1546 }
1547 if crate::sema::validate::is_intrinsic_name(&key) {
1548 continue;
1549 }
1550 if ctx.internal_funcs.contains_key(&key) {
1551 continue;
1552 }
1553 if ctx.globals.keys().any(|(_m, v)| v == &key) {
1554 continue;
1555 }
1556 // Names that resolve to symbols already known to the sema
1557 // layer (e.g. module procedures, use-associated symbols whose
1558 // globals we don't synthesise, interface generics) are
1559 // skipped so we don't shadow them with a bogus implicit alloca.
1560 if ctx.st.find_symbol_any_scope(&key).is_some() {
1561 continue;
1562 }
1563
1564 let itype = match scope.implicit_rules.type_for(&key) {
1565 Some(t) => t,
1566 None => continue,
1567 };
1568 let ir_ty = match itype {
1569 ImplicitType::Integer => IrType::Int(IntWidth::I32),
1570 ImplicitType::Real => IrType::Float(FloatWidth::F32),
1571 ImplicitType::DoublePrecision => IrType::Float(FloatWidth::F64),
1572 ImplicitType::Complex => {
1573 // Default complex is single precision — stored as [f32 x 2].
1574 IrType::Array(Box::new(IrType::Float(FloatWidth::F32)), 2)
1575 }
1576 ImplicitType::Logical => IrType::Bool,
1577 ImplicitType::Character => {
1578 // Default implicit character is CHARACTER(1). Emit a
1579 // fixed-1-byte buffer with a CharKind::Fixed(1) kind so
1580 // the lowering treats it as a character scalar.
1581 let slot = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 1));
1582 ctx.locals.insert(
1583 key.clone(),
1584 LocalInfo {
1585 addr: slot,
1586 ty: IrType::Int(IntWidth::I8),
1587 dims: vec![],
1588 allocatable: false,
1589 descriptor_arg: false,
1590 by_ref: false,
1591 char_kind: CharKind::Fixed(1),
1592 derived_type: None,
1593 inline_const: None,
1594 is_pointer: false,
1595 runtime_dim_upper: vec![],
1596 },
1597 );
1598 continue;
1599 }
1600 };
1601 let slot = b.alloca(ir_ty.clone());
1602 ctx.insert_scalar(key, slot, ir_ty);
1603 }
1604 }
1605
1606 fn collect_name_refs_stmt(stmt: &crate::ast::stmt::SpannedStmt, out: &mut Vec<String>) {
1607 use crate::ast::stmt::Stmt;
1608 match &stmt.node {
1609 Stmt::Assignment { target, value } | Stmt::PointerAssignment { target, value } => {
1610 collect_name_refs_expr(target, out);
1611 collect_name_refs_expr(value, out);
1612 }
1613 Stmt::IfConstruct {
1614 condition,
1615 then_body,
1616 else_ifs,
1617 else_body,
1618 ..
1619 } => {
1620 collect_name_refs_expr(condition, out);
1621 for s in then_body {
1622 collect_name_refs_stmt(s, out);
1623 }
1624 for (c, b) in else_ifs {
1625 collect_name_refs_expr(c, out);
1626 for s in b {
1627 collect_name_refs_stmt(s, out);
1628 }
1629 }
1630 if let Some(b) = else_body {
1631 for s in b {
1632 collect_name_refs_stmt(s, out);
1633 }
1634 }
1635 }
1636 Stmt::IfStmt { condition, action } => {
1637 collect_name_refs_expr(condition, out);
1638 collect_name_refs_stmt(action, out);
1639 }
1640 Stmt::DoLoop {
1641 var,
1642 start,
1643 end,
1644 step,
1645 body,
1646 ..
1647 } => {
1648 if let Some(v) = var {
1649 out.push(v.clone());
1650 }
1651 if let Some(e) = start {
1652 collect_name_refs_expr(e, out);
1653 }
1654 if let Some(e) = end {
1655 collect_name_refs_expr(e, out);
1656 }
1657 if let Some(s) = step {
1658 collect_name_refs_expr(s, out);
1659 }
1660 for s in body {
1661 collect_name_refs_stmt(s, out);
1662 }
1663 }
1664 Stmt::DoWhile {
1665 condition, body, ..
1666 } => {
1667 collect_name_refs_expr(condition, out);
1668 for s in body {
1669 collect_name_refs_stmt(s, out);
1670 }
1671 }
1672 Stmt::DoConcurrent { body, .. }
1673 | Stmt::Block { body, .. }
1674 | Stmt::Associate { body, .. } => {
1675 for s in body {
1676 collect_name_refs_stmt(s, out);
1677 }
1678 }
1679 Stmt::Print { items, .. } => {
1680 for e in items {
1681 collect_name_refs_expr(e, out);
1682 }
1683 }
1684 Stmt::Write { items, .. } | Stmt::Read { items, .. } => {
1685 for e in items {
1686 collect_name_refs_expr(e, out);
1687 }
1688 }
1689 Stmt::Call { args, .. } => {
1690 for arg in args {
1691 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
1692 collect_name_refs_expr(e, out);
1693 }
1694 }
1695 }
1696 Stmt::Return { value: Some(e) } => collect_name_refs_expr(e, out),
1697 Stmt::Labeled { stmt, .. } => collect_name_refs_stmt(stmt, out),
1698 Stmt::SelectCase {
1699 selector, cases, ..
1700 } => {
1701 collect_name_refs_expr(selector, out);
1702 for case in cases {
1703 for s in &case.body {
1704 collect_name_refs_stmt(s, out);
1705 }
1706 }
1707 }
1708 Stmt::Allocate { items, .. } | Stmt::Deallocate { items, .. } | Stmt::Nullify { items } => {
1709 for e in items {
1710 collect_name_refs_expr(e, out);
1711 }
1712 }
1713 _ => {}
1714 }
1715 }
1716
1717 fn collect_name_refs_expr(expr: &crate::ast::expr::SpannedExpr, out: &mut Vec<String>) {
1718 match &expr.node {
1719 Expr::Name { name } => out.push(name.clone()),
1720 Expr::BinaryOp { left, right, .. } => {
1721 collect_name_refs_expr(left, out);
1722 collect_name_refs_expr(right, out);
1723 }
1724 Expr::UnaryOp { operand, .. } => collect_name_refs_expr(operand, out),
1725 Expr::ParenExpr { inner } => collect_name_refs_expr(inner, out),
1726 Expr::FunctionCall { callee, args } => {
1727 // `arr(i)` and `f(i)` alias at this AST level. Walk both —
1728 // the filter in collect_implicit_locals rejects intrinsics
1729 // and internal procs so real function calls don't become
1730 // implicit locals.
1731 collect_name_refs_expr(callee, out);
1732 for arg in args {
1733 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
1734 collect_name_refs_expr(e, out);
1735 }
1736 if let crate::ast::expr::SectionSubscript::Range { start, end, stride } = &arg.value
1737 {
1738 if let Some(e) = start {
1739 collect_name_refs_expr(e, out);
1740 }
1741 if let Some(e) = end {
1742 collect_name_refs_expr(e, out);
1743 }
1744 if let Some(e) = stride {
1745 collect_name_refs_expr(e, out);
1746 }
1747 }
1748 }
1749 }
1750 Expr::ComponentAccess { base, .. } => collect_name_refs_expr(base, out),
1751 Expr::ArrayConstructor { values, .. } => {
1752 for v in values {
1753 collect_name_refs_acvalue(v, out);
1754 }
1755 }
1756 Expr::ComplexLiteral { real, imag } => {
1757 collect_name_refs_expr(real, out);
1758 collect_name_refs_expr(imag, out);
1759 }
1760 _ => {}
1761 }
1762 }
1763
1764 fn collect_name_refs_acvalue(v: &crate::ast::expr::AcValue, out: &mut Vec<String>) {
1765 match v {
1766 crate::ast::expr::AcValue::Expr(e) => collect_name_refs_expr(e, out),
1767 crate::ast::expr::AcValue::ImpliedDo(inner) => {
1768 for iv in &inner.values {
1769 collect_name_refs_acvalue(iv, out);
1770 }
1771 collect_name_refs_expr(&inner.start, out);
1772 collect_name_refs_expr(&inner.end, out);
1773 if let Some(s) = &inner.step {
1774 collect_name_refs_expr(s, out);
1775 }
1776 }
1777 }
1778 }
1779
1780 fn collect_module_globals(
1781 module: &mut Module,
1782 globals: &mut HashMap<(String, String), ModuleGlobalInfo>,
1783 mod_name: &str,
1784 decls: &[crate::ast::decl::SpannedDecl],
1785 st: &SymbolTable,
1786 type_layouts: &crate::sema::type_layout::TypeLayoutRegistry,
1787 ) {
1788 use crate::ast::decl::Attribute;
1789 // Module-level parameter table built incrementally so a later
1790 // parameter declaration can reference earlier ones.
1791 let param_consts = collect_decl_param_consts(decls);
1792 for decl in decls {
1793 if let Decl::TypeDecl {
1794 type_spec,
1795 attrs,
1796 entities,
1797 } = &decl.node
1798 {
1799 let ir_ty = lower_type_spec_st(type_spec, Some(st));
1800 let derived_type_name = match type_spec {
1801 TypeSpec::Type(name) => Some(name.clone()),
1802 _ => None,
1803 };
1804 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
1805 if let Attribute::Dimension(specs) = a {
1806 Some(specs)
1807 } else {
1808 None
1809 }
1810 });
1811 let is_allocatable = attrs.iter().any(|a| matches!(a, Attribute::Allocatable));
1812 let is_pointer = attrs.iter().any(|a| matches!(a, Attribute::Pointer));
1813 let global_char_kind = match type_spec {
1814 TypeSpec::Character(Some(sel)) => match &sel.len {
1815 Some(crate::ast::decl::LenSpec::Expr(e)) => {
1816 eval_const_int_in_scope_or_any_scope(e, &HashMap::new(), st)
1817 .map(CharKind::Fixed)
1818 .unwrap_or(CharKind::None)
1819 }
1820 Some(crate::ast::decl::LenSpec::Colon) => CharKind::Deferred,
1821 Some(crate::ast::decl::LenSpec::Star) => CharKind::None,
1822 None => CharKind::Fixed(1),
1823 },
1824 TypeSpec::Character(None) => CharKind::Fixed(1),
1825 _ => CharKind::None,
1826 };
1827 for entity in entities {
1828 let symbol = format!(
1829 "afs_mod_{}_{}",
1830 mod_name.to_lowercase(),
1831 entity.name.to_lowercase()
1832 );
1833
1834 let array_spec = entity.array_spec.as_ref().or(attr_dims);
1835
1836 // Module-level allocatable and pointer arrays both
1837 // live in a 384-byte descriptor slot. For pointers
1838 // we also mark allocatable=true so the existing
1839 // descriptor helpers treat the global storage as the
1840 // descriptor body rather than an extra layer of
1841 // indirection.
1842 if (is_allocatable || is_pointer) && array_spec.is_some() {
1843 let storage_ty = if let Some(type_name) = &derived_type_name {
1844 derived_storage_ir_type(type_name, type_layouts)
1845 .unwrap_or_else(|| ir_ty.clone())
1846 } else {
1847 ir_ty.clone()
1848 };
1849 let desc_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384);
1850 module.add_global(Global {
1851 name: symbol.clone(),
1852 ty: desc_ty,
1853 initializer: Some(GlobalInit::Zero),
1854 });
1855 globals.insert(
1856 (mod_name.to_lowercase(), entity.name.to_lowercase()),
1857 ModuleGlobalInfo {
1858 symbol,
1859 ty: storage_ty,
1860 dims: vec![],
1861 allocatable: true,
1862 is_pointer,
1863 deferred_char: false,
1864 derived_type: derived_type_name.clone(),
1865 char_kind: global_char_kind.clone(),
1866 external: false,
1867 },
1868 );
1869 continue;
1870 }
1871
1872 // Deferred-length allocatable character: 32-byte
1873 // StringDescriptor global.
1874 let is_deferred_char = matches!(type_spec,
1875 TypeSpec::Character(Some(sel)) if matches!(&sel.len, Some(crate::ast::decl::LenSpec::Colon))
1876 );
1877 if is_allocatable && is_deferred_char && array_spec.is_none() {
1878 let desc_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 32);
1879 module.add_global(Global {
1880 name: symbol.clone(),
1881 ty: desc_ty,
1882 initializer: Some(GlobalInit::Zero),
1883 });
1884 globals.insert(
1885 (mod_name.to_lowercase(), entity.name.to_lowercase()),
1886 ModuleGlobalInfo {
1887 symbol,
1888 ty: ir_ty.clone(),
1889 dims: vec![],
1890 allocatable: false,
1891 is_pointer,
1892 deferred_char: true,
1893 derived_type: None,
1894 char_kind: CharKind::Deferred,
1895 external: false,
1896 },
1897 );
1898 continue;
1899 }
1900
1901 if let Some(specs) = array_spec {
1902 // Array module variable. Compute dims and
1903 // build an array-typed global with a matching
1904 // IntArray/FloatArray initializer when the
1905 // entity.init is an array constructor of
1906 // literal values.
1907 let dims = extract_array_dims(specs, &param_consts);
1908 let total: i64 = dims.iter().map(|(_, e)| *e).product();
1909 if total <= 0 {
1910 continue; // assumed/deferred shape — skip
1911 }
1912 if let Some(type_name) = &derived_type_name {
1913 if let Some(layout) = type_layouts.get(type_name) {
1914 let elem_ty = IrType::Array(
1915 Box::new(IrType::Int(IntWidth::I8)),
1916 layout.size as u64,
1917 );
1918 let global_ty = IrType::Array(Box::new(elem_ty.clone()), total as u64);
1919 module.add_global(Global {
1920 name: symbol.clone(),
1921 ty: global_ty,
1922 initializer: Some(GlobalInit::Zero),
1923 });
1924 globals.insert(
1925 (mod_name.to_lowercase(), entity.name.to_lowercase()),
1926 ModuleGlobalInfo {
1927 symbol,
1928 ty: elem_ty,
1929 dims,
1930 allocatable: false,
1931 is_pointer,
1932 deferred_char: false,
1933 derived_type: Some(type_name.clone()),
1934 char_kind: CharKind::None,
1935 external: false,
1936 },
1937 );
1938 continue;
1939 }
1940 }
1941 let global_ty = IrType::Array(Box::new(ir_ty.clone()), total as u64);
1942
1943 // Audit MAJOR-3: detect over-long initializer
1944 // BEFORE eval_const_array_init returns None.
1945 // Per F2018 §7.4.4, the initializer's shape
1946 // must conform with the variable's declared
1947 // shape; over-long is a hard error.
1948 if let Some(init_e) = &entity.init {
1949 if let Some(scalars) =
1950 collect_const_array_scalars(init_e, &ir_ty, &param_consts)
1951 {
1952 if (scalars.len() as i64) > total {
1953 eprintln!(
1954 "armfortas: error: {}:{}: initializer for '{}' has \
1955 {} elements but its declared shape requires \
1956 {} (audit MAJOR-3 — initializer shape \
1957 mismatch)",
1958 init_e.span.start.line,
1959 init_e.span.start.col,
1960 entity.name,
1961 scalars.len(),
1962 total,
1963 );
1964 let _ = std::io::stderr().flush();
1965 std::process::exit(1);
1966 }
1967 }
1968 }
1969
1970 let init = entity
1971 .init
1972 .as_ref()
1973 .and_then(|e| eval_const_array_init(e, &ir_ty, total, &param_consts));
1974 module.add_global(Global {
1975 name: symbol.clone(),
1976 ty: global_ty,
1977 initializer: Some(init.unwrap_or(GlobalInit::Zero)),
1978 });
1979 globals.insert(
1980 (mod_name.to_lowercase(), entity.name.to_lowercase()),
1981 ModuleGlobalInfo {
1982 symbol,
1983 ty: ir_ty.clone(),
1984 dims,
1985 allocatable: false,
1986 is_pointer,
1987 deferred_char: false,
1988 derived_type: None,
1989 char_kind: global_char_kind.clone(),
1990 external: false,
1991 },
1992 );
1993 } else {
1994 // Scalar module variable.
1995 if is_pointer {
1996 let slot_ty = pointer_slot_type(&ir_ty);
1997 module.add_global(Global {
1998 name: symbol.clone(),
1999 ty: slot_ty,
2000 initializer: Some(GlobalInit::Zero),
2001 });
2002 globals.insert(
2003 (mod_name.to_lowercase(), entity.name.to_lowercase()),
2004 ModuleGlobalInfo {
2005 symbol,
2006 ty: ir_ty.clone(),
2007 dims: vec![],
2008 allocatable: false,
2009 is_pointer: true,
2010 deferred_char: false,
2011 derived_type: derived_type_name.clone(),
2012 char_kind: global_char_kind.clone(),
2013 external: false,
2014 },
2015 );
2016 continue;
2017 }
2018 if let Some(type_name) = &derived_type_name {
2019 if let Some(layout) = type_layouts.get(type_name) {
2020 let scalar_ty = IrType::Array(
2021 Box::new(IrType::Int(IntWidth::I8)),
2022 layout.size as u64,
2023 );
2024 module.add_global(Global {
2025 name: symbol.clone(),
2026 ty: scalar_ty.clone(),
2027 initializer: Some(GlobalInit::Zero),
2028 });
2029 globals.insert(
2030 (mod_name.to_lowercase(), entity.name.to_lowercase()),
2031 ModuleGlobalInfo {
2032 symbol,
2033 ty: scalar_ty,
2034 dims: vec![],
2035 allocatable: false,
2036 is_pointer,
2037 deferred_char: false,
2038 derived_type: Some(type_name.clone()),
2039 char_kind: CharKind::None,
2040 external: false,
2041 },
2042 );
2043 continue;
2044 }
2045 }
2046 let init = entity
2047 .init
2048 .as_ref()
2049 .and_then(|e| eval_const_global_init(e, &param_consts, Some(&ir_ty)));
2050 module.add_global(Global {
2051 name: symbol.clone(),
2052 ty: ir_ty.clone(),
2053 initializer: Some(init.unwrap_or(GlobalInit::Zero)),
2054 });
2055 globals.insert(
2056 (mod_name.to_lowercase(), entity.name.to_lowercase()),
2057 ModuleGlobalInfo {
2058 symbol,
2059 ty: ir_ty.clone(),
2060 dims: vec![],
2061 allocatable: false,
2062 is_pointer,
2063 deferred_char: false,
2064 derived_type: None,
2065 char_kind: global_char_kind.clone(),
2066 external: false,
2067 },
2068 );
2069 }
2070 }
2071 }
2072 }
2073 }
2074
2075 /// Try to evaluate an array constructor as a `GlobalInit`.
2076 /// Handles three forms:
2077 /// 1. `[v0, v1, v2]` literal-element constructor
2078 /// 2. `[(expr, i = lo, hi[, step])]` implied-do iterator
2079 /// 3. `reshape(constructor, shape)` reshape of (1) or (2)
2080 ///
2081 /// Each path produces a flat list of `i128` (for integer types)
2082 /// or `f64` (for float types) of length `total`. Shorter lists
2083 /// are zero-padded; longer lists return `None` (a future Maj-3
2084 /// fix will add a proper diagnostic for shape-mismatch errors).
2085 ///
2086 /// Audit MAJOR-2.
2087 fn eval_const_array_init(
2088 expr: &crate::ast::expr::SpannedExpr,
2089 elem_ty: &IrType,
2090 total: i64,
2091 param_consts: &HashMap<String, ConstScalar>,
2092 ) -> Option<GlobalInit> {
2093 let scalars = collect_const_array_scalars(expr, elem_ty, param_consts)?;
2094 if (scalars.len() as i64) > total {
2095 // Shape mismatch — too many elements. Return None so the
2096 // caller falls back to zero-init. A proper diagnostic is
2097 // tracked under audit MAJOR-3.
2098 return None;
2099 }
2100
2101 let is_float = matches!(elem_ty, IrType::Float(_));
2102 if is_float {
2103 let mut out: Vec<f64> = scalars.iter().map(|s| s.to_float()).collect();
2104 while (out.len() as i64) < total {
2105 out.push(0.0);
2106 }
2107 Some(GlobalInit::FloatArray(out))
2108 } else {
2109 let mut out: Vec<i128> = scalars
2110 .iter()
2111 .map(|s| match s {
2112 ConstScalar::Int(i) => *i,
2113 ConstScalar::Float(f) => *f as i128,
2114 })
2115 .collect();
2116 while (out.len() as i64) < total {
2117 out.push(0);
2118 }
2119 Some(GlobalInit::IntArray(out))
2120 }
2121 }
2122
2123 /// Recursively collect the scalar elements of a constructor
2124 /// expression into a flat Vec. Used by eval_const_array_init to
2125 /// support nested implied-do, reshape, and parameter references
2126 /// uniformly.
2127 ///
2128 /// reshape(source, shape) just produces source's elements in
2129 /// declared order — Fortran's reshape is column-major and
2130 /// reorders dimensions, but for the FLAT linearization the
2131 /// element ordering is identical to source's. We don't yet
2132 /// honor non-trivial shape arguments (only reshape passes that
2133 /// match the source length get folded).
2134 fn collect_const_array_scalars(
2135 expr: &crate::ast::expr::SpannedExpr,
2136 elem_ty: &IrType,
2137 param_consts: &HashMap<String, ConstScalar>,
2138 ) -> Option<Vec<ConstScalar>> {
2139 match &expr.node {
2140 Expr::ArrayConstructor { values, .. } => {
2141 let mut out: Vec<ConstScalar> = Vec::new();
2142 for v in values {
2143 collect_ac_value(v, elem_ty, param_consts, &mut out)?;
2144 }
2145 Some(out)
2146 }
2147 // reshape(source, shape) — pass through source elements.
2148 Expr::FunctionCall { callee, args } => {
2149 if let Expr::Name { name } = &callee.node {
2150 if name.eq_ignore_ascii_case("reshape") && !args.is_empty() {
2151 if let crate::ast::expr::SectionSubscript::Element(src) = &args[0].value {
2152 return collect_const_array_scalars(src, elem_ty, param_consts);
2153 }
2154 }
2155 }
2156 None
2157 }
2158 _ => None,
2159 }
2160 }
2161
2162 /// Collect a single AcValue (which may be a literal element or
2163 /// an implied-do iterator) into a flat scalar list.
2164 fn collect_ac_value(
2165 v: &crate::ast::expr::AcValue,
2166 elem_ty: &IrType,
2167 param_consts: &HashMap<String, ConstScalar>,
2168 out: &mut Vec<ConstScalar>,
2169 ) -> Option<()> {
2170 use crate::ast::expr::AcValue;
2171 match v {
2172 AcValue::Expr(e) => {
2173 let raw = eval_const_scalar(e, param_consts)?;
2174 // Coerce int → float when the destination is float.
2175 let coerced = if matches!(elem_ty, IrType::Float(_)) {
2176 ConstScalar::Float(raw.to_float())
2177 } else {
2178 raw
2179 };
2180 out.push(coerced);
2181 Some(())
2182 }
2183 AcValue::ImpliedDo(ido) => {
2184 let (values, var, start, end, step) =
2185 (&ido.values, &ido.var, &ido.start, &ido.end, &ido.step);
2186 let start_v = eval_const_scalar(start, param_consts)?;
2187 let end_v = eval_const_scalar(end, param_consts)?;
2188 let step_v = match step {
2189 Some(e) => eval_const_scalar(e, param_consts)?,
2190 None => ConstScalar::Int(1),
2191 };
2192 let (ConstScalar::Int(s), ConstScalar::Int(e), ConstScalar::Int(stp)) =
2193 (start_v, end_v, step_v)
2194 else {
2195 return None;
2196 };
2197 if stp == 0 {
2198 return None;
2199 }
2200
2201 // Walk the range, evaluating the inner values for each
2202 // iteration with `var` bound in a temporary param_consts
2203 // overlay.
2204 let mut local_consts = param_consts.clone();
2205 let var_key = var.to_lowercase();
2206 let mut i = s;
2207 // Cap iterations to avoid runaway folding for runtime
2208 // bounds disguised as constants.
2209 let mut steps_remaining: i64 = 1_000_000;
2210 let going_down = stp < 0;
2211 loop {
2212 if steps_remaining == 0 {
2213 return None;
2214 }
2215 steps_remaining -= 1;
2216 let in_range = if going_down { i >= e } else { i <= e };
2217 if !in_range {
2218 break;
2219 }
2220 local_consts.insert(var_key.clone(), ConstScalar::Int(i));
2221 for inner in values {
2222 collect_ac_value(inner, elem_ty, &local_consts, out)?;
2223 }
2224 i = i.wrapping_add(stp);
2225 }
2226 Some(())
2227 }
2228 }
2229 }
2230
2231 fn lower_unit(
2232 module: &mut Module,
2233 unit: &SpannedUnit,
2234 st: &SymbolTable,
2235 globals: &HashMap<(String, String), ModuleGlobalInfo>,
2236 type_layouts: &crate::sema::type_layout::TypeLayoutRegistry,
2237 // Audit CRITICAL-4: USE imports from the host program unit
2238 // (and its hosts, transitively). Per F2018 §16.2, names
2239 // imported into a host are visible in its contained
2240 // subprograms via host association. Each lower_unit call
2241 // accumulates its own uses on top of host_uses and passes
2242 // the combined list down to any nested subprogram. The
2243 // top-level call from lower_file passes an empty slice.
2244 host_uses: &[crate::ast::decl::SpannedDecl],
2245 host_param_consts: &HashMap<String, ConstScalar>,
2246 // `host_decls`: decls of the immediate enclosing program unit.
2247 // Used by contained subprograms to resolve element type, dims,
2248 // and character-kind for each host-associated variable the
2249 // closure-passing ABI threads in as a hidden pointer param.
2250 host_decls: &[crate::ast::decl::SpannedDecl],
2251 host_link_name: Option<&str>,
2252 host_module: Option<&str>,
2253 alloc_return_funcs: &HashSet<String>,
2254 optional_params: &HashMap<String, Vec<bool>>,
2255 descriptor_params: &HashMap<String, Vec<bool>>,
2256 internal_funcs: &HashMap<String, u32>,
2257 elemental_funcs: &HashSet<String>,
2258 char_len_star_params: &HashMap<String, Vec<bool>>,
2259 // `contained_host_refs`: per-callee ordered list of host-local
2260 // names it reads or writes. Drives both callee signature
2261 // (hidden trailing pointer params) and call-site arg list.
2262 contained_host_refs: &HashMap<String, Vec<String>>,
2263 internal_only: bool,
2264 ) {
2265 match &unit.node {
2266 ProgramUnit::Program {
2267 name,
2268 decls,
2269 body,
2270 contains,
2271 uses,
2272 ..
2273 } => {
2274 let fname = name.clone().unwrap_or_else(|| "main".into());
2275 let visible_param_consts =
2276 collect_decl_param_consts_with_host(decls, host_param_consts);
2277 let body_fname = format!("__prog_{}", fname);
2278 let mut func = Function::new(body_fname.clone(), vec![], IrType::Void);
2279 let mut ctx = LowerCtx::new(
2280 st,
2281 globals,
2282 type_layouts,
2283 alloc_return_funcs,
2284 optional_params,
2285 descriptor_params,
2286 internal_funcs,
2287 elemental_funcs,
2288 char_len_star_params,
2289 contained_host_refs,
2290 );
2291 let mut pending_globals: Vec<PendingGlobal> = Vec::new();
2292
2293 let combined_uses: Vec<crate::ast::decl::SpannedDecl> =
2294 host_uses.iter().chain(uses.iter()).cloned().collect();
2295
2296 {
2297 let mut b = FuncBuilder::new(&mut func);
2298 install_common_locals(&mut b, &mut ctx.locals, decls);
2299 install_equivalence_locals(&mut b, &mut ctx.locals, decls);
2300 alloc_decls(
2301 &mut b,
2302 &mut ctx.locals,
2303 decls,
2304 &visible_param_consts,
2305 type_layouts,
2306 &mut pending_globals,
2307 &fname,
2308 st,
2309 );
2310 install_host_param_consts(&mut b, &mut ctx.locals, host_param_consts);
2311 install_globals_as_locals(
2312 &mut b,
2313 &mut ctx.locals,
2314 globals,
2315 &combined_uses,
2316 host_module,
2317 ctx.st,
2318 );
2319 ctx.filtered_names = compute_filtered_names(globals, &combined_uses);
2320 check_no_filtered_refs(body, &ctx.filtered_names);
2321 collect_implicit_locals(&mut b, &mut ctx, body, UnitScope::Program(&fname));
2322 init_decls(&mut b, &ctx.locals, decls, st);
2323 collect_label_blocks(&mut b, body, &mut ctx.label_blocks);
2324 lower_stmts(&mut b, &mut ctx, body);
2325 if b.func().block(b.current_block()).terminator.is_none() {
2326 insert_implicit_dealloc(
2327 &mut b,
2328 &ctx.locals,
2329 &ctx.locals,
2330 type_layouts,
2331 ctx.st,
2332 ctx.internal_funcs,
2333 Some(ctx.contained_host_refs),
2334 None,
2335 );
2336 }
2337 ensure_termination(&mut b, None);
2338 }
2339
2340 module.add_function(func);
2341 for pg in pending_globals {
2342 module.add_global(pg.global);
2343 }
2344
2345 // Lower CONTAINS subprograms. Their host_decls chain is
2346 // this unit's decls PLUS whatever we inherited from our
2347 // own host (via `host_decls`). That way a nested contained
2348 // proc can resolve names from any ancestor scope when
2349 // build_host_ref_params looks up types.
2350 let mut child_host_decls: Vec<crate::ast::decl::SpannedDecl> = decls.to_vec();
2351 child_host_decls.extend(host_decls.iter().cloned());
2352 for sub in contains {
2353 lower_unit(
2354 module,
2355 sub,
2356 st,
2357 globals,
2358 type_layouts,
2359 &combined_uses,
2360 &visible_param_consts,
2361 &child_host_decls,
2362 Some(body_fname.as_str()),
2363 host_module,
2364 alloc_return_funcs,
2365 optional_params,
2366 descriptor_params,
2367 internal_funcs,
2368 elemental_funcs,
2369 char_len_star_params,
2370 contained_host_refs,
2371 true,
2372 );
2373 }
2374 }
2375 ProgramUnit::Subroutine {
2376 name,
2377 decls,
2378 body,
2379 args,
2380 bind,
2381 uses,
2382 contains,
2383 prefix,
2384 ..
2385 } => {
2386 let func_name = lowered_procedure_symbol_name(
2387 name,
2388 bind.as_ref(),
2389 host_link_name,
2390 host_module,
2391 internal_only,
2392 internal_funcs,
2393 );
2394 let visible_param_consts =
2395 collect_decl_param_consts_with_host(decls, host_param_consts);
2396 let mut params: Vec<Param> = args
2397 .iter()
2398 .enumerate()
2399 .filter_map(|(i, arg)| {
2400 if let DummyArg::Name(n) = arg {
2401 let elem_ty = arg_type_from_decls(n, decls, Some(st));
2402 let fortran_noalias = arg_is_fortran_noalias(n, decls);
2403 let uses_descriptor = arg_uses_descriptor_from_decls(n, decls);
2404 let uses_string_descriptor =
2405 arg_uses_string_descriptor_from_decls(n, decls);
2406 if arg_has_value_attr(n, decls) {
2407 // VALUE: pass by value (raw type, not pointer).
2408 Some(Param {
2409 name: n.clone(),
2410 ty: elem_ty,
2411 id: ValueId(i as u32),
2412 fortran_noalias: false,
2413 })
2414 } else {
2415 Some(Param {
2416 name: n.clone(),
2417 ty: if uses_descriptor {
2418 IrType::Ptr(Box::new(IrType::Array(
2419 Box::new(IrType::Int(IntWidth::I8)),
2420 384,
2421 )))
2422 } else if uses_string_descriptor {
2423 IrType::Ptr(Box::new(IrType::Array(
2424 Box::new(IrType::Int(IntWidth::I8)),
2425 32,
2426 )))
2427 } else {
2428 IrType::Ptr(Box::new(elem_ty))
2429 },
2430 id: ValueId(i as u32),
2431 fortran_noalias,
2432 })
2433 }
2434 } else {
2435 None
2436 }
2437 })
2438 .collect();
2439 // Append hidden-length i64 params for character(len=*) dummies.
2440 // Per the standard Fortran ABI, these trail the normal params.
2441 let mut hidden_len_params: Vec<(String, ValueId)> = Vec::new();
2442 let own_cls = char_len_star_params.get(&name.to_lowercase());
2443 if let Some(flags) = own_cls {
2444 let normal_count = params.len();
2445 for (i, (flag, arg)) in flags.iter().zip(args.iter()).enumerate() {
2446 if *flag {
2447 if let DummyArg::Name(n) = arg {
2448 let hid_id = ValueId((normal_count + hidden_len_params.len()) as u32);
2449 params.push(Param {
2450 name: format!("__len_{}", n.to_lowercase()),
2451 ty: IrType::Int(IntWidth::I64),
2452 id: hid_id,
2453 fortran_noalias: false,
2454 });
2455 hidden_len_params.push((n.to_lowercase(), hid_id));
2456 }
2457 }
2458 let _ = i;
2459 }
2460 }
2461
2462 // Host-association closure params. Trailing pointer params,
2463 // one per host-local variable this contained proc reads or
2464 // writes. Order matches contained_host_refs[name].
2465 let host_ref_infos = build_host_ref_params(
2466 name,
2467 host_decls,
2468 host_param_consts,
2469 contained_host_refs,
2470 params.len() as u32,
2471 st,
2472 &mut params,
2473 );
2474
2475 let mut func = Function::new(func_name.clone(), params, IrType::Void);
2476 use crate::ast::unit::Prefix;
2477 func.is_pure = prefix.iter().any(|p| matches!(p, Prefix::Pure));
2478 func.is_elemental = prefix.iter().any(|p| matches!(p, Prefix::Elemental));
2479 func.internal_only = internal_only;
2480 let mut ctx = LowerCtx::new(
2481 st,
2482 globals,
2483 type_layouts,
2484 alloc_return_funcs,
2485 optional_params,
2486 descriptor_params,
2487 internal_funcs,
2488 elemental_funcs,
2489 char_len_star_params,
2490 contained_host_refs,
2491 );
2492 let mut pending_globals: Vec<PendingGlobal> = Vec::new();
2493 let combined_uses: Vec<crate::ast::decl::SpannedDecl> =
2494 host_uses.iter().chain(uses.iter()).cloned().collect();
2495
2496 // Collect param info: (name, param_id, elem_type, is_value).
2497 // Skip hidden params: __len_* (character-length) and __host_*
2498 // (host-association closure pointers) — they are installed
2499 // by separate paths below.
2500 let param_info: Vec<(String, ValueId, IrType, bool)> = func
2501 .params
2502 .iter()
2503 .filter(|p| !p.name.starts_with("__len_") && !p.name.starts_with("__host_"))
2504 .map(|p| {
2505 let pname = p.name.to_lowercase();
2506 let elem_ty = arg_type_from_decls(&pname, decls, Some(st));
2507 let is_value = arg_has_value_attr(&pname, decls);
2508 (pname, p.id, elem_ty, is_value)
2509 })
2510 .collect();
2511
2512 {
2513 let mut b = FuncBuilder::new(&mut func);
2514
2515 // Set up hidden-length locals for assumed-len char dummies.
2516 let mut hidden_len_addrs: HashMap<String, ValueId> = HashMap::new();
2517 for (hname, hid) in &hidden_len_params {
2518 let slot = b.alloca(IrType::Int(IntWidth::I64));
2519 b.store(*hid, slot);
2520 hidden_len_addrs.insert(hname.clone(), slot);
2521 }
2522
2523 for (pname, pid, elem_ty, is_value) in &param_info {
2524 if *is_value {
2525 let slot = b.alloca(elem_ty.clone());
2526 b.store(*pid, slot);
2527 ctx.insert_scalar(pname.clone(), slot, elem_ty.clone());
2528 } else {
2529 let uses_descriptor = arg_uses_descriptor_from_decls(pname, decls);
2530 let uses_string_descriptor =
2531 arg_uses_string_descriptor_from_decls(pname, decls);
2532 let slot = if uses_descriptor {
2533 b.alloca(IrType::Ptr(Box::new(IrType::Array(
2534 Box::new(IrType::Int(IntWidth::I8)),
2535 384,
2536 ))))
2537 } else if uses_string_descriptor {
2538 b.alloca(IrType::Ptr(Box::new(IrType::Array(
2539 Box::new(IrType::Int(IntWidth::I8)),
2540 32,
2541 ))))
2542 } else {
2543 b.alloca(IrType::Ptr(Box::new(elem_ty.clone())))
2544 };
2545 b.store(*pid, slot);
2546 // Check if this is a derived type parameter.
2547 let dt_name = arg_derived_type_name(pname, decls);
2548 let ck = if let Some(&len_slot) = hidden_len_addrs.get(pname) {
2549 CharKind::AssumedLen { len_addr: len_slot }
2550 } else {
2551 arg_char_kind_from_decls(pname, decls, st)
2552 };
2553 let info = LocalInfo {
2554 addr: slot,
2555 ty: elem_ty.clone(),
2556 dims: arg_dims_from_decls(pname, decls, &visible_param_consts),
2557 allocatable: false,
2558 descriptor_arg: uses_descriptor,
2559 by_ref: true,
2560 char_kind: ck,
2561 derived_type: dt_name,
2562 inline_const: None,
2563 is_pointer: decl_is_pointer(pname, decls),
2564 runtime_dim_upper: vec![],
2565 };
2566 ctx.locals.insert(pname.clone(), info);
2567 }
2568 }
2569
2570 for (pname, _, _, is_value) in &param_info {
2571 if *is_value || hidden_len_addrs.contains_key(pname) {
2572 continue;
2573 }
2574 let Some(len_expr) = arg_runtime_char_len_expr_from_decls(pname, decls, st)
2575 else {
2576 continue;
2577 };
2578 let len_raw = lower_expr(&mut b, &ctx.locals, &len_expr, ctx.st);
2579 let len_addr = b.alloca(IrType::Int(IntWidth::I64));
2580 let len_val = clamp_nonnegative_i64(&mut b, len_raw);
2581 b.store(len_val, len_addr);
2582 if let Some(info) = ctx.locals.get_mut(pname) {
2583 info.char_kind = CharKind::FixedRuntime { len_addr };
2584 }
2585 }
2586 // Explicit-shape dummies whose upper bound is itself
2587 // a (non-const) dummy argument — e.g. `xs(n)` — need
2588 // the bound evaluated at runtime on function entry.
2589 // arg_dims_from_decls falls back to (1, 1) when the
2590 // bound isn't const-foldable, which would produce
2591 // spurious bounds-check failures. Walk every by_ref
2592 // dummy, lower its bound expressions now (all other
2593 // dummies are already in ctx.locals), and stash the
2594 // i64 result into runtime_dim_upper.
2595 install_runtime_dim_bounds(
2596 &mut b,
2597 &mut ctx.locals,
2598 decls,
2599 &visible_param_consts,
2600 ctx.st,
2601 );
2602
2603 install_common_locals(&mut b, &mut ctx.locals, decls);
2604 install_equivalence_locals(&mut b, &mut ctx.locals, decls);
2605 // Install host-association by_ref locals before alloc_decls
2606 // so any same-named callee local (shouldn't occur per F
2607 // scoping rules) is short-circuited, and so init_decls has
2608 // them available for initialization expressions that
2609 // reference host vars.
2610 install_host_ref_locals(&mut b, &mut ctx.locals, &host_ref_infos);
2611 alloc_decls(
2612 &mut b,
2613 &mut ctx.locals,
2614 decls,
2615 &visible_param_consts,
2616 type_layouts,
2617 &mut pending_globals,
2618 &func_name,
2619 st,
2620 );
2621 install_host_param_consts(&mut b, &mut ctx.locals, host_param_consts);
2622 install_globals_as_locals(
2623 &mut b,
2624 &mut ctx.locals,
2625 globals,
2626 &combined_uses,
2627 host_module,
2628 ctx.st,
2629 );
2630 ctx.filtered_names = compute_filtered_names(globals, &combined_uses);
2631 check_no_filtered_refs(body, &ctx.filtered_names);
2632 collect_implicit_locals(&mut b, &mut ctx, body, UnitScope::Subroutine(name));
2633 init_decls(&mut b, &ctx.locals, decls, st);
2634 // Pre-create blocks for all statement labels so GOTO can branch forward.
2635 collect_label_blocks(&mut b, body, &mut ctx.label_blocks);
2636 lower_stmts(&mut b, &mut ctx, body);
2637 if b.func().block(b.current_block()).terminator.is_none() {
2638 insert_implicit_dealloc(
2639 &mut b,
2640 &ctx.locals,
2641 &ctx.locals,
2642 type_layouts,
2643 ctx.st,
2644 ctx.internal_funcs,
2645 Some(ctx.contained_host_refs),
2646 None,
2647 );
2648 }
2649 ensure_termination(&mut b, None);
2650 }
2651
2652 module.add_function(func);
2653 for pg in pending_globals {
2654 module.add_global(pg.global);
2655 }
2656
2657 // Lower nested CONTAINS subprograms (this was a latent
2658 // bug — the previous code only walked Program::contains).
2659 // Each nested sub inherits this subroutine's combined
2660 // host_uses + own uses, and its host_decls chain is our
2661 // `decls` followed by whatever host_decls we inherited —
2662 // so a two-level-nested contained proc can look up host
2663 // variables that live two scopes above it.
2664 let mut child_host_decls: Vec<crate::ast::decl::SpannedDecl> = decls.to_vec();
2665 child_host_decls.extend(host_decls.iter().cloned());
2666 for sub in contains {
2667 lower_unit(
2668 module,
2669 sub,
2670 st,
2671 globals,
2672 type_layouts,
2673 &combined_uses,
2674 &visible_param_consts,
2675 &child_host_decls,
2676 Some(func_name.as_str()),
2677 host_module,
2678 alloc_return_funcs,
2679 optional_params,
2680 descriptor_params,
2681 internal_funcs,
2682 elemental_funcs,
2683 char_len_star_params,
2684 contained_host_refs,
2685 true,
2686 );
2687 }
2688 }
2689 ProgramUnit::Function {
2690 name,
2691 decls,
2692 body,
2693 args,
2694 result,
2695 return_type,
2696 bind,
2697 uses,
2698 contains,
2699 prefix,
2700 ..
2701 } => {
2702 let func_name = lowered_procedure_symbol_name(
2703 name,
2704 bind.as_ref(),
2705 host_link_name,
2706 host_module,
2707 internal_only,
2708 internal_funcs,
2709 );
2710 let visible_param_consts =
2711 collect_decl_param_consts_with_host(decls, host_param_consts);
2712
2713 // Hidden-result ABI: allocatable arrays use a 384-byte array
2714 // descriptor, while deferred-length scalar character results use
2715 // a 32-byte string descriptor. In both cases the caller provides
2716 // the descriptor storage as param 0 and the callee returns void.
2717 let hidden_result_abi = function_hidden_result_abi(name, result, decls);
2718 let uses_hidden_result = hidden_result_abi != HiddenResultAbi::None;
2719
2720 let (func_params, ir_ret_ty) = if uses_hidden_result {
2721 let desc_size = match hidden_result_abi {
2722 HiddenResultAbi::ArrayDescriptor => 384,
2723 HiddenResultAbi::StringDescriptor => 32,
2724 HiddenResultAbi::None => 0,
2725 };
2726 let desc_ptr_ty = IrType::Ptr(Box::new(IrType::Array(
2727 Box::new(IrType::Int(IntWidth::I8)),
2728 desc_size,
2729 )));
2730 let sret = Param {
2731 name: "_sret".into(),
2732 ty: desc_ptr_ty,
2733 id: ValueId(0),
2734 fortran_noalias: false,
2735 };
2736 // Real args shifted by 1 so _sret is param 0.
2737 let real: Vec<Param> = args
2738 .iter()
2739 .enumerate()
2740 .filter_map(|(i, arg)| {
2741 if let DummyArg::Name(n) = arg {
2742 let elem_ty = arg_type_from_decls(n, decls, Some(st));
2743 let fortran_noalias = arg_is_fortran_noalias(n, decls);
2744 let uses_descriptor = arg_uses_descriptor_from_decls(n, decls);
2745 let uses_string_descriptor =
2746 arg_uses_string_descriptor_from_decls(n, decls);
2747 if arg_has_value_attr(n, decls) {
2748 Some(Param {
2749 name: n.clone(),
2750 ty: elem_ty,
2751 id: ValueId(i as u32 + 1),
2752 fortran_noalias: false,
2753 })
2754 } else {
2755 Some(Param {
2756 name: n.clone(),
2757 ty: if uses_descriptor {
2758 IrType::Ptr(Box::new(IrType::Array(
2759 Box::new(IrType::Int(IntWidth::I8)),
2760 384,
2761 )))
2762 } else if uses_string_descriptor {
2763 IrType::Ptr(Box::new(IrType::Array(
2764 Box::new(IrType::Int(IntWidth::I8)),
2765 32,
2766 )))
2767 } else {
2768 IrType::Ptr(Box::new(elem_ty))
2769 },
2770 id: ValueId(i as u32 + 1),
2771 fortran_noalias,
2772 })
2773 }
2774 } else {
2775 None
2776 }
2777 })
2778 .collect();
2779 let mut params = vec![sret];
2780 params.extend(real);
2781 (params, IrType::Void)
2782 } else {
2783 let ret_ty = return_type
2784 .as_ref()
2785 .map(|ts| lower_type_spec_st(ts, Some(st)))
2786 .unwrap_or_else(|| {
2787 let result_name = result.as_deref().unwrap_or(name.as_str());
2788 arg_type_from_decls(result_name, decls, Some(st))
2789 });
2790 let params: Vec<Param> = args
2791 .iter()
2792 .enumerate()
2793 .filter_map(|(i, arg)| {
2794 if let DummyArg::Name(n) = arg {
2795 let elem_ty = arg_type_from_decls(n, decls, Some(st));
2796 let fortran_noalias = arg_is_fortran_noalias(n, decls);
2797 let uses_descriptor = arg_uses_descriptor_from_decls(n, decls);
2798 let uses_string_descriptor =
2799 arg_uses_string_descriptor_from_decls(n, decls);
2800 if arg_has_value_attr(n, decls) {
2801 Some(Param {
2802 name: n.clone(),
2803 ty: elem_ty,
2804 id: ValueId(i as u32),
2805 fortran_noalias: false,
2806 })
2807 } else {
2808 Some(Param {
2809 name: n.clone(),
2810 ty: if uses_descriptor {
2811 IrType::Ptr(Box::new(IrType::Array(
2812 Box::new(IrType::Int(IntWidth::I8)),
2813 384,
2814 )))
2815 } else if uses_string_descriptor {
2816 IrType::Ptr(Box::new(IrType::Array(
2817 Box::new(IrType::Int(IntWidth::I8)),
2818 32,
2819 )))
2820 } else {
2821 IrType::Ptr(Box::new(elem_ty))
2822 },
2823 id: ValueId(i as u32),
2824 fortran_noalias,
2825 })
2826 }
2827 } else {
2828 None
2829 }
2830 })
2831 .collect();
2832 (params, ret_ty)
2833 };
2834
2835 // Host-association closure params for contained functions.
2836 // Trailing pointer params, one per host-local variable the
2837 // body reads or writes. See `build_host_ref_params`.
2838 let mut func_params = func_params;
2839 let mut hidden_len_params: Vec<(String, ValueId)> = Vec::new();
2840 let own_cls = char_len_star_params.get(&name.to_lowercase());
2841 if let Some(flags) = own_cls {
2842 let normal_count = func_params.len();
2843 for (flag, arg) in flags.iter().zip(args.iter()) {
2844 if *flag {
2845 if let DummyArg::Name(n) = arg {
2846 let hid_id = ValueId((normal_count + hidden_len_params.len()) as u32);
2847 func_params.push(Param {
2848 name: format!("__len_{}", n.to_lowercase()),
2849 ty: IrType::Int(IntWidth::I64),
2850 id: hid_id,
2851 fortran_noalias: false,
2852 });
2853 hidden_len_params.push((n.to_lowercase(), hid_id));
2854 }
2855 }
2856 }
2857 }
2858 let host_ref_infos = build_host_ref_params(
2859 name,
2860 host_decls,
2861 host_param_consts,
2862 contained_host_refs,
2863 func_params.len() as u32,
2864 st,
2865 &mut func_params,
2866 );
2867
2868 let mut func = Function::new(func_name.clone(), func_params, ir_ret_ty.clone());
2869 // Propagate PURE/ELEMENTAL from AST prefix.
2870 use crate::ast::unit::Prefix;
2871 func.is_pure = prefix.iter().any(|p| matches!(p, Prefix::Pure));
2872 func.is_elemental = prefix.iter().any(|p| matches!(p, Prefix::Elemental));
2873 func.internal_only = internal_only;
2874 let mut ctx = LowerCtx::new(
2875 st,
2876 globals,
2877 type_layouts,
2878 alloc_return_funcs,
2879 optional_params,
2880 descriptor_params,
2881 internal_funcs,
2882 elemental_funcs,
2883 char_len_star_params,
2884 contained_host_refs,
2885 );
2886 ctx.is_alloc_return = uses_hidden_result;
2887 let mut pending_globals: Vec<PendingGlobal> = Vec::new();
2888 let combined_uses: Vec<crate::ast::decl::SpannedDecl> =
2889 host_uses.iter().chain(uses.iter()).cloned().collect();
2890
2891 // Build param_info skipping the sret param (not a Fortran
2892 // variable) and __host_* closure-passing pointers (installed
2893 // via install_host_ref_locals below).
2894 let param_info: Vec<(String, ValueId, IrType, bool)> = func
2895 .params
2896 .iter()
2897 .filter(|p| {
2898 p.name != "_sret"
2899 && !p.name.starts_with("__len_")
2900 && !p.name.starts_with("__host_")
2901 })
2902 .map(|p| {
2903 let pname = p.name.to_lowercase();
2904 let elem_ty = arg_type_from_decls(&pname, decls, Some(st));
2905 let is_value = arg_has_value_attr(&pname, decls);
2906 (pname, p.id, elem_ty, is_value)
2907 })
2908 .collect();
2909
2910 {
2911 let mut b = FuncBuilder::new(&mut func);
2912
2913 let mut hidden_len_addrs: HashMap<String, ValueId> = HashMap::new();
2914 for (hname, hid) in &hidden_len_params {
2915 let slot = b.alloca(IrType::Int(IntWidth::I64));
2916 b.store(*hid, slot);
2917 hidden_len_addrs.insert(hname.clone(), slot);
2918 }
2919
2920 for (pname, pid, elem_ty, is_value) in &param_info {
2921 if *is_value {
2922 let slot = b.alloca(elem_ty.clone());
2923 b.store(*pid, slot);
2924 ctx.insert_scalar(pname.clone(), slot, elem_ty.clone());
2925 } else {
2926 let uses_descriptor = arg_uses_descriptor_from_decls(pname, decls);
2927 let uses_string_descriptor =
2928 arg_uses_string_descriptor_from_decls(pname, decls);
2929 let slot = if uses_descriptor {
2930 b.alloca(IrType::Ptr(Box::new(IrType::Array(
2931 Box::new(IrType::Int(IntWidth::I8)),
2932 384,
2933 ))))
2934 } else if uses_string_descriptor {
2935 b.alloca(IrType::Ptr(Box::new(IrType::Array(
2936 Box::new(IrType::Int(IntWidth::I8)),
2937 32,
2938 ))))
2939 } else {
2940 b.alloca(IrType::Ptr(Box::new(elem_ty.clone())))
2941 };
2942 b.store(*pid, slot);
2943 let dt_name = arg_derived_type_name(pname, decls);
2944 let ck = if let Some(&len_slot) = hidden_len_addrs.get(pname) {
2945 CharKind::AssumedLen { len_addr: len_slot }
2946 } else {
2947 arg_char_kind_from_decls(pname, decls, st)
2948 };
2949 ctx.locals.insert(
2950 pname.clone(),
2951 LocalInfo {
2952 addr: slot,
2953 ty: elem_ty.clone(),
2954 dims: arg_dims_from_decls(pname, decls, &visible_param_consts),
2955 allocatable: false,
2956 descriptor_arg: uses_descriptor,
2957 by_ref: true,
2958 char_kind: ck,
2959 derived_type: dt_name,
2960 inline_const: None,
2961 is_pointer: decl_is_pointer(pname, decls),
2962 runtime_dim_upper: vec![],
2963 },
2964 );
2965 }
2966 }
2967
2968 for (pname, _, _, is_value) in &param_info {
2969 if *is_value || hidden_len_addrs.contains_key(pname) {
2970 continue;
2971 }
2972 let Some(len_expr) = arg_runtime_char_len_expr_from_decls(pname, decls, st)
2973 else {
2974 continue;
2975 };
2976 let len_raw = lower_expr(&mut b, &ctx.locals, &len_expr, ctx.st);
2977 let len_addr = b.alloca(IrType::Int(IntWidth::I64));
2978 let len_val = clamp_nonnegative_i64(&mut b, len_raw);
2979 b.store(len_val, len_addr);
2980 if let Some(info) = ctx.locals.get_mut(pname) {
2981 info.char_kind = CharKind::FixedRuntime { len_addr };
2982 }
2983 }
2984 install_runtime_dim_bounds(
2985 &mut b,
2986 &mut ctx.locals,
2987 decls,
2988 &visible_param_consts,
2989 ctx.st,
2990 );
2991
2992 let result_name = result.as_deref().unwrap_or(name.as_str()).to_lowercase();
2993
2994 let result_is_pointer = decl_is_pointer(&result_name, decls);
2995
2996 if hidden_result_abi == HiddenResultAbi::ArrayDescriptor {
2997 // The hidden first param is the caller-provided array descriptor.
2998 let elem_ty = arg_type_from_decls(&result_name, decls, Some(st));
2999 ctx.locals.insert(
3000 result_name.clone(),
3001 LocalInfo {
3002 addr: ValueId(0),
3003 ty: elem_ty,
3004 dims: vec![],
3005 allocatable: true,
3006 descriptor_arg: false,
3007 by_ref: false,
3008 char_kind: CharKind::None,
3009 derived_type: None,
3010 inline_const: None,
3011 is_pointer: false,
3012 runtime_dim_upper: vec![],
3013 },
3014 );
3015 } else if hidden_result_abi == HiddenResultAbi::StringDescriptor {
3016 // Deferred-length scalar character result: the hidden first
3017 // param is a caller-provided StringDescriptor.
3018 ctx.locals.insert(
3019 result_name.clone(),
3020 LocalInfo {
3021 addr: ValueId(0),
3022 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3023 dims: vec![],
3024 allocatable: true,
3025 descriptor_arg: false,
3026 by_ref: false,
3027 char_kind: CharKind::Deferred,
3028 derived_type: None,
3029 inline_const: None,
3030 is_pointer: result_is_pointer,
3031 runtime_dim_upper: vec![],
3032 },
3033 );
3034 } else if result_is_pointer {
3035 let result_addr = b.alloca(ir_ret_ty.clone());
3036 let zero_byte = b.const_i32(0);
3037 let eight = b.const_i64(8);
3038 b.call(
3039 FuncRef::External("memset".into()),
3040 vec![result_addr, zero_byte, eight],
3041 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3042 );
3043 ctx.locals.insert(
3044 result_name.clone(),
3045 LocalInfo {
3046 addr: result_addr,
3047 ty: match &ir_ret_ty {
3048 IrType::Ptr(elem) => (**elem).clone(),
3049 other => other.clone(),
3050 },
3051 dims: vec![],
3052 allocatable: false,
3053 descriptor_arg: false,
3054 by_ref: false,
3055 char_kind: CharKind::None,
3056 derived_type: derived_type_name_for_result_var(
3057 return_type,
3058 &result_name,
3059 decls,
3060 ),
3061 inline_const: None,
3062 is_pointer: true,
3063 runtime_dim_upper: vec![],
3064 },
3065 );
3066 ctx.result_addr = Some(result_addr);
3067 ctx.result_type = Some(ir_ret_ty.clone());
3068 } else if let Some(dt_name) =
3069 derived_type_name_for_result_var(return_type, &result_name, decls)
3070 {
3071 // Derived-type FUNCTION result: allocate a struct-shaped
3072 // buffer ([i8 x layout.size]) and register the result
3073 // variable with `derived_type = Some(name)` so component
3074 // access (e.g. `vec_add%x = ...`) lands on the buffer.
3075 // Without this, the generic `b.alloca(ir_ret_ty)` path
3076 // allocates a `ptr<ptr<i8>>` slot, ComponentAccess can't
3077 // resolve the type name, and every assignment to the
3078 // result variable is silently dropped. derived_type_name_
3079 // for_result_var accepts both header-level (`type(t)
3080 // function f`) and body-level (`function f result(r);
3081 // type(t) :: r`) declarations.
3082 let layout = type_layouts.get(&dt_name);
3083 let size = layout.map(|l| l.size as u64).unwrap_or(8);
3084 let buf_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), size);
3085 let result_addr = b.alloca(buf_ty);
3086 ctx.locals.insert(
3087 result_name.clone(),
3088 LocalInfo {
3089 addr: result_addr,
3090 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
3091 dims: vec![],
3092 allocatable: false,
3093 descriptor_arg: false,
3094 by_ref: false,
3095 char_kind: CharKind::None,
3096 derived_type: Some(dt_name),
3097 inline_const: None,
3098 is_pointer: false,
3099 runtime_dim_upper: vec![],
3100 },
3101 );
3102 ctx.result_addr = Some(result_addr);
3103 ctx.result_type = Some(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
3104 } else {
3105 let result_addr = b.alloca(ir_ret_ty.clone());
3106 ctx.insert_scalar(result_name.clone(), result_addr, ir_ret_ty.clone());
3107 ctx.result_addr = Some(result_addr);
3108 ctx.result_type = Some(ir_ret_ty.clone());
3109 }
3110
3111 install_common_locals(&mut b, &mut ctx.locals, decls);
3112 install_equivalence_locals(&mut b, &mut ctx.locals, decls);
3113 install_host_ref_locals(&mut b, &mut ctx.locals, &host_ref_infos);
3114 alloc_decls(
3115 &mut b,
3116 &mut ctx.locals,
3117 decls,
3118 &visible_param_consts,
3119 type_layouts,
3120 &mut pending_globals,
3121 &func_name,
3122 st,
3123 );
3124 install_host_param_consts(&mut b, &mut ctx.locals, host_param_consts);
3125 install_globals_as_locals(
3126 &mut b,
3127 &mut ctx.locals,
3128 globals,
3129 &combined_uses,
3130 host_module,
3131 ctx.st,
3132 );
3133 ctx.filtered_names = compute_filtered_names(globals, &combined_uses);
3134 check_no_filtered_refs(body, &ctx.filtered_names);
3135 collect_implicit_locals(&mut b, &mut ctx, body, UnitScope::Function(name));
3136 init_decls(&mut b, &ctx.locals, decls, st);
3137 collect_label_blocks(&mut b, body, &mut ctx.label_blocks);
3138 lower_stmts(&mut b, &mut ctx, body);
3139
3140 if b.func().block(b.current_block()).terminator.is_none() {
3141 let skip = if uses_hidden_result {
3142 Some(ValueId(0))
3143 } else {
3144 None
3145 };
3146 insert_implicit_dealloc(
3147 &mut b,
3148 &ctx.locals,
3149 &ctx.locals,
3150 type_layouts,
3151 ctx.st,
3152 ctx.internal_funcs,
3153 Some(ctx.contained_host_refs),
3154 skip,
3155 );
3156 if uses_hidden_result {
3157 b.ret(None);
3158 } else if derived_type_name_for_result_var(return_type, &result_name, decls)
3159 .is_some()
3160 {
3161 // Derived-type result: return the buffer
3162 // address as a Ptr(i8) (the declared return
3163 // type). A zero-offset GEP through `i8`
3164 // reshapes Ptr(Array(i8, N)) into Ptr(i8).
3165 let result_addr = ctx
3166 .result_addr
3167 .expect("derived-return function has result_addr");
3168 let zero = b.const_i64(0);
3169 let byte_ptr = b.gep(result_addr, vec![zero], IrType::Int(IntWidth::I8));
3170 b.ret(Some(byte_ptr));
3171 } else {
3172 let result_addr =
3173 ctx.result_addr.expect("non-sret function has result_addr");
3174 let rv = b.load(result_addr);
3175 b.ret(Some(rv));
3176 }
3177 }
3178 }
3179
3180 module.add_function(func);
3181 for pg in pending_globals {
3182 module.add_global(pg.global);
3183 }
3184
3185 // Lower nested CONTAINS subprograms with the accumulated
3186 // host_decls chain (our decls + inherited).
3187 let mut child_host_decls: Vec<crate::ast::decl::SpannedDecl> = decls.to_vec();
3188 child_host_decls.extend(host_decls.iter().cloned());
3189 for sub in contains {
3190 lower_unit(
3191 module,
3192 sub,
3193 st,
3194 globals,
3195 type_layouts,
3196 &combined_uses,
3197 &visible_param_consts,
3198 &child_host_decls,
3199 Some(func_name.as_str()),
3200 host_module,
3201 alloc_return_funcs,
3202 optional_params,
3203 descriptor_params,
3204 internal_funcs,
3205 elemental_funcs,
3206 char_len_star_params,
3207 contained_host_refs,
3208 true,
3209 );
3210 }
3211 }
3212 ProgramUnit::Module {
3213 decls,
3214 uses,
3215 contains,
3216 ..
3217 } => {
3218 // Module globals are installed in pass 1 (collect_module_globals).
3219 // The module body has no executable statements, but its CONTAINS
3220 // subprograms (module procedures) must be lowered as top-level
3221 // functions so they are emitted into the object file.
3222 let visible_param_consts =
3223 collect_decl_param_consts_with_host(decls, host_param_consts);
3224 let combined_uses: Vec<crate::ast::decl::SpannedDecl> =
3225 host_uses.iter().chain(uses.iter()).cloned().collect();
3226 let module_name = match &unit.node {
3227 ProgramUnit::Module { name, .. } => Some(name.as_str()),
3228 _ => None,
3229 };
3230 // Module procedures don't have host-local closure association;
3231 // they resolve module-level names through globals. Pass an
3232 // empty host_decls slice.
3233 let no_host_decls: Vec<crate::ast::decl::SpannedDecl> = Vec::new();
3234 for sub in contains {
3235 lower_unit(
3236 module,
3237 sub,
3238 st,
3239 globals,
3240 type_layouts,
3241 &combined_uses,
3242 &visible_param_consts,
3243 &no_host_decls,
3244 None,
3245 module_name,
3246 alloc_return_funcs,
3247 optional_params,
3248 descriptor_params,
3249 internal_funcs,
3250 elemental_funcs,
3251 char_len_star_params,
3252 contained_host_refs,
3253 false,
3254 );
3255 }
3256 }
3257 ProgramUnit::Submodule {
3258 parent,
3259 decls,
3260 uses,
3261 contains,
3262 ..
3263 } => {
3264 // F2018 §11.2.3: a submodule provides implementations for the
3265 // separate-module procedures declared in its parent module's
3266 // interface block. The parent module already installed its
3267 // globals in pass 1; the submodule's own decls (if any) act
3268 // like additional private module-scope state. We treat the
3269 // submodule's CONTAINS subprograms exactly like the parent
3270 // module's contains — emit them as top-level functions whose
3271 // host scope is the parent module — so the linker sees the
3272 // implementations the program later calls into.
3273 let visible_param_consts =
3274 collect_decl_param_consts_with_host(decls, host_param_consts);
3275 let combined_uses: Vec<crate::ast::decl::SpannedDecl> =
3276 host_uses.iter().chain(uses.iter()).cloned().collect();
3277 let no_host_decls: Vec<crate::ast::decl::SpannedDecl> = Vec::new();
3278 for sub in contains {
3279 lower_unit(
3280 module,
3281 sub,
3282 st,
3283 globals,
3284 type_layouts,
3285 &combined_uses,
3286 &visible_param_consts,
3287 &no_host_decls,
3288 None,
3289 Some(parent.as_str()),
3290 alloc_return_funcs,
3291 optional_params,
3292 descriptor_params,
3293 internal_funcs,
3294 elemental_funcs,
3295 char_len_star_params,
3296 contained_host_refs,
3297 false,
3298 );
3299 }
3300 }
3301 _ => {}
3302 }
3303 }
3304
3305 /// Emit IR instructions that materialize a folded constant
3306 /// scalar at the given target type. Used by Maj4 parameter
3307 /// inlining: when an `Expr::Name` references a parameter whose
3308 /// initializer const-folds, we emit `b.const_i32(value)` (or
3309 /// the appropriate width) directly instead of going through a
3310 /// global address + load.
3311 fn materialize_const_scalar(b: &mut FuncBuilder, c: ConstScalar, target: &IrType) -> ValueId {
3312 match (c, target) {
3313 (ConstScalar::Int(i), IrType::Int(IntWidth::I128)) => b.const_i128(i),
3314 (ConstScalar::Int(i), IrType::Int(IntWidth::I64)) => b.const_i64(i as i64),
3315 (ConstScalar::Int(i), IrType::Int(_)) => b.const_i32(i as i32),
3316 (ConstScalar::Int(i), IrType::Bool) => b.const_bool(i != 0),
3317 (ConstScalar::Int(i), IrType::Float(FloatWidth::F64)) => b.const_f64(i as f64),
3318 (ConstScalar::Int(i), IrType::Float(FloatWidth::F32)) => b.const_f32(i as f32),
3319 (ConstScalar::Float(f), IrType::Float(FloatWidth::F64)) => b.const_f64(f),
3320 (ConstScalar::Float(f), IrType::Float(FloatWidth::F32)) => b.const_f32(f as f32),
3321 (ConstScalar::Float(f), IrType::Int(IntWidth::I128)) => b.const_i128(f as i128),
3322 (ConstScalar::Float(f), IrType::Int(IntWidth::I64)) => b.const_i64(f as i64),
3323 (ConstScalar::Float(f), IrType::Int(_)) => b.const_i32(f as i32),
3324 // Fallback — emit a zero of the target's class.
3325 _ => b.const_i32(0),
3326 }
3327 }
3328
3329 /// Sign-extend an i64 const value at the target IR type's width.
3330 /// `integer(kind=1) :: x = 256` parses to 256, which doesn't fit
3331 /// in i8; the user almost certainly meant the truncation
3332 /// (`256 mod 256 = 0`). Clamp by masking to the low N bits and
3333 /// re-sign-extending. Out-of-range floats and aggregates are
3334 /// passed through unchanged. Audit CRITICAL-2.
3335 fn clamp_const_to_type(v: ConstScalar, target: &IrType) -> ConstScalar {
3336 match (v, target) {
3337 (ConstScalar::Int(i), IrType::Int(IntWidth::I8)) => ConstScalar::Int((i as i8) as i128),
3338 (ConstScalar::Int(i), IrType::Int(IntWidth::I16)) => ConstScalar::Int((i as i16) as i128),
3339 (ConstScalar::Int(i), IrType::Int(IntWidth::I32)) => ConstScalar::Int((i as i32) as i128),
3340 (ConstScalar::Int(i), IrType::Int(IntWidth::I64)) => ConstScalar::Int((i as i64) as i128),
3341 (ConstScalar::Int(i), IrType::Bool) => ConstScalar::Int(if i != 0 { 1 } else { 0 }),
3342 // Int → Float (e.g. `real :: x = 1`).
3343 (ConstScalar::Int(i), IrType::Float(_)) => ConstScalar::Float(i as f64),
3344 _ => v,
3345 }
3346 }
3347
3348 /// Try to evaluate a scalar initializer expression at compile time
3349 /// to a `GlobalInit`. Used by SAVE-promotion in `alloc_decls`.
3350 ///
3351 /// Handles literals, unary minus, parenthesization, and binary
3352 /// arithmetic (`+`, `-`, `*`, `/`, `**`) on any combination of
3353 /// integer and real operands, plus references to named PARAMETERs
3354 /// declared earlier in the same scope (looked up via `param_consts`).
3355 /// Mixed int/real promotes to real per Fortran's usual arithmetic
3356 /// rules. Anything that can't be folded (function calls, derived
3357 /// types, strings, names that aren't compile-time parameters)
3358 /// returns `None`. The caller then falls back to alloca + runtime
3359 /// store, which DOES break SAVE semantics — every new non-foldable
3360 /// case is a silent off-spec wrong-result, so the folder should
3361 /// cover as much as possible.
3362 fn eval_const_global_init(
3363 e: &crate::ast::expr::SpannedExpr,
3364 param_consts: &HashMap<String, ConstScalar>,
3365 target: Option<&IrType>,
3366 ) -> Option<GlobalInit> {
3367 eval_const_scalar(e, param_consts).map(|raw| {
3368 let clamped = match target {
3369 Some(t) => clamp_const_to_type(raw, t),
3370 None => raw,
3371 };
3372 match clamped {
3373 ConstScalar::Int(i) => GlobalInit::Int(i),
3374 ConstScalar::Float(f) => GlobalInit::Float(f),
3375 }
3376 })
3377 }
3378
3379 /// Internal const-folding result for initializer expressions.
3380 /// Int is used for integer kinds AND logical (0/1). Float is
3381 /// used for real/double precision.
3382 #[derive(Debug, Clone, Copy)]
3383 enum ConstScalar {
3384 Int(i128),
3385 Float(f64),
3386 }
3387
3388 impl ConstScalar {
3389 fn to_float(self) -> f64 {
3390 match self {
3391 ConstScalar::Int(i) => i as f64,
3392 ConstScalar::Float(f) => f,
3393 }
3394 }
3395 }
3396
3397 fn eval_const_scalar(
3398 e: &crate::ast::expr::SpannedExpr,
3399 param_consts: &HashMap<String, ConstScalar>,
3400 ) -> Option<ConstScalar> {
3401 use crate::ast::expr::{BinaryOp, UnaryOp};
3402 match &e.node {
3403 Expr::IntegerLiteral { text, .. } => text.parse::<i128>().ok().map(ConstScalar::Int),
3404 Expr::RealLiteral { text, .. } => text
3405 .replace('d', "e")
3406 .replace('D', "E")
3407 .parse::<f64>()
3408 .ok()
3409 .map(ConstScalar::Float),
3410 Expr::LogicalLiteral { value, .. } => Some(ConstScalar::Int(if *value { 1 } else { 0 })),
3411 // Audit CRITICAL-1: a name reference resolves only if it's
3412 // a compile-time parameter declared earlier in the same
3413 // scope. Anything else (regular local, dummy arg, module
3414 // global) is not a compile-time constant and the folder
3415 // gives up — the caller falls back to runtime evaluation.
3416 Expr::Name { name } => param_consts.get(&name.to_lowercase()).copied(),
3417 Expr::UnaryOp { op, operand } => {
3418 let v = eval_const_scalar(operand, param_consts)?;
3419 match op {
3420 UnaryOp::Minus => Some(match v {
3421 ConstScalar::Int(i) => ConstScalar::Int(-i),
3422 ConstScalar::Float(f) => ConstScalar::Float(-f),
3423 }),
3424 UnaryOp::Plus => Some(v),
3425 _ => None,
3426 }
3427 }
3428 Expr::BinaryOp { op, left, right } => {
3429 let lv = eval_const_scalar(left, param_consts)?;
3430 let rv = eval_const_scalar(right, param_consts)?;
3431 // Promote to float when either operand is float.
3432 let any_float =
3433 matches!(lv, ConstScalar::Float(_)) || matches!(rv, ConstScalar::Float(_));
3434 if any_float {
3435 let l = lv.to_float();
3436 let r = rv.to_float();
3437 match op {
3438 BinaryOp::Add => Some(ConstScalar::Float(l + r)),
3439 BinaryOp::Sub => Some(ConstScalar::Float(l - r)),
3440 BinaryOp::Mul => Some(ConstScalar::Float(l * r)),
3441 // Audit Min-5: fold all IEEE 754 cases. Float
3442 // division by zero now folds to ±Inf or NaN
3443 // (matching `f64::powf`, which already folds
3444 // negative-base fractional powers to NaN).
3445 // Consistent with gfortran's `parameter ::
3446 // x = 1.0/0.0 → +inf` behavior.
3447 BinaryOp::Div => Some(ConstScalar::Float(l / r)),
3448 BinaryOp::Pow => Some(ConstScalar::Float(l.powf(r))),
3449 _ => None,
3450 }
3451 } else {
3452 let (ConstScalar::Int(l), ConstScalar::Int(r)) = (lv, rv) else {
3453 return None;
3454 };
3455 match op {
3456 BinaryOp::Add => Some(ConstScalar::Int(l.wrapping_add(r))),
3457 BinaryOp::Sub => Some(ConstScalar::Int(l.wrapping_sub(r))),
3458 BinaryOp::Mul => Some(ConstScalar::Int(l.wrapping_mul(r))),
3459 BinaryOp::Div => {
3460 if r == 0 {
3461 None
3462 } else {
3463 Some(ConstScalar::Int(l / r))
3464 }
3465 }
3466 BinaryOp::Pow => {
3467 // Integer power with non-negative exponent.
3468 if r < 0 || r > i32::MAX as i128 {
3469 return None;
3470 }
3471 let mut acc: i128 = 1;
3472 for _ in 0..r {
3473 acc = acc.wrapping_mul(l);
3474 }
3475 Some(ConstScalar::Int(acc))
3476 }
3477 _ => None,
3478 }
3479 }
3480 }
3481 Expr::ParenExpr { inner } => eval_const_scalar(inner, param_consts),
3482 Expr::FunctionCall { callee, args } => {
3483 if let Expr::Name { name } = &callee.node {
3484 let key = name.to_lowercase();
3485 let first_arg = args.first().and_then(|a| {
3486 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
3487 eval_const_scalar(e, param_consts)
3488 } else {
3489 None
3490 }
3491 });
3492 match key.as_str() {
3493 "selected_int_kind" => {
3494 if let Some(ConstScalar::Int(r)) = first_arg {
3495 let r = r as i64;
3496 let kind = if r <= 2 {
3497 1
3498 } else if r <= 4 {
3499 2
3500 } else if r <= 9 {
3501 4
3502 } else if r <= 18 {
3503 8
3504 } else if r <= 38 {
3505 16
3506 } else {
3507 -1
3508 };
3509 Some(ConstScalar::Int(kind as i128))
3510 } else {
3511 None
3512 }
3513 }
3514 "selected_real_kind" => {
3515 if let Some(ConstScalar::Int(p)) = first_arg {
3516 let p = p as i64;
3517 let kind = if p <= 6 {
3518 4
3519 } else if p <= 15 {
3520 8
3521 } else {
3522 -1
3523 };
3524 Some(ConstScalar::Int(kind as i128))
3525 } else {
3526 None
3527 }
3528 }
3529 "kind" => {
3530 // kind(expr): return the kind of the argument.
3531 // For compile-time purposes, infer from the literal type.
3532 if let Some(arg_expr) = args.first() {
3533 if let crate::ast::expr::SectionSubscript::Element(e) = &arg_expr.value
3534 {
3535 match &e.node {
3536 Expr::RealLiteral { text, .. } => {
3537 if text.contains('d') || text.contains('D') {
3538 Some(ConstScalar::Int(8))
3539 } else {
3540 Some(ConstScalar::Int(4))
3541 }
3542 }
3543 Expr::IntegerLiteral { .. } => Some(ConstScalar::Int(4)),
3544 _ => None,
3545 }
3546 } else {
3547 None
3548 }
3549 } else {
3550 None
3551 }
3552 }
3553 _ => None,
3554 }
3555 } else {
3556 None
3557 }
3558 }
3559 _ => None,
3560 }
3561 }
3562
3563 fn collect_decl_param_consts(
3564 decls: &[crate::ast::decl::SpannedDecl],
3565 ) -> HashMap<String, ConstScalar> {
3566 collect_decl_param_consts_with_host(decls, &HashMap::new())
3567 }
3568
3569 fn collect_decl_param_consts_with_host(
3570 decls: &[crate::ast::decl::SpannedDecl],
3571 host_param_consts: &HashMap<String, ConstScalar>,
3572 ) -> HashMap<String, ConstScalar> {
3573 let mut param_consts: HashMap<String, ConstScalar> = host_param_consts.clone();
3574 for decl in decls {
3575 match &decl.node {
3576 Decl::TypeDecl {
3577 attrs, entities, ..
3578 } => {
3579 let is_param = attrs
3580 .iter()
3581 .any(|a| matches!(a, crate::ast::decl::Attribute::Parameter));
3582 if !is_param {
3583 continue;
3584 }
3585 for entity in entities {
3586 if let Some(init) = &entity.init {
3587 if let Some(val) = eval_const_scalar(init, &param_consts) {
3588 param_consts.insert(entity.name.to_lowercase(), val);
3589 }
3590 }
3591 }
3592 }
3593 Decl::ParameterStmt { pairs } => {
3594 for (name, expr) in pairs {
3595 if let Some(val) = eval_const_scalar(expr, &param_consts) {
3596 param_consts.insert(name.to_lowercase(), val);
3597 }
3598 }
3599 }
3600 _ => {}
3601 }
3602 }
3603 param_consts
3604 }
3605
3606 fn const_scalar_ir_type(value: ConstScalar) -> IrType {
3607 match value {
3608 ConstScalar::Int(v) => {
3609 if i32::try_from(v).is_ok() {
3610 IrType::Int(IntWidth::I32)
3611 } else {
3612 IrType::Int(IntWidth::I64)
3613 }
3614 }
3615 ConstScalar::Float(_) => IrType::Float(FloatWidth::F64),
3616 }
3617 }
3618
3619 fn install_host_param_consts(
3620 b: &mut FuncBuilder,
3621 locals: &mut HashMap<String, LocalInfo>,
3622 host_param_consts: &HashMap<String, ConstScalar>,
3623 ) {
3624 for (name, value) in host_param_consts {
3625 if locals.contains_key(name) {
3626 continue;
3627 }
3628 let ty = const_scalar_ir_type(*value);
3629 let addr = b.alloca(ty.clone());
3630 locals.insert(
3631 name.clone(),
3632 LocalInfo {
3633 addr,
3634 ty,
3635 dims: vec![],
3636 allocatable: false,
3637 descriptor_arg: false,
3638 by_ref: false,
3639 char_kind: CharKind::None,
3640 derived_type: None,
3641 inline_const: Some(*value),
3642 is_pointer: false,
3643 runtime_dim_upper: vec![],
3644 },
3645 );
3646 }
3647 }
3648
3649 /// A pending global variable produced by the lowerer for a SAVE'd
3650 /// scalar local. Flushed into the IR Module after the containing
3651 /// function finishes lowering.
3652 struct PendingGlobal {
3653 global: Global,
3654 }
3655
3656 /// Synthesize a unique global symbol name for a SAVE'd local.
3657 /// Audit Min-2: previously used `__save_` but the leading double
3658 /// underscore is reserved for implementation symbols by Mach-O
3659 /// (and by POSIX). Switched to `afs_save_` which makes the
3660 /// provenance obvious and avoids the reserved-prefix footgun.
3661 fn save_global_name(func_name: &str, local_name: &str) -> String {
3662 format!(
3663 "afs_save_{}_{}",
3664 func_name.to_lowercase(),
3665 local_name.to_lowercase()
3666 )
3667 }
3668
3669 /// Collect the set of ValueIds whose defining instruction is a
3670 /// `GlobalAddr`. Used by `init_decls` to skip re-initializing
3671 /// SAVE-promoted locals on every function call. One pre-pass over
3672 /// the function beats the O(N²) per-local scan the original
3673 /// implementation did (Audit Maj-3).
3674 fn collect_global_addr_values(b: &FuncBuilder) -> HashSet<ValueId> {
3675 let mut set = HashSet::new();
3676 for block in &b.func().blocks {
3677 for inst in &block.insts {
3678 if matches!(inst.kind, InstKind::GlobalAddr(_)) {
3679 set.insert(inst.id);
3680 }
3681 }
3682 }
3683 set
3684 }
3685
3686 /// Install module-level globals as `LocalInfo` entries in the
3687 /// function-local map so `Expr::Name` lookups can resolve them
3688 /// uniformly with stack locals. Must run *after* `alloc_decls` so
3689 /// that any same-named local declared in this subprogram shadows
3690 /// the global per Fortran scoping rules.
3691 /// Walk a body of statements and check every Expr::Name against
3692 /// the function's filtered names set. If any match, emit a hard
3693 /// compile-time error mentioning the filtered name. This is the
3694 /// pre-lowering hook for audit MAJOR-1: USE ONLY hides a name
3695 /// must not silently lower to const_int 0.
3696 fn check_no_filtered_refs(body: &[crate::ast::stmt::SpannedStmt], filtered: &HashSet<String>) {
3697 if filtered.is_empty() {
3698 return;
3699 }
3700 for stmt in body {
3701 check_filtered_in_stmt(stmt, filtered);
3702 }
3703 }
3704
3705 /// Walk every Stmt variant and recurse into substatements + every
3706 /// expression-bearing field. Audit5 MAJOR-2: the original walker
3707 /// only covered Assignment/Print/Write/Read/If/Do/Call/Block, so
3708 /// filtered USE ONLY refs slipped through WHERE constructs, FORALL,
3709 /// SELECT CASE, SELECT TYPE, ASSOCIATE, ALLOCATE/DEALLOCATE
3710 /// argument exprs, IO control specifiers, and ALL of the executable
3711 /// transfer-of-control statements that carry expressions
3712 /// (STOP code, RETURN value, ARITHMETIC IF, COMPUTED GOTO).
3713 fn check_filtered_in_stmt(stmt: &crate::ast::stmt::SpannedStmt, filtered: &HashSet<String>) {
3714 use crate::ast::stmt::Stmt;
3715 match &stmt.node {
3716 // ---- Assignment ----
3717 Stmt::Assignment { target, value } | Stmt::PointerAssignment { target, value } => {
3718 check_filtered_in_expr(target, filtered);
3719 check_filtered_in_expr(value, filtered);
3720 }
3721
3722 // ---- IF ----
3723 Stmt::IfConstruct {
3724 condition,
3725 then_body,
3726 else_ifs,
3727 else_body,
3728 ..
3729 } => {
3730 check_filtered_in_expr(condition, filtered);
3731 check_no_filtered_refs(then_body, filtered);
3732 for (cond, body) in else_ifs {
3733 check_filtered_in_expr(cond, filtered);
3734 check_no_filtered_refs(body, filtered);
3735 }
3736 if let Some(eb) = else_body {
3737 check_no_filtered_refs(eb, filtered);
3738 }
3739 }
3740 Stmt::IfStmt { condition, action } => {
3741 check_filtered_in_expr(condition, filtered);
3742 check_filtered_in_stmt(action, filtered);
3743 }
3744
3745 // ---- DO loops ----
3746 Stmt::DoLoop {
3747 start,
3748 end,
3749 step,
3750 body,
3751 ..
3752 } => {
3753 if let Some(e) = start {
3754 check_filtered_in_expr(e, filtered);
3755 }
3756 if let Some(e) = end {
3757 check_filtered_in_expr(e, filtered);
3758 }
3759 if let Some(e) = step {
3760 check_filtered_in_expr(e, filtered);
3761 }
3762 check_no_filtered_refs(body, filtered);
3763 }
3764 Stmt::DoWhile {
3765 condition, body, ..
3766 } => {
3767 check_filtered_in_expr(condition, filtered);
3768 check_no_filtered_refs(body, filtered);
3769 }
3770 Stmt::DoConcurrent {
3771 controls,
3772 mask,
3773 body,
3774 ..
3775 } => {
3776 for c in controls {
3777 check_filtered_in_expr(&c.start, filtered);
3778 check_filtered_in_expr(&c.end, filtered);
3779 if let Some(s) = &c.step {
3780 check_filtered_in_expr(s, filtered);
3781 }
3782 }
3783 if let Some(m) = mask {
3784 check_filtered_in_expr(m, filtered);
3785 }
3786 check_no_filtered_refs(body, filtered);
3787 }
3788
3789 // ---- SELECT ----
3790 Stmt::SelectCase {
3791 selector, cases, ..
3792 } => {
3793 check_filtered_in_expr(selector, filtered);
3794 for case in cases {
3795 for sel in &case.selectors {
3796 use crate::ast::stmt::CaseSelector;
3797 match sel {
3798 CaseSelector::Value(e) => check_filtered_in_expr(e, filtered),
3799 CaseSelector::Range { low, high } => {
3800 if let Some(e) = low {
3801 check_filtered_in_expr(e, filtered);
3802 }
3803 if let Some(e) = high {
3804 check_filtered_in_expr(e, filtered);
3805 }
3806 }
3807 CaseSelector::Default => {}
3808 }
3809 }
3810 check_no_filtered_refs(&case.body, filtered);
3811 }
3812 }
3813 Stmt::SelectType {
3814 selector, guards, ..
3815 } => {
3816 check_filtered_in_expr(selector, filtered);
3817 for guard in guards {
3818 use crate::ast::stmt::TypeGuard;
3819 let body = match guard {
3820 TypeGuard::TypeIs { body, .. }
3821 | TypeGuard::ClassIs { body, .. }
3822 | TypeGuard::ClassDefault { body } => body,
3823 };
3824 check_no_filtered_refs(body, filtered);
3825 }
3826 }
3827
3828 // ---- WHERE / FORALL ----
3829 Stmt::WhereConstruct {
3830 mask,
3831 body,
3832 elsewhere,
3833 ..
3834 } => {
3835 check_filtered_in_expr(mask, filtered);
3836 check_no_filtered_refs(body, filtered);
3837 for (mcond, ebody) in elsewhere {
3838 if let Some(m) = mcond {
3839 check_filtered_in_expr(m, filtered);
3840 }
3841 check_no_filtered_refs(ebody, filtered);
3842 }
3843 }
3844 Stmt::WhereStmt { mask, stmt } => {
3845 check_filtered_in_expr(mask, filtered);
3846 check_filtered_in_stmt(stmt, filtered);
3847 }
3848 Stmt::ForallConstruct {
3849 specs, mask, body, ..
3850 } => {
3851 for s in specs {
3852 check_filtered_in_expr(&s.start, filtered);
3853 check_filtered_in_expr(&s.end, filtered);
3854 if let Some(st) = &s.step {
3855 check_filtered_in_expr(st, filtered);
3856 }
3857 }
3858 if let Some(m) = mask {
3859 check_filtered_in_expr(m, filtered);
3860 }
3861 check_no_filtered_refs(body, filtered);
3862 }
3863 Stmt::ForallStmt { specs, mask, stmt } => {
3864 for s in specs {
3865 check_filtered_in_expr(&s.start, filtered);
3866 check_filtered_in_expr(&s.end, filtered);
3867 if let Some(st) = &s.step {
3868 check_filtered_in_expr(st, filtered);
3869 }
3870 }
3871 if let Some(m) = mask {
3872 check_filtered_in_expr(m, filtered);
3873 }
3874 check_filtered_in_stmt(stmt, filtered);
3875 }
3876
3877 // ---- BLOCK / ASSOCIATE ----
3878 Stmt::Block { body, .. } => check_no_filtered_refs(body, filtered),
3879 Stmt::Associate { assocs, body, .. } => {
3880 for (_, e) in assocs {
3881 check_filtered_in_expr(e, filtered);
3882 }
3883 check_no_filtered_refs(body, filtered);
3884 }
3885
3886 // ---- Branch / transfer ----
3887 Stmt::Stop { code, .. } | Stmt::ErrorStop { code, .. } => {
3888 if let Some(e) = code {
3889 check_filtered_in_expr(e, filtered);
3890 }
3891 }
3892 Stmt::Return { value } => {
3893 if let Some(e) = value {
3894 check_filtered_in_expr(e, filtered);
3895 }
3896 }
3897 Stmt::ComputedGoto { selector, .. } => {
3898 check_filtered_in_expr(selector, filtered);
3899 }
3900 Stmt::ArithmeticIf { expr, .. } => {
3901 check_filtered_in_expr(expr, filtered);
3902 }
3903 Stmt::Exit { .. } | Stmt::Cycle { .. } | Stmt::Goto { .. } | Stmt::Continue { .. } => {}
3904 Stmt::Labeled { stmt: inner, .. } => {
3905 check_no_filtered_refs(std::slice::from_ref(inner.as_ref()), filtered);
3906 }
3907
3908 // ---- I/O ----
3909 Stmt::Print { format, items } => {
3910 check_filtered_in_expr(format, filtered);
3911 for item in items {
3912 check_filtered_in_expr(item, filtered);
3913 }
3914 }
3915 Stmt::Write { controls, items } | Stmt::Read { controls, items } => {
3916 for c in controls {
3917 check_filtered_in_expr(&c.value, filtered);
3918 }
3919 for item in items {
3920 check_filtered_in_expr(item, filtered);
3921 }
3922 }
3923 Stmt::Open { specs }
3924 | Stmt::Close { specs }
3925 | Stmt::Rewind { specs }
3926 | Stmt::Backspace { specs }
3927 | Stmt::Endfile { specs }
3928 | Stmt::Flush { specs }
3929 | Stmt::Wait { specs } => {
3930 for c in specs {
3931 check_filtered_in_expr(&c.value, filtered);
3932 }
3933 }
3934 Stmt::Inquire { specs, items } => {
3935 for c in specs {
3936 check_filtered_in_expr(&c.value, filtered);
3937 }
3938 for item in items {
3939 check_filtered_in_expr(item, filtered);
3940 }
3941 }
3942
3943 // ---- Memory ----
3944 Stmt::Allocate { items, opts, .. } | Stmt::Deallocate { items, opts } => {
3945 for item in items {
3946 check_filtered_in_expr(item, filtered);
3947 }
3948 for c in opts {
3949 check_filtered_in_expr(&c.value, filtered);
3950 }
3951 }
3952 Stmt::Nullify { items } => {
3953 for item in items {
3954 check_filtered_in_expr(item, filtered);
3955 }
3956 }
3957
3958 // ---- Other executable ----
3959 Stmt::Call { callee, args } => {
3960 check_filtered_in_expr(callee, filtered);
3961 for a in args {
3962 check_filtered_in_subscript(&a.value, filtered);
3963 }
3964 }
3965 Stmt::Namelist { .. } => {}
3966 Stmt::Declaration(_) => {
3967 // Initializers in inline declarations could reference
3968 // module names, but Decl init exprs go through a
3969 // separate const-fold path that already errors on
3970 // unknown names. Conservative: skip here.
3971 }
3972 }
3973 }
3974
3975 fn check_filtered_in_expr(expr: &crate::ast::expr::SpannedExpr, filtered: &HashSet<String>) {
3976 match &expr.node {
3977 Expr::Name { name } => {
3978 let key = name.to_lowercase();
3979 if filtered.contains(&key) {
3980 eprintln!(
3981 "armfortas: error: {}:{}: '{}' is not accessible in this scope — \
3982 it was filtered out by a USE ONLY clause (audit MAJOR-1)",
3983 expr.span.start.line, expr.span.start.col, name,
3984 );
3985 let _ = std::io::stderr().flush();
3986 std::process::exit(1);
3987 }
3988 }
3989 Expr::ComponentAccess { base, .. } => {
3990 check_filtered_in_expr(base, filtered);
3991 }
3992 Expr::BinaryOp { left, right, .. } => {
3993 check_filtered_in_expr(left, filtered);
3994 check_filtered_in_expr(right, filtered);
3995 }
3996 Expr::UnaryOp { operand, .. } => check_filtered_in_expr(operand, filtered),
3997 Expr::ParenExpr { inner } => check_filtered_in_expr(inner, filtered),
3998 Expr::FunctionCall { callee, args } => {
3999 check_filtered_in_expr(callee, filtered);
4000 for a in args {
4001 check_filtered_in_subscript(&a.value, filtered);
4002 }
4003 }
4004 Expr::ArrayConstructor { values, .. } => {
4005 for v in values {
4006 check_filtered_in_acvalue(v, filtered);
4007 }
4008 }
4009 Expr::ComplexLiteral { real, imag } => {
4010 check_filtered_in_expr(real, filtered);
4011 check_filtered_in_expr(imag, filtered);
4012 }
4013 // Pure literals: nothing to walk.
4014 Expr::IntegerLiteral { .. }
4015 | Expr::RealLiteral { .. }
4016 | Expr::StringLiteral { .. }
4017 | Expr::LogicalLiteral { .. }
4018 | Expr::BozLiteral { .. } => {}
4019 }
4020 }
4021
4022 fn check_filtered_in_subscript(
4023 sub: &crate::ast::expr::SectionSubscript,
4024 filtered: &HashSet<String>,
4025 ) {
4026 use crate::ast::expr::SectionSubscript;
4027 match sub {
4028 SectionSubscript::Element(e) => check_filtered_in_expr(e, filtered),
4029 SectionSubscript::Range { start, end, stride } => {
4030 if let Some(e) = start {
4031 check_filtered_in_expr(e, filtered);
4032 }
4033 if let Some(e) = end {
4034 check_filtered_in_expr(e, filtered);
4035 }
4036 if let Some(e) = stride {
4037 check_filtered_in_expr(e, filtered);
4038 }
4039 }
4040 }
4041 }
4042
4043 fn check_filtered_in_acvalue(v: &crate::ast::expr::AcValue, filtered: &HashSet<String>) {
4044 use crate::ast::expr::AcValue;
4045 match v {
4046 AcValue::Expr(e) => check_filtered_in_expr(e, filtered),
4047 AcValue::ImpliedDo(ido) => {
4048 for inner in &ido.values {
4049 check_filtered_in_acvalue(inner, filtered);
4050 }
4051 check_filtered_in_expr(&ido.start, filtered);
4052 check_filtered_in_expr(&ido.end, filtered);
4053 if let Some(s) = &ido.step {
4054 check_filtered_in_expr(s, filtered);
4055 }
4056 }
4057 }
4058 }
4059
4060 /// Walk the function's USE statements and collect every name
4061 /// from a USE-only-imported module that the only-list filtered
4062 /// out. Audit MAJOR-1: those names must NOT silently fall
4063 /// through to const_int 0; the lowerer treats them as undefined
4064 /// at the reference site.
4065 fn compute_filtered_names(
4066 globals: &HashMap<(String, String), ModuleGlobalInfo>,
4067 uses: &[crate::ast::decl::SpannedDecl],
4068 ) -> HashSet<String> {
4069 use crate::ast::decl::OnlyItem;
4070 let mut filtered: HashSet<String> = HashSet::new();
4071 for decl in uses {
4072 let Decl::UseStmt {
4073 module,
4074 only: Some(only_list),
4075 ..
4076 } = &decl.node
4077 else {
4078 continue;
4079 };
4080 let mod_key = module.to_lowercase();
4081 // The set of names this module exports (limited to what
4082 // collect_module_globals registered — module functions and
4083 // derived types are tracked elsewhere and remain visible).
4084 let mut exports: HashSet<String> = HashSet::new();
4085 for (mk, var) in globals.keys() {
4086 if *mk == mod_key {
4087 exports.insert(var.clone());
4088 }
4089 }
4090 // The set of (lowercase) names the only-list explicitly
4091 // imports. A rename's `remote` is what's pulled from the
4092 // module; a Name is itself.
4093 let mut imported: HashSet<String> = HashSet::new();
4094 for item in only_list {
4095 match item {
4096 OnlyItem::Name(n) => {
4097 imported.insert(n.to_lowercase());
4098 }
4099 OnlyItem::Rename(rn) => {
4100 imported.insert(rn.remote.to_lowercase());
4101 }
4102 }
4103 }
4104 // Anything in exports but not imported is now filtered.
4105 for e in &exports {
4106 if !imported.contains(e) {
4107 filtered.insert(e.clone());
4108 }
4109 }
4110 }
4111 filtered
4112 }
4113
4114 /// Install a module-level global as a `LocalInfo` entry under the
4115 /// given local key. Shared helper so all install paths build a
4116 /// consistent LocalInfo shape.
4117 fn install_one_global(
4118 b: &mut FuncBuilder,
4119 locals: &mut HashMap<String, LocalInfo>,
4120 local_key: String,
4121 info: &ModuleGlobalInfo,
4122 ) {
4123 if locals.contains_key(&local_key) {
4124 return;
4125 }
4126 let addr_ty = if info.allocatable {
4127 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384)
4128 } else if info.deferred_char {
4129 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 32)
4130 } else if info.is_pointer {
4131 pointer_slot_type(&info.ty)
4132 } else {
4133 info.ty.clone()
4134 };
4135 let addr = b.global_addr(&info.symbol, addr_ty);
4136 locals.insert(
4137 local_key,
4138 LocalInfo {
4139 addr,
4140 ty: info.ty.clone(),
4141 dims: info.dims.clone(),
4142 allocatable: info.allocatable,
4143 descriptor_arg: false,
4144 by_ref: false,
4145 char_kind: info.char_kind.clone(),
4146 derived_type: info.derived_type.clone(),
4147 inline_const: None,
4148 is_pointer: info.is_pointer,
4149 runtime_dim_upper: vec![],
4150 },
4151 );
4152 }
4153
4154 /// Install module globals imported by this function's USE
4155 /// statements as `LocalInfo` entries. Honors:
4156 /// * USE ONLY filtering — only names in the only-list are installed
4157 /// * Renames — both forms, `use m, only: y => x` and
4158 /// `use m, x => y` (non-only rename)
4159 /// * Cross-module collision detection — if two modules bring in
4160 /// the same local key through their use list, the emitted IR
4161 /// would resolve ambiguously; we skip the second one and note
4162 /// the collision in an eprintln (sema doesn't yet diagnose).
4163 ///
4164 /// Audit C2/C3/C4: previously this function installed every
4165 /// global regardless of any USE statement, ignored ONLY filtering,
4166 /// silently dropped USE renames, and let two same-named variables
4167 /// from different modules silently overwrite each other.
4168 fn install_globals_as_locals(
4169 b: &mut FuncBuilder,
4170 locals: &mut HashMap<String, LocalInfo>,
4171 globals: &HashMap<(String, String), ModuleGlobalInfo>,
4172 uses: &[crate::ast::decl::SpannedDecl],
4173 host_module: Option<&str>,
4174 st: &SymbolTable,
4175 ) {
4176 use crate::ast::decl::OnlyItem;
4177
4178 // Sorted per-use iteration so the emitted global_addr
4179 // instructions land in deterministic order. Audit B-3 holds
4180 // across this path too.
4181 //
4182 // The two-pass pattern:
4183 // 1. Enumerate the (use statement, key-in-local-scope, module_key)
4184 // triples this function imports.
4185 // 2. Sort by local-scope key.
4186 // 3. Install in order, checking for collision before inserting.
4187 let mut pending: Vec<(String, (String, String))> = Vec::new();
4188
4189 if let Some(module_name) = host_module {
4190 let mod_key = module_name.to_lowercase();
4191 for (mk, var) in globals.keys() {
4192 if *mk == mod_key {
4193 pending.push((var.clone(), (mod_key.clone(), var.clone())));
4194 }
4195 }
4196 }
4197
4198 for decl in uses {
4199 let Decl::UseStmt {
4200 module,
4201 nature: _,
4202 renames,
4203 only,
4204 } = &decl.node
4205 else {
4206 continue;
4207 };
4208 let mod_key = module.to_lowercase();
4209 if let Some(only_list) = only {
4210 for item in only_list {
4211 match item {
4212 OnlyItem::Name(n) => {
4213 let n_lc = n.to_lowercase();
4214 pending.push((n_lc.clone(), (mod_key.clone(), n_lc)));
4215 }
4216 OnlyItem::Rename(rn) => {
4217 pending.push((
4218 rn.local.to_lowercase(),
4219 (mod_key.clone(), rn.remote.to_lowercase()),
4220 ));
4221 }
4222 }
4223 }
4224 } else {
4225 // No ONLY list: import every name from the module,
4226 // minus any rename targets (which are substituted).
4227 let rename_targets: std::collections::HashSet<String> =
4228 renames.iter().map(|r| r.remote.to_lowercase()).collect();
4229 for (mk, var) in globals.keys() {
4230 if *mk != mod_key {
4231 continue;
4232 }
4233 if rename_targets.contains(var) {
4234 continue;
4235 }
4236 // Skip PRIVATE symbols — only PUBLIC symbols are accessible
4237 // via USE without ONLY.
4238 if let Some(mod_scope_id) = st.find_module_scope(&mod_key) {
4239 if let Some(sym) = st.scope(mod_scope_id).symbols.get(var) {
4240 if matches!(sym.attrs.access, crate::sema::symtab::Access::Private) {
4241 continue;
4242 }
4243 }
4244 }
4245 pending.push((var.clone(), (mod_key.clone(), var.clone())));
4246 }
4247 // Also scan the SymbolTable module scope for symbols not
4248 // in the globals map (e.g., PARAMETERs, which are inlined
4249 // and don't generate globals).
4250 if let Some(mod_scope_id) = st.find_module_scope(&mod_key) {
4251 for (sym_key, sym) in &st.scope(mod_scope_id).symbols {
4252 if matches!(sym.attrs.access, crate::sema::symtab::Access::Private) {
4253 continue;
4254 }
4255 if rename_targets.contains(sym_key) {
4256 continue;
4257 }
4258 let pair = (mod_key.clone(), sym_key.clone());
4259 if !globals.contains_key(&pair) && !pending.iter().any(|(k, _)| k == sym_key) {
4260 pending.push((sym_key.clone(), pair));
4261 }
4262 }
4263 }
4264 for rn in renames {
4265 pending.push((
4266 rn.local.to_lowercase(),
4267 (mod_key.clone(), rn.remote.to_lowercase()),
4268 ));
4269 }
4270 }
4271 }
4272
4273 pending.sort_by(|a, b| a.0.cmp(&b.0));
4274
4275 let mut installed_from: HashMap<String, String> = HashMap::new();
4276 for (local_key, (mod_key, var_key)) in pending {
4277 if let Some(info) = globals.get(&(mod_key.clone(), var_key.clone())) {
4278 // Collision check: two modules exporting the same local key.
4279 if let Some(prev_mod) = installed_from.get(&local_key) {
4280 if *prev_mod != mod_key {
4281 eprintln!(
4282 "warning: ambiguous USE import '{}' from both '{}' and '{}'; \
4283 keeping the first",
4284 local_key, prev_mod, mod_key,
4285 );
4286 continue;
4287 }
4288 }
4289 installed_from.insert(local_key.clone(), mod_key);
4290 install_one_global(b, locals, local_key, info);
4291 } else {
4292 // Not an IR global — check if it's an intrinsic module parameter constant
4293 // (iso_c_binding, iso_fortran_env). These are registered in the symbol
4294 // table but never emitted as IR globals; install them as inline_const locals.
4295 if locals.contains_key(&local_key) {
4296 continue;
4297 }
4298 if let Some(mod_scope_id) = st.find_module_scope(&mod_key) {
4299 if let Some(sym) = st.scope(mod_scope_id).symbols.get(&var_key) {
4300 if sym.attrs.parameter {
4301 if let Some(cv) = sym.const_value {
4302 // Check if this is a character constant (e.g. c_null_char).
4303 let is_char = matches!(
4304 sym.type_info,
4305 Some(crate::sema::symtab::TypeInfo::Character { .. })
4306 );
4307 if is_char {
4308 // Emit a 1-byte global string constant with the ASCII value.
4309 let byte = [cv as u8];
4310 let ptr = b.const_string(&byte);
4311 let ty = IrType::Ptr(Box::new(IrType::Int(IntWidth::I8)));
4312 locals.insert(
4313 local_key.clone(),
4314 LocalInfo {
4315 addr: ptr,
4316 ty,
4317 dims: vec![],
4318 allocatable: false,
4319 descriptor_arg: false,
4320 by_ref: false,
4321 char_kind: CharKind::Fixed(1),
4322 derived_type: None,
4323 inline_const: None,
4324 is_pointer: false,
4325 runtime_dim_upper: vec![],
4326 },
4327 );
4328 } else {
4329 let ty = IrType::Int(IntWidth::I32);
4330 // Create a dummy alloca (never loaded from; inline_const
4331 // short-circuits at every use site via materialize_const_scalar).
4332 let addr = b.alloca(ty.clone());
4333 locals.insert(
4334 local_key.clone(),
4335 LocalInfo {
4336 addr,
4337 ty,
4338 dims: vec![],
4339 allocatable: false,
4340 descriptor_arg: false,
4341 by_ref: false,
4342 char_kind: CharKind::None,
4343 derived_type: None,
4344 inline_const: Some(ConstScalar::Int(cv as i128)),
4345 is_pointer: false,
4346 runtime_dim_upper: vec![],
4347 },
4348 );
4349 }
4350 installed_from.insert(local_key, mod_key);
4351 }
4352 }
4353 }
4354 }
4355 // If still not found (e.g., USE references a name that doesn't exist),
4356 // skip silently — sema should have diagnosed it.
4357 }
4358 }
4359 }
4360
4361 /// Allocate local variables from declarations. Handles both scalars and arrays.
4362 fn alloc_decls(
4363 b: &mut FuncBuilder,
4364 locals: &mut HashMap<String, LocalInfo>,
4365 decls: &[crate::ast::decl::SpannedDecl],
4366 visible_param_consts: &HashMap<String, ConstScalar>,
4367 type_layouts: &crate::sema::type_layout::TypeLayoutRegistry,
4368 pending_globals: &mut Vec<PendingGlobal>,
4369 func_name: &str,
4370 st: &SymbolTable,
4371 ) {
4372 use crate::ast::decl::Attribute;
4373
4374 // Pre-scan standalone PARAMETER statements so a TypeDecl entity
4375 // whose value comes from a separate `parameter (name = expr)`
4376 // statement still triggers SAVE-promotion at alloc time. Without
4377 // this pre-scan, the standalone form would silently fall back to
4378 // the alloca + per-call store path.
4379 let mut parameter_inits: HashMap<String, &crate::ast::expr::SpannedExpr> = HashMap::new();
4380 for d in decls {
4381 if let Decl::ParameterStmt { pairs } = &d.node {
4382 for (name, expr) in pairs {
4383 parameter_inits.insert(name.to_lowercase(), expr);
4384 }
4385 }
4386 }
4387
4388 // Audit CRITICAL-1: build the per-scope parameter constants
4389 // table so SAVE-promotion's eval_const_global_init can resolve
4390 // `Expr::Name` references against compile-time-known parameters
4391 // declared earlier in the same scope. Without this, an init
4392 // like `integer :: x = k * 2` (k a parameter) silently falls
4393 // back to alloca + per-call store and breaks SAVE semantics.
4394 //
4395 // Parameters can reference earlier parameters (`tau = 2 * pi`),
4396 // so we walk decls in order and build the map incrementally.
4397 let param_consts = collect_decl_param_consts_with_host(decls, visible_param_consts);
4398
4399 for decl in decls {
4400 if let Decl::TypeDecl {
4401 type_spec,
4402 attrs,
4403 entities,
4404 } = &decl.node
4405 {
4406 let elem_ty = lower_type_spec_st(type_spec, Some(st));
4407
4408 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
4409 if let Attribute::Dimension(specs) = a {
4410 Some(specs)
4411 } else {
4412 None
4413 }
4414 });
4415 let is_allocatable = attrs.iter().any(|a| matches!(a, Attribute::Allocatable));
4416 let is_pointer_attr = attrs.iter().any(|a| matches!(a, Attribute::Pointer));
4417
4418 for entity in entities {
4419 let key = entity.name.to_lowercase();
4420 if locals.contains_key(&key) {
4421 continue;
4422 }
4423
4424 // Use entity-level array spec, or fall back to attribute-level DIMENSION.
4425 let array_spec = entity.array_spec.as_ref().or(attr_dims);
4426
4427 // Check for character type.
4428 let char_len = match type_spec {
4429 TypeSpec::Character(Some(sel)) => {
4430 match &sel.len {
4431 Some(crate::ast::decl::LenSpec::Expr(e)) => {
4432 eval_const_int_in_scope_or_any_scope(e, &param_consts, st)
4433 }
4434 Some(crate::ast::decl::LenSpec::Star) => None, // assumed
4435 Some(crate::ast::decl::LenSpec::Colon) => None, // deferred
4436 None => Some(1), // default len=1
4437 }
4438 }
4439 TypeSpec::Character(None) => Some(1),
4440 _ => None,
4441 };
4442 let runtime_char_len_expr = match type_spec {
4443 TypeSpec::Character(Some(sel)) => match &sel.len {
4444 Some(crate::ast::decl::LenSpec::Expr(e))
4445 if eval_const_int_in_scope_or_any_scope(e, &param_consts, st)
4446 .is_none() =>
4447 {
4448 Some(e)
4449 }
4450 _ => None,
4451 },
4452 _ => None,
4453 };
4454 let is_deferred_char = matches!(type_spec,
4455 TypeSpec::Character(Some(sel)) if matches!(&sel.len, Some(crate::ast::decl::LenSpec::Colon))
4456 );
4457
4458 if is_pointer_attr && array_spec.is_some() {
4459 // Pointer to array. Reuses the 384-byte array
4460 // descriptor layout that allocatables use: the
4461 // pointer slot carries base_addr, elem_size,
4462 // rank, flags, and per-dim bounds so that
4463 // downstream subscript / SIZE / whole-array
4464 // operations pick it up through the existing
4465 // descriptor path. `=>` fills the slot from a
4466 // materialised descriptor of the target (see
4467 // Stmt::PointerAssignment). Unassociated state
4468 // is encoded by flags=0, same as an unallocated
4469 // allocatable.
4470 //
4471 // We set `allocatable = true` so that
4472 // `local_uses_array_descriptor` and
4473 // `array_descriptor_addr` treat the slot as a
4474 // descriptor-at-info.addr (no extra indirection).
4475 // `is_pointer = true` is separately used by
4476 // scope-exit deallocation to suppress the
4477 // afs_deallocate_array call — a pointer does
4478 // not own its target.
4479 let desc_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384);
4480 let addr = b.alloca(desc_ty);
4481 let zero_byte = b.const_i32(0);
4482 let size384 = b.const_i64(384);
4483 b.call(
4484 FuncRef::External("memset".into()),
4485 vec![addr, zero_byte, size384],
4486 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4487 );
4488 // dims is left empty for a deferred-shape pointer;
4489 // the descriptor carries the runtime rank and
4490 // bounds after `=>` binds it to a target.
4491 let pointer_elem_ty = if matches!(type_spec, TypeSpec::Character(_)) {
4492 match char_len {
4493 Some(len) => fixed_char_storage_ir_type(len),
4494 None => elem_ty.clone(),
4495 }
4496 } else if let TypeSpec::Type(ref type_name) = type_spec {
4497 derived_storage_ir_type(type_name, type_layouts)
4498 .unwrap_or_else(|| elem_ty.clone())
4499 } else {
4500 elem_ty.clone()
4501 };
4502 let pointer_char_kind = if matches!(type_spec, TypeSpec::Character(_)) {
4503 match char_len {
4504 Some(len) => CharKind::Fixed(len),
4505 None => CharKind::None,
4506 }
4507 } else {
4508 CharKind::None
4509 };
4510 locals.insert(
4511 key,
4512 LocalInfo {
4513 addr,
4514 ty: pointer_elem_ty,
4515 dims: vec![],
4516 allocatable: true,
4517 descriptor_arg: false,
4518 by_ref: false,
4519 char_kind: pointer_char_kind,
4520 derived_type: match type_spec {
4521 TypeSpec::Type(type_name) => Some(type_name.clone()),
4522 _ => None,
4523 },
4524 inline_const: None,
4525 is_pointer: true,
4526 runtime_dim_upper: vec![],
4527 },
4528 );
4529 continue;
4530 }
4531 if is_pointer_attr && matches!(type_spec, TypeSpec::Type(_)) && array_spec.is_none()
4532 {
4533 // Pointer to derived type. Slot holds an 8-byte
4534 // pointer to the target struct; ComponentAccess
4535 // loads the slot and uses that address as the
4536 // struct base. derived_type is stored so that
4537 // component lookup can find the type layout.
4538 if let TypeSpec::Type(ref type_name) = type_spec {
4539 let addr = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4540 let zero_byte = b.const_i32(0);
4541 let eight = b.const_i64(8);
4542 b.call(
4543 FuncRef::External("memset".into()),
4544 vec![addr, zero_byte, eight],
4545 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4546 );
4547 locals.insert(
4548 key,
4549 LocalInfo {
4550 addr,
4551 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4552 dims: vec![],
4553 allocatable: false,
4554 descriptor_arg: false,
4555 by_ref: false,
4556 char_kind: CharKind::None,
4557 derived_type: Some(type_name.clone()),
4558 inline_const: None,
4559 is_pointer: true,
4560 runtime_dim_upper: vec![],
4561 },
4562 );
4563 continue;
4564 }
4565 }
4566 if is_deferred_char && is_allocatable && array_spec.is_none() {
4567 // Deferred-length allocatable scalar character:
4568 // 32-byte StringDescriptor. Deferred-length
4569 // allocatable arrays fall through to the general
4570 // array-descriptor path below.
4571 let desc_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 32);
4572 let addr = b.alloca(desc_ty);
4573 let zero = b.const_i32(0);
4574 let size32 = b.const_i64(32);
4575 b.call(
4576 FuncRef::External("memset".into()),
4577 vec![addr, zero, size32],
4578 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4579 );
4580 locals.insert(
4581 key,
4582 LocalInfo {
4583 addr,
4584 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4585 dims: vec![],
4586 allocatable: true,
4587 descriptor_arg: false,
4588 by_ref: false,
4589 char_kind: CharKind::Deferred,
4590 derived_type: None,
4591 inline_const: None,
4592 is_pointer: false,
4593 runtime_dim_upper: vec![],
4594 },
4595 );
4596 continue;
4597 } else if let Some(len) = char_len {
4598 if let Some(specs) = array_spec.filter(|_| !is_allocatable) {
4599 let dims = extract_array_dims(specs, &param_consts);
4600 let total_size: i64 = dims.iter().map(|(_, size)| *size).product();
4601 if len == 1 {
4602 // `character(len=1)` arrays are byte arrays, not
4603 // pointer tables. Keeping them contiguous matches
4604 // descriptor-backed `character(kind=c_char)` locals
4605 // and lets element stores land in `ptr<i8>` slots.
4606 const STACK_THRESHOLD: i64 = 64 * 1024;
4607 if total_size >= STACK_THRESHOLD {
4608 let desc_ty =
4609 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384);
4610 let addr = b.alloca(desc_ty);
4611 let zero = b.const_i32(0);
4612 let size384 = b.const_i64(384);
4613 b.call(
4614 FuncRef::External("memset".into()),
4615 vec![addr, zero, size384],
4616 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4617 );
4618 let es = b.const_i64(1);
4619 let n = b.const_i64(total_size);
4620 b.call(
4621 FuncRef::External("afs_allocate_1d".into()),
4622 vec![addr, es, n],
4623 IrType::Void,
4624 );
4625 let space = b.const_i32(b' ' as i32);
4626 let base = b.load_typed(
4627 addr,
4628 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4629 );
4630 let bytes = b.const_i64(total_size);
4631 b.call(
4632 FuncRef::External("memset".into()),
4633 vec![base, space, bytes],
4634 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4635 );
4636 locals.insert(
4637 key,
4638 LocalInfo {
4639 addr,
4640 ty: IrType::Int(IntWidth::I8),
4641 dims,
4642 allocatable: true,
4643 descriptor_arg: false,
4644 by_ref: false,
4645 char_kind: CharKind::Fixed(1),
4646 derived_type: None,
4647 inline_const: None,
4648 is_pointer: false,
4649 runtime_dim_upper: vec![],
4650 },
4651 );
4652 } else {
4653 let arr_ty = IrType::Array(
4654 Box::new(IrType::Int(IntWidth::I8)),
4655 total_size as u64,
4656 );
4657 let addr = b.alloca(arr_ty);
4658 let space = b.const_i32(b' ' as i32);
4659 let bytes = b.const_i64(total_size);
4660 b.call(
4661 FuncRef::External("memset".into()),
4662 vec![addr, space, bytes],
4663 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4664 );
4665 locals.insert(
4666 key,
4667 LocalInfo {
4668 addr,
4669 ty: IrType::Int(IntWidth::I8),
4670 dims,
4671 allocatable: false,
4672 descriptor_arg: false,
4673 by_ref: false,
4674 char_kind: CharKind::Fixed(1),
4675 derived_type: None,
4676 inline_const: None,
4677 is_pointer: false,
4678 runtime_dim_upper: vec![],
4679 },
4680 );
4681 }
4682 continue;
4683 }
4684 let table_ty = IrType::Array(
4685 Box::new(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8)))),
4686 total_size as u64,
4687 );
4688 let addr = b.alloca(table_ty);
4689 let buf_ty = IrType::Array(
4690 Box::new(IrType::Int(IntWidth::I8)),
4691 (total_size * (len + 1)) as u64,
4692 );
4693 let buf = b.alloca(buf_ty);
4694 let zero = b.const_i32(0);
4695 let total_bytes = b.const_i64(total_size * (len + 1));
4696 b.call(
4697 FuncRef::External("memset".into()),
4698 vec![buf, zero, total_bytes],
4699 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4700 );
4701 let space = b.const_i32(b' ' as i32);
4702 let char_bytes = b.const_i64(len);
4703 for idx in 0..total_size {
4704 let slot_idx = b.const_i64(idx);
4705 let slot_ptr = b.gep(
4706 addr,
4707 vec![slot_idx],
4708 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4709 );
4710 let byte_off = b.const_i64(idx * (len + 1));
4711 let elem_ptr = b.gep(buf, vec![byte_off], IrType::Int(IntWidth::I8));
4712 b.call(
4713 FuncRef::External("memset".into()),
4714 vec![elem_ptr, space, char_bytes],
4715 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4716 );
4717 b.store(elem_ptr, slot_ptr);
4718 }
4719 locals.insert(
4720 key,
4721 LocalInfo {
4722 addr,
4723 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4724 dims,
4725 allocatable: false,
4726 descriptor_arg: false,
4727 by_ref: false,
4728 char_kind: CharKind::Fixed(len),
4729 derived_type: None,
4730 inline_const: None,
4731 is_pointer: false,
4732 runtime_dim_upper: vec![],
4733 },
4734 );
4735 continue;
4736 }
4737 if !is_allocatable {
4738 // Fixed-length character(N): alloca N+1 bytes so call-boundary
4739 // lowering can rely on a stable trailing NUL while the Fortran
4740 // value still occupies the first N bytes.
4741 let buf_ty =
4742 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), (len + 1) as u64);
4743 let addr = b.alloca(buf_ty);
4744 let zero = b.const_i32(0);
4745 let total = b.const_i64(len + 1);
4746 b.call(
4747 FuncRef::External("memset".into()),
4748 vec![addr, zero, total],
4749 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4750 );
4751 // Initialize with spaces.
4752 let space = b.const_i32(b' ' as i32);
4753 let len_val = b.const_i64(len);
4754 b.call(
4755 FuncRef::External("memset".into()),
4756 vec![addr, space, len_val],
4757 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4758 );
4759 locals.insert(
4760 key,
4761 LocalInfo {
4762 addr,
4763 ty: IrType::Int(IntWidth::I8),
4764 dims: vec![],
4765 allocatable: false,
4766 descriptor_arg: false,
4767 by_ref: false,
4768 char_kind: CharKind::Fixed(len),
4769 derived_type: None,
4770 inline_const: None,
4771 is_pointer: false,
4772 runtime_dim_upper: vec![],
4773 },
4774 );
4775 continue; // skip normal path
4776 }
4777 } else if let Some(len_expr) = runtime_char_len_expr {
4778 if !is_allocatable && array_spec.is_none() {
4779 // Automatic fixed-length character whose size depends on a
4780 // runtime expression such as LEN(input). Materialize a
4781 // heap buffer now and remember the runtime length for
4782 // substring and assignment lowering.
4783 let raw_len = lower_expr(b, locals, len_expr, st);
4784 let len_val = clamp_nonnegative_i64(b, raw_len);
4785 let len_addr = b.alloca(IrType::Int(IntWidth::I64));
4786 b.store(len_val, len_addr);
4787
4788 let one = b.const_i64(1);
4789 let total = b.iadd(len_val, one);
4790 let ptr = b.runtime_call(
4791 RuntimeFunc::Allocate,
4792 vec![total],
4793 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4794 );
4795 let ptr_slot = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
4796 b.store(ptr, ptr_slot);
4797
4798 let zero = b.const_i32(0);
4799 b.call(
4800 FuncRef::External("memset".into()),
4801 vec![ptr, zero, total],
4802 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4803 );
4804 let space = b.const_i32(b' ' as i32);
4805 b.call(
4806 FuncRef::External("memset".into()),
4807 vec![ptr, space, len_val],
4808 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4809 );
4810
4811 locals.insert(
4812 key,
4813 LocalInfo {
4814 addr: ptr_slot,
4815 ty: IrType::Int(IntWidth::I8),
4816 dims: vec![],
4817 allocatable: false,
4818 descriptor_arg: false,
4819 by_ref: false,
4820 char_kind: CharKind::FixedRuntime { len_addr },
4821 derived_type: None,
4822 inline_const: None,
4823 is_pointer: false,
4824 runtime_dim_upper: vec![],
4825 },
4826 );
4827 continue;
4828 }
4829 }
4830
4831 if is_allocatable {
4832 // Allocatable variable: alloca a descriptor (384 bytes), zero-initialized.
4833 let desc_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384);
4834 let addr = b.alloca(desc_ty);
4835 // Zero-initialize the descriptor so flags=0 (not allocated).
4836 let zero = b.const_i32(0);
4837 let size = b.const_i64(384);
4838 b.call(
4839 FuncRef::External("memset".into()),
4840 vec![addr, zero, size],
4841 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4842 );
4843 let alloc_elem_ty = if matches!(type_spec, TypeSpec::Character(_)) {
4844 match char_len {
4845 Some(len) => fixed_char_storage_ir_type(len),
4846 None => elem_ty.clone(),
4847 }
4848 } else if let TypeSpec::Type(ref type_name) = type_spec {
4849 derived_storage_ir_type(type_name, type_layouts)
4850 .unwrap_or_else(|| elem_ty.clone())
4851 } else {
4852 elem_ty.clone()
4853 };
4854 let char_kind = match char_len {
4855 Some(len) if array_spec.is_some() => CharKind::Fixed(len),
4856 _ => CharKind::None,
4857 };
4858 locals.insert(
4859 key,
4860 LocalInfo {
4861 addr,
4862 ty: alloc_elem_ty,
4863 dims: vec![],
4864 allocatable: true,
4865 descriptor_arg: false,
4866 by_ref: false,
4867 char_kind,
4868 derived_type: match type_spec {
4869 TypeSpec::Type(type_name) => Some(type_name.clone()),
4870 _ => None,
4871 },
4872 inline_const: None,
4873 is_pointer: false,
4874 runtime_dim_upper: vec![],
4875 },
4876 );
4877 } else if let Some(specs) = array_spec {
4878 // Fixed-size array variable.
4879 let dims = extract_array_dims(specs, &param_consts);
4880 let total_size: i64 = dims.iter().map(|(_, size)| *size).product();
4881 let (array_elem_ty, array_derived_type) =
4882 if let TypeSpec::Type(ref type_name) = type_spec {
4883 if let Some(layout) = type_layouts.get(type_name) {
4884 (
4885 IrType::Array(
4886 Box::new(IrType::Int(IntWidth::I8)),
4887 layout.size as u64,
4888 ),
4889 Some(type_name.clone()),
4890 )
4891 } else {
4892 (elem_ty.clone(), None)
4893 }
4894 } else {
4895 (elem_ty.clone(), None)
4896 };
4897 let elem_bytes = ir_scalar_byte_size(&array_elem_ty);
4898 let total_bytes = total_size * elem_bytes;
4899 const STACK_THRESHOLD: i64 = 64 * 1024; // 64KB
4900
4901 if total_bytes >= STACK_THRESHOLD {
4902 // Large array: use descriptor + heap allocation (prevents stack overflow).
4903 let desc_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384);
4904 let addr = b.alloca(desc_ty);
4905 let zero = b.const_i32(0);
4906 let size384 = b.const_i64(384);
4907 b.call(
4908 FuncRef::External("memset".into()),
4909 vec![addr, zero, size384],
4910 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4911 );
4912 // Auto-allocate with the declared shape.
4913 let es = b.const_i64(elem_bytes);
4914 let n = b.const_i64(total_size);
4915 b.call(
4916 FuncRef::External("afs_allocate_1d".into()),
4917 vec![addr, es, n],
4918 IrType::Void,
4919 );
4920 // Mark as allocatable so scope-exit dealloc fires.
4921 locals.insert(
4922 key,
4923 LocalInfo {
4924 addr,
4925 ty: array_elem_ty.clone(),
4926 dims,
4927 allocatable: true,
4928 descriptor_arg: false,
4929 by_ref: false,
4930 char_kind: CharKind::None,
4931 derived_type: array_derived_type.clone(),
4932 inline_const: None,
4933 is_pointer: false,
4934 runtime_dim_upper: vec![],
4935 },
4936 );
4937 } else {
4938 // Small array: stack allocation.
4939 let arr_ty =
4940 IrType::Array(Box::new(array_elem_ty.clone()), total_size as u64);
4941 let addr = b.alloca(arr_ty);
4942 locals.insert(
4943 key,
4944 LocalInfo {
4945 addr,
4946 ty: array_elem_ty.clone(),
4947 dims,
4948 allocatable: false,
4949 descriptor_arg: false,
4950 by_ref: false,
4951 char_kind: CharKind::None,
4952 derived_type: array_derived_type,
4953 inline_const: None,
4954 is_pointer: false,
4955 runtime_dim_upper: vec![],
4956 },
4957 );
4958 }
4959 } else if let TypeSpec::Type(ref type_name) = type_spec {
4960 // Derived type variable: allocate struct-sized byte array.
4961 if let Some(layout) = type_layouts.get(type_name) {
4962 let struct_ty =
4963 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), layout.size as u64);
4964 let addr = b.alloca(struct_ty);
4965 // Store the derived type name in the ty field for component access lookup.
4966 // Use Ptr<i8> as a marker — the type_layouts registry is used for field resolution.
4967 locals.insert(
4968 key,
4969 LocalInfo {
4970 addr,
4971 ty: IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
4972 dims: vec![],
4973 allocatable: false,
4974 descriptor_arg: false,
4975 by_ref: false,
4976 char_kind: CharKind::None,
4977 derived_type: Some(type_name.clone()),
4978 inline_const: None,
4979 is_pointer: false,
4980 runtime_dim_upper: vec![],
4981 },
4982 );
4983 } else {
4984 // Unknown derived type — fall back to 8-byte alloca.
4985 let addr = b.alloca(IrType::Int(IntWidth::I64));
4986 locals.insert(
4987 key,
4988 LocalInfo {
4989 addr,
4990 ty: elem_ty.clone(),
4991 dims: vec![],
4992 allocatable: false,
4993 descriptor_arg: false,
4994 by_ref: false,
4995 char_kind: CharKind::None,
4996 derived_type: None,
4997 inline_const: None,
4998 is_pointer: false,
4999 runtime_dim_upper: vec![],
5000 },
5001 );
5002 }
5003 } else if is_pointer_attr && array_spec.is_none() {
5004 // Scalar Fortran POINTER: allocate a pointer slot
5005 // (`alloca ptr<elem_ty>`) that holds the address
5006 // of whatever the pointer is currently associated
5007 // with. `=>` stores into this slot; plain `=`
5008 // dereferences it; reads load twice. The slot
5009 // starts null so that ASSOCIATED() returns
5010 // false before the first `=>`.
5011 let addr = b.alloca(IrType::Ptr(Box::new(elem_ty.clone())));
5012 // Memset the slot to zero so unassociated pointers
5013 // compare null. Eight bytes matches the ARM64
5014 // pointer width.
5015 let zero_byte = b.const_i32(0);
5016 let eight = b.const_i64(8);
5017 b.call(
5018 FuncRef::External("memset".into()),
5019 vec![addr, zero_byte, eight],
5020 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5021 );
5022 locals.insert(
5023 key,
5024 LocalInfo {
5025 addr,
5026 ty: elem_ty.clone(),
5027 dims: vec![],
5028 allocatable: false,
5029 descriptor_arg: false,
5030 by_ref: false,
5031 char_kind: CharKind::None,
5032 derived_type: None,
5033 inline_const: None,
5034 is_pointer: true,
5035 runtime_dim_upper: vec![],
5036 },
5037 );
5038 } else {
5039 // Scalar variable. Three sub-cases:
5040 // (a) PARAMETER-attributed and folds → inline
5041 // at every use site. No alloca, no global,
5042 // no .data slot. Audit MAJOR-4.
5043 // (b) Has a const-evaluable init but isn't a
5044 // parameter → SAVE-promote to a module
5045 // global (F2018 §8.5.16 implicit SAVE).
5046 // (c) Plain alloca, no init.
5047 let init_expr: Option<&crate::ast::expr::SpannedExpr> = entity
5048 .init
5049 .as_ref()
5050 .or_else(|| parameter_inits.get(&key).copied());
5051 let is_parameter = attrs.iter().any(|a| matches!(a, Attribute::Parameter))
5052 || parameter_inits.contains_key(&key);
5053
5054 if is_parameter {
5055 // Audit MAJOR-4: pure compile-time parameter.
5056 // Try to fold; if we can, store the value in
5057 // inline_const and skip the global+alloca.
5058 // Use a one-byte sentinel alloca for `addr`
5059 // so other code paths that touch info.addr
5060 // still work, but never load through it.
5061 let folded = init_expr
5062 .and_then(|e| eval_const_scalar(e, &param_consts))
5063 .map(|raw| clamp_const_to_type(raw, &elem_ty));
5064 if let Some(value) = folded {
5065 // Sentinel alloca — never read.
5066 let addr = b.alloca(elem_ty.clone());
5067 locals.insert(
5068 key,
5069 LocalInfo {
5070 addr,
5071 ty: elem_ty.clone(),
5072 dims: vec![],
5073 allocatable: false,
5074 descriptor_arg: false,
5075 by_ref: false,
5076 char_kind: CharKind::None,
5077 derived_type: None,
5078 inline_const: Some(value),
5079 is_pointer: false,
5080 runtime_dim_upper: vec![],
5081 },
5082 );
5083 continue;
5084 }
5085 // Fall through to the SAVE path if the
5086 // parameter init can't be folded — at least
5087 // semantics are preserved.
5088 }
5089
5090 if let Some(init) = init_expr
5091 .and_then(|e| eval_const_global_init(e, &param_consts, Some(&elem_ty)))
5092 {
5093 let global_name = save_global_name(func_name, &key);
5094 pending_globals.push(PendingGlobal {
5095 global: Global {
5096 name: global_name.clone(),
5097 ty: elem_ty.clone(),
5098 initializer: Some(init),
5099 },
5100 });
5101 let addr = b.global_addr(&global_name, elem_ty.clone());
5102 locals.insert(
5103 key,
5104 LocalInfo {
5105 addr,
5106 ty: elem_ty.clone(),
5107 dims: vec![],
5108 allocatable: false,
5109 descriptor_arg: false,
5110 by_ref: false,
5111 char_kind: CharKind::None,
5112 derived_type: None,
5113 inline_const: None,
5114 is_pointer: false,
5115 runtime_dim_upper: vec![],
5116 },
5117 );
5118 } else {
5119 let addr = b.alloca(elem_ty.clone());
5120 locals.insert(
5121 key,
5122 LocalInfo {
5123 addr,
5124 ty: elem_ty.clone(),
5125 dims: vec![],
5126 allocatable: false,
5127 descriptor_arg: false,
5128 by_ref: false,
5129 char_kind: CharKind::None,
5130 derived_type: None,
5131 inline_const: None,
5132 is_pointer: false,
5133 runtime_dim_upper: vec![],
5134 },
5135 );
5136 }
5137 }
5138 }
5139 }
5140 }
5141 }
5142
5143 /// Lower initializer expressions for declared variables.
5144 ///
5145 /// Handles two AST shapes:
5146 /// 1. `Decl::TypeDecl` entities with `entity.init` set. This
5147 /// covers BOTH `integer :: x = 42` and
5148 /// `integer, parameter :: pi = 3.14` — the parameter
5149 /// attribute doesn't change the lowering, only sema's
5150 /// classification of the symbol.
5151 /// 2. Standalone `Decl::ParameterStmt { pairs }`, where each
5152 /// pair refers to an already-allocated local declared
5153 /// elsewhere in the same decl list.
5154 ///
5155 /// Most scalar locals with const-evaluable initializers are
5156 /// SAVE-promoted to module globals back in `alloc_decls`; for
5157 /// those, `is_global_addr` returns true and this pass leaves the
5158 /// initialization to the .data section. The remaining cases this
5159 /// pass handles are non-const initializers (rare).
5160 ///
5161 /// Must run *after* `alloc_decls` so that all locals exist. Only
5162 /// stores into scalar slots — array, character, derived-type, and
5163 /// allocatable initializers have their own paths in alloc_decls.
5164 fn init_decls(
5165 b: &mut FuncBuilder,
5166 locals: &HashMap<String, LocalInfo>,
5167 decls: &[crate::ast::decl::SpannedDecl],
5168 st: &SymbolTable,
5169 ) {
5170 // Pre-collect the set of GlobalAddr-defining ValueIds so the
5171 // inner skip check is O(1). Audit Maj-3.
5172 let global_addr_ids = collect_global_addr_values(b);
5173 for decl in decls {
5174 match &decl.node {
5175 Decl::TypeDecl { entities, .. } => {
5176 for entity in entities {
5177 let Some(init_expr) = &entity.init else {
5178 continue;
5179 };
5180 let key = entity.name.to_lowercase();
5181 let Some(info) = locals.get(&key) else {
5182 continue;
5183 };
5184 // Dummy arguments (by_ref locals) cannot have
5185 // initializers per the Fortran standard — they
5186 // bind to caller storage. If sema lets one
5187 // through it would be a bug; the debug_assert
5188 // catches it in development without crashing
5189 // release builds. Audit Min-4.
5190 debug_assert!(
5191 !info.by_ref,
5192 "init_decls: dummy argument {:?} should not have an initializer",
5193 key,
5194 );
5195 if info.by_ref {
5196 continue;
5197 }
5198
5199 // Array entity with an array constructor init:
5200 // store each literal element into the slot.
5201 // Only stack/non-allocatable arrays are handled
5202 // here; allocatable arrays would need their
5203 // descriptor allocated first.
5204 if !info.dims.is_empty()
5205 && !info.allocatable
5206 && matches!(info.char_kind, CharKind::None)
5207 && info.derived_type.is_none()
5208 {
5209 if let Expr::ArrayConstructor { values, .. } = &init_expr.node {
5210 store_ac_values_into(b, locals, info.addr, &info.ty, values, st);
5211 }
5212 continue;
5213 }
5214
5215 // Fixed-length character initializer: copy the
5216 // literal bytes into the stack buffer with
5217 // space-padding to the declared length. Previously
5218 // the character arm was unconditionally skipped,
5219 // leaving every `character(len=N) :: s = 'hello'`
5220 // zero-initialized and silently blank at runtime
5221 // (audit31 Finding 3).
5222 if let CharKind::Fixed(len) = info.char_kind {
5223 let (src_ptr, src_len) = lower_string_expr(b, locals, init_expr, st);
5224 let dest_len = b.const_i64(len);
5225 b.call(
5226 FuncRef::External("afs_assign_char_fixed".into()),
5227 vec![info.addr, dest_len, src_ptr, src_len],
5228 IrType::Void,
5229 );
5230 continue;
5231 }
5232 if let CharKind::FixedRuntime { len_addr } = info.char_kind {
5233 let (src_ptr, src_len) = lower_string_expr(b, locals, init_expr, st);
5234 let (dest_ptr, dest_len) =
5235 fixed_runtime_char_ptr_and_len(b, info, len_addr);
5236 b.call(
5237 FuncRef::External("afs_assign_char_fixed".into()),
5238 vec![dest_ptr, dest_len, src_ptr, src_len],
5239 IrType::Void,
5240 );
5241 continue;
5242 }
5243 // Other non-plain-scalar shapes are handled
5244 // elsewhere (allocatables, derived types) or not
5245 // at all (deferred-length character, which gets
5246 // its store through afs_assign_char_deferred at
5247 // the declaration's assignment lowering).
5248 if !info.dims.is_empty()
5249 || info.allocatable
5250 || !matches!(info.char_kind, CharKind::None)
5251 || info.derived_type.is_some()
5252 {
5253 continue;
5254 }
5255 // SAVE-promoted locals are backed by a module
5256 // global already initialized at link time. Don't
5257 // re-store on every call — that would defeat
5258 // the SAVE semantics (audit MAJOR-1).
5259 if global_addr_ids.contains(&info.addr) {
5260 continue;
5261 }
5262 // Audit5 MAJOR-3: PARAMETER scalars folded by
5263 // alloc_decls have inline_const set and a
5264 // sentinel alloca that is never loaded — every
5265 // use materializes the constant directly. The
5266 // store here would be dead in the IR forever
5267 // at -O0 (mem2reg cleans it up at -O1+, but
5268 // we shouldn't generate dead code in the first
5269 // place).
5270 if info.inline_const.is_some() {
5271 continue;
5272 }
5273 // Complex scalar init: ComplexLiteral lowers to an
5274 // address of a [f32/f64 x 2] buffer. Copying a
5275 // pointer into the slot (whose pointee is the
5276 // 2-element array) would fail IR verification — do
5277 // a byte memcpy of the inline buffer instead.
5278 if is_complex_ty(&info.ty) && !info.is_pointer {
5279 let src = lower_expr(b, locals, init_expr, st);
5280 let bytes = complex_byte_size(&info.ty);
5281 let sz = b.const_i64(bytes);
5282 b.call(
5283 FuncRef::External("memcpy".into()),
5284 vec![info.addr, src, sz],
5285 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5286 );
5287 continue;
5288 }
5289 let val = lower_expr(b, locals, init_expr, st);
5290 let coerced = coerce_to_type(b, val, &info.ty);
5291 b.store(coerced, info.addr);
5292 }
5293 }
5294 Decl::ParameterStmt { pairs } => {
5295 for (name, expr) in pairs {
5296 let key = name.to_lowercase();
5297 let Some(info) = locals.get(&key) else {
5298 continue;
5299 };
5300 if !info.dims.is_empty()
5301 || info.allocatable
5302 || info.by_ref
5303 || !matches!(info.char_kind, CharKind::None)
5304 || info.derived_type.is_some()
5305 {
5306 continue;
5307 }
5308 // SAVE-promoted locals are backed by a module
5309 // global; the initial value is already baked
5310 // into .data at link time, so skip the runtime
5311 // store. Audit MAJOR-1 interaction.
5312 if global_addr_ids.contains(&info.addr) {
5313 continue;
5314 }
5315 // Audit5 MAJOR-3: same dead-store skip as the
5316 // TypeDecl arm above. Standalone PARAMETER
5317 // statements also produce inline_const-tagged
5318 // locals when alloc_decls successfully folds
5319 // the value.
5320 if info.inline_const.is_some() {
5321 continue;
5322 }
5323 let val = lower_expr(b, locals, expr, st);
5324 let coerced = coerce_to_type(b, val, &info.ty);
5325 b.store(coerced, info.addr);
5326 }
5327 }
5328 // Audit MEDIUM-3: DATA statements. Each set pairs
5329 // target objects with values. For the simple form
5330 // `data x /42/, y /3.14/`, walk objects + values
5331 // pairwise and emit a store per scalar Name target.
5332 // Implied-do object lists and value-side repetition
5333 // (`r*v`) are not yet supported — they fall through
5334 // silently and are tracked as future work.
5335 Decl::DataStmt { sets } => {
5336 for set in sets {
5337 let n = set.objects.len().min(set.values.len());
5338 for (target, value) in set.objects.iter().zip(set.values.iter()).take(n) {
5339 let Expr::Name { name } = &target.node else {
5340 continue;
5341 };
5342 let key = name.to_lowercase();
5343 let Some(info) = locals.get(&key) else {
5344 continue;
5345 };
5346 if !info.dims.is_empty()
5347 || info.allocatable
5348 || info.by_ref
5349 || !matches!(info.char_kind, CharKind::None)
5350 || info.derived_type.is_some()
5351 {
5352 continue;
5353 }
5354 // Don't shadow a SAVE-promoted global —
5355 // its initial value is in .data already.
5356 if global_addr_ids.contains(&info.addr) {
5357 continue;
5358 }
5359 let val = lower_expr(b, locals, value, st);
5360 let coerced = coerce_to_type(b, val, &info.ty);
5361 b.store(coerced, info.addr);
5362 }
5363 }
5364 }
5365 _ => {}
5366 }
5367 }
5368 }
5369
5370 /// Coerce a scalar value to a target type for initializer storage.
5371 ///
5372 /// Covers every Fortran scalar coercion that can show up at an
5373 /// initializer-store site:
5374 /// * Int → Int width change (sign-extend or truncate). Audit
5375 /// Min-3: Fortran integers are always signed, so the int_extend
5376 /// `signed` flag is hardcoded to `true`.
5377 /// * Int ↔ Float (round to nearest for Float→Int).
5378 /// * F32 ↔ F64 (extend / truncate).
5379 /// * Bool ↔ Int (round-trip via int_extend; Fortran logicals
5380 /// occupy a full kind so this is rare but legal).
5381 ///
5382 /// Anything that doesn't match one of those cases falls into the
5383 /// `_ => val` arm and a `debug_assert!` fires — silently passing
5384 /// the wrong-typed value would let a future caller wire mismatched
5385 /// types into a Store, which the verifier (after MAJOR-4) would
5386 /// then catch much later. Better to fail loudly at the source.
5387 fn coerce_to_type(b: &mut FuncBuilder, val: ValueId, target: &IrType) -> ValueId {
5388 let src = match b.func().value_type(val) {
5389 Some(t) => t,
5390 None => return val,
5391 };
5392 if src == *target {
5393 return val;
5394 }
5395 match (&src, target) {
5396 // Int → Float
5397 (IrType::Int(_), IrType::Float(fw)) => b.int_to_float(val, *fw),
5398 // Float → Int
5399 (IrType::Float(_), IrType::Int(iw)) => b.float_to_int(val, *iw),
5400 // F32 ↔ F64
5401 (IrType::Float(FloatWidth::F32), IrType::Float(FloatWidth::F64)) => {
5402 b.float_extend(val, FloatWidth::F64)
5403 }
5404 (IrType::Float(FloatWidth::F64), IrType::Float(FloatWidth::F32)) => {
5405 b.float_trunc(val, FloatWidth::F32)
5406 }
5407 // Int width change. Audit Min-3: Fortran integers are signed.
5408 (IrType::Int(src_w), IrType::Int(dst_w)) => {
5409 if dst_w.bits() > src_w.bits() {
5410 b.int_extend(val, *dst_w, true)
5411 } else if dst_w.bits() < src_w.bits() {
5412 b.int_trunc(val, *dst_w)
5413 } else {
5414 val
5415 }
5416 }
5417 // Bool ↔ Int via int_extend. Bool is i1 in our model.
5418 (IrType::Bool, IrType::Int(iw)) => b.int_extend(val, *iw, false),
5419 // Int → Bool: compare against zero to produce a true Bool
5420 // rather than truncating to i8 (which the verifier would
5421 // then reject on any .and./.or. operand). Common path:
5422 // LOGICAL fields in derived types load as i8 and need to
5423 // reach Bool before a logical op (audit31 Finding 13).
5424 (IrType::Int(_), IrType::Bool) => {
5425 let zero = match &src {
5426 IrType::Int(IntWidth::I64) => b.const_i64(0),
5427 IrType::Int(IntWidth::I16) => b.const_i32(0),
5428 IrType::Int(IntWidth::I8) => b.const_i32(0),
5429 _ => b.const_i32(0),
5430 };
5431 // Widen to i32 first if the source is narrower so
5432 // icmp gets matching operand widths.
5433 let widened = match &src {
5434 IrType::Int(IntWidth::I8) | IrType::Int(IntWidth::I16) => {
5435 b.int_extend(val, IntWidth::I32, false)
5436 }
5437 _ => val,
5438 };
5439 b.icmp(CmpOp::Ne, widened, zero)
5440 }
5441 // Ptr<Array<T, N>> → Ptr<T>: pointer to array used as pointer to element.
5442 // Common for character arrays (Ptr<[i8 x 20]> → Ptr<i8>).
5443 (IrType::Ptr(_), IrType::Ptr(_)) => {
5444 // Pointers are all the same size on ARM64 — pass through.
5445 val
5446 }
5447 // Int → Ptr: value used in pointer context (e.g., byte as char*).
5448 (IrType::Int(_), IrType::Ptr(_)) => b.int_to_ptr(val, IrType::Int(IntWidth::I8)),
5449 // Ptr → Int: pointer used in integer context.
5450 (IrType::Ptr(_), IrType::Int(IntWidth::I64)) => b.ptr_to_int(val),
5451 (IrType::Ptr(_), IrType::Int(iw)) => {
5452 let i64_val = b.ptr_to_int(val);
5453 b.int_trunc(i64_val, *iw)
5454 }
5455 _ => {
5456 eprintln!(
5457 "coerce_to_type: unhandled coercion {:?}{:?}",
5458 src, target
5459 );
5460 val
5461 }
5462 }
5463 }
5464
5465 /// Extract compile-time array dimensions from array spec.
5466 /// Returns (lower_bound, extent) pairs. Runtime expressions default to (1, 1).
5467 fn extract_array_dims(
5468 specs: &[ArraySpec],
5469 param_consts: &HashMap<String, ConstScalar>,
5470 ) -> Vec<(i64, i64)> {
5471 specs
5472 .iter()
5473 .map(|spec| {
5474 match spec {
5475 ArraySpec::Explicit { lower, upper } => {
5476 let lo = lower
5477 .as_ref()
5478 .and_then(|e| eval_const_int_in_scope(e, param_consts))
5479 .unwrap_or(1);
5480 let hi = eval_const_int_in_scope(upper, param_consts).unwrap_or(1);
5481 (lo, hi - lo + 1)
5482 }
5483 ArraySpec::AssumedShape { .. } => (1, 0), // size unknown at compile time
5484 ArraySpec::Deferred => (1, 0),
5485 ArraySpec::AssumedSize { .. } => (1, 0),
5486 ArraySpec::AssumedRank => (1, 0),
5487 }
5488 })
5489 .collect()
5490 }
5491
5492 /// Try to evaluate a constant integer expression at compile time.
5493 fn eval_const_int(expr: &crate::ast::expr::SpannedExpr) -> Option<i64> {
5494 match &expr.node {
5495 Expr::IntegerLiteral { text, .. } => text.parse().ok(),
5496 Expr::UnaryOp {
5497 op: UnaryOp::Minus,
5498 operand,
5499 } => eval_const_int(operand).map(|v| -v),
5500 _ => None,
5501 }
5502 }
5503
5504 fn eval_const_int_in_scope(
5505 expr: &crate::ast::expr::SpannedExpr,
5506 param_consts: &HashMap<String, ConstScalar>,
5507 ) -> Option<i64> {
5508 match eval_const_scalar(expr, param_consts)? {
5509 ConstScalar::Int(v) => i64::try_from(v).ok(),
5510 ConstScalar::Float(_) => None,
5511 }
5512 }
5513
5514 fn eval_const_scalar_with_any_scope(
5515 expr: &crate::ast::expr::SpannedExpr,
5516 param_consts: &HashMap<String, ConstScalar>,
5517 st: &SymbolTable,
5518 ) -> Option<ConstScalar> {
5519 if let Some(value) = eval_const_scalar(expr, param_consts) {
5520 return Some(value);
5521 }
5522 match &expr.node {
5523 Expr::Name { name } => {
5524 let key = name.to_ascii_lowercase();
5525 param_consts.get(&key).copied().or_else(|| {
5526 st.find_symbol_any_scope(&key)
5527 .and_then(|sym| sym.const_value.map(|v| ConstScalar::Int(v as i128)))
5528 })
5529 }
5530 Expr::ParenExpr { inner } => eval_const_scalar_with_any_scope(inner, param_consts, st),
5531 Expr::UnaryOp {
5532 op: UnaryOp::Minus,
5533 operand,
5534 } => {
5535 let value = eval_const_scalar_with_any_scope(operand, param_consts, st)?;
5536 Some(match value {
5537 ConstScalar::Int(i) => ConstScalar::Int(-i),
5538 ConstScalar::Float(f) => ConstScalar::Float(-f),
5539 })
5540 }
5541 Expr::BinaryOp { op, left, right } => {
5542 let lv = eval_const_scalar_with_any_scope(left, param_consts, st)?;
5543 let rv = eval_const_scalar_with_any_scope(right, param_consts, st)?;
5544 let promote_float =
5545 matches!(lv, ConstScalar::Float(_)) || matches!(rv, ConstScalar::Float(_));
5546 if promote_float {
5547 let l = lv.to_float();
5548 let r = rv.to_float();
5549 match op {
5550 BinaryOp::Add => Some(ConstScalar::Float(l + r)),
5551 BinaryOp::Sub => Some(ConstScalar::Float(l - r)),
5552 BinaryOp::Mul => Some(ConstScalar::Float(l * r)),
5553 BinaryOp::Div => {
5554 if r == 0.0 {
5555 None
5556 } else {
5557 Some(ConstScalar::Float(l / r))
5558 }
5559 }
5560 BinaryOp::Pow => Some(ConstScalar::Float(l.powf(r))),
5561 _ => None,
5562 }
5563 } else {
5564 let (ConstScalar::Int(l), ConstScalar::Int(r)) = (lv, rv) else {
5565 return None;
5566 };
5567 match op {
5568 BinaryOp::Add => Some(ConstScalar::Int(l.wrapping_add(r))),
5569 BinaryOp::Sub => Some(ConstScalar::Int(l.wrapping_sub(r))),
5570 BinaryOp::Mul => Some(ConstScalar::Int(l.wrapping_mul(r))),
5571 BinaryOp::Div => {
5572 if r == 0 {
5573 None
5574 } else {
5575 Some(ConstScalar::Int(l / r))
5576 }
5577 }
5578 BinaryOp::Pow => {
5579 if r < 0 {
5580 None
5581 } else {
5582 let mut acc: i128 = 1;
5583 for _ in 0..(r as usize) {
5584 acc = acc.wrapping_mul(l);
5585 }
5586 Some(ConstScalar::Int(acc))
5587 }
5588 }
5589 _ => None,
5590 }
5591 }
5592 }
5593 _ => None,
5594 }
5595 }
5596
5597 fn eval_const_int_in_scope_or_any_scope(
5598 expr: &crate::ast::expr::SpannedExpr,
5599 param_consts: &HashMap<String, ConstScalar>,
5600 st: &SymbolTable,
5601 ) -> Option<i64> {
5602 match eval_const_scalar_with_any_scope(expr, param_consts, st)? {
5603 ConstScalar::Int(v) => i64::try_from(v).ok(),
5604 ConstScalar::Float(_) => None,
5605 }
5606 }
5607
5608 /// Resolve the raw data pointer and declared length for a character argument expression.
5609 /// Returns `None` if the argument is not a recognized fixed-length character.
5610 /// Build (ptr, len) for a substring `base_ptr(start:end)` per F2018 7.4.4.2.
5611 /// `start` defaults to 1, `end` defaults to the base string's length.
5612 /// Negative resulting lengths are clamped to 0 to match the standard's
5613 /// zero-length substring semantics when `start > end`.
5614 fn lower_substring(
5615 b: &mut FuncBuilder,
5616 locals: &HashMap<String, LocalInfo>,
5617 st: &SymbolTable,
5618 base_ptr: ValueId,
5619 base_len: ValueId,
5620 start: Option<&crate::ast::expr::SpannedExpr>,
5621 end: Option<&crate::ast::expr::SpannedExpr>,
5622 ) -> (ValueId, ValueId) {
5623 let widen = |b: &mut FuncBuilder, e: &crate::ast::expr::SpannedExpr| -> ValueId {
5624 let v = lower_expr(b, locals, e, st);
5625 match b.func().value_type(v) {
5626 Some(IrType::Int(IntWidth::I64)) => v,
5627 _ => b.int_extend(v, IntWidth::I64, true),
5628 }
5629 };
5630 let start_val = match start {
5631 Some(se) => widen(b, se),
5632 None => b.const_i64(1),
5633 };
5634 let end_val = match end {
5635 Some(ee) => widen(b, ee),
5636 None => base_len,
5637 };
5638 let one = b.const_i64(1);
5639 let off = b.isub(start_val, one);
5640 let sub_ptr = b.gep(base_ptr, vec![off], IrType::Int(IntWidth::I8));
5641 let span = b.isub(end_val, start_val);
5642 let raw_len = b.iadd(span, one);
5643 let zero = b.const_i64(0);
5644 let is_pos = b.icmp(CmpOp::Ge, raw_len, zero);
5645 let sub_len = b.select(is_pos, raw_len, zero);
5646 (sub_ptr, sub_len)
5647 }
5648
5649 fn widen_to_i64(b: &mut FuncBuilder, value: ValueId) -> ValueId {
5650 match b.func().value_type(value) {
5651 Some(IrType::Int(IntWidth::I64)) => value,
5652 _ => b.int_extend(value, IntWidth::I64, true),
5653 }
5654 }
5655
5656 fn clamp_nonnegative_i64(b: &mut FuncBuilder, value: ValueId) -> ValueId {
5657 let widened = widen_to_i64(b, value);
5658 let zero = b.const_i64(0);
5659 let is_nonnegative = b.icmp(CmpOp::Ge, widened, zero);
5660 b.select(is_nonnegative, widened, zero)
5661 }
5662
5663 fn fixed_runtime_char_ptr_and_len(
5664 b: &mut FuncBuilder,
5665 info: &LocalInfo,
5666 len_addr: ValueId,
5667 ) -> (ValueId, ValueId) {
5668 let ptr = if info.by_ref {
5669 let outer = b.load(info.addr);
5670 b.load_typed(outer, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
5671 } else {
5672 b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
5673 };
5674 let len = b.load(len_addr);
5675 (ptr, len)
5676 }
5677
5678 fn descriptor_elem_size(b: &mut FuncBuilder, desc: ValueId) -> ValueId {
5679 let off = b.const_i64(8);
5680 let ptr = b.gep(desc, vec![off], IrType::Int(IntWidth::I8));
5681 b.load_typed(ptr, IrType::Int(IntWidth::I64))
5682 }
5683
5684 fn descriptor_backed_runtime_char_array(info: &LocalInfo) -> bool {
5685 local_uses_array_descriptor(info)
5686 && matches!(
5687 info.ty,
5688 IrType::Ptr(ref inner) if matches!(inner.as_ref(), IrType::Int(IntWidth::I8))
5689 )
5690 && (!info.dims.is_empty()
5691 || info.descriptor_arg
5692 || (info.allocatable
5693 && info.char_kind == CharKind::None
5694 && local_fixed_char_allocatable_scalar_len(info).is_none()))
5695 }
5696
5697 fn char_array_element_ptr_and_len(
5698 b: &mut FuncBuilder,
5699 locals: &HashMap<String, LocalInfo>,
5700 info: &LocalInfo,
5701 args: &[crate::ast::expr::Argument],
5702 st: &SymbolTable,
5703 ) -> Option<(ValueId, ValueId)> {
5704 if args.is_empty() {
5705 return None;
5706 }
5707 if info.char_kind == CharKind::None && !descriptor_backed_runtime_char_array(info) {
5708 return None;
5709 }
5710 let idx64 = compute_flat_elem_offset(b, locals, info, args, st);
5711 let elem_len = match info.char_kind {
5712 CharKind::Fixed(n) => b.const_i64(n),
5713 CharKind::FixedRuntime { len_addr } | CharKind::AssumedLen { len_addr } => b.load(len_addr),
5714 CharKind::Deferred if local_uses_array_descriptor(info) => {
5715 let desc = array_descriptor_addr(b, info);
5716 descriptor_elem_size(b, desc)
5717 }
5718 CharKind::Deferred => return None,
5719 CharKind::None if descriptor_backed_runtime_char_array(info) => {
5720 let desc = array_descriptor_addr(b, info);
5721 descriptor_elem_size(b, desc)
5722 }
5723 CharKind::None => return None,
5724 };
5725 if !local_uses_array_descriptor(info) && !info.by_ref {
5726 let slot_ptr = b.gep(
5727 info.addr,
5728 vec![idx64],
5729 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
5730 );
5731 let elem_ptr = b.load_typed(slot_ptr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
5732 return Some((elem_ptr, elem_len));
5733 }
5734 let base = if local_uses_array_descriptor(info) {
5735 let desc = array_descriptor_addr(b, info);
5736 b.load_typed(desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
5737 } else {
5738 array_data_ptr_for_call(b, info)
5739 };
5740 let byte_offset = b.imul(idx64, elem_len);
5741 let elem_ptr = b.gep(base, vec![byte_offset], IrType::Int(IntWidth::I8));
5742 Some((elem_ptr, elem_len))
5743 }
5744
5745 fn char_addr_and_len(
5746 b: &mut FuncBuilder,
5747 arg_spanned: &crate::ast::expr::SpannedExpr,
5748 locals: &HashMap<String, LocalInfo>,
5749 ) -> Option<(ValueId, i64)> {
5750 use crate::ast::expr::Expr;
5751 match &arg_spanned.node {
5752 Expr::Name { name } => {
5753 let info = locals.get(&name.to_lowercase())?;
5754 match &info.char_kind {
5755 CharKind::Fixed(n) => {
5756 if !info.dims.is_empty() {
5757 return None;
5758 }
5759 let ptr = if info.by_ref {
5760 let outer = b.load(info.addr);
5761 b.load_typed(outer, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
5762 } else {
5763 let zero = b.const_i64(0);
5764 b.gep(info.addr, vec![zero], IrType::Int(IntWidth::I8))
5765 };
5766 Some((ptr, *n))
5767 }
5768 CharKind::FixedRuntime { .. }
5769 | CharKind::AssumedLen { .. }
5770 | CharKind::Deferred
5771 | CharKind::None => None,
5772 }
5773 }
5774 Expr::StringLiteral { value, .. } => {
5775 let ptr = b.const_string(value.as_bytes());
5776 Some((ptr, value.len() as i64))
5777 }
5778 _ => None,
5779 }
5780 }
5781
5782 fn char_addr_and_runtime_len(
5783 b: &mut FuncBuilder,
5784 arg_spanned: &crate::ast::expr::SpannedExpr,
5785 locals: &HashMap<String, LocalInfo>,
5786 ) -> Option<(ValueId, ValueId)> {
5787 use crate::ast::expr::Expr;
5788 match &arg_spanned.node {
5789 Expr::Name { name } => {
5790 let info = locals.get(&name.to_lowercase())?;
5791 match &info.char_kind {
5792 CharKind::Fixed(n) => {
5793 if !info.dims.is_empty() {
5794 return None;
5795 }
5796 let ptr = if info.by_ref {
5797 let outer = b.load(info.addr);
5798 b.load_typed(outer, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
5799 } else {
5800 let zero = b.const_i64(0);
5801 b.gep(info.addr, vec![zero], IrType::Int(IntWidth::I8))
5802 };
5803 let len = b.const_i64(*n);
5804 Some((ptr, len))
5805 }
5806 CharKind::FixedRuntime { len_addr } => {
5807 Some(fixed_runtime_char_ptr_and_len(b, info, *len_addr))
5808 }
5809 CharKind::Deferred => {
5810 let desc = string_descriptor_addr(b, info);
5811 let ptr = b.load_typed(desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
5812 let eight = b.const_i64(8);
5813 let len_ptr = b.gep(desc, vec![eight], IrType::Int(IntWidth::I8));
5814 let len = b.load_typed(len_ptr, IrType::Int(IntWidth::I64));
5815 Some((ptr, len))
5816 }
5817 CharKind::AssumedLen { len_addr } => {
5818 // by_ref assumed-length dummy: double-deref for
5819 // the data pointer, load the hidden-length param.
5820 let outer = b.load(info.addr);
5821 let ptr = b.load_typed(outer, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
5822 let len = b.load(*len_addr);
5823 Some((ptr, len))
5824 }
5825 CharKind::None => {
5826 if let Some(len) = local_fixed_char_allocatable_scalar_len(info) {
5827 let desc = array_descriptor_addr(b, info);
5828 let base = b.load_typed(desc, IrType::Ptr(Box::new(info.ty.clone())));
5829 let zero = b.const_i64(0);
5830 let ptr = b.gep(base, vec![zero], IrType::Int(IntWidth::I8));
5831 Some((ptr, b.const_i64(len)))
5832 } else if info.by_ref
5833 && matches!(
5834 info.ty,
5835 IrType::Ptr(ref inner) if matches!(inner.as_ref(), IrType::Int(IntWidth::I8))
5836 )
5837 {
5838 let outer = b.load(info.addr);
5839 let ptr =
5840 b.load_typed(outer, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
5841 let len = b.call(
5842 FuncRef::External("afs_c_strlen".into()),
5843 vec![ptr],
5844 IrType::Int(IntWidth::I64),
5845 );
5846 Some((ptr, len))
5847 } else {
5848 None
5849 }
5850 }
5851 }
5852 }
5853 Expr::StringLiteral { value, .. } => {
5854 let ptr = b.const_string(value.as_bytes());
5855 let len = b.const_i64(value.len() as i64);
5856 Some((ptr, len))
5857 }
5858 _ => None,
5859 }
5860 }
5861
5862 fn local_char_runtime_len(b: &mut FuncBuilder, info: &LocalInfo) -> Option<ValueId> {
5863 match &info.char_kind {
5864 CharKind::Fixed(n) => Some(b.const_i64(*n)),
5865 CharKind::FixedRuntime { len_addr } | CharKind::AssumedLen { len_addr } => {
5866 Some(b.load(*len_addr))
5867 }
5868 CharKind::Deferred if local_uses_array_descriptor(info) => {
5869 let desc = array_descriptor_addr(b, info);
5870 Some(descriptor_elem_size(b, desc))
5871 }
5872 CharKind::Deferred => {
5873 let desc = string_descriptor_addr(b, info);
5874 let eight = b.const_i64(8);
5875 let len_ptr = b.gep(desc, vec![eight], IrType::Int(IntWidth::I8));
5876 Some(b.load_typed(len_ptr, IrType::Int(IntWidth::I64)))
5877 }
5878 CharKind::None => None,
5879 }
5880 }
5881
5882 fn actual_char_arg_runtime_len(
5883 b: &mut FuncBuilder,
5884 locals: &HashMap<String, LocalInfo>,
5885 expr: &crate::ast::expr::SpannedExpr,
5886 st: &SymbolTable,
5887 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
5888 ) -> Option<ValueId> {
5889 if let Some((_ptr, len)) = char_addr_and_runtime_len(b, expr, locals) {
5890 return Some(len);
5891 }
5892
5893 match &expr.node {
5894 Expr::Name { name } => locals
5895 .get(&name.to_lowercase())
5896 .and_then(|info| local_char_runtime_len(b, info)),
5897 Expr::FunctionCall { callee, args } => {
5898 if let Expr::Name { name } = &callee.node {
5899 if let Some(info) = locals.get(&name.to_lowercase()) {
5900 if let Some((_ptr, len)) =
5901 char_array_element_ptr_and_len(b, locals, info, args, st)
5902 {
5903 return Some(len);
5904 }
5905 }
5906 }
5907 expr_is_character_expr(b, locals, expr, st, type_layouts).then(|| {
5908 let (_ptr, len) = lower_string_expr_with_layouts(b, locals, expr, st, type_layouts);
5909 len
5910 })
5911 }
5912 _ => expr_is_character_expr(b, locals, expr, st, type_layouts).then(|| {
5913 let (_ptr, len) = lower_string_expr_with_layouts(b, locals, expr, st, type_layouts);
5914 len
5915 }),
5916 }
5917 }
5918
5919 /// Lower character intrinsic functions (LEN, LEN_TRIM, ICHAR, CHAR, INDEX, SCAN, VERIFY,
5920 /// ADJUSTL, ADJUSTR, TRIM). These need access to `locals` (for CharKind info) and the
5921 /// original un-lowered argument expressions, so they cannot go through `lower_intrinsic`.
5922 /// Returns Some(ValueId) if recognized, None otherwise.
5923 fn lower_char_intrinsic(
5924 b: &mut FuncBuilder,
5925 name: &str,
5926 args: &[crate::ast::expr::Argument],
5927 locals: &HashMap<String, LocalInfo>,
5928 st: &SymbolTable,
5929 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
5930 ) -> Option<ValueId> {
5931 use crate::ast::expr::SectionSubscript;
5932
5933 // Extract the SpannedExpr from argument i.
5934 let arg_spanned = |i: usize| -> Option<&crate::ast::expr::SpannedExpr> {
5935 args.get(i).and_then(|a| {
5936 if let SectionSubscript::Element(e) = &a.value {
5937 Some(e)
5938 } else {
5939 None
5940 }
5941 })
5942 };
5943
5944 match name {
5945 "len" => {
5946 let (_, len) =
5947 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
5948 Some(len)
5949 }
5950 "len_trim" => {
5951 let (ptr, len_val) =
5952 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
5953 Some(b.call(
5954 FuncRef::External("afs_len_trim".into()),
5955 vec![ptr, len_val],
5956 IrType::Int(IntWidth::I64),
5957 ))
5958 }
5959 "ichar" | "iachar" => {
5960 let (ptr, _) =
5961 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
5962 let byte = b.load_typed(ptr, IrType::Int(IntWidth::I8));
5963 Some(b.call(
5964 FuncRef::External("afs_ichar".into()),
5965 vec![byte],
5966 IrType::Int(IntWidth::I32),
5967 ))
5968 }
5969 "char" | "achar" => {
5970 let int_arg = args.first().and_then(|a| {
5971 if let SectionSubscript::Element(e) = &a.value {
5972 Some(lower_expr(b, locals, e, st))
5973 } else {
5974 None
5975 }
5976 })?;
5977 let i32_arg = match b.func().value_type(int_arg) {
5978 Some(IrType::Int(IntWidth::I64)) => b.int_trunc(int_arg, IntWidth::I32),
5979 _ => int_arg,
5980 };
5981 let byte_val = b.call(
5982 FuncRef::External("afs_char".into()),
5983 vec![i32_arg],
5984 IrType::Int(IntWidth::I8),
5985 );
5986 // Allocate a 1-byte buffer and store through a byte-level GEP to avoid
5987 // the Ptr<[i8 x 1]> vs Ptr<i8> store-type mismatch.
5988 let buf = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 1));
5989 let zero = b.const_i64(0);
5990 let byte_ptr = b.gep(buf, vec![zero], IrType::Int(IntWidth::I8));
5991 b.store(byte_val, byte_ptr);
5992 Some(buf)
5993 }
5994 "index" => {
5995 let (hay_ptr, hay_len_val) =
5996 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
5997 let (needle_ptr, needle_len_val) =
5998 lower_string_expr_with_layouts(b, locals, arg_spanned(1)?, st, type_layouts);
5999 let back_val = arg_spanned(2)
6000 .map(|e| lower_expr(b, locals, e, st))
6001 .unwrap_or_else(|| b.const_i32(0));
6002 Some(b.call(
6003 FuncRef::External("afs_index".into()),
6004 vec![hay_ptr, hay_len_val, needle_ptr, needle_len_val, back_val],
6005 IrType::Int(IntWidth::I64),
6006 ))
6007 }
6008 "scan" => {
6009 let (src_ptr, src_len_val) =
6010 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6011 let (set_ptr, set_len_val) =
6012 lower_string_expr_with_layouts(b, locals, arg_spanned(1)?, st, type_layouts);
6013 let back_val = arg_spanned(2)
6014 .map(|e| lower_expr(b, locals, e, st))
6015 .unwrap_or_else(|| b.const_i32(0));
6016 Some(b.call(
6017 FuncRef::External("afs_scan".into()),
6018 vec![src_ptr, src_len_val, set_ptr, set_len_val, back_val],
6019 IrType::Int(IntWidth::I64),
6020 ))
6021 }
6022 "verify" => {
6023 let (src_ptr, src_len_val) =
6024 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6025 let (set_ptr, set_len_val) =
6026 lower_string_expr_with_layouts(b, locals, arg_spanned(1)?, st, type_layouts);
6027 let back_val = arg_spanned(2)
6028 .map(|e| lower_expr(b, locals, e, st))
6029 .unwrap_or_else(|| b.const_i32(0));
6030 Some(b.call(
6031 FuncRef::External("afs_verify".into()),
6032 vec![src_ptr, src_len_val, set_ptr, set_len_val, back_val],
6033 IrType::Int(IntWidth::I64),
6034 ))
6035 }
6036 "lge" | "lgt" | "lle" | "llt" => {
6037 let (lhs_ptr, lhs_len) =
6038 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6039 let (rhs_ptr, rhs_len) =
6040 lower_string_expr_with_layouts(b, locals, arg_spanned(1)?, st, type_layouts);
6041 let raw = b.call(
6042 FuncRef::External(format!("afs_{}", name)),
6043 vec![lhs_ptr, lhs_len, rhs_ptr, rhs_len],
6044 IrType::Int(IntWidth::I32),
6045 );
6046 let zero = b.const_i32(0);
6047 Some(b.icmp(CmpOp::Ne, raw, zero))
6048 }
6049 "adjustl" => {
6050 let (src_ptr, len_val) =
6051 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6052 let one = b.const_i64(1);
6053 let alloc_len = b.iadd(len_val, one);
6054 let buf = b.runtime_call(
6055 RuntimeFunc::Allocate,
6056 vec![alloc_len],
6057 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6058 );
6059 let zero = b.const_i32(0);
6060 b.call(
6061 FuncRef::External("memset".into()),
6062 vec![buf, zero, alloc_len],
6063 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6064 );
6065 b.call(
6066 FuncRef::External("afs_adjustl".into()),
6067 vec![buf, src_ptr, len_val],
6068 IrType::Void,
6069 );
6070 Some(buf)
6071 }
6072 "adjustr" => {
6073 let (src_ptr, len_val) =
6074 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6075 let one = b.const_i64(1);
6076 let alloc_len = b.iadd(len_val, one);
6077 let buf = b.runtime_call(
6078 RuntimeFunc::Allocate,
6079 vec![alloc_len],
6080 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6081 );
6082 let zero = b.const_i32(0);
6083 b.call(
6084 FuncRef::External("memset".into()),
6085 vec![buf, zero, alloc_len],
6086 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6087 );
6088 b.call(
6089 FuncRef::External("afs_adjustr".into()),
6090 vec![buf, src_ptr, len_val],
6091 IrType::Void,
6092 );
6093 Some(buf)
6094 }
6095 "trim" => {
6096 // TRIM(s): returns character with trailing blanks removed.
6097 // Allocate buffer of declared length, memcpy source, return buffer pointer.
6098 // The actual printed length is discovered by len_trim at the call site.
6099 let (src_ptr, len_val) =
6100 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6101 let one = b.const_i64(1);
6102 let alloc_len = b.iadd(len_val, one);
6103 let buf = b.runtime_call(
6104 RuntimeFunc::Allocate,
6105 vec![alloc_len],
6106 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6107 );
6108 let zero = b.const_i32(0);
6109 b.call(
6110 FuncRef::External("memset".into()),
6111 vec![buf, zero, alloc_len],
6112 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6113 );
6114 b.call(
6115 FuncRef::External("memcpy".into()),
6116 vec![buf, src_ptr, len_val],
6117 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6118 );
6119 Some(buf)
6120 }
6121 "repeat" => {
6122 let (src_ptr, src_len) =
6123 lower_string_expr_with_layouts(b, locals, arg_spanned(0)?, st, type_layouts);
6124 let raw_copies = lower_expr(b, locals, arg_spanned(1)?, st);
6125 let copies = widen_to_i64(b, raw_copies);
6126 let copies = clamp_nonnegative_i64(b, copies);
6127 let total_len = b.imul(src_len, copies);
6128 let one = b.const_i64(1);
6129 let alloc_len = b.iadd(total_len, one);
6130 let buf = b.runtime_call(
6131 RuntimeFunc::Allocate,
6132 vec![alloc_len],
6133 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6134 );
6135 let zero = b.const_i32(0);
6136 b.call(
6137 FuncRef::External("memset".into()),
6138 vec![buf, zero, alloc_len],
6139 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
6140 );
6141 b.call(
6142 FuncRef::External("afs_repeat".into()),
6143 vec![src_ptr, src_len, copies, buf],
6144 IrType::Void,
6145 );
6146 Some(buf)
6147 }
6148 "compiler_version" => {
6149 let version = b"armfortas 0.1.0";
6150 Some(b.const_string(version))
6151 }
6152 "compiler_options" => {
6153 // Return a placeholder — the actual options string would
6154 // need to be threaded from the driver. Empty for now.
6155 let opts = b"";
6156 Some(b.const_string(opts))
6157 }
6158 _ => None,
6159 }
6160 }
6161
6162 fn lower_any_intrinsic_ast(
6163 b: &mut FuncBuilder,
6164 name: &str,
6165 args: &[crate::ast::expr::Argument],
6166 locals: &HashMap<String, LocalInfo>,
6167 st: &SymbolTable,
6168 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
6169 internal_funcs: Option<&HashMap<String, u32>>,
6170 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
6171 descriptor_params: Option<&HashMap<String, Vec<bool>>>,
6172 ) -> Option<ValueId> {
6173 use crate::ast::expr::{AcValue, BinaryOp, Expr, SectionSubscript};
6174
6175 if name != "any" {
6176 return None;
6177 }
6178
6179 let arg0 = args.first().and_then(|arg| {
6180 if let SectionSubscript::Element(expr) = &arg.value {
6181 Some(expr)
6182 } else {
6183 None
6184 }
6185 })?;
6186
6187 let lower_bool_expr = |b: &mut FuncBuilder, expr: &crate::ast::expr::SpannedExpr| {
6188 let raw = lower_expr_full(
6189 b,
6190 locals,
6191 expr,
6192 st,
6193 type_layouts,
6194 internal_funcs,
6195 contained_host_refs,
6196 descriptor_params,
6197 );
6198 coerce_to_type(b, raw, &IrType::Bool)
6199 };
6200
6201 match &arg0.node {
6202 Expr::ArrayConstructor { values, .. } => {
6203 let mut acc = b.const_bool(false);
6204 for value in values {
6205 let AcValue::Expr(expr) = value else {
6206 return None;
6207 };
6208 let pred = lower_bool_expr(b, expr);
6209 acc = b.or(acc, pred);
6210 }
6211 Some(acc)
6212 }
6213 Expr::BinaryOp { op, left, right } if matches!(op, BinaryOp::Eq | BinaryOp::Ne) => {
6214 match (&left.node, &right.node) {
6215 (Expr::ArrayConstructor { values, .. }, _) => {
6216 let mut acc = b.const_bool(false);
6217 for value in values {
6218 let AcValue::Expr(rhs) = value else {
6219 return None;
6220 };
6221 let cmp = crate::ast::Spanned::new(
6222 Expr::BinaryOp {
6223 op: op.clone(),
6224 left: Box::new(right.as_ref().clone()),
6225 right: Box::new(rhs.clone()),
6226 },
6227 right.span,
6228 );
6229 let pred = lower_bool_expr(b, &cmp);
6230 acc = b.or(acc, pred);
6231 }
6232 Some(acc)
6233 }
6234 (_, Expr::ArrayConstructor { values, .. }) => {
6235 let mut acc = b.const_bool(false);
6236 for value in values {
6237 let AcValue::Expr(rhs) = value else {
6238 return None;
6239 };
6240 let cmp = crate::ast::Spanned::new(
6241 Expr::BinaryOp {
6242 op: op.clone(),
6243 left: Box::new(left.as_ref().clone()),
6244 right: Box::new(rhs.clone()),
6245 },
6246 left.span,
6247 );
6248 let pred = lower_bool_expr(b, &cmp);
6249 acc = b.or(acc, pred);
6250 }
6251 Some(acc)
6252 }
6253 _ => None,
6254 }
6255 }
6256 _ => None,
6257 }
6258 }
6259
6260 /// Lower a Fortran intrinsic function call to IR instructions.
6261 /// Returns Some(ValueId) if recognized, None for external functions.
6262 fn lower_intrinsic(b: &mut FuncBuilder, name: &str, args: &[ValueId]) -> Option<ValueId> {
6263 match name {
6264 "merge" => {
6265 if args.len() >= 3 {
6266 let mut ty = b
6267 .func()
6268 .value_type(args[0])
6269 .unwrap_or(IrType::Int(IntWidth::I32));
6270 if ty.is_float() {
6271 if matches!(
6272 b.func().value_type(args[1]),
6273 Some(IrType::Float(FloatWidth::F64))
6274 ) {
6275 ty = IrType::Float(FloatWidth::F64);
6276 }
6277 } else if ty.is_int() {
6278 let width = [args[0], args[1]]
6279 .iter()
6280 .filter_map(|arg| b.func().value_type(*arg).and_then(|ty| ty.int_width()))
6281 .max_by_key(|width| width.bits())
6282 .unwrap_or(IntWidth::I32);
6283 ty = IrType::Int(width);
6284 }
6285 let true_val = coerce_to_type(b, args[0], &ty);
6286 let false_val = coerce_to_type(b, args[1], &ty);
6287 let mask = coerce_to_type(b, args[2], &IrType::Bool);
6288 Some(b.select(mask, true_val, false_val))
6289 } else {
6290 None
6291 }
6292 }
6293 "transfer" => {
6294 if args.len() >= 2 {
6295 let mold_ty = b
6296 .func()
6297 .value_type(args[1])
6298 .unwrap_or(IrType::Int(IntWidth::I32));
6299 Some(coerce_to_type(b, args[0], &mold_ty))
6300 } else {
6301 None
6302 }
6303 }
6304 "mod" => {
6305 // MOD(a, p) = a - INT(a/p) * p (sign of dividend)
6306 // C-style remainder matches this.
6307 if args.len() >= 2 {
6308 let (lhs, rhs) = unify_int_widths(b, args[0], args[1]);
6309 Some(b.imod(lhs, rhs))
6310 } else {
6311 None
6312 }
6313 }
6314 "modulo" => {
6315 // MODULO(a, p) = a - FLOOR(a/p) * p (sign of divisor, result in [0, |p|))
6316 // For integers: if result has opposite sign to p, add p.
6317 if args.len() >= 2 {
6318 let (lhs, rhs) = unify_int_widths(b, args[0], args[1]);
6319 let ty = b
6320 .func()
6321 .value_type(lhs)
6322 .unwrap_or(IrType::Int(IntWidth::I32));
6323 if ty.is_float() {
6324 // Float modulo: use fmod then adjust.
6325 let rem = b.call(FuncRef::External("fmod".into()), vec![lhs, rhs], ty.clone());
6326 let sum = b.fadd(rem, rhs);
6327 let rem2 = b.call(FuncRef::External("fmod".into()), vec![sum, rhs], ty);
6328 Some(rem2)
6329 } else {
6330 // Integer modulo: rem = a % p; if (rem != 0 && (rem ^ p) < 0) rem += p
6331 let rem = b.imod(lhs, rhs);
6332 let zero = match &ty {
6333 IrType::Int(IntWidth::I64) => b.const_i64(0),
6334 _ => b.const_i32(0),
6335 };
6336 let rem_ne_zero = b.icmp(CmpOp::Ne, rem, zero);
6337 let rem_xor_p = b.bit_xor(rem, rhs);
6338 let sign_differs = b.icmp(CmpOp::Lt, rem_xor_p, zero);
6339 let needs_adjust = b.and(rem_ne_zero, sign_differs);
6340 let adjusted = b.iadd(rem, rhs);
6341 Some(b.select(needs_adjust, adjusted, rem))
6342 }
6343 } else {
6344 None
6345 }
6346 }
6347 "abs" | "iabs" | "dabs" => {
6348 if let Some(arg) = args.first() {
6349 let ty = b
6350 .func()
6351 .value_type(*arg)
6352 .unwrap_or(IrType::Int(IntWidth::I32));
6353 match &ty {
6354 IrType::Int(w) => {
6355 let zero = match w {
6356 IntWidth::I64 => b.const_i64(0),
6357 _ => b.const_i32(0),
6358 };
6359 let is_pos = b.icmp(CmpOp::Ge, *arg, zero);
6360 let neg = b.ineg(*arg);
6361 Some(b.select(is_pos, *arg, neg))
6362 }
6363 IrType::Float(_) => Some(b.fabs(*arg)),
6364 _ => None,
6365 }
6366 } else {
6367 None
6368 }
6369 }
6370 "int" | "idint" | "ifix" => {
6371 if let Some(arg) = args.first() {
6372 let ty = b
6373 .func()
6374 .value_type(*arg)
6375 .unwrap_or(IrType::Int(IntWidth::I32));
6376 if ty.is_float() {
6377 Some(b.float_to_int(*arg, IntWidth::I32))
6378 } else {
6379 Some(*arg)
6380 }
6381 } else {
6382 None
6383 }
6384 }
6385 "nint" | "idnint" => {
6386 // NINT: round to nearest integer (not truncate).
6387 // Round via libm round(), then convert to int.
6388 if let Some(arg) = args.first() {
6389 let ty = b
6390 .func()
6391 .value_type(*arg)
6392 .unwrap_or(IrType::Float(FloatWidth::F64));
6393 if ty.is_float() {
6394 let func = if matches!(ty, IrType::Float(FloatWidth::F32)) {
6395 "roundf"
6396 } else {
6397 "round"
6398 };
6399 let rounded = b.call(FuncRef::External(func.into()), vec![*arg], ty.clone());
6400 Some(b.float_to_int(rounded, IntWidth::I32))
6401 } else {
6402 Some(*arg)
6403 }
6404 } else {
6405 None
6406 }
6407 }
6408 "anint" | "dnint" => {
6409 // ANINT: round to nearest whole number, return as real.
6410 if let Some(arg) = args.first() {
6411 let ty = b
6412 .func()
6413 .value_type(*arg)
6414 .unwrap_or(IrType::Float(FloatWidth::F64));
6415 let func = if matches!(ty, IrType::Float(FloatWidth::F32)) {
6416 "roundf"
6417 } else {
6418 "round"
6419 };
6420 Some(b.call(FuncRef::External(func.into()), vec![*arg], ty))
6421 } else {
6422 None
6423 }
6424 }
6425 "real" | "float" | "sngl" => {
6426 if let Some(arg) = args.first() {
6427 let ty = b
6428 .func()
6429 .value_type(*arg)
6430 .unwrap_or(IrType::Int(IntWidth::I32));
6431 if ty.is_int() {
6432 Some(b.int_to_float(*arg, FloatWidth::F32))
6433 } else if is_complex_ty(&ty) {
6434 // real(z) extracts the real component of a complex number.
6435 // Complex values live as ptr<[f32/f64 x 2]>; load element 0.
6436 let fw = complex_float_width(&ty);
6437 let zero = b.const_i64(0);
6438 let re_ptr = b.gep(*arg, vec![zero], IrType::Int(IntWidth::I8));
6439 Some(b.load_typed(re_ptr, IrType::Float(fw)))
6440 } else {
6441 Some(*arg)
6442 }
6443 } else {
6444 None
6445 }
6446 }
6447 "aimag" | "dimag" => {
6448 // aimag(z) extracts the imaginary component of a complex number.
6449 // Complex values live as ptr<[f32/f64 x 2]>; load element 1 at
6450 // byte offset 4 (f32) or 8 (f64).
6451 if let Some(arg) = args.first() {
6452 let ty = b
6453 .func()
6454 .value_type(*arg)
6455 .unwrap_or(IrType::Int(IntWidth::I32));
6456 if is_complex_ty(&ty) {
6457 let fw = complex_float_width(&ty);
6458 let offset = b.const_i64(if fw == FloatWidth::F64 { 8 } else { 4 });
6459 let im_ptr = b.gep(*arg, vec![offset], IrType::Int(IntWidth::I8));
6460 Some(b.load_typed(im_ptr, IrType::Float(fw)))
6461 } else {
6462 None
6463 }
6464 } else {
6465 None
6466 }
6467 }
6468 "dble" | "dfloat" => {
6469 if let Some(arg) = args.first() {
6470 let ty = b
6471 .func()
6472 .value_type(*arg)
6473 .unwrap_or(IrType::Int(IntWidth::I32));
6474 if ty.is_int() {
6475 Some(b.int_to_float(*arg, FloatWidth::F64))
6476 } else if matches!(ty, IrType::Float(FloatWidth::F32)) {
6477 Some(b.float_extend(*arg, FloatWidth::F64))
6478 } else {
6479 Some(*arg)
6480 }
6481 } else {
6482 None
6483 }
6484 }
6485 "max" | "max0" | "amax1" | "dmax1" => {
6486 if args.len() >= 2 {
6487 let mut ty = b
6488 .func()
6489 .value_type(args[0])
6490 .unwrap_or(IrType::Int(IntWidth::I32));
6491 if ty.is_float() {
6492 if args.iter().any(|arg| {
6493 matches!(
6494 b.func().value_type(*arg),
6495 Some(IrType::Float(FloatWidth::F64))
6496 )
6497 }) {
6498 ty = IrType::Float(FloatWidth::F64);
6499 }
6500 } else if ty.is_int() {
6501 let width = args
6502 .iter()
6503 .filter_map(|arg| b.func().value_type(*arg).and_then(|ty| ty.int_width()))
6504 .max_by_key(|width| width.bits())
6505 .unwrap_or(IntWidth::I32);
6506 ty = IrType::Int(width);
6507 }
6508 let coerced: Vec<ValueId> = args
6509 .iter()
6510 .map(|arg| coerce_to_type(b, *arg, &ty))
6511 .collect();
6512 let cmp = if ty.is_float() {
6513 b.fcmp(CmpOp::Ge, coerced[0], coerced[1])
6514 } else {
6515 b.icmp(CmpOp::Ge, coerced[0], coerced[1])
6516 };
6517 let mut result = b.select(cmp, coerced[0], coerced[1]);
6518 // Variadic: max(a, b, c, ...) chains.
6519 for arg in &coerced[2..] {
6520 let cmp = if ty.is_float() {
6521 b.fcmp(CmpOp::Ge, result, *arg)
6522 } else {
6523 b.icmp(CmpOp::Ge, result, *arg)
6524 };
6525 result = b.select(cmp, result, *arg);
6526 }
6527 Some(result)
6528 } else {
6529 None
6530 }
6531 }
6532 "min" | "min0" | "amin1" | "dmin1" => {
6533 if args.len() >= 2 {
6534 let mut ty = b
6535 .func()
6536 .value_type(args[0])
6537 .unwrap_or(IrType::Int(IntWidth::I32));
6538 if ty.is_float() {
6539 if args.iter().any(|arg| {
6540 matches!(
6541 b.func().value_type(*arg),
6542 Some(IrType::Float(FloatWidth::F64))
6543 )
6544 }) {
6545 ty = IrType::Float(FloatWidth::F64);
6546 }
6547 } else if ty.is_int() {
6548 let width = args
6549 .iter()
6550 .filter_map(|arg| b.func().value_type(*arg).and_then(|ty| ty.int_width()))
6551 .max_by_key(|width| width.bits())
6552 .unwrap_or(IntWidth::I32);
6553 ty = IrType::Int(width);
6554 }
6555 let coerced: Vec<ValueId> = args
6556 .iter()
6557 .map(|arg| coerce_to_type(b, *arg, &ty))
6558 .collect();
6559 let cmp = if ty.is_float() {
6560 b.fcmp(CmpOp::Le, coerced[0], coerced[1])
6561 } else {
6562 b.icmp(CmpOp::Le, coerced[0], coerced[1])
6563 };
6564 let mut result = b.select(cmp, coerced[0], coerced[1]);
6565 for arg in &coerced[2..] {
6566 let cmp = if ty.is_float() {
6567 b.fcmp(CmpOp::Le, result, *arg)
6568 } else {
6569 b.icmp(CmpOp::Le, result, *arg)
6570 };
6571 result = b.select(cmp, result, *arg);
6572 }
6573 Some(result)
6574 } else {
6575 None
6576 }
6577 }
6578 "sign" | "dsign" | "isign" => {
6579 // sign(a, b) = abs(a) * sign_of(b) = b >= 0 ? abs(a) : -abs(a)
6580 if args.len() >= 2 {
6581 let ty = b
6582 .func()
6583 .value_type(args[0])
6584 .unwrap_or(IrType::Int(IntWidth::I32));
6585 let abs_a = if ty.is_float() {
6586 b.fabs(args[0])
6587 } else {
6588 let zero = match &ty {
6589 IrType::Int(IntWidth::I64) => b.const_i64(0),
6590 _ => b.const_i32(0),
6591 };
6592 let is_pos = b.icmp(CmpOp::Ge, args[0], zero);
6593 let neg = b.ineg(args[0]);
6594 b.select(is_pos, args[0], neg)
6595 };
6596 let neg_abs = if ty.is_float() {
6597 b.fneg(abs_a)
6598 } else {
6599 b.ineg(abs_a)
6600 };
6601 let zero = match &ty {
6602 IrType::Float(FloatWidth::F32) => b.const_f32(0.0),
6603 IrType::Float(_) => b.const_f64(0.0),
6604 IrType::Int(IntWidth::I64) => b.const_i64(0),
6605 _ => b.const_i32(0),
6606 };
6607 let b_pos = if ty.is_float() {
6608 b.fcmp(CmpOp::Ge, args[1], zero)
6609 } else {
6610 b.icmp(CmpOp::Ge, args[1], zero)
6611 };
6612 Some(b.select(b_pos, abs_a, neg_abs))
6613 } else {
6614 None
6615 }
6616 }
6617 "sqrt" | "dsqrt" => args.first().map(|a| b.fsqrt(*a)),
6618 // ---- Bit manipulation (inline) ----
6619 // Mixed-kind bit ops (e.g. iand(c_long, c_int)) must unify
6620 // widths to the wider operand before the IR-level bit_and,
6621 // or the verifier rejects "operand width mismatch". F2018
6622 // §16.9.104 doesn't require same kinds; gfortran silently
6623 // promotes. Audit31 Finding 14.
6624 "iand" => {
6625 if args.len() >= 2 {
6626 let (l, r) = unify_int_widths(b, args[0], args[1]);
6627 Some(b.bit_and(l, r))
6628 } else {
6629 None
6630 }
6631 }
6632 "ior" => {
6633 if args.len() >= 2 {
6634 let (l, r) = unify_int_widths(b, args[0], args[1]);
6635 Some(b.bit_or(l, r))
6636 } else {
6637 None
6638 }
6639 }
6640 "ieor" => {
6641 if args.len() >= 2 {
6642 let (l, r) = unify_int_widths(b, args[0], args[1]);
6643 Some(b.bit_xor(l, r))
6644 } else {
6645 None
6646 }
6647 }
6648 "not" => args.first().map(|a| b.bit_not(*a)),
6649 "leadz" => args.first().map(|a| b.clz(*a)),
6650 "trailz" => args.first().map(|a| b.ctz(*a)),
6651 "popcount" | "popcnt" => {
6652 // Use __builtin_popcountll via runtime call since ARM64 NEON popcount
6653 // requires a complex instruction sequence.
6654 args.first().map(|a| {
6655 let widened = b.int_extend(*a, IntWidth::I64, false);
6656 b.call(
6657 FuncRef::External("afs_popcount".into()),
6658 vec![widened],
6659 IrType::Int(IntWidth::I32),
6660 )
6661 })
6662 }
6663 "ishft" => {
6664 // ishft(a, shift): positive shift = left, negative = right.
6665 // For now, only handle positive (left shift). Full impl needs Select.
6666 if args.len() >= 2 {
6667 let zero = b.const_i32(0);
6668 let is_left = b.icmp(CmpOp::Ge, args[1], zero);
6669 let neg_shift = b.ineg(args[1]);
6670 let left = b.shl(args[0], args[1]);
6671 let right = b.lshr(args[0], neg_shift);
6672 Some(b.select(is_left, left, right))
6673 } else {
6674 None
6675 }
6676 }
6677 "btest" => {
6678 // btest(a, pos) = (a >> pos) & 1 /= 0
6679 if args.len() >= 2 {
6680 let shifted = b.lshr(args[0], args[1]);
6681 let one = b.const_i32(1);
6682 let masked = b.bit_and(shifted, one);
6683 let zero = b.const_i32(0);
6684 Some(b.icmp(CmpOp::Ne, masked, zero))
6685 } else {
6686 None
6687 }
6688 }
6689 "ibset" => {
6690 // ibset(a, pos) = a | (1 << pos)
6691 if args.len() >= 2 {
6692 let one = b.const_i32(1);
6693 let mask = b.shl(one, args[1]);
6694 Some(b.bit_or(args[0], mask))
6695 } else {
6696 None
6697 }
6698 }
6699 "ibclr" => {
6700 // ibclr(a, pos) = a & ~(1 << pos)
6701 if args.len() >= 2 {
6702 let one = b.const_i32(1);
6703 let mask = b.shl(one, args[1]);
6704 let inv = b.bit_not(mask);
6705 Some(b.bit_and(args[0], inv))
6706 } else {
6707 None
6708 }
6709 }
6710 "ibits" => {
6711 // ibits(i, pos, len) = (i >> pos) & ((1 << len) - 1)
6712 if args.len() >= 3 {
6713 let shifted = b.lshr(args[0], args[1]);
6714 let one = b.const_i32(1);
6715 let mask_hi = b.shl(one, args[2]);
6716 let one2 = b.const_i32(1);
6717 let mask = b.isub(mask_hi, one2);
6718 Some(b.bit_and(shifted, mask))
6719 } else {
6720 None
6721 }
6722 }
6723 // ---- Math intrinsics → libm calls ----
6724 // Dispatch to sinf/sin based on argument type for F32/F64 correctness.
6725 "sin" | "dsin" | "cos" | "dcos" | "tan" | "dtan" | "asin" | "dasin" | "acos" | "dacos"
6726 | "atan" | "datan" | "sinh" | "dsinh" | "cosh" | "dcosh" | "tanh" | "dtanh" | "exp"
6727 | "dexp" | "log" | "dlog" | "alog" | "log10" | "dlog10" | "alog10" | "erf" | "derf"
6728 | "erfc" | "derfc" | "ceiling" | "floor" => {
6729 if let Some(arg) = args.first() {
6730 let ty = b
6731 .func()
6732 .value_type(*arg)
6733 .unwrap_or(IrType::Float(FloatWidth::F64));
6734 let is_f32 = matches!(ty, IrType::Float(FloatWidth::F32));
6735 let base_name = match name {
6736 "dsin" | "sin" => "sin",
6737 "dcos" | "cos" => "cos",
6738 "dtan" | "tan" => "tan",
6739 "dasin" | "asin" => "asin",
6740 "dacos" | "acos" => "acos",
6741 "datan" | "atan" => "atan",
6742 "dsinh" | "sinh" => "sinh",
6743 "dcosh" | "cosh" => "cosh",
6744 "dtanh" | "tanh" => "tanh",
6745 "dexp" | "exp" => "exp",
6746 "dlog" | "log" | "alog" => "log",
6747 "dlog10" | "log10" | "alog10" => "log10",
6748 "derf" | "erf" => "erf",
6749 "derfc" | "erfc" => "erfc",
6750 "ceiling" => "ceil",
6751 "floor" => "floor",
6752 _ => name,
6753 };
6754 let func_name = if is_f32 {
6755 format!("{}f", base_name)
6756 } else {
6757 base_name.to_string()
6758 };
6759 let ret_ty = if is_f32 {
6760 IrType::Float(FloatWidth::F32)
6761 } else {
6762 IrType::Float(FloatWidth::F64)
6763 };
6764 Some(b.call(FuncRef::External(func_name), vec![*arg], ret_ty))
6765 } else {
6766 None
6767 }
6768 }
6769 "atan2" | "datan2" => {
6770 if args.len() >= 2 {
6771 let ty = b
6772 .func()
6773 .value_type(args[0])
6774 .unwrap_or(IrType::Float(FloatWidth::F64));
6775 let is_f32 = matches!(ty, IrType::Float(FloatWidth::F32));
6776 let func = if is_f32 { "atan2f" } else { "atan2" };
6777 let ret_ty = if is_f32 {
6778 IrType::Float(FloatWidth::F32)
6779 } else {
6780 IrType::Float(FloatWidth::F64)
6781 };
6782 Some(b.call(
6783 FuncRef::External(func.into()),
6784 vec![args[0], args[1]],
6785 ret_ty,
6786 ))
6787 } else {
6788 None
6789 }
6790 }
6791 "gamma" | "dgamma" => args.first().map(|a| {
6792 let ty = b
6793 .func()
6794 .value_type(*a)
6795 .unwrap_or(IrType::Float(FloatWidth::F64));
6796 let is_f32 = matches!(ty, IrType::Float(FloatWidth::F32));
6797 let func = if is_f32 { "tgammaf" } else { "tgamma" };
6798 let ret_ty = if is_f32 {
6799 IrType::Float(FloatWidth::F32)
6800 } else {
6801 IrType::Float(FloatWidth::F64)
6802 };
6803 b.call(FuncRef::External(func.into()), vec![*a], ret_ty)
6804 }),
6805 "log_gamma" => args.first().map(|a| {
6806 let ty = b
6807 .func()
6808 .value_type(*a)
6809 .unwrap_or(IrType::Float(FloatWidth::F64));
6810 let is_f32 = matches!(ty, IrType::Float(FloatWidth::F32));
6811 let func = if is_f32 { "lgammaf" } else { "lgamma" };
6812 let ret_ty = if is_f32 {
6813 IrType::Float(FloatWidth::F32)
6814 } else {
6815 IrType::Float(FloatWidth::F64)
6816 };
6817 b.call(FuncRef::External(func.into()), vec![*a], ret_ty)
6818 }),
6819 "bessel_j0" => args.first().map(|a| {
6820 b.call(
6821 FuncRef::External("j0".into()),
6822 vec![*a],
6823 IrType::Float(FloatWidth::F64),
6824 )
6825 }),
6826 "bessel_j1" => args.first().map(|a| {
6827 b.call(
6828 FuncRef::External("j1".into()),
6829 vec![*a],
6830 IrType::Float(FloatWidth::F64),
6831 )
6832 }),
6833 "bessel_y0" => args.first().map(|a| {
6834 b.call(
6835 FuncRef::External("y0".into()),
6836 vec![*a],
6837 IrType::Float(FloatWidth::F64),
6838 )
6839 }),
6840 "bessel_y1" => args.first().map(|a| {
6841 b.call(
6842 FuncRef::External("y1".into()),
6843 vec![*a],
6844 IrType::Float(FloatWidth::F64),
6845 )
6846 }),
6847 "hypot" => {
6848 if args.len() >= 2 {
6849 let ty = b
6850 .func()
6851 .value_type(args[0])
6852 .unwrap_or(IrType::Float(FloatWidth::F64));
6853 let is_f32 = matches!(ty, IrType::Float(FloatWidth::F32));
6854 let func = if is_f32 { "hypotf" } else { "hypot" };
6855 let ret_ty = if is_f32 {
6856 IrType::Float(FloatWidth::F32)
6857 } else {
6858 IrType::Float(FloatWidth::F64)
6859 };
6860 Some(b.call(
6861 FuncRef::External(func.into()),
6862 vec![args[0], args[1]],
6863 ret_ty,
6864 ))
6865 } else {
6866 None
6867 }
6868 }
6869 "ishftc" => {
6870 // ishftc(a, shift, size): circular shift of the rightmost `size` bits.
6871 if args.len() >= 2 {
6872 let ty = b
6873 .func()
6874 .value_type(args[0])
6875 .unwrap_or(IrType::Int(IntWidth::I32));
6876 let default_size = match &ty {
6877 IrType::Int(IntWidth::I64) => 64,
6878 IrType::Int(IntWidth::I16) => 16,
6879 IrType::Int(IntWidth::I8) => 8,
6880 _ => 32,
6881 };
6882 let size = if args.len() >= 3 {
6883 args[2]
6884 } else {
6885 b.const_i32(default_size)
6886 };
6887 let shift = args[1];
6888 // left = (a << shift) | (a >> (size - shift)), masked to size bits.
6889 let left = b.shl(args[0], shift);
6890 let diff = b.isub(size, shift);
6891 let right = b.lshr(args[0], diff);
6892 let combined = b.bit_or(left, right);
6893 // Mask to `size` bits: combined & ((1 << size) - 1).
6894 let one = b.const_i32(1);
6895 let shifted_one = b.shl(one, size);
6896 let one2 = b.const_i32(1);
6897 let mask = b.isub(shifted_one, one2);
6898 Some(b.bit_and(combined, mask))
6899 } else {
6900 None
6901 }
6902 }
6903
6904 // ---- Numeric inquiry intrinsics (compile-time constants) ----
6905 // These depend on the argument's type, which we determine from the first arg.
6906 "huge" => {
6907 if let Some(arg) = args.first() {
6908 let ty = b
6909 .func()
6910 .value_type(*arg)
6911 .unwrap_or(IrType::Int(IntWidth::I32));
6912 match &ty {
6913 IrType::Int(IntWidth::I8) => Some(b.const_i32(i8::MAX as i64 as i32)),
6914 IrType::Int(IntWidth::I16) => Some(b.const_i32(i16::MAX as i64 as i32)),
6915 IrType::Int(IntWidth::I32) => Some(b.const_i32(i32::MAX)),
6916 IrType::Int(IntWidth::I64) => Some(b.const_i64(i64::MAX)),
6917 IrType::Float(FloatWidth::F32) => Some(b.const_f32(f32::MAX)),
6918 IrType::Float(FloatWidth::F64) => Some(b.const_f64(f64::MAX)),
6919 _ => None,
6920 }
6921 } else {
6922 None
6923 }
6924 }
6925 "tiny" => {
6926 if let Some(arg) = args.first() {
6927 let ty = b
6928 .func()
6929 .value_type(*arg)
6930 .unwrap_or(IrType::Float(FloatWidth::F32));
6931 match &ty {
6932 IrType::Float(FloatWidth::F32) => Some(b.const_f32(f32::MIN_POSITIVE)),
6933 IrType::Float(FloatWidth::F64) => Some(b.const_f64(f64::MIN_POSITIVE)),
6934 _ => None,
6935 }
6936 } else {
6937 None
6938 }
6939 }
6940 "epsilon" => {
6941 if let Some(arg) = args.first() {
6942 let ty = b
6943 .func()
6944 .value_type(*arg)
6945 .unwrap_or(IrType::Float(FloatWidth::F32));
6946 match &ty {
6947 IrType::Float(FloatWidth::F32) => Some(b.const_f32(f32::EPSILON)),
6948 IrType::Float(FloatWidth::F64) => Some(b.const_f64(f64::EPSILON)),
6949 _ => None,
6950 }
6951 } else {
6952 None
6953 }
6954 }
6955 "precision" => {
6956 if let Some(arg) = args.first() {
6957 let ty = b
6958 .func()
6959 .value_type(*arg)
6960 .unwrap_or(IrType::Float(FloatWidth::F32));
6961 let prec = match &ty {
6962 IrType::Float(FloatWidth::F32) => 6, // ~7.2 decimal digits → 6
6963 IrType::Float(FloatWidth::F64) => 15, // ~15.9 decimal digits → 15
6964 _ => 0,
6965 };
6966 Some(b.const_i32(prec))
6967 } else {
6968 None
6969 }
6970 }
6971 "range" => {
6972 if let Some(arg) = args.first() {
6973 let ty = b
6974 .func()
6975 .value_type(*arg)
6976 .unwrap_or(IrType::Int(IntWidth::I32));
6977 let range = match &ty {
6978 IrType::Int(IntWidth::I8) => 2,
6979 IrType::Int(IntWidth::I16) => 4,
6980 IrType::Int(IntWidth::I32) => 9,
6981 IrType::Int(IntWidth::I64) => 18,
6982 IrType::Int(IntWidth::I128) => 38,
6983 IrType::Float(FloatWidth::F32) => 37,
6984 IrType::Float(FloatWidth::F64) => 307,
6985 _ => 0,
6986 };
6987 Some(b.const_i32(range))
6988 } else {
6989 None
6990 }
6991 }
6992 "digits" => {
6993 if let Some(arg) = args.first() {
6994 let ty = b
6995 .func()
6996 .value_type(*arg)
6997 .unwrap_or(IrType::Int(IntWidth::I32));
6998 let digits = match &ty {
6999 IrType::Int(IntWidth::I8) => 7,
7000 IrType::Int(IntWidth::I16) => 15,
7001 IrType::Int(IntWidth::I32) => 31,
7002 IrType::Int(IntWidth::I64) => 63,
7003 IrType::Int(IntWidth::I128) => 127,
7004 IrType::Float(FloatWidth::F32) => 24, // significand bits
7005 IrType::Float(FloatWidth::F64) => 53,
7006 _ => 0,
7007 };
7008 Some(b.const_i32(digits))
7009 } else {
7010 None
7011 }
7012 }
7013 "radix" => {
7014 // Always 2 for binary machines.
7015 Some(b.const_i32(2))
7016 }
7017 "bit_size" => {
7018 if let Some(arg) = args.first() {
7019 let ty = b
7020 .func()
7021 .value_type(*arg)
7022 .unwrap_or(IrType::Int(IntWidth::I32));
7023 let bits = match &ty {
7024 IrType::Int(IntWidth::I8) => 8,
7025 IrType::Int(IntWidth::I16) => 16,
7026 IrType::Int(IntWidth::I32) => 32,
7027 IrType::Int(IntWidth::I64) => 64,
7028 IrType::Int(IntWidth::I128) => 128,
7029 _ => 0,
7030 };
7031 Some(b.const_i32(bits))
7032 } else {
7033 None
7034 }
7035 }
7036 "kind" => {
7037 if let Some(arg) = args.first() {
7038 let ty = b
7039 .func()
7040 .value_type(*arg)
7041 .unwrap_or(IrType::Int(IntWidth::I32));
7042 let kind = match &ty {
7043 IrType::Int(IntWidth::I8) => 1,
7044 IrType::Int(IntWidth::I16) => 2,
7045 IrType::Int(IntWidth::I32) => 4,
7046 IrType::Int(IntWidth::I64) => 8,
7047 IrType::Int(IntWidth::I128) => 16,
7048 IrType::Float(FloatWidth::F32) => 4,
7049 IrType::Float(FloatWidth::F64) => 8,
7050 IrType::Bool => 4,
7051 _ => 4,
7052 };
7053 Some(b.const_i32(kind))
7054 } else {
7055 None
7056 }
7057 }
7058 // ---- System inquiry functions ----
7059 "command_argument_count" => Some(b.call(
7060 FuncRef::External("afs_command_argument_count".into()),
7061 vec![],
7062 IrType::Int(IntWidth::I32),
7063 )),
7064
7065 // ---- iso_c_binding functions ----
7066 "c_loc" => None,
7067 "c_sizeof" => {
7068 // c_sizeof(x) — return byte size of x's C representation.
7069 if let Some(arg) = args.first() {
7070 let ty = b
7071 .func()
7072 .value_type(*arg)
7073 .unwrap_or(IrType::Int(IntWidth::I32));
7074 let size: i64 = match &ty {
7075 IrType::Int(IntWidth::I8) | IrType::Bool => 1,
7076 IrType::Int(IntWidth::I16) => 2,
7077 IrType::Int(IntWidth::I32) | IrType::Float(FloatWidth::F32) => 4,
7078 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => 8,
7079 IrType::Int(IntWidth::I128) => 16,
7080 IrType::Ptr(_) => 8, // pointers are 8 bytes on ARM64
7081 // Arrays use element size * count, but we don't have shape info here.
7082 // For now, return element size. Proper impl needs descriptor access.
7083 IrType::Array(elem, count) => {
7084 let elem_size = ir_scalar_byte_size(elem.as_ref());
7085 elem_size * (*count as i64)
7086 }
7087 _ => 8, // default to pointer size for unknown types
7088 };
7089 Some(b.const_i64(size))
7090 } else {
7091 None
7092 }
7093 }
7094 "c_associated" => {
7095 // c_associated(p) → p /= null
7096 // c_associated(p, q) → p == q
7097 if args.len() >= 2 {
7098 Some(b.icmp(CmpOp::Eq, args[0], args[1]))
7099 } else if let Some(p) = args.first() {
7100 // Use type-matched zero to avoid register width mismatch.
7101 let ty = b
7102 .func()
7103 .value_type(*p)
7104 .unwrap_or(IrType::Int(IntWidth::I64));
7105 let null = match &ty {
7106 IrType::Int(IntWidth::I32) => b.const_i32(0),
7107 _ => b.const_i64(0),
7108 };
7109 Some(b.icmp(CmpOp::Ne, *p, null))
7110 } else {
7111 None
7112 }
7113 }
7114
7115 // ---- Kind selection intrinsics ----
7116 "selected_int_kind" => {
7117 // selected_int_kind(r): smallest integer kind whose range covers [-10^r, 10^r].
7118 if let Some(arg) = args.first() {
7119 if let Some(r) = extract_const_int_from_value(b, *arg) {
7120 let kind: i32 = if r <= 2 {
7121 1
7122 }
7123 // i8: ±127
7124 else if r <= 4 {
7125 2
7126 }
7127 // i16: ±32767
7128 else if r <= 9 {
7129 4
7130 }
7131 // i32: ±2.1e9
7132 else if r <= 18 {
7133 8
7134 }
7135 // i64: ±9.2e18
7136 else if r <= 38 {
7137 16
7138 }
7139 // i128: ±1.7e38
7140 else {
7141 -1
7142 }; // no kind available
7143 Some(b.const_i32(kind))
7144 } else {
7145 Some(b.const_i32(4)) // non-constant: default to 4
7146 }
7147 } else {
7148 None
7149 }
7150 }
7151 "selected_real_kind" => {
7152 // selected_real_kind(p[, r]): smallest real kind with ≥p decimal digits.
7153 if let Some(arg) = args.first() {
7154 if let Some(p) = extract_const_int_from_value(b, *arg) {
7155 let kind: i32 = if p <= 6 {
7156 4
7157 }
7158 // f32: ~7 digits
7159 else if p <= 15 {
7160 8
7161 }
7162 // f64: ~15 digits
7163 else {
7164 -1
7165 }; // no kind available
7166 Some(b.const_i32(kind))
7167 } else {
7168 Some(b.const_i32(8)) // non-constant: default to 8
7169 }
7170 } else {
7171 None
7172 }
7173 }
7174
7175 // ---- IEEE arithmetic intrinsics ----
7176 "ieee_is_nan" => {
7177 // IEEE_IS_NAN(x) → x != x (NaN is the only value that is not equal to itself)
7178 args.first().map(|arg| b.fcmp(CmpOp::Ne, *arg, *arg))
7179 }
7180 "ieee_is_finite" => {
7181 // IEEE_IS_FINITE(x) → (x - x) == 0.0
7182 // For finite values, x-x is 0.0. For inf, x-x is NaN. For NaN, x-x is NaN.
7183 if let Some(arg) = args.first() {
7184 let diff = b.fsub(*arg, *arg);
7185 let ty = b
7186 .func()
7187 .value_type(*arg)
7188 .unwrap_or(IrType::Float(FloatWidth::F64));
7189 let zero = match &ty {
7190 IrType::Float(FloatWidth::F32) => b.const_f32(0.0),
7191 _ => b.const_f64(0.0),
7192 };
7193 Some(b.fcmp(CmpOp::Eq, diff, zero))
7194 } else {
7195 None
7196 }
7197 }
7198 "ieee_support_datatype" | "ieee_support_denormal" => {
7199 // These return .TRUE. for supported types on ARM64.
7200 Some(b.const_bool(true))
7201 }
7202
7203 _ => None,
7204 }
7205 }
7206
7207 /// Resolve a generic interface call to a specific procedure.
7208 /// Returns Some(specific_name) if the callee is a NamedInterface
7209 /// and a matching specific is found. Returns None otherwise.
7210 fn resolve_generic_call(
7211 st: &SymbolTable,
7212 b: &FuncBuilder,
7213 callee: &str,
7214 arg_vals: &[ValueId],
7215 ) -> Option<String> {
7216 // Find the generic interface symbol in any scope.
7217 let sym = st.find_symbol_any_scope(callee)?;
7218 if sym.kind != crate::sema::symtab::SymbolKind::NamedInterface {
7219 return None;
7220 }
7221 if sym.arg_names.is_empty() {
7222 return None;
7223 }
7224
7225 // Classify actual argument types.
7226 let actual_types: Vec<Option<IrType>> =
7227 arg_vals.iter().map(|v| b.func().value_type(*v)).collect();
7228
7229 // For each specific procedure, look up its declared argument types
7230 // from the symbol table and check if they match the actual types.
7231 // Match on the full IR type (category AND width) so real vs double
7232 // precision resolve to the right specific. Also require the arity
7233 // to match exactly before attempting per-arg checks.
7234 for specific in &sym.arg_names {
7235 let scope = st.all_scopes().iter().find(|s| match &s.kind {
7236 crate::sema::symtab::ScopeKind::Function(n)
7237 | crate::sema::symtab::ScopeKind::Subroutine(n) => n.to_lowercase() == *specific,
7238 _ => false,
7239 });
7240 let Some(scope) = scope else {
7241 continue;
7242 };
7243
7244 // Use the ordered argument list from the scope, not the
7245 // arbitrary symbols-map iteration order (which doesn't honor
7246 // declaration order and was previously confusing position with
7247 // hash order on multi-arg specifics).
7248 let declared_args: Vec<&crate::sema::symtab::Symbol> = scope
7249 .arg_order
7250 .iter()
7251 .filter_map(|n| scope.symbols.get(n))
7252 .collect();
7253
7254 if declared_args.len() != actual_types.len() {
7255 continue;
7256 }
7257
7258 let mut type_match = true;
7259 for ((decl_sym, actual_ty), arg_val) in declared_args
7260 .iter()
7261 .zip(actual_types.iter())
7262 .zip(arg_vals.iter())
7263 {
7264 let Some(ti) = decl_sym.type_info.as_ref() else {
7265 type_match = false;
7266 break;
7267 };
7268 let Some(at) = actual_ty.as_ref() else {
7269 type_match = false;
7270 break;
7271 };
7272 if !arg_matches_declared(ti, at, *arg_val, b) {
7273 type_match = false;
7274 break;
7275 }
7276 }
7277
7278 if type_match {
7279 return Some(specific.clone());
7280 }
7281 }
7282
7283 // No specific matched the actual argument types. Return None so
7284 // the caller can report a diagnostic instead of silently dispatching
7285 // to an arbitrary specific (which mis-interprets bits across kinds).
7286 None
7287 }
7288
7289 /// Try to lower an assignment through a user-defined `INTERFACE
7290 /// ASSIGNMENT(=)`. Returns true when a specific subroutine matches
7291 /// the (LHS info, RHS expression) type pair and the call was emitted.
7292 /// The LHS is passed by reference (its address) to the assignment
7293 /// subroutine so the callee can store into the caller's storage.
7294 fn try_defined_assignment(
7295 b: &mut FuncBuilder,
7296 ctx: &mut LowerCtx,
7297 lhs_key: &str,
7298 rhs: &crate::ast::expr::SpannedExpr,
7299 ) -> bool {
7300 let sym = match ctx.st.find_symbol_any_scope("assignment(=)") {
7301 Some(s) if s.kind == crate::sema::symtab::SymbolKind::NamedInterface => s,
7302 _ => return false,
7303 };
7304 if sym.arg_names.is_empty() {
7305 return false;
7306 }
7307
7308 let lhs_info = match ctx.locals.get(lhs_key) {
7309 Some(i) => i.clone(),
7310 None => return false,
7311 };
7312
7313 // Lower the RHS to an SSA value so resolve_generic_call can see
7314 // its IR type. If the RHS lowering ends up unused because we
7315 // instead take a default path, the IR instructions are still
7316 // emitted — that's harmless (dead-code elim removes them).
7317 let rhs_val = lower_expr_ctx_tl(b, ctx, rhs);
7318 let lhs_val = lhs_info.addr;
7319
7320 // Only attempt overload resolution when the LHS and RHS types
7321 // differ in a way the intrinsic assignment can't handle — e.g.
7322 // derived-type vs scalar, derived-type with user-defined store.
7323 // For strict type equality the built-in paths are correct and
7324 // cheaper; defer to them.
7325 let lhs_ty = lhs_info.ty.clone();
7326 let rhs_ty = b.func().value_type(rhs_val);
7327 // Skip for plain scalar/scalar of matching category.
7328 if let Some(rt) = rhs_ty.as_ref() {
7329 if ir_types_dispatch_equal(&lhs_ty, rt) && lhs_info.derived_type.is_none() {
7330 return false;
7331 }
7332 }
7333
7334 // For ASSIGNMENT(=) resolution, the conceptual argument list is
7335 // (lhs, rhs). The LHS is represented by its address (matching the
7336 // subroutine's intent(out) dummy), the RHS by its value.
7337 let arg_vals = vec![lhs_val, rhs_val];
7338 let resolved = match resolve_generic_call(ctx.st, b, "assignment(=)", &arg_vals) {
7339 Some(r) => r,
7340 None => return false,
7341 };
7342 let rk = resolved.to_lowercase();
7343 let (call_name, _) = resolved_symbol_call_target(ctx.st, &rk, &resolved);
7344 let func_ref = ctx
7345 .internal_funcs
7346 .get(&rk)
7347 .copied()
7348 .map(FuncRef::Internal)
7349 .unwrap_or_else(|| FuncRef::External(call_name));
7350 // RHS for ASSIGNMENT(=) is passed by reference to match
7351 // intent(in) dummy semantics — build a temp slot for scalar
7352 // values (the common case for user assignment from integer etc).
7353 let rhs_for_call = if rhs_ty.as_ref().map(|t| !t.is_ptr()).unwrap_or(true) {
7354 let ty = rhs_ty.clone().unwrap_or(IrType::Int(IntWidth::I32));
7355 let slot = b.alloca(ty);
7356 b.store(rhs_val, slot);
7357 slot
7358 } else {
7359 rhs_val
7360 };
7361 b.call(func_ref, vec![lhs_val, rhs_for_call], IrType::Void);
7362 true
7363 }
7364
7365 /// Map a `BinaryOp` to the Fortran operator-interface name used by
7366 /// `INTERFACE OPERATOR(<op>)`. Returns None for operators that have no
7367 /// user-defined form in this compiler (the Defined(name) variant uses
7368 /// `operator(.name.)` and is handled by the caller via the embedded
7369 /// string).
7370 fn binary_op_interface_name(op: &BinaryOp) -> Option<String> {
7371 Some(match op {
7372 BinaryOp::Add => "operator(+)".into(),
7373 BinaryOp::Sub => "operator(-)".into(),
7374 BinaryOp::Mul => "operator(*)".into(),
7375 BinaryOp::Div => "operator(/)".into(),
7376 BinaryOp::Pow => "operator(**)".into(),
7377 BinaryOp::Concat => "operator(//)".into(),
7378 BinaryOp::Eq => "operator(==)".into(),
7379 BinaryOp::Ne => "operator(/=)".into(),
7380 BinaryOp::Lt => "operator(<)".into(),
7381 BinaryOp::Le => "operator(<=)".into(),
7382 BinaryOp::Gt => "operator(>)".into(),
7383 BinaryOp::Ge => "operator(>=)".into(),
7384 BinaryOp::And => "operator(.and.)".into(),
7385 BinaryOp::Or => "operator(.or.)".into(),
7386 BinaryOp::Eqv => "operator(.eqv.)".into(),
7387 BinaryOp::Neqv => "operator(.neqv.)".into(),
7388 BinaryOp::Defined(name) => format!("operator(.{}.)", name.to_lowercase()),
7389 })
7390 }
7391
7392 /// Attempt to resolve a binary expression through a user-defined
7393 /// `INTERFACE OPERATOR(<op>)`. Returns `(specific_name, [lhs, rhs])`
7394 /// when a specific matches both operand types; `None` when no
7395 /// operator interface exists or no specific matches. Intrinsic ops
7396 /// between numerics keep their fast path by returning None for
7397 /// numeric × numeric combinations even if a generic is registered
7398 /// under the same operator (the language allows user overloads only
7399 /// for types not already handled by the intrinsic operator).
7400 fn resolve_operator_overload(
7401 st: &SymbolTable,
7402 b: &FuncBuilder,
7403 op: &BinaryOp,
7404 lty: &IrType,
7405 rty: &IrType,
7406 lhs: ValueId,
7407 rhs: ValueId,
7408 ) -> Option<(String, Vec<ValueId>)> {
7409 // Both operands numeric: fall through to intrinsic arithmetic.
7410 // Derived-type and character overloads are the common case for
7411 // user operators; numeric-on-numeric keeps the cheap path.
7412 if lty.is_numeric() && rty.is_numeric() {
7413 return None;
7414 }
7415 let iface_name = binary_op_interface_name(op)?;
7416 let sym = st.find_symbol_any_scope(&iface_name)?;
7417 if sym.kind != crate::sema::symtab::SymbolKind::NamedInterface {
7418 return None;
7419 }
7420 if sym.arg_names.is_empty() {
7421 return None;
7422 }
7423
7424 let arg_vals = vec![lhs, rhs];
7425 let resolved = resolve_generic_call(st, b, &iface_name, &arg_vals)?;
7426 Some((resolved, arg_vals))
7427 }
7428
7429 /// Reorder a call's actual-arg list so keyword arguments land in
7430 /// the positions declared by the callee. Per F2003 §12.4.1.2:
7431 /// positional args come first (matching the callee's first N
7432 /// params), then any keyword arg can appear in any order but each
7433 /// must reference a param past the last positional. When the
7434 /// callee's signature isn't resolvable (e.g. external BIND(C)),
7435 /// the original list passes through unchanged.
7436 fn reorder_args_by_keyword(
7437 args: &[crate::ast::expr::Argument],
7438 callee_key: &str,
7439 st: &SymbolTable,
7440 ) -> Vec<crate::ast::expr::Argument> {
7441 // Fast path: no keyword args anywhere → pass through.
7442 if args.iter().all(|a| a.keyword.is_none()) {
7443 return args.to_vec();
7444 }
7445 // Look up the callee's declared param order. NamedInterface
7446 // symbols live in scope.symbols keyed by lowercase name and
7447 // carry the specifics in arg_names; we want the actual
7448 // procedure's arg_order, which lives on its scope.
7449 use crate::sema::symtab::ScopeKind;
7450 let arg_order: Vec<String> = {
7451 let mut found: Option<Vec<String>> = None;
7452 for scope in st.all_scopes() {
7453 if let ScopeKind::Function(n) | ScopeKind::Subroutine(n) = &scope.kind {
7454 if n.eq_ignore_ascii_case(callee_key) {
7455 found = Some(scope.arg_order.clone());
7456 break;
7457 }
7458 }
7459 }
7460 match found {
7461 Some(v) if !v.is_empty() => v,
7462 _ => return args.to_vec(), // no signature info → no reorder
7463 }
7464 };
7465 // Build slot list the size of the callee's declared params.
7466 // Positional actuals fill slots 0..K. Keyword actuals look up
7467 // their slot by name. Unused slots stay None (OPTIONAL args
7468 // get their runtime null treatment in the subsequent per-call
7469 // hidden-arg logic).
7470 let mut slots: Vec<Option<crate::ast::expr::Argument>> = vec![None; arg_order.len()];
7471 let mut last_positional = 0usize;
7472 for a in args {
7473 if let Some(kw) = &a.keyword {
7474 let key = kw.to_lowercase();
7475 if let Some(idx) = arg_order.iter().position(|n| n.to_lowercase() == key) {
7476 slots[idx] = Some(a.clone());
7477 continue;
7478 }
7479 // Unknown keyword — leave it at the end to preserve
7480 // error-reporting locality; sema should have rejected
7481 // this earlier.
7482 slots.push(Some(a.clone()));
7483 continue;
7484 }
7485 if last_positional < slots.len() {
7486 slots[last_positional] = Some(a.clone());
7487 last_positional += 1;
7488 } else {
7489 slots.push(Some(a.clone()));
7490 }
7491 }
7492 slots.into_iter().flatten().collect()
7493 }
7494
7495 /// Generic SUBROUTINE call-site resolver. Mirror of the generic
7496 /// function-call logic in `lower_expr_full`. If `name` is a
7497 /// NamedInterface, use the actual arg types to pick a specific;
7498 /// otherwise pass the name through unchanged. Failed resolution
7499 /// emits a compile-time diagnostic and exits, same as the
7500 /// function-call path, so silent miscompiles don't slip through.
7501 fn resolve_subroutine_call_name(
7502 st: &SymbolTable,
7503 b: &FuncBuilder,
7504 orig_name: &str,
7505 key: &str,
7506 arg_vals: &[ValueId],
7507 span: crate::lexer::Span,
7508 ) -> (String, String) {
7509 if let Some(sym) = st.find_symbol_any_scope(key) {
7510 if sym.kind == crate::sema::symtab::SymbolKind::NamedInterface {
7511 match resolve_generic_call(st, b, key, arg_vals) {
7512 Some(resolved) => {
7513 let rk = resolved.to_lowercase();
7514 let (call_name, _) = resolved_symbol_call_target(st, &rk, &resolved);
7515 return (call_name, rk);
7516 }
7517 None => {
7518 let specifics = sym.arg_names.join(", ");
7519 eprintln!(
7520 "armfortas: error: {}:{}: no specific procedure of generic '{}' matches the actual arguments; candidates: [{}]",
7521 span.start.line,
7522 span.start.col,
7523 orig_name,
7524 specifics,
7525 );
7526 let _ = std::io::stderr().flush();
7527 std::process::exit(1);
7528 }
7529 }
7530 }
7531 }
7532 resolved_symbol_call_target(st, key, orig_name)
7533 }
7534
7535 fn module_procedure_symbol_name(module_name: &str, proc_name: &str) -> String {
7536 format!("afs_modproc_{}_{}", module_name.to_lowercase(), proc_name)
7537 }
7538
7539 fn sanitize_internal_host_symbol(host_link_name: &str) -> String {
7540 host_link_name
7541 .chars()
7542 .map(|ch| {
7543 if ch.is_ascii_alphanumeric() {
7544 ch.to_ascii_lowercase()
7545 } else {
7546 '_'
7547 }
7548 })
7549 .collect()
7550 }
7551
7552 fn lowered_procedure_symbol_name(
7553 name: &str,
7554 bind: Option<&crate::ast::unit::BindInfo>,
7555 host_link_name: Option<&str>,
7556 host_module: Option<&str>,
7557 internal_only: bool,
7558 internal_funcs: &HashMap<String, u32>,
7559 ) -> String {
7560 if let Some(bind) = bind {
7561 return bind
7562 .name
7563 .as_deref()
7564 .unwrap_or(name)
7565 .trim_matches('\'')
7566 .trim_matches('"')
7567 .to_string();
7568 }
7569 if internal_only {
7570 if let Some(idx) = internal_funcs.get(&name.to_lowercase()) {
7571 let host_prefix = host_link_name
7572 .map(sanitize_internal_host_symbol)
7573 .filter(|prefix| !prefix.is_empty())
7574 .unwrap_or_else(|| "local".into());
7575 return format!("afs_internal_{}_{}", host_prefix, idx);
7576 }
7577 }
7578 if let Some(module_name) = host_module {
7579 return module_procedure_symbol_name(module_name, name);
7580 }
7581 name.to_string()
7582 }
7583
7584 fn symbol_link_name(st: &SymbolTable, sym: &crate::sema::symtab::Symbol) -> String {
7585 if let Some(binding_label) = &sym.attrs.binding_label {
7586 return binding_label.clone();
7587 }
7588 if matches!(
7589 sym.kind,
7590 crate::sema::symtab::SymbolKind::Function | crate::sema::symtab::SymbolKind::Subroutine
7591 ) {
7592 match &st.scope(sym.scope).kind {
7593 crate::sema::symtab::ScopeKind::Module(module_name)
7594 | crate::sema::symtab::ScopeKind::Submodule(module_name) => {
7595 return module_procedure_symbol_name(module_name, &sym.name);
7596 }
7597 _ => {}
7598 }
7599 }
7600 sym.name.clone()
7601 }
7602
7603 fn resolved_symbol_call_target(
7604 st: &SymbolTable,
7605 key: &str,
7606 fallback_name: &str,
7607 ) -> (String, String) {
7608 if let Some(sym) = st.find_symbol_any_scope(key) {
7609 let call_name = symbol_link_name(st, sym);
7610 return (call_name, sym.name.to_lowercase());
7611 }
7612 (fallback_name.to_string(), key.to_string())
7613 }
7614
7615 fn emit_named_function_call(
7616 b: &mut FuncBuilder,
7617 locals: &HashMap<String, LocalInfo>,
7618 st: &SymbolTable,
7619 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
7620 internal_funcs: Option<&HashMap<String, u32>>,
7621 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
7622 descriptor_params: Option<&HashMap<String, Vec<bool>>>,
7623 callee_name: &str,
7624 args: &[crate::ast::expr::Argument],
7625 hidden_result: Option<ValueId>,
7626 ret_ty: IrType,
7627 ) -> ValueId {
7628 let key = callee_name.to_lowercase();
7629 let reordered = reorder_args_by_keyword(args, &key, st);
7630 let args: &[crate::ast::expr::Argument] = &reordered;
7631
7632 let intrinsic_arg_vals: Vec<ValueId> = args
7633 .iter()
7634 .map(|a| match &a.value {
7635 crate::ast::expr::SectionSubscript::Element(e) => lower_expr_full(
7636 b,
7637 locals,
7638 e,
7639 st,
7640 type_layouts,
7641 internal_funcs,
7642 contained_host_refs,
7643 descriptor_params,
7644 ),
7645 _ => b.const_i32(0),
7646 })
7647 .collect();
7648
7649 let resolved_name = match resolve_generic_call(st, b, &key, &intrinsic_arg_vals) {
7650 Some(name) => name,
7651 None => callee_name.to_string(),
7652 };
7653 let resolved_key = resolved_name.to_lowercase();
7654 let (call_name, callee_key) = resolved_symbol_call_target(st, &resolved_key, &resolved_name);
7655
7656 let callee_value_args =
7657 callee_value_arg_mask(st, &callee_key).or_else(|| callee_value_arg_mask(st, &key));
7658 let callee_descriptor_args = descriptor_params
7659 .and_then(|m| m.get(&callee_key).cloned().or_else(|| m.get(&key).cloned()));
7660 let callee_string_descriptor_args = callee_string_descriptor_arg_mask(st, &callee_key)
7661 .or_else(|| callee_string_descriptor_arg_mask(st, &key));
7662
7663 let mut call_args = Vec::new();
7664 if let Some(desc) = hidden_result {
7665 call_args.push(desc);
7666 }
7667 call_args.extend(args.iter().enumerate().map(|(i, a)| {
7668 let is_value = callee_value_args
7669 .as_ref()
7670 .map(|mask| i < mask.len() && mask[i])
7671 .unwrap_or(false);
7672 let wants_descriptor = callee_descriptor_args
7673 .as_ref()
7674 .map(|mask| i < mask.len() && mask[i])
7675 .unwrap_or(false);
7676 let wants_string_descriptor = callee_string_descriptor_args
7677 .as_ref()
7678 .map(|mask| i < mask.len() && mask[i])
7679 .unwrap_or(false);
7680 match &a.value {
7681 crate::ast::expr::SectionSubscript::Element(e) => {
7682 if is_value {
7683 lower_expr_full(
7684 b,
7685 locals,
7686 e,
7687 st,
7688 type_layouts,
7689 internal_funcs,
7690 contained_host_refs,
7691 descriptor_params,
7692 )
7693 } else if wants_descriptor {
7694 lower_arg_descriptor(b, locals, e, st)
7695 } else if wants_string_descriptor {
7696 lower_arg_string_descriptor(b, locals, e, st, type_layouts)
7697 } else {
7698 lower_arg_by_ref(b, locals, e, st)
7699 }
7700 }
7701 _ => b.const_i32(0),
7702 }
7703 }));
7704
7705 if let Some(cls_flags) =
7706 callee_char_len_star_mask(st, &callee_key).or_else(|| callee_char_len_star_mask(st, &key))
7707 {
7708 for (i, flag) in cls_flags.iter().enumerate() {
7709 if !*flag || i >= args.len() {
7710 continue;
7711 }
7712 if let crate::ast::expr::SectionSubscript::Element(e) = &args[i].value {
7713 if let Some((_ptr, len)) = char_addr_and_runtime_len(b, e, locals) {
7714 call_args.push(len);
7715 } else if let Expr::StringLiteral { value, .. } = &e.node {
7716 call_args.push(b.const_i64(value.len() as i64));
7717 } else if expr_is_character_expr(b, locals, e, st, type_layouts) {
7718 let (_ptr, len) = lower_string_expr_full(
7719 b,
7720 locals,
7721 e,
7722 st,
7723 type_layouts,
7724 internal_funcs,
7725 contained_host_refs,
7726 descriptor_params,
7727 );
7728 call_args.push(len);
7729 } else {
7730 call_args.push(b.const_i64(0));
7731 }
7732 } else {
7733 call_args.push(b.const_i64(0));
7734 }
7735 }
7736 }
7737
7738 let closure_key = if contained_host_refs
7739 .map(|m| m.contains_key(&callee_key))
7740 .unwrap_or(false)
7741 {
7742 &callee_key
7743 } else {
7744 &key
7745 };
7746 append_host_closure_args_raw(b, locals, contained_host_refs, closure_key, &mut call_args);
7747
7748 let func_ref = internal_funcs
7749 .and_then(|map| map.get(&callee_key).or_else(|| map.get(&key)).copied())
7750 .map(FuncRef::Internal)
7751 .unwrap_or_else(|| FuncRef::External(call_name));
7752 b.call(func_ref, call_args, ret_ty)
7753 }
7754
7755 fn lower_alloc_return_call_into_desc(
7756 b: &mut FuncBuilder,
7757 ctx: &LowerCtx,
7758 desc_addr: ValueId,
7759 callee_name: &str,
7760 args: &[crate::ast::expr::Argument],
7761 ) {
7762 let key = callee_name.to_lowercase();
7763 let reordered = reorder_args_by_keyword(args, &key, ctx.st);
7764 let args: &[crate::ast::expr::Argument] = &reordered;
7765
7766 let intrinsic_arg_vals: Vec<ValueId> = args
7767 .iter()
7768 .map(|a| match &a.value {
7769 crate::ast::expr::SectionSubscript::Element(e) => lower_expr_ctx(b, ctx, e),
7770 _ => b.const_i32(0),
7771 })
7772 .collect();
7773
7774 let resolved_name = match resolve_generic_call(ctx.st, b, &key, &intrinsic_arg_vals) {
7775 Some(name) => name,
7776 None => callee_name.to_string(),
7777 };
7778 let resolved_key = resolved_name.to_lowercase();
7779 let (call_name, callee_key) =
7780 resolved_symbol_call_target(ctx.st, &resolved_key, &resolved_name);
7781
7782 let callee_value_args =
7783 callee_value_arg_mask(ctx.st, &callee_key).or_else(|| callee_value_arg_mask(ctx.st, &key));
7784 let callee_descriptor_args = ctx
7785 .descriptor_params
7786 .get(&callee_key)
7787 .cloned()
7788 .or_else(|| ctx.descriptor_params.get(&key).cloned());
7789 let callee_string_descriptor_args = callee_string_descriptor_arg_mask(ctx.st, &callee_key)
7790 .or_else(|| callee_string_descriptor_arg_mask(ctx.st, &key));
7791
7792 let mut call_args = vec![desc_addr];
7793 call_args.extend(args.iter().enumerate().map(|(i, a)| {
7794 let is_value = callee_value_args
7795 .as_ref()
7796 .map(|mask| i < mask.len() && mask[i])
7797 .unwrap_or(false);
7798 let wants_descriptor = callee_descriptor_args
7799 .as_ref()
7800 .map(|mask| i < mask.len() && mask[i])
7801 .unwrap_or(false);
7802 let wants_string_descriptor = callee_string_descriptor_args
7803 .as_ref()
7804 .map(|mask| i < mask.len() && mask[i])
7805 .unwrap_or(false);
7806 match &a.value {
7807 crate::ast::expr::SectionSubscript::Element(e) => {
7808 if is_value {
7809 lower_expr_ctx(b, ctx, e)
7810 } else if wants_string_descriptor {
7811 lower_arg_string_descriptor(b, &ctx.locals, e, ctx.st, Some(ctx.type_layouts))
7812 } else if wants_descriptor {
7813 lower_arg_descriptor(b, &ctx.locals, e, ctx.st)
7814 } else {
7815 lower_arg_by_ref_ctx(b, ctx, e)
7816 }
7817 }
7818 _ => b.const_i32(0),
7819 }
7820 }));
7821
7822 if let Some(cls_flags) = ctx
7823 .char_len_star_params
7824 .get(&callee_key)
7825 .or_else(|| ctx.char_len_star_params.get(&key))
7826 {
7827 for (i, flag) in cls_flags.iter().enumerate() {
7828 if !*flag || i >= args.len() {
7829 continue;
7830 }
7831 if let crate::ast::expr::SectionSubscript::Element(e) = &args[i].value {
7832 call_args.push(
7833 actual_char_arg_runtime_len(b, &ctx.locals, e, ctx.st, Some(ctx.type_layouts))
7834 .unwrap_or_else(|| b.const_i64(0)),
7835 );
7836 } else {
7837 call_args.push(b.const_i64(0));
7838 }
7839 }
7840 }
7841
7842 append_host_closure_args(b, ctx, &callee_key, &mut call_args);
7843
7844 let func_ref = ctx
7845 .internal_funcs
7846 .get(&callee_key)
7847 .or_else(|| ctx.internal_funcs.get(&key))
7848 .copied()
7849 .map(FuncRef::Internal)
7850 .unwrap_or_else(|| FuncRef::External(call_name));
7851 b.call(func_ref, call_args, IrType::Void);
7852 }
7853
7854 fn procedure_pointer_signature_key(st: &SymbolTable, key: &str) -> Option<String> {
7855 let sym = st.find_symbol_any_scope(key)?;
7856 if sym.kind != crate::sema::symtab::SymbolKind::ProcedurePointer {
7857 return None;
7858 }
7859 Some(
7860 sym.attrs
7861 .procedure_iface
7862 .as_deref()
7863 .unwrap_or(key)
7864 .to_lowercase(),
7865 )
7866 }
7867
7868 fn procedure_pointer_call_target(
7869 b: &mut FuncBuilder,
7870 locals: &HashMap<String, LocalInfo>,
7871 st: &SymbolTable,
7872 key: &str,
7873 ) -> Option<(ValueId, String)> {
7874 let signature_key = procedure_pointer_signature_key(st, key)?;
7875 let info = locals.get(key)?;
7876 let load_ty = if info.ty.is_ptr() {
7877 info.ty.clone()
7878 } else {
7879 IrType::Ptr(Box::new(info.ty.clone()))
7880 };
7881 Some((b.load_typed(info.addr, load_ty), signature_key))
7882 }
7883
7884 fn procedure_pointer_symbol_addr_elem_type(info: &LocalInfo) -> IrType {
7885 match &info.ty {
7886 IrType::Ptr(inner) => inner.as_ref().clone(),
7887 ty => ty.clone(),
7888 }
7889 }
7890
7891 /// Does the actual IR value at `arg_val` satisfy the declared
7892 /// parameter's `TypeInfo`? Derived types need special handling because
7893 /// we lower them to `Ptr(Array(i8, N))` and the name of the struct
7894 /// only lives in the sema layer — look the declared name up on the
7895 /// caller-side local (via its `derived_type` slot) to confirm the
7896 /// match. Non-derived types reduce to `ir_types_dispatch_equal` on
7897 /// the declared IR type.
7898 fn arg_matches_declared(
7899 decl_ti: &crate::sema::symtab::TypeInfo,
7900 actual_ir: &IrType,
7901 arg_val: ValueId,
7902 b: &FuncBuilder,
7903 ) -> bool {
7904 use crate::sema::symtab::TypeInfo;
7905 if let TypeInfo::Derived(decl_name) = decl_ti {
7906 // Walk through any pointer wrappers — the caller passes an
7907 // address to the struct and dispatch has to look through it.
7908 // Derived-type arguments arrive in two shapes:
7909 // * A local alloca: Ptr(Array(i8, N)) → peel → Array
7910 // * A function call result: Ptr(i8) → peel → Int(I8)
7911 // Both are valid struct-reference encodings. The sema layer
7912 // already rejected mismatched derived-type assignments
7913 // before reaching lowering, so we can trust any pointer
7914 // shape at this point.
7915 let mut peeled = actual_ir.clone();
7916 while let IrType::Ptr(inner) = peeled {
7917 peeled = *inner;
7918 }
7919 let _ = decl_name;
7920 return matches!(peeled, IrType::Array(_, _) | IrType::Int(IntWidth::I8));
7921 }
7922 let decl_ir = type_info_to_ir_type(decl_ti);
7923 let _ = arg_val;
7924 let _ = b;
7925 ir_types_dispatch_equal(&decl_ir, actual_ir)
7926 }
7927
7928 /// Kind-aware equality for generic dispatch: an IR value matches a
7929 /// declared parameter type when both category AND width agree. For
7930 /// pointers the pointee type is compared recursively so that a `real`
7931 /// scalar and a `real, dimension(:)` array argument dispatch to
7932 /// different specifics even though both arrive as `Ptr(Float)` at the
7933 /// call site.
7934 fn ir_types_dispatch_equal(decl: &IrType, actual: &IrType) -> bool {
7935 match (decl, actual) {
7936 (IrType::Int(a), IrType::Int(c)) => a == c,
7937 (IrType::Float(a), IrType::Float(c)) => a == c,
7938 (IrType::Bool, IrType::Bool) => true,
7939 // Callers pass by-reference for non-VALUE dummies, so the
7940 // actual IR type is often Ptr(T) while the declared is T.
7941 (decl, IrType::Ptr(p)) => ir_types_dispatch_equal(decl, p),
7942 (IrType::Ptr(p), actual) => ir_types_dispatch_equal(p, actual),
7943 (IrType::Array(e1, _), IrType::Array(e2, _)) => ir_types_dispatch_equal(e1, e2),
7944 _ => false,
7945 }
7946 }
7947
7948 /// Resolve a kind suffix (literal integer or named constant) to a kind width.
7949 /// Returns 4 as the default when unresolvable.
7950 fn real_kind_to_width(kind_str: &str, st: &SymbolTable) -> u8 {
7951 // Try parsing as a literal integer first.
7952 if let Ok(n) = kind_str.parse::<u8>() {
7953 return n;
7954 }
7955 // Otherwise look up the named constant in the symbol table.
7956 if let Some(sym) = st.find_symbol_any_scope(&kind_str.to_lowercase()) {
7957 if let Some(v) = sym.const_value {
7958 return v as u8;
7959 }
7960 }
7961 4 // default
7962 }
7963
7964 /// Extract a compile-time integer constant from a ValueId by
7965 /// looking up its defining instruction in the function.
7966 fn extract_const_int_from_value(b: &FuncBuilder, id: ValueId) -> Option<i64> {
7967 let inst = b.func().find_defining_inst(id)?;
7968 match &inst.kind {
7969 InstKind::ConstInt(v, _) => Some(*v as i64),
7970 _ => None,
7971 }
7972 }
7973
7974 /// Lower an intrinsic subroutine call (CALL system_clock, CALL date_and_time, etc.).
7975 /// Returns true if the name was recognized and lowered, false otherwise.
7976 fn lower_intrinsic_subroutine(
7977 b: &mut FuncBuilder,
7978 ctx: &mut LowerCtx,
7979 name: &str,
7980 args: &[crate::ast::expr::Argument],
7981 ) -> bool {
7982 /// Helper: get the nth positional arg as a by-ref pointer, or null if absent.
7983 fn nth_arg_ref(
7984 b: &mut FuncBuilder,
7985 ctx: &LowerCtx,
7986 args: &[crate::ast::expr::Argument],
7987 n: usize,
7988 ) -> ValueId {
7989 if n < args.len() {
7990 if let crate::ast::expr::SectionSubscript::Element(e) = &args[n].value {
7991 return lower_arg_by_ref_ctx(b, ctx, e);
7992 }
7993 }
7994 b.const_i64(0) // null pointer for missing optional arg
7995 }
7996
7997 /// Helper: get the nth positional arg as a by-value expression, or default.
7998 fn nth_arg_val(
7999 b: &mut FuncBuilder,
8000 ctx: &LowerCtx,
8001 args: &[crate::ast::expr::Argument],
8002 n: usize,
8003 default: i32,
8004 ) -> ValueId {
8005 if n < args.len() {
8006 if let crate::ast::expr::SectionSubscript::Element(e) = &args[n].value {
8007 return lower_expr_ctx(b, ctx, e);
8008 }
8009 }
8010 b.const_i32(default)
8011 }
8012
8013 /// Helper: get the nth positional arg as a (ptr, len) string pair, or (null, 0).
8014 fn nth_arg_str(
8015 b: &mut FuncBuilder,
8016 ctx: &LowerCtx,
8017 args: &[crate::ast::expr::Argument],
8018 n: usize,
8019 ) -> (ValueId, ValueId) {
8020 if n < args.len() {
8021 if let crate::ast::expr::SectionSubscript::Element(e) = &args[n].value {
8022 // Check if it's a character variable — pass ptr+len.
8023 if let Expr::Name { name } = &e.node {
8024 if let Some(info) = ctx.locals.get(&name.to_lowercase()) {
8025 if info.char_kind != CharKind::None {
8026 return lower_string_expr_with_layouts(
8027 b,
8028 &ctx.locals,
8029 e,
8030 ctx.st,
8031 Some(ctx.type_layouts),
8032 );
8033 }
8034 }
8035 }
8036 // Otherwise pass as ref + zero length.
8037 let ptr = lower_arg_by_ref_ctx(b, ctx, e);
8038 let zero = b.const_i64(0);
8039 return (ptr, zero);
8040 }
8041 }
8042 let z = b.const_i64(0);
8043 (z, z)
8044 }
8045
8046 fn move_alloc_target(
8047 b: &mut FuncBuilder,
8048 ctx: &LowerCtx,
8049 expr: &crate::ast::expr::SpannedExpr,
8050 ) -> Option<(ValueId, bool)> {
8051 match &expr.node {
8052 Expr::ParenExpr { inner } => move_alloc_target(b, ctx, inner),
8053 Expr::Name { name } => {
8054 let info = ctx.locals.get(&name.to_lowercase())?;
8055 if matches!(info.char_kind, CharKind::Deferred) {
8056 Some((string_descriptor_addr(b, info), true))
8057 } else if local_uses_array_descriptor(info) {
8058 Some((array_descriptor_addr(b, info), false))
8059 } else {
8060 None
8061 }
8062 }
8063 Expr::ComponentAccess { .. } => {
8064 if let Some(info) =
8065 component_array_local_info(b, &ctx.locals, expr, ctx.st, ctx.type_layouts)
8066 {
8067 return Some((array_descriptor_addr(b, &info), false));
8068 }
8069 resolve_component_field_access(b, &ctx.locals, expr, ctx.st, ctx.type_layouts)
8070 .and_then(|(field_ptr, field)| {
8071 if matches!(field_char_kind(&field), CharKind::Deferred) && field.size == 32
8072 {
8073 Some((field_ptr, true))
8074 } else {
8075 None
8076 }
8077 })
8078 }
8079 _ => None,
8080 }
8081 }
8082
8083 match name {
8084 "move_alloc" => {
8085 let from_expr = args.first().and_then(|arg| {
8086 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
8087 Some(e)
8088 } else {
8089 None
8090 }
8091 });
8092 let to_expr = args.get(1).and_then(|arg| {
8093 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
8094 Some(e)
8095 } else {
8096 None
8097 }
8098 });
8099 let Some((from_desc, from_is_string)) =
8100 from_expr.and_then(|e| move_alloc_target(b, ctx, e))
8101 else {
8102 eprintln!(
8103 "armfortas: error: MOVE_ALLOC source must be a descriptor-backed allocatable variable"
8104 );
8105 std::process::exit(1);
8106 };
8107 let Some((to_desc, to_is_string)) = to_expr.and_then(|e| move_alloc_target(b, ctx, e))
8108 else {
8109 eprintln!(
8110 "armfortas: error: MOVE_ALLOC destination must be a descriptor-backed allocatable variable"
8111 );
8112 std::process::exit(1);
8113 };
8114 if from_is_string != to_is_string {
8115 eprintln!(
8116 "armfortas: error: MOVE_ALLOC source and destination must use matching descriptor kinds"
8117 );
8118 std::process::exit(1);
8119 }
8120 let runtime = if from_is_string {
8121 "afs_move_alloc_string"
8122 } else {
8123 "afs_move_alloc"
8124 };
8125 b.call(
8126 FuncRef::External(runtime.into()),
8127 vec![from_desc, to_desc],
8128 IrType::Void,
8129 );
8130 true
8131 }
8132 "system_clock" => {
8133 // call system_clock(count, count_rate, count_max) — all optional
8134 let count = nth_arg_ref(b, ctx, args, 0);
8135 let rate = nth_arg_ref(b, ctx, args, 1);
8136 let max = nth_arg_ref(b, ctx, args, 2);
8137 b.call(
8138 FuncRef::External("afs_system_clock".into()),
8139 vec![count, rate, max],
8140 IrType::Void,
8141 );
8142 true
8143 }
8144 "cpu_time" => {
8145 let time = nth_arg_ref(b, ctx, args, 0);
8146 b.call(
8147 FuncRef::External("afs_cpu_time".into()),
8148 vec![time],
8149 IrType::Void,
8150 );
8151 true
8152 }
8153 "date_and_time" => {
8154 // call date_and_time(date, time, zone, values) — all optional strings/array
8155 // Runtime: afs_date_and_time(date_buf, date_len, time_buf, time_len, zone_buf, zone_len, values)
8156 let (date_ptr, date_len) = nth_arg_str(b, ctx, args, 0);
8157 let (time_ptr, time_len) = nth_arg_str(b, ctx, args, 1);
8158 let (zone_ptr, zone_len) = nth_arg_str(b, ctx, args, 2);
8159 let values = nth_arg_ref(b, ctx, args, 3);
8160 b.call(
8161 FuncRef::External("afs_date_and_time".into()),
8162 vec![
8163 date_ptr, date_len, time_ptr, time_len, zone_ptr, zone_len, values,
8164 ],
8165 IrType::Void,
8166 );
8167 true
8168 }
8169 "get_command_argument" => {
8170 // call get_command_argument(number, value, length, status)
8171 // Runtime: afs_get_command_argument(number, value, value_len, length, status)
8172 let number = nth_arg_val(b, ctx, args, 0, 0);
8173 let (val_ptr, val_len) = nth_arg_str(b, ctx, args, 1);
8174 let length = nth_arg_ref(b, ctx, args, 2);
8175 let status = nth_arg_ref(b, ctx, args, 3);
8176 b.call(
8177 FuncRef::External("afs_get_command_argument".into()),
8178 vec![number, val_ptr, val_len, length, status],
8179 IrType::Void,
8180 );
8181 true
8182 }
8183 "command_argument_count" => {
8184 // This is a function, not a subroutine — handled in lower_intrinsic.
8185 false
8186 }
8187 "get_command" => {
8188 // call get_command(command, length, status)
8189 let (cmd_ptr, cmd_len) = nth_arg_str(b, ctx, args, 0);
8190 let length = nth_arg_ref(b, ctx, args, 1);
8191 let status = nth_arg_ref(b, ctx, args, 2);
8192 b.call(
8193 FuncRef::External("afs_get_command".into()),
8194 vec![cmd_ptr, cmd_len, length, status],
8195 IrType::Void,
8196 );
8197 true
8198 }
8199 "get_environment_variable" => {
8200 // call get_environment_variable(name, value, length, status)
8201 // Runtime: afs_get_environment_variable(name, name_len, value, value_len, length, status)
8202 let (name_ptr, name_len) = nth_arg_str(b, ctx, args, 0);
8203 let (val_ptr, val_len) = nth_arg_str(b, ctx, args, 1);
8204 let length = nth_arg_ref(b, ctx, args, 2);
8205 let status = nth_arg_ref(b, ctx, args, 3);
8206 b.call(
8207 FuncRef::External("afs_get_environment_variable".into()),
8208 vec![name_ptr, name_len, val_ptr, val_len, length, status],
8209 IrType::Void,
8210 );
8211 true
8212 }
8213 "random_number" => {
8214 let harvest = nth_arg_ref(b, ctx, args, 0);
8215 b.call(
8216 FuncRef::External("afs_random_number_f64".into()),
8217 vec![harvest],
8218 IrType::Void,
8219 );
8220 true
8221 }
8222 "random_seed" => {
8223 let seed = nth_arg_val(b, ctx, args, 0, 0);
8224 let widened = b.int_extend(seed, IntWidth::I64, true);
8225 b.call(
8226 FuncRef::External("afs_random_seed".into()),
8227 vec![widened],
8228 IrType::Void,
8229 );
8230 true
8231 }
8232 "execute_command_line" => {
8233 let (cmd_ptr, cmd_len) = nth_arg_str(b, ctx, args, 0);
8234 let wait = nth_arg_val(b, ctx, args, 1, 1);
8235 let exitstat = nth_arg_ref(b, ctx, args, 2);
8236 let cmdstat = nth_arg_ref(b, ctx, args, 3);
8237 b.call(
8238 FuncRef::External("afs_execute_command_line".into()),
8239 vec![cmd_ptr, cmd_len, wait, exitstat, cmdstat],
8240 IrType::Void,
8241 );
8242 true
8243 }
8244
8245 // ---- iso_c_binding subroutines ----
8246 "c_f_pointer" => {
8247 // call c_f_pointer(cptr, fptr [, shape])
8248 //
8249 // Scalar pointers store the raw address directly into the
8250 // pointer slot. Array pointers are descriptor-backed in
8251 // armfortas, so we must populate the 384-byte descriptor
8252 // with base_addr/elem_size/rank/bounds instead of
8253 // treating the second argument like a plain Ptr<T>.
8254 let raw_cptr = nth_arg_val(b, ctx, args, 0, 0);
8255 let cptr = match b.func().value_type(raw_cptr) {
8256 Some(IrType::Int(IntWidth::I64)) => raw_cptr,
8257 _ => b.int_extend(raw_cptr, IntWidth::I64, false),
8258 };
8259
8260 let target_expr = args.get(1).and_then(|arg| {
8261 if let crate::ast::expr::SectionSubscript::Element(expr) = &arg.value {
8262 Some(expr)
8263 } else {
8264 None
8265 }
8266 });
8267 if let Some(expr) = target_expr {
8268 if let Some((target_addr, elem_ty, descriptor_backed)) =
8269 c_f_pointer_target(b, ctx, expr)
8270 {
8271 if descriptor_backed {
8272 let zero32 = b.const_i32(0);
8273 let sz384 = b.const_i64(384);
8274 b.call(
8275 FuncRef::External("memset".into()),
8276 vec![target_addr, zero32, sz384],
8277 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
8278 );
8279
8280 let base_ptr = b.int_to_ptr(cptr, elem_ty.clone());
8281 store_byte_aggregate_field(
8282 b,
8283 target_addr,
8284 0,
8285 IrType::Ptr(Box::new(elem_ty.clone())),
8286 base_ptr,
8287 );
8288 let elem_size = b.const_i64(ir_scalar_byte_size(&elem_ty));
8289 store_byte_aggregate_field(
8290 b,
8291 target_addr,
8292 8,
8293 IrType::Int(IntWidth::I64),
8294 elem_size,
8295 );
8296
8297 let shape_vals = c_f_pointer_shape_values(args);
8298 let rank = shape_vals.map_or(0, |vals| vals.len());
8299 let rank_val = b.const_i32(rank as i32);
8300 store_byte_aggregate_field(
8301 b,
8302 target_addr,
8303 16,
8304 IrType::Int(IntWidth::I32),
8305 rank_val,
8306 );
8307
8308 let null_cptr = b.const_i64(0);
8309 let is_associated = b.icmp(CmpOp::Ne, cptr, null_cptr);
8310 let assoc_flag = b.const_i32(2);
8311 let disassoc_flag = b.const_i32(0);
8312 let flags = b.select(is_associated, assoc_flag, disassoc_flag);
8313 store_byte_aggregate_field(
8314 b,
8315 target_addr,
8316 20,
8317 IrType::Int(IntWidth::I32),
8318 flags,
8319 );
8320
8321 if let Some(values) = shape_vals {
8322 for (i, value) in values.iter().enumerate() {
8323 let crate::ast::expr::AcValue::Expr(extent_expr) = value else {
8324 continue;
8325 };
8326 let raw_extent = lower_expr_ctx(b, ctx, extent_expr);
8327 let extent = match b.func().value_type(raw_extent) {
8328 Some(IrType::Int(IntWidth::I64)) => raw_extent,
8329 _ => b.int_extend(raw_extent, IntWidth::I64, true),
8330 };
8331 let base = 24 + (i as i64) * 24;
8332 let one64 = b.const_i64(1);
8333 store_byte_aggregate_field(
8334 b,
8335 target_addr,
8336 base,
8337 IrType::Int(IntWidth::I64),
8338 one64,
8339 );
8340 store_byte_aggregate_field(
8341 b,
8342 target_addr,
8343 base + 8,
8344 IrType::Int(IntWidth::I64),
8345 extent,
8346 );
8347 let stride64 = b.const_i64(1);
8348 store_byte_aggregate_field(
8349 b,
8350 target_addr,
8351 base + 16,
8352 IrType::Int(IntWidth::I64),
8353 stride64,
8354 );
8355 }
8356 }
8357 return true;
8358 }
8359
8360 let ptr_val = b.int_to_ptr(cptr, elem_ty);
8361 b.store(ptr_val, target_addr);
8362 return true;
8363 }
8364 }
8365
8366 let fptr = nth_arg_ref(b, ctx, args, 1);
8367 let inner_pointee = b
8368 .func()
8369 .value_type(fptr)
8370 .and_then(|ty| {
8371 if let IrType::Ptr(inner) = ty {
8372 if let IrType::Ptr(elem) = inner.as_ref() {
8373 Some(elem.as_ref().clone())
8374 } else {
8375 Some(inner.as_ref().clone())
8376 }
8377 } else {
8378 None
8379 }
8380 })
8381 .unwrap_or(IrType::Int(IntWidth::I8));
8382 let ptr_val = b.int_to_ptr(cptr, inner_pointee);
8383 b.store(ptr_val, fptr);
8384 true
8385 }
8386
8387 _ => false,
8388 }
8389 }
8390
8391 /// Look up a dummy argument's declared type from the declaration list.
8392 /// Returns the IR type for the argument, defaulting to I32 if not found.
8393 fn c_f_pointer_target(
8394 b: &mut FuncBuilder,
8395 ctx: &LowerCtx,
8396 expr: &crate::ast::expr::SpannedExpr,
8397 ) -> Option<(ValueId, IrType, bool)> {
8398 match &expr.node {
8399 Expr::Name { name } => {
8400 let info = ctx.locals.get(&name.to_lowercase())?;
8401 if !info.is_pointer {
8402 return None;
8403 }
8404 if local_uses_array_descriptor(info) {
8405 Some((array_descriptor_addr(b, info), info.ty.clone(), true))
8406 } else {
8407 let addr = if info.by_ref {
8408 b.load(info.addr)
8409 } else {
8410 info.addr
8411 };
8412 Some((addr, info.ty.clone(), false))
8413 }
8414 }
8415 Expr::ComponentAccess { .. } => {
8416 let (field_ptr, field) =
8417 resolve_component_field_access(b, &ctx.locals, expr, ctx.st, ctx.type_layouts)?;
8418 if !field.pointer {
8419 return None;
8420 }
8421 let elem_ty = type_info_to_storage_ir_type(&field.type_info, ctx.type_layouts);
8422 Some((field_ptr, elem_ty, field.size == 384))
8423 }
8424 _ => None,
8425 }
8426 }
8427
8428 fn c_f_pointer_shape_values(
8429 args: &[crate::ast::expr::Argument],
8430 ) -> Option<&[crate::ast::expr::AcValue]> {
8431 let arg = args.get(2)?;
8432 let crate::ast::expr::SectionSubscript::Element(expr) = &arg.value else {
8433 return None;
8434 };
8435 if let Expr::ArrayConstructor { values, .. } = &expr.node {
8436 Some(values.as_slice())
8437 } else {
8438 None
8439 }
8440 }
8441
8442 /// Determine the CharKind for a dummy argument from its declaration.
8443 ///
8444 /// Returns `CharKind::Fixed(n)` if the declaration is
8445 /// `character(len=n)`, `CharKind::None` otherwise. Assumed-length
8446 /// dummies (`character(len=*)`) currently return `CharKind::None`
8447 /// because the hidden-length ABI parameter that would supply the
8448 /// runtime length is not yet implemented.
8449 fn arg_char_kind_from_decls(
8450 arg_name: &str,
8451 decls: &[crate::ast::decl::SpannedDecl],
8452 st: &SymbolTable,
8453 ) -> CharKind {
8454 let key = arg_name.to_lowercase();
8455 for decl in decls {
8456 if let Decl::TypeDecl {
8457 type_spec,
8458 entities,
8459 ..
8460 } = &decl.node
8461 {
8462 for entity in entities {
8463 if entity.name.to_lowercase() == key {
8464 match type_spec {
8465 TypeSpec::Character(Some(sel)) => {
8466 if matches!(sel.len, Some(crate::ast::decl::LenSpec::Colon))
8467 && arg_uses_string_descriptor_from_decls(arg_name, decls)
8468 {
8469 return CharKind::Deferred;
8470 }
8471 if let Some(crate::ast::decl::LenSpec::Expr(e)) = &sel.len {
8472 if let Some(n) =
8473 eval_const_int_in_scope_or_any_scope(e, &HashMap::new(), st)
8474 {
8475 return CharKind::Fixed(n);
8476 }
8477 }
8478 }
8479 TypeSpec::Character(None) => return CharKind::Fixed(1),
8480 _ => {}
8481 }
8482 return CharKind::None;
8483 }
8484 }
8485 }
8486 }
8487 CharKind::None
8488 }
8489
8490 fn arg_runtime_char_len_expr_from_decls(
8491 arg_name: &str,
8492 decls: &[crate::ast::decl::SpannedDecl],
8493 st: &SymbolTable,
8494 ) -> Option<crate::ast::expr::SpannedExpr> {
8495 let key = arg_name.to_lowercase();
8496 for decl in decls {
8497 if let Decl::TypeDecl {
8498 type_spec,
8499 entities,
8500 ..
8501 } = &decl.node
8502 {
8503 for entity in entities {
8504 if entity.name.to_lowercase() == key {
8505 if let TypeSpec::Character(Some(sel)) = type_spec {
8506 if let Some(crate::ast::decl::LenSpec::Expr(e)) = &sel.len {
8507 if eval_const_int_in_scope_or_any_scope(e, &HashMap::new(), st)
8508 .is_none()
8509 {
8510 return Some(e.clone());
8511 }
8512 }
8513 }
8514 return None;
8515 }
8516 }
8517 }
8518 }
8519 None
8520 }
8521
8522 fn arg_type_from_decls(
8523 arg_name: &str,
8524 decls: &[crate::ast::decl::SpannedDecl],
8525 st: Option<&SymbolTable>,
8526 ) -> IrType {
8527 let key = arg_name.to_lowercase();
8528 for decl in decls {
8529 if let Decl::TypeDecl {
8530 type_spec,
8531 entities,
8532 ..
8533 } = &decl.node
8534 {
8535 for entity in entities {
8536 if entity.name.to_lowercase() == key {
8537 return lower_type_spec_st(type_spec, st);
8538 }
8539 }
8540 }
8541 }
8542 IrType::Int(IntWidth::I32) // fallback
8543 }
8544
8545 fn arg_dims_from_decls(
8546 arg_name: &str,
8547 decls: &[crate::ast::decl::SpannedDecl],
8548 visible_param_consts: &HashMap<String, ConstScalar>,
8549 ) -> Vec<(i64, i64)> {
8550 let key = arg_name.to_lowercase();
8551 let param_consts = collect_decl_param_consts_with_host(decls, visible_param_consts);
8552 for decl in decls {
8553 if let Decl::TypeDecl {
8554 attrs, entities, ..
8555 } = &decl.node
8556 {
8557 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
8558 if let crate::ast::decl::Attribute::Dimension(specs) = a {
8559 Some(specs)
8560 } else {
8561 None
8562 }
8563 });
8564 for entity in entities {
8565 if entity.name.to_lowercase() == key {
8566 let array_spec = entity.array_spec.as_ref().or(attr_dims);
8567 return array_spec
8568 .map(|specs| extract_array_dims(specs, &param_consts))
8569 .unwrap_or_default();
8570 }
8571 }
8572 }
8573 }
8574 Vec::new()
8575 }
8576
8577 /// Post-pass run after normal dummy-arg locals are registered. For
8578 /// every by_ref dummy whose declared array_spec is `ArraySpec::Explicit`
8579 /// with a non-const upper bound (typically another dummy, e.g. `xs(n)`),
8580 /// lower the bound expression and store the i64 result into the dummy's
8581 /// `runtime_dim_upper`. Subsequent bounds checks and stride computation
8582 /// consult this value instead of the (1, 1) fallback that
8583 /// `arg_dims_from_decls` emits when a bound is not compile-time
8584 /// resolvable.
8585 fn install_runtime_dim_bounds(
8586 b: &mut FuncBuilder,
8587 locals: &mut HashMap<String, LocalInfo>,
8588 decls: &[crate::ast::decl::SpannedDecl],
8589 visible_param_consts: &HashMap<String, ConstScalar>,
8590 st: &SymbolTable,
8591 ) {
8592 use crate::ast::decl::{ArraySpec, Attribute};
8593 for decl in decls {
8594 let Decl::TypeDecl {
8595 attrs, entities, ..
8596 } = &decl.node
8597 else {
8598 continue;
8599 };
8600 let attr_dims: Option<&Vec<ArraySpec>> = attrs.iter().find_map(|a| {
8601 if let Attribute::Dimension(specs) = a {
8602 Some(specs)
8603 } else {
8604 None
8605 }
8606 });
8607 for entity in entities {
8608 let key = entity.name.to_lowercase();
8609 let Some(info) = locals.get(&key) else {
8610 continue;
8611 };
8612 if !info.by_ref {
8613 continue;
8614 }
8615 if info.descriptor_arg {
8616 continue;
8617 }
8618 let Some(specs) = entity.array_spec.as_ref().or(attr_dims) else {
8619 continue;
8620 };
8621
8622 let mut runtime = Vec::with_capacity(specs.len());
8623 let mut any_runtime = false;
8624 for spec in specs.iter() {
8625 let upper_expr = match spec {
8626 ArraySpec::Explicit { upper, .. } => Some(upper),
8627 _ => None,
8628 };
8629 let Some(upper_expr) = upper_expr else {
8630 runtime.push(None);
8631 continue;
8632 };
8633 // Skip compile-time-resolvable bounds — the static
8634 // path through `info.dims` already has the right value.
8635 // That covers both literals and PARAMETER references
8636 // (e.g. `xs(n)` where n is a module parameter).
8637 if matches!(upper_expr.node, Expr::IntegerLiteral { .. }) {
8638 runtime.push(None);
8639 continue;
8640 }
8641 if eval_const_scalar(upper_expr, visible_param_consts).is_some() {
8642 runtime.push(None);
8643 continue;
8644 }
8645 // Only emit runtime bounds when the referenced name
8646 // is a dummy already in `locals`. Anything else (host
8647 // globals, uninstalled host_param_consts, unresolved
8648 // names) would lower to const_i32(0) here and poison
8649 // the bounds check.
8650 let resolvable = match &upper_expr.node {
8651 Expr::Name { name } => locals.contains_key(&name.to_lowercase()),
8652 _ => true, // arithmetic on dummies — best-effort
8653 };
8654 if !resolvable {
8655 runtime.push(None);
8656 continue;
8657 }
8658 let val = lower_expr(b, locals, upper_expr, st);
8659 let as_i64 = match b.func().value_type(val) {
8660 Some(IrType::Int(IntWidth::I64)) => val,
8661 Some(IrType::Int(_)) => b.int_extend(val, IntWidth::I64, true),
8662 _ => val,
8663 };
8664 runtime.push(Some(as_i64));
8665 any_runtime = true;
8666 }
8667 if any_runtime {
8668 if let Some(slot) = locals.get_mut(&key) {
8669 slot.runtime_dim_upper = runtime;
8670 }
8671 }
8672 }
8673 }
8674 }
8675
8676 /// Check if a dummy argument is a derived type, returning the type name if so.
8677 fn arg_derived_type_name(
8678 arg_name: &str,
8679 decls: &[crate::ast::decl::SpannedDecl],
8680 ) -> Option<String> {
8681 let key = arg_name.to_lowercase();
8682 for decl in decls {
8683 if let Decl::TypeDecl {
8684 type_spec,
8685 entities,
8686 ..
8687 } = &decl.node
8688 {
8689 for entity in entities {
8690 if entity.name.to_lowercase() == key {
8691 if let TypeSpec::Type(ref name) = type_spec {
8692 return Some(name.clone());
8693 }
8694 }
8695 }
8696 }
8697 }
8698 None
8699 }
8700
8701 /// Walk a statement and gather every Name reference appearing in
8702 /// expression position. Used by the Stmt::Block lowering to discover
8703 /// implicitly-typed locals introduced by a block-scope IMPLICIT
8704 /// statement so they can be allocated alongside the explicit decls.
8705 fn collect_referenced_names(stmt: &SpannedStmt, out: &mut Vec<String>) {
8706 fn walk_expr(expr: &crate::ast::expr::SpannedExpr, out: &mut Vec<String>) {
8707 use crate::ast::expr::Expr;
8708 match &expr.node {
8709 Expr::Name { name } => out.push(name.clone()),
8710 Expr::BinaryOp { left, right, .. } => {
8711 walk_expr(left, out);
8712 walk_expr(right, out);
8713 }
8714 Expr::UnaryOp { operand, .. } => walk_expr(operand, out),
8715 Expr::ParenExpr { inner } => walk_expr(inner, out),
8716 Expr::ComponentAccess { base, .. } => walk_expr(base, out),
8717 Expr::FunctionCall { callee, args } => {
8718 walk_expr(callee, out);
8719 for a in args {
8720 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
8721 walk_expr(e, out);
8722 }
8723 }
8724 }
8725 _ => {}
8726 }
8727 }
8728 macro_rules! walk {
8729 ($e:expr) => {
8730 walk_expr($e, out)
8731 };
8732 }
8733 macro_rules! recurse {
8734 ($s:expr) => {
8735 collect_referenced_names($s, out)
8736 };
8737 }
8738 match &stmt.node {
8739 Stmt::Assignment { target, value } => {
8740 walk!(target);
8741 walk!(value);
8742 }
8743 Stmt::PointerAssignment { target, value, .. } => {
8744 walk!(target);
8745 walk!(value);
8746 }
8747 Stmt::Print { items, .. } => {
8748 for i in items {
8749 walk!(i);
8750 }
8751 }
8752 Stmt::Write {
8753 items, controls, ..
8754 } => {
8755 for i in items {
8756 walk!(i);
8757 }
8758 for c in controls {
8759 walk!(&c.value);
8760 }
8761 }
8762 Stmt::Read {
8763 items, controls, ..
8764 } => {
8765 for i in items {
8766 walk!(i);
8767 }
8768 for c in controls {
8769 walk!(&c.value);
8770 }
8771 }
8772 Stmt::IfConstruct {
8773 condition,
8774 then_body,
8775 else_ifs,
8776 else_body,
8777 ..
8778 } => {
8779 walk!(condition);
8780 for s in then_body {
8781 recurse!(s);
8782 }
8783 for (cond, body) in else_ifs {
8784 walk!(cond);
8785 for s in body {
8786 recurse!(s);
8787 }
8788 }
8789 if let Some(body) = else_body {
8790 for s in body {
8791 recurse!(s);
8792 }
8793 }
8794 }
8795 Stmt::IfStmt { condition, action } => {
8796 walk!(condition);
8797 recurse!(action);
8798 }
8799 Stmt::DoLoop {
8800 body,
8801 var,
8802 start,
8803 end,
8804 step,
8805 ..
8806 } => {
8807 if let Some(v) = var {
8808 out.push(v.clone());
8809 }
8810 if let Some(s) = start {
8811 walk!(s);
8812 }
8813 if let Some(s) = end {
8814 walk!(s);
8815 }
8816 if let Some(s) = step {
8817 walk!(s);
8818 }
8819 for s in body {
8820 recurse!(s);
8821 }
8822 }
8823 Stmt::DoWhile {
8824 body, condition, ..
8825 } => {
8826 walk!(condition);
8827 for s in body {
8828 recurse!(s);
8829 }
8830 }
8831 Stmt::DoConcurrent { body, .. } => {
8832 for s in body {
8833 recurse!(s);
8834 }
8835 }
8836 Stmt::Block { body, .. } => {
8837 for s in body {
8838 recurse!(s);
8839 }
8840 }
8841 Stmt::SelectCase {
8842 selector, cases, ..
8843 } => {
8844 walk!(selector);
8845 for c in cases {
8846 for s in &c.body {
8847 recurse!(s);
8848 }
8849 }
8850 }
8851 Stmt::Call { args, .. } => {
8852 for a in args {
8853 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
8854 walk!(e);
8855 }
8856 }
8857 }
8858 Stmt::Labeled { stmt: inner, .. } => recurse!(inner),
8859 _ => {}
8860 }
8861 }
8862
8863 /// Extract the derived-type name from a function `return_type`
8864 /// declaration. Returns `Some("vec")` for `type(vec) function f()` and
8865 /// `None` for intrinsic-typed returns. Used by the Function lowering
8866 /// arm to decide whether the result variable needs derived-type
8867 /// storage and metadata.
8868 fn derived_type_name_for_return(return_type: &Option<TypeSpec>) -> Option<String> {
8869 if let Some(TypeSpec::Type(name)) = return_type {
8870 let lower = name.to_lowercase();
8871 if lower == "c_ptr" || lower == "c_funptr" {
8872 return None;
8873 }
8874 Some(name.clone())
8875 } else {
8876 None
8877 }
8878 }
8879
8880 /// Same as `derived_type_name_for_return` but also looks at the
8881 /// body's declaration of the result variable. A function written as
8882 /// `function add_t(a, b) result(r); type(t) :: r` has no return type
8883 /// on the header — the info lives in `decls` keyed by `result_name`
8884 /// (or the function name if `result(...)` wasn't specified). Without
8885 /// this second lookup the Function lowering arm treats the return
8886 /// type as an intrinsic pointer, allocates an 8-byte ptr slot for
8887 /// `r`, and silently drops every component-assignment in the body.
8888 fn derived_type_name_for_result_var(
8889 return_type: &Option<TypeSpec>,
8890 result_name: &str,
8891 decls: &[crate::ast::decl::SpannedDecl],
8892 ) -> Option<String> {
8893 if let Some(n) = derived_type_name_for_return(return_type) {
8894 return Some(n);
8895 }
8896 let key = result_name.to_lowercase();
8897 for decl in decls {
8898 if let Decl::TypeDecl {
8899 type_spec,
8900 entities,
8901 ..
8902 } = &decl.node
8903 {
8904 for entity in entities {
8905 if entity.name.to_lowercase() == key {
8906 if let TypeSpec::Type(name) = type_spec {
8907 let lower = name.to_lowercase();
8908 if lower == "c_ptr" || lower == "c_funptr" {
8909 return None;
8910 }
8911 return Some(name.clone());
8912 }
8913 }
8914 }
8915 }
8916 }
8917 None
8918 }
8919
8920 /// Does the named variable in `decls` carry the ALLOCATABLE attribute
8921 /// (either on the type-decl attrs or the entity)? Used by host-association
8922 /// closure-passing to decide whether the hidden pointer param should
8923 /// carry a descriptor (384 bytes) or a raw element pointer.
8924 fn decl_is_allocatable(name: &str, decls: &[crate::ast::decl::SpannedDecl]) -> bool {
8925 use crate::ast::decl::Attribute;
8926 let key = name.to_lowercase();
8927 for decl in decls {
8928 if let Decl::TypeDecl {
8929 attrs, entities, ..
8930 } = &decl.node
8931 {
8932 for entity in entities {
8933 if entity.name.to_lowercase() == key {
8934 return attrs.iter().any(|a| matches!(a, Attribute::Allocatable));
8935 }
8936 }
8937 }
8938 }
8939 false
8940 }
8941
8942 /// Does the named variable in `decls` carry the POINTER attribute?
8943 fn decl_is_pointer(name: &str, decls: &[crate::ast::decl::SpannedDecl]) -> bool {
8944 use crate::ast::decl::Attribute;
8945 let key = name.to_lowercase();
8946 for decl in decls {
8947 if let Decl::TypeDecl {
8948 attrs, entities, ..
8949 } = &decl.node
8950 {
8951 for entity in entities {
8952 if entity.name.to_lowercase() == key {
8953 return attrs.iter().any(|a| matches!(a, Attribute::Pointer));
8954 }
8955 }
8956 }
8957 }
8958 false
8959 }
8960
8961 /// Metadata for one host-associated variable threaded into a contained
8962 /// procedure via closure passing. Assembled from the host's declaration
8963 /// list once per (callee, host-var) pair; the contained proc uses it to
8964 /// allocate a spill slot and insert a by_ref LocalInfo so all name
8965 /// references inside the body go through the host's storage via the
8966 /// hidden pointer parameter.
8967 #[derive(Clone)]
8968 struct HostRefParamInfo {
8969 /// Lowercase host-var name (also the name inside the contained
8970 /// proc, since Fortran host association preserves the identifier).
8971 name: String,
8972 /// SSA value id of the hidden parameter carrying the pointer.
8973 id: ValueId,
8974 /// Element type of the host variable (scalar type, or the element
8975 /// type of an array).
8976 elem_ty: IrType,
8977 /// Array bounds. Empty for scalars.
8978 dims: Vec<(i64, i64)>,
8979 char_kind: CharKind,
8980 derived_type: Option<String>,
8981 /// True when the host var is represented by a 384-byte
8982 /// ArrayDescriptor (assumed-shape, deferred, allocatable array).
8983 descriptor_arg: bool,
8984 allocatable: bool,
8985 is_pointer: bool,
8986 }
8987
8988 /// Build the ordered list of host-association hidden parameters for
8989 /// `callee_name` against `host_decls`. The order must match
8990 /// `contained_host_refs[callee_name]` exactly so call sites and the
8991 /// callee agree on positional assignment. `starting_id` is the next
8992 /// free SSA value id after any other params (normal + hidden-length).
8993 fn build_host_ref_params(
8994 callee_name: &str,
8995 host_decls: &[crate::ast::decl::SpannedDecl],
8996 host_param_consts: &HashMap<String, ConstScalar>,
8997 contained_host_refs: &HashMap<String, Vec<String>>,
8998 starting_id: u32,
8999 st: &SymbolTable,
9000 out_params: &mut Vec<Param>,
9001 ) -> Vec<HostRefParamInfo> {
9002 let refs = match contained_host_refs.get(&callee_name.to_lowercase()) {
9003 Some(r) if !r.is_empty() => r,
9004 _ => return Vec::new(),
9005 };
9006 if host_decls.is_empty() {
9007 // No host decls visible — nothing we can resolve. Refuse
9008 // silently so modules with module procedures still lower.
9009 return Vec::new();
9010 }
9011 let host_visible = collect_decl_param_consts_with_host(host_decls, host_param_consts);
9012 let mut infos = Vec::with_capacity(refs.len());
9013 for (idx, hname) in refs.iter().enumerate() {
9014 let elem_ty = arg_type_from_decls(hname, host_decls, Some(st));
9015 let uses_desc = arg_uses_descriptor_from_decls(hname, host_decls);
9016 let alloc = decl_is_allocatable(hname, host_decls);
9017 let ptr_is_pointer = decl_is_pointer(hname, host_decls);
9018 let descriptor_arg = uses_desc || alloc;
9019 let ptr_ty = if descriptor_arg {
9020 IrType::Ptr(Box::new(IrType::Array(
9021 Box::new(IrType::Int(IntWidth::I8)),
9022 384,
9023 )))
9024 } else {
9025 IrType::Ptr(Box::new(elem_ty.clone()))
9026 };
9027 let pid = ValueId(starting_id + idx as u32);
9028 out_params.push(Param {
9029 name: format!("__host_{}", hname),
9030 ty: ptr_ty,
9031 id: pid,
9032 fortran_noalias: false,
9033 });
9034 infos.push(HostRefParamInfo {
9035 name: hname.clone(),
9036 id: pid,
9037 elem_ty,
9038 dims: arg_dims_from_decls(hname, host_decls, &host_visible),
9039 char_kind: arg_char_kind_from_decls(hname, host_decls, st),
9040 derived_type: arg_derived_type_name(hname, host_decls),
9041 descriptor_arg,
9042 allocatable: alloc,
9043 is_pointer: ptr_is_pointer,
9044 });
9045 }
9046 infos
9047 }
9048
9049 /// Append trailing pointer args to `arg_vals` matching the callee's
9050 /// host-association closure signature. For each host-local variable the
9051 /// callee references, look up its address in the caller's locals and
9052 /// push it. For caller locals flagged `by_ref` (i.e. the caller is a
9053 /// sibling contained proc forwarding the same host var), load the
9054 /// spill slot to recover the original host address. For caller-owned
9055 /// allocas or descriptor-backed arrays, push the address directly.
9056 fn append_host_closure_args(
9057 b: &mut FuncBuilder,
9058 ctx: &LowerCtx,
9059 callee_key: &str,
9060 arg_vals: &mut Vec<ValueId>,
9061 ) {
9062 append_host_closure_args_raw(
9063 b,
9064 &ctx.locals,
9065 Some(ctx.contained_host_refs),
9066 callee_key,
9067 arg_vals,
9068 );
9069 }
9070
9071 fn append_host_closure_args_raw(
9072 b: &mut FuncBuilder,
9073 locals: &HashMap<String, LocalInfo>,
9074 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
9075 callee_key: &str,
9076 arg_vals: &mut Vec<ValueId>,
9077 ) {
9078 let refs = match contained_host_refs.and_then(|m| m.get(callee_key)) {
9079 Some(r) if !r.is_empty() => r,
9080 _ => return,
9081 };
9082 for hname in refs {
9083 let info = match locals.get(hname) {
9084 Some(i) => i.clone(),
9085 None => {
9086 // Should never happen: the host-refs analysis only
9087 // records names declared in the host, and every host
9088 // scope installs those as locals. Push a null to keep
9089 // the ABI alignment consistent rather than panic.
9090 let zero = b.const_i64(0);
9091 arg_vals.push(zero);
9092 continue;
9093 }
9094 };
9095 // Caller-side address resolution: for by_ref locals (we are
9096 // a sibling forwarding), load the spill slot to get the host
9097 // address. For normal allocas (we are the host), the alloca
9098 // id IS the address.
9099 let addr = if info.by_ref {
9100 b.load(info.addr)
9101 } else {
9102 info.addr
9103 };
9104 arg_vals.push(addr);
9105 }
9106 }
9107
9108 /// After a contained proc's normal params are installed, set up a
9109 /// by_ref LocalInfo for each host-association hidden pointer. The
9110 /// slot holds the address passed by the caller; reads/writes through
9111 /// `name` inside the body route through `info.addr → load → element`.
9112 fn install_host_ref_locals(
9113 b: &mut FuncBuilder,
9114 locals: &mut HashMap<String, LocalInfo>,
9115 infos: &[HostRefParamInfo],
9116 ) {
9117 for info in infos {
9118 let slot_ty = if info.descriptor_arg {
9119 IrType::Ptr(Box::new(IrType::Array(
9120 Box::new(IrType::Int(IntWidth::I8)),
9121 384,
9122 )))
9123 } else {
9124 IrType::Ptr(Box::new(info.elem_ty.clone()))
9125 };
9126 let slot = b.alloca(slot_ty);
9127 b.store(info.id, slot);
9128 locals.insert(
9129 info.name.clone(),
9130 LocalInfo {
9131 addr: slot,
9132 ty: info.elem_ty.clone(),
9133 dims: info.dims.clone(),
9134 allocatable: info.allocatable,
9135 descriptor_arg: info.descriptor_arg,
9136 by_ref: true,
9137 char_kind: info.char_kind.clone(),
9138 derived_type: info.derived_type.clone(),
9139 inline_const: None,
9140 is_pointer: info.is_pointer,
9141 runtime_dim_upper: vec![],
9142 },
9143 );
9144 }
9145 }
9146
9147 /// Check if a callee has VALUE-attributed arguments via its scope in the symbol table.
9148 /// Returns a Vec<bool> per argument position — true if that arg is VALUE.
9149 /// Returns None if callee scope not found or no VALUE args.
9150 fn callee_value_arg_mask(st: &SymbolTable, callee_name: &str) -> Option<Vec<bool>> {
9151 use crate::sema::symtab::ScopeKind;
9152 let callee_scope = st.scopes.iter().find(|s| match &s.kind {
9153 ScopeKind::Function(n) | ScopeKind::Subroutine(n) => n.to_lowercase() == callee_name,
9154 _ => false,
9155 })?;
9156 if !callee_scope.symbols.values().any(|sym| sym.attrs.value) {
9157 return None;
9158 }
9159 // Use arg_order to build a positional mask.
9160 let mask: Vec<bool> = callee_scope
9161 .arg_order
9162 .iter()
9163 .map(|arg_name| {
9164 callee_scope
9165 .symbols
9166 .get(arg_name)
9167 .map(|sym| sym.attrs.value)
9168 .unwrap_or(false)
9169 })
9170 .collect();
9171 Some(mask)
9172 }
9173
9174 /// Check if a callee has `character(len=*)` dummies via its scope in the
9175 /// symbol table. Returns a positional bitmap for the visible arguments.
9176 fn callee_char_len_star_mask(st: &SymbolTable, callee_name: &str) -> Option<Vec<bool>> {
9177 use crate::sema::symtab::{ScopeKind, TypeInfo};
9178 let callee_scope = st.scopes.iter().find(|s| match &s.kind {
9179 ScopeKind::Function(n) | ScopeKind::Subroutine(n) => n.to_lowercase() == callee_name,
9180 _ => false,
9181 })?;
9182 let mask: Vec<bool> = callee_scope
9183 .arg_order
9184 .iter()
9185 .map(|arg_name| {
9186 callee_scope
9187 .symbols
9188 .get(arg_name)
9189 .map(|sym| {
9190 matches!(sym.type_info, Some(TypeInfo::Character { len: None, .. }))
9191 && !sym.attrs.allocatable
9192 && !sym.attrs.pointer
9193 })
9194 .unwrap_or(false)
9195 })
9196 .collect();
9197 if mask.iter().any(|flag| *flag) {
9198 Some(mask)
9199 } else {
9200 None
9201 }
9202 }
9203
9204 /// Check if a callee has deferred-length allocatable/pointer character dummies
9205 /// that are passed via StringDescriptor pointers.
9206 fn callee_string_descriptor_arg_mask(st: &SymbolTable, callee_name: &str) -> Option<Vec<bool>> {
9207 use crate::sema::symtab::{ScopeKind, TypeInfo};
9208 let callee_scope = st.scopes.iter().find(|s| match &s.kind {
9209 ScopeKind::Function(n) | ScopeKind::Subroutine(n) => n.to_lowercase() == callee_name,
9210 _ => false,
9211 })?;
9212 let mask: Vec<bool> = callee_scope
9213 .arg_order
9214 .iter()
9215 .map(|arg_name| {
9216 callee_scope
9217 .symbols
9218 .get(arg_name)
9219 .map(|sym| {
9220 matches!(sym.type_info, Some(TypeInfo::Character { len: None, .. }))
9221 && (sym.attrs.allocatable || sym.attrs.pointer)
9222 })
9223 .unwrap_or(false)
9224 })
9225 .collect();
9226 if mask.iter().any(|flag| *flag) {
9227 Some(mask)
9228 } else {
9229 None
9230 }
9231 }
9232
9233 /// Look up the derived-type NAME a callee's result variable carries, if
9234 /// any. Used by `Expr::ComponentAccess` when the base is a function
9235 /// call returning a derived type — the call evaluates to a pointer to
9236 /// a result struct, and we need the type name to resolve the
9237 /// subsequent `%field` through the layout registry. Returns None for
9238 /// intrinsic returns, unresolved callees, or subroutines.
9239 fn callee_return_derived_type_name(st: &SymbolTable, callee_name: &str) -> Option<String> {
9240 use crate::sema::symtab::{ScopeKind, TypeInfo};
9241 let key = callee_name.to_lowercase();
9242 if let Some(TypeInfo::Derived(name)) = st
9243 .scopes
9244 .iter()
9245 .find_map(|scope| scope.symbols.get(&key))
9246 .and_then(|sym| sym.type_info.as_ref())
9247 {
9248 return Some(name.clone());
9249 }
9250 let callee_scope = st.scopes.iter().find(
9251 |scope| matches!(&scope.kind, ScopeKind::Function(name) if name.to_lowercase() == key),
9252 )?;
9253 for sym in callee_scope.symbols.values() {
9254 if callee_scope
9255 .arg_order
9256 .iter()
9257 .any(|arg| arg == &sym.name.to_lowercase())
9258 {
9259 continue;
9260 }
9261 if let Some(TypeInfo::Derived(name)) = sym.type_info.as_ref() {
9262 return Some(name.clone());
9263 }
9264 }
9265 None
9266 }
9267
9268 fn callee_return_ir_type(st: &SymbolTable, callee_name: &str) -> Option<IrType> {
9269 use crate::sema::symtab::ScopeKind;
9270
9271 let key = callee_name.to_lowercase();
9272 if let Some(type_info) = st
9273 .scopes
9274 .iter()
9275 .find_map(|scope| scope.symbols.get(&key))
9276 .and_then(|sym| sym.type_info.as_ref())
9277 {
9278 return Some(type_info_to_ir_type(type_info));
9279 }
9280
9281 let callee_scope = st.scopes.iter().find(
9282 |scope| matches!(&scope.kind, ScopeKind::Function(name) if name.to_lowercase() == key),
9283 )?;
9284
9285 let mut result_type = None;
9286 for sym in callee_scope.symbols.values() {
9287 if callee_scope
9288 .arg_order
9289 .iter()
9290 .any(|arg| arg == &sym.name.to_lowercase())
9291 {
9292 continue;
9293 }
9294 if let Some(type_info) = sym.type_info.as_ref() {
9295 if result_type.is_some() {
9296 return None;
9297 }
9298 result_type = Some(type_info_to_ir_type(type_info));
9299 }
9300 }
9301 result_type
9302 }
9303
9304 #[derive(Clone, Copy, PartialEq, Eq)]
9305 enum CharacterReturnAbi {
9306 Direct(Option<i64>),
9307 HiddenDescriptor,
9308 }
9309
9310 fn callee_character_return_abi(st: &SymbolTable, callee_name: &str) -> Option<CharacterReturnAbi> {
9311 use crate::sema::symtab::{SymbolKind, TypeInfo};
9312
9313 let key = callee_name.to_ascii_lowercase();
9314 let sym = st.scopes.iter().find_map(|scope| scope.symbols.get(&key))?;
9315 match sym.kind {
9316 SymbolKind::Function
9317 | SymbolKind::ExternalProc
9318 | SymbolKind::IntrinsicProc
9319 | SymbolKind::ProcedurePointer => {}
9320 _ => return None,
9321 }
9322 let TypeInfo::Character { len, .. } = sym.type_info.as_ref()? else {
9323 return None;
9324 };
9325 if sym.attrs.allocatable || sym.attrs.pointer {
9326 Some(CharacterReturnAbi::HiddenDescriptor)
9327 } else {
9328 Some(CharacterReturnAbi::Direct(*len))
9329 }
9330 }
9331
9332 fn local_fixed_char_allocatable_scalar_len(info: &LocalInfo) -> Option<i64> {
9333 if !info.allocatable || !info.dims.is_empty() || info.char_kind != CharKind::None {
9334 return None;
9335 }
9336 match &info.ty {
9337 IrType::Array(inner, len)
9338 if matches!(inner.as_ref(), IrType::Int(IntWidth::I8)) && *len > 1 =>
9339 {
9340 Some(*len as i64)
9341 }
9342 _ => None,
9343 }
9344 }
9345
9346 fn local_is_string_scalar(info: &LocalInfo) -> bool {
9347 (info.allocatable && info.dims.is_empty() && matches!(info.char_kind, CharKind::Deferred))
9348 || local_fixed_char_allocatable_scalar_len(info).is_some()
9349 }
9350
9351 fn local_is_array_like(info: &LocalInfo) -> bool {
9352 (!info.dims.is_empty() || info.allocatable) && !local_is_string_scalar(info)
9353 }
9354
9355 fn named_expr_callable_character_return_abi(
9356 st: &SymbolTable,
9357 locals: &HashMap<String, LocalInfo>,
9358 callee_name: &str,
9359 ) -> Option<CharacterReturnAbi> {
9360 let key = callee_name.to_ascii_lowercase();
9361 if locals.contains_key(&key) {
9362 let sym = st.scopes.iter().find_map(|scope| scope.symbols.get(&key))?;
9363 if sym.kind != crate::sema::symtab::SymbolKind::ProcedurePointer {
9364 return None;
9365 }
9366 }
9367 callee_character_return_abi(st, &key)
9368 }
9369
9370 /// Check if a dummy argument has the VALUE attribute in its declaration.
9371 fn arg_has_value_attr(arg_name: &str, decls: &[crate::ast::decl::SpannedDecl]) -> bool {
9372 let key = arg_name.to_lowercase();
9373 for decl in decls {
9374 if let Decl::TypeDecl {
9375 attrs, entities, ..
9376 } = &decl.node
9377 {
9378 for entity in entities {
9379 if entity.name.to_lowercase() == key {
9380 return attrs
9381 .iter()
9382 .any(|a| matches!(a, crate::ast::decl::Attribute::Value));
9383 }
9384 }
9385 }
9386 }
9387 false
9388 }
9389
9390 fn arg_is_fortran_noalias(arg_name: &str, decls: &[crate::ast::decl::SpannedDecl]) -> bool {
9391 let key = arg_name.to_lowercase();
9392 for decl in decls {
9393 if let Decl::TypeDecl {
9394 attrs, entities, ..
9395 } = &decl.node
9396 {
9397 for entity in entities {
9398 if entity.name.to_lowercase() == key {
9399 return !attrs.iter().any(|attr| {
9400 matches!(
9401 attr,
9402 crate::ast::decl::Attribute::Pointer
9403 | crate::ast::decl::Attribute::Target
9404 | crate::ast::decl::Attribute::Value
9405 )
9406 });
9407 }
9408 }
9409 }
9410 }
9411 false
9412 }
9413
9414 /// Lower a string expression, returning (ptr, len) as ValueIds.
9415 /// String literals return (const_string_ptr, const_len).
9416 /// Character variables return (buffer_addr, known_len).
9417 /// Deferred-length variables load ptr and len from the StringDescriptor.
9418 fn lower_string_expr(
9419 b: &mut FuncBuilder,
9420 locals: &HashMap<String, LocalInfo>,
9421 expr: &crate::ast::expr::SpannedExpr,
9422 st: &SymbolTable,
9423 ) -> (ValueId, ValueId) {
9424 lower_string_expr_full(b, locals, expr, st, None, None, None, None)
9425 }
9426
9427 fn lower_string_expr_with_layouts(
9428 b: &mut FuncBuilder,
9429 locals: &HashMap<String, LocalInfo>,
9430 expr: &crate::ast::expr::SpannedExpr,
9431 st: &SymbolTable,
9432 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
9433 ) -> (ValueId, ValueId) {
9434 lower_string_expr_full(b, locals, expr, st, type_layouts, None, None, None)
9435 }
9436
9437 fn lower_string_expr_ctx(
9438 b: &mut FuncBuilder,
9439 ctx: &LowerCtx,
9440 expr: &crate::ast::expr::SpannedExpr,
9441 ) -> (ValueId, ValueId) {
9442 lower_string_expr_full(
9443 b,
9444 &ctx.locals,
9445 expr,
9446 ctx.st,
9447 Some(ctx.type_layouts),
9448 Some(ctx.internal_funcs),
9449 Some(ctx.contained_host_refs),
9450 Some(ctx.descriptor_params),
9451 )
9452 }
9453
9454 fn lower_string_expr_full(
9455 b: &mut FuncBuilder,
9456 locals: &HashMap<String, LocalInfo>,
9457 expr: &crate::ast::expr::SpannedExpr,
9458 st: &SymbolTable,
9459 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
9460 internal_funcs: Option<&HashMap<String, u32>>,
9461 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
9462 descriptor_params: Option<&HashMap<String, Vec<bool>>>,
9463 ) -> (ValueId, ValueId) {
9464 match &expr.node {
9465 Expr::StringLiteral { value, .. } => {
9466 let ptr = b.const_string(value.as_bytes());
9467 let len = b.const_i64(value.len() as i64);
9468 (ptr, len)
9469 }
9470 Expr::ComponentAccess { .. } => {
9471 if let Some(tl) = type_layouts {
9472 if let Some((field_ptr, field)) =
9473 resolve_component_field_access(b, locals, expr, st, tl)
9474 {
9475 match field_char_kind(&field) {
9476 CharKind::Fixed(len) => {
9477 return (field_ptr, b.const_i64(len));
9478 }
9479 CharKind::Deferred if field.size == 32 => {
9480 return load_string_descriptor_view(b, field_ptr);
9481 }
9482 _ => {}
9483 }
9484 }
9485 }
9486 let val = lower_expr_full(
9487 b,
9488 locals,
9489 expr,
9490 st,
9491 type_layouts,
9492 internal_funcs,
9493 contained_host_refs,
9494 descriptor_params,
9495 );
9496 let zero = b.const_i64(0);
9497 (val, zero)
9498 }
9499 Expr::Name { name } => {
9500 let key = name.to_lowercase();
9501 if let Some(info) = locals.get(&key) {
9502 match &info.char_kind {
9503 CharKind::Fixed(_)
9504 | CharKind::FixedRuntime { .. }
9505 | CharKind::Deferred
9506 | CharKind::AssumedLen { .. } => {
9507 // Delegate to char_addr_and_runtime_len which
9508 // correctly handles local buffers, by_ref
9509 // dummies, and assumed-length dummies.
9510 if let Some((ptr, len)) = char_addr_and_runtime_len(b, expr, locals) {
9511 (ptr, len)
9512 } else {
9513 let val = lower_expr_full(
9514 b,
9515 locals,
9516 expr,
9517 st,
9518 type_layouts,
9519 internal_funcs,
9520 contained_host_refs,
9521 descriptor_params,
9522 );
9523 let zero = b.const_i64(0);
9524 (val, zero)
9525 }
9526 }
9527 CharKind::None => {
9528 if let Some((ptr, len)) = char_addr_and_runtime_len(b, expr, locals) {
9529 (ptr, len)
9530 } else {
9531 // Not a character variable — shouldn't happen but fall back.
9532 let val = lower_expr_full(
9533 b,
9534 locals,
9535 expr,
9536 st,
9537 type_layouts,
9538 internal_funcs,
9539 contained_host_refs,
9540 descriptor_params,
9541 );
9542 let zero = b.const_i64(0);
9543 (val, zero)
9544 }
9545 }
9546 }
9547 } else {
9548 let val = lower_expr_full(
9549 b,
9550 locals,
9551 expr,
9552 st,
9553 type_layouts,
9554 internal_funcs,
9555 contained_host_refs,
9556 descriptor_params,
9557 );
9558 let zero = b.const_i64(0);
9559 (val, zero)
9560 }
9561 }
9562 Expr::FunctionCall { callee, args } => {
9563 if args.len() == 1
9564 && expr_is_character_expr(b, locals, callee, st, type_layouts)
9565 && !expr_is_array_designator(b, locals, callee, st, type_layouts)
9566 {
9567 match &args[0].value {
9568 crate::ast::expr::SectionSubscript::Range { start, end, .. } => {
9569 let (base_ptr, base_len) = lower_string_expr_full(
9570 b,
9571 locals,
9572 callee,
9573 st,
9574 type_layouts,
9575 internal_funcs,
9576 contained_host_refs,
9577 descriptor_params,
9578 );
9579 return lower_substring(
9580 b,
9581 locals,
9582 st,
9583 base_ptr,
9584 base_len,
9585 start.as_ref(),
9586 end.as_ref(),
9587 );
9588 }
9589 crate::ast::expr::SectionSubscript::Element(idx_expr) => {
9590 let (base_ptr, base_len) = lower_string_expr_full(
9591 b,
9592 locals,
9593 callee,
9594 st,
9595 type_layouts,
9596 internal_funcs,
9597 contained_host_refs,
9598 descriptor_params,
9599 );
9600 return lower_substring(
9601 b,
9602 locals,
9603 st,
9604 base_ptr,
9605 base_len,
9606 Some(idx_expr),
9607 Some(idx_expr),
9608 );
9609 }
9610 }
9611 }
9612 if let Expr::Name { name } = &callee.node {
9613 let key = name.to_lowercase();
9614 let first_char_arg = args.first().and_then(|a| {
9615 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
9616 Some(e)
9617 } else {
9618 None
9619 }
9620 });
9621 match key.as_str() {
9622 "trim" => {
9623 if let Some(arg) = first_char_arg {
9624 let (src_ptr, len_val) = lower_string_expr_full(
9625 b,
9626 locals,
9627 arg,
9628 st,
9629 type_layouts,
9630 internal_funcs,
9631 contained_host_refs,
9632 descriptor_params,
9633 );
9634 let buf = b.runtime_call(
9635 RuntimeFunc::Allocate,
9636 vec![len_val],
9637 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9638 );
9639 b.call(
9640 FuncRef::External("memcpy".into()),
9641 vec![buf, src_ptr, len_val],
9642 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9643 );
9644 let trimmed_len = b.call(
9645 FuncRef::External("afs_len_trim".into()),
9646 vec![src_ptr, len_val],
9647 IrType::Int(IntWidth::I64),
9648 );
9649 return (buf, trimmed_len);
9650 }
9651 }
9652 "adjustl" => {
9653 if let Some(arg) = first_char_arg {
9654 let (src_ptr, len_val) = lower_string_expr_full(
9655 b,
9656 locals,
9657 arg,
9658 st,
9659 type_layouts,
9660 internal_funcs,
9661 contained_host_refs,
9662 descriptor_params,
9663 );
9664 let buf = b.runtime_call(
9665 RuntimeFunc::Allocate,
9666 vec![len_val],
9667 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9668 );
9669 b.call(
9670 FuncRef::External("afs_adjustl".into()),
9671 vec![buf, src_ptr, len_val],
9672 IrType::Void,
9673 );
9674 return (buf, len_val);
9675 }
9676 }
9677 "adjustr" => {
9678 if let Some(arg) = first_char_arg {
9679 let (src_ptr, len_val) = lower_string_expr_full(
9680 b,
9681 locals,
9682 arg,
9683 st,
9684 type_layouts,
9685 internal_funcs,
9686 contained_host_refs,
9687 descriptor_params,
9688 );
9689 let buf = b.runtime_call(
9690 RuntimeFunc::Allocate,
9691 vec![len_val],
9692 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9693 );
9694 b.call(
9695 FuncRef::External("afs_adjustr".into()),
9696 vec![buf, src_ptr, len_val],
9697 IrType::Void,
9698 );
9699 return (buf, len_val);
9700 }
9701 }
9702 "char" | "achar" => {
9703 // CHAR/ACHAR(i) -> 1-byte buffer.
9704 if let Some(arg) = first_char_arg {
9705 let int_val = lower_expr(b, locals, arg, st);
9706 let i32_val = match b.func().value_type(int_val) {
9707 Some(IrType::Int(IntWidth::I64)) => {
9708 b.int_trunc(int_val, IntWidth::I32)
9709 }
9710 _ => int_val,
9711 };
9712 let byte_val = b.call(
9713 FuncRef::External("afs_char".into()),
9714 vec![i32_val],
9715 IrType::Int(IntWidth::I8),
9716 );
9717 let buf =
9718 b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 1));
9719 let zero = b.const_i64(0);
9720 let byte_ptr = b.gep(buf, vec![zero], IrType::Int(IntWidth::I8));
9721 b.store(byte_val, byte_ptr);
9722 let one = b.const_i64(1);
9723 return (buf, one);
9724 }
9725 }
9726 "new_line" => {
9727 let ptr = b.const_string(b"\n");
9728 let len = b.const_i64(1);
9729 return (ptr, len);
9730 }
9731 "merge" => {
9732 let second_char_arg = args.get(1).and_then(|a| {
9733 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
9734 Some(e)
9735 } else {
9736 None
9737 }
9738 });
9739 let mask_arg = args.get(2).and_then(|a| {
9740 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
9741 Some(e)
9742 } else {
9743 None
9744 }
9745 });
9746 if let (Some(tsrc), Some(fsrc), Some(mask_expr)) =
9747 (first_char_arg, second_char_arg, mask_arg)
9748 {
9749 let (t_ptr, t_len) = lower_string_expr_full(
9750 b,
9751 locals,
9752 tsrc,
9753 st,
9754 type_layouts,
9755 internal_funcs,
9756 contained_host_refs,
9757 descriptor_params,
9758 );
9759 let (f_ptr, f_len) = lower_string_expr_full(
9760 b,
9761 locals,
9762 fsrc,
9763 st,
9764 type_layouts,
9765 internal_funcs,
9766 contained_host_refs,
9767 descriptor_params,
9768 );
9769 let mask_raw = lower_expr(b, locals, mask_expr, st);
9770 let mask = coerce_to_type(b, mask_raw, &IrType::Bool);
9771 let ptr = b.select(mask, t_ptr, f_ptr);
9772 let len = b.select(mask, t_len, f_len);
9773 return (ptr, len);
9774 }
9775 }
9776 "compiler_version" => {
9777 let s = b"armfortas 0.1.0";
9778 let ptr = b.const_string(s);
9779 let len = b.const_i64(s.len() as i64);
9780 return (ptr, len);
9781 }
9782 "compiler_options" => {
9783 let s = b"";
9784 let ptr = b.const_string(s);
9785 let len = b.const_i64(s.len() as i64);
9786 return (ptr, len);
9787 }
9788 _ => {}
9789 }
9790
9791 // Character array element: words(i) — return the
9792 // element's string pointer and element length.
9793 if args.len() == 1 {
9794 if let crate::ast::expr::SectionSubscript::Element(_) = &args[0].value {
9795 if let Some(info) = locals.get(&key) {
9796 if (info.char_kind != CharKind::None
9797 || descriptor_backed_runtime_char_array(info))
9798 && (!info.dims.is_empty() || local_uses_array_descriptor(info))
9799 {
9800 if let Some(result) =
9801 char_array_element_ptr_and_len(b, locals, info, args, st)
9802 {
9803 return result;
9804 }
9805 }
9806 }
9807 }
9808 }
9809 // Alternate parser shape for substring of a character
9810 // array element: arr(i, lo:hi) encoded as a single
9811 // FunctionCall with an element subscript followed by
9812 // a substring range.
9813 if args.len() == 2 {
9814 if let crate::ast::expr::SectionSubscript::Element(_) = &args[0].value {
9815 if let crate::ast::expr::SectionSubscript::Range {
9816 ref start,
9817 ref end,
9818 ..
9819 } = args[1].value
9820 {
9821 if let Some(info) = locals.get(&key) {
9822 if (info.char_kind != CharKind::None
9823 || descriptor_backed_runtime_char_array(info))
9824 && (!info.dims.is_empty() || local_uses_array_descriptor(info))
9825 {
9826 if let Some((elem_ptr, elem_len)) =
9827 char_array_element_ptr_and_len(
9828 b,
9829 locals,
9830 info,
9831 &args[..1],
9832 st,
9833 )
9834 {
9835 return lower_substring(
9836 b,
9837 locals,
9838 st,
9839 elem_ptr,
9840 elem_len,
9841 start.as_ref(),
9842 end.as_ref(),
9843 );
9844 }
9845 }
9846 }
9847 }
9848 }
9849 }
9850
9851 if let Some(ret_abi) = named_expr_callable_character_return_abi(st, locals, &key) {
9852 match ret_abi {
9853 CharacterReturnAbi::HiddenDescriptor => {
9854 let desc =
9855 b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 32));
9856 let zero_i32 = b.const_i32(0);
9857 let size32 = b.const_i64(32);
9858 b.call(
9859 FuncRef::External("memset".into()),
9860 vec![desc, zero_i32, size32],
9861 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9862 );
9863 emit_named_function_call(
9864 b,
9865 locals,
9866 st,
9867 type_layouts,
9868 internal_funcs,
9869 contained_host_refs,
9870 descriptor_params,
9871 name,
9872 args,
9873 Some(desc),
9874 IrType::Void,
9875 );
9876 return load_string_descriptor_view(b, desc);
9877 }
9878 CharacterReturnAbi::Direct(len) => {
9879 let ptr = emit_named_function_call(
9880 b,
9881 locals,
9882 st,
9883 type_layouts,
9884 internal_funcs,
9885 contained_host_refs,
9886 descriptor_params,
9887 name,
9888 args,
9889 None,
9890 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9891 );
9892 return (ptr, b.const_i64(len.unwrap_or(0)));
9893 }
9894 }
9895 }
9896 }
9897 if let Expr::ComponentAccess { .. } = &callee.node {
9898 if let Some(tl) = type_layouts {
9899 if let Some(info) = component_array_local_info(b, locals, callee, st, tl) {
9900 if args.len() == 1 {
9901 if let crate::ast::expr::SectionSubscript::Element(_) = &args[0].value {
9902 if info.char_kind != CharKind::None
9903 || descriptor_backed_runtime_char_array(&info)
9904 {
9905 if let Some(result) =
9906 char_array_element_ptr_and_len(b, locals, &info, args, st)
9907 {
9908 return result;
9909 }
9910 }
9911 }
9912 }
9913 if args.len() == 2 {
9914 if let crate::ast::expr::SectionSubscript::Element(_) = &args[0].value {
9915 if let crate::ast::expr::SectionSubscript::Range {
9916 ref start,
9917 ref end,
9918 ..
9919 } = args[1].value
9920 {
9921 if info.char_kind != CharKind::None
9922 || descriptor_backed_runtime_char_array(&info)
9923 {
9924 if let Some((elem_ptr, elem_len)) =
9925 char_array_element_ptr_and_len(
9926 b,
9927 locals,
9928 &info,
9929 &args[..1],
9930 st,
9931 )
9932 {
9933 return lower_substring(
9934 b,
9935 locals,
9936 st,
9937 elem_ptr,
9938 elem_len,
9939 start.as_ref(),
9940 end.as_ref(),
9941 );
9942 }
9943 }
9944 }
9945 }
9946 }
9947 }
9948 }
9949 }
9950 let val = lower_expr_full(
9951 b,
9952 locals,
9953 expr,
9954 st,
9955 type_layouts,
9956 internal_funcs,
9957 contained_host_refs,
9958 descriptor_params,
9959 );
9960 let len = b.const_i64(string_literal_len(expr));
9961 (val, len)
9962 }
9963 Expr::BinaryOp {
9964 op: BinaryOp::Concat,
9965 left,
9966 right,
9967 } => {
9968 // Concatenation: get both sides as (ptr, len), allocate temp, call afs_concat.
9969 let (a_ptr, a_len) = lower_string_expr_full(
9970 b,
9971 locals,
9972 left,
9973 st,
9974 type_layouts,
9975 internal_funcs,
9976 contained_host_refs,
9977 descriptor_params,
9978 );
9979 let (b_ptr, b_len) = lower_string_expr_full(
9980 b,
9981 locals,
9982 right,
9983 st,
9984 type_layouts,
9985 internal_funcs,
9986 contained_host_refs,
9987 descriptor_params,
9988 );
9989 let total_len = b.iadd(a_len, b_len);
9990 // Allocate temp buffer for the result.
9991 let result_buf = b.runtime_call(
9992 RuntimeFunc::Allocate,
9993 vec![total_len],
9994 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
9995 );
9996 // Call afs_concat(result, a, a_len, b, b_len).
9997 b.call(
9998 FuncRef::External("afs_concat".into()),
9999 vec![result_buf, a_ptr, a_len, b_ptr, b_len],
10000 IrType::Void,
10001 );
10002 (result_buf, total_len)
10003 }
10004 _ => {
10005 // For other expressions, evaluate as value and use literal length if available.
10006 let val = lower_expr_full(
10007 b,
10008 locals,
10009 expr,
10010 st,
10011 type_layouts,
10012 internal_funcs,
10013 contained_host_refs,
10014 descriptor_params,
10015 );
10016 let len = b.const_i64(string_literal_len(expr));
10017 (val, len)
10018 }
10019 }
10020 }
10021
10022 /// Get the length of a string literal expression (for PRINT).
10023 fn string_literal_len(expr: &crate::ast::expr::SpannedExpr) -> i64 {
10024 match &expr.node {
10025 Expr::StringLiteral { value, .. } => value.len() as i64,
10026 _ => 0,
10027 }
10028 }
10029
10030 /// True if `ty` is the complex representation: `[f32/f64 x 2]` or `ptr<[f32/f64 x 2]>`.
10031 /// Complex allocas have pointer type in the IR; the underlying element type is the array.
10032 fn is_complex_ty(ty: &IrType) -> bool {
10033 match ty {
10034 IrType::Array(ref e, 2) => matches!(e.as_ref(), IrType::Float(_)),
10035 IrType::Ptr(ref inner) => {
10036 matches!(inner.as_ref(), IrType::Array(ref e, 2) if matches!(e.as_ref(), IrType::Float(_)))
10037 }
10038 _ => false,
10039 }
10040 }
10041
10042 /// Float width of a complex type, whether `[f32/f64 x 2]` or `ptr<[f32/f64 x 2]>`.
10043 fn complex_float_width(ty: &IrType) -> FloatWidth {
10044 let elem = match ty {
10045 IrType::Array(ref e, 2) => e.as_ref(),
10046 IrType::Ptr(ref inner) => match inner.as_ref() {
10047 IrType::Array(ref e, 2) => e.as_ref(),
10048 _ => return FloatWidth::F32,
10049 },
10050 _ => return FloatWidth::F32,
10051 };
10052 match elem {
10053 IrType::Float(FloatWidth::F64) => FloatWidth::F64,
10054 _ => FloatWidth::F32,
10055 }
10056 }
10057
10058 /// Byte size of a complex value stored as `[f32 x 2]` (8) or `[f64 x 2]` (16).
10059 fn complex_byte_size(ty: &IrType) -> i64 {
10060 if complex_float_width(ty) == FloatWidth::F64 {
10061 16
10062 } else {
10063 8
10064 }
10065 }
10066
10067 /// Insert implicit deallocation calls for all local allocatable variables.
10068 /// Uses a dummy STAT variable so already-deallocated arrays don't abort.
10069 ///
10070 /// Iterates locals in alphabetical order by name to make the emitted
10071 /// IR (and therefore the assembly) deterministic across runs. The
10072 /// previous version walked `locals.values()` directly, picking up the
10073 /// HashMap's randomized iteration order — surfaced as non-reproducible
10074 /// builds for any function with multiple allocatable locals.
10075 /// When `skip_addr` is `Some(addr)`, skip deallocation for any local whose
10076 /// `info.addr` matches. Used to preserve sret result ownership: the result
10077 /// variable of an allocatable-returning function is allocated inside the
10078 /// callee but ownership is transferred to the caller — the callee must
10079 /// NOT free it. Audit6 BLOCKING-1.
10080 fn emit_final_proc_call(
10081 b: &mut FuncBuilder,
10082 st: &SymbolTable,
10083 internal_funcs: &HashMap<String, u32>,
10084 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
10085 closure_locals: &HashMap<String, LocalInfo>,
10086 final_proc: &str,
10087 finalized_addr: ValueId,
10088 ) {
10089 let key = final_proc.to_lowercase();
10090 let (call_name, resolved_key) = resolved_symbol_call_target(st, &key, final_proc);
10091 let mut call_args = vec![finalized_addr];
10092 append_host_closure_args_raw(
10093 b,
10094 closure_locals,
10095 contained_host_refs,
10096 &resolved_key,
10097 &mut call_args,
10098 );
10099 let func_ref = internal_funcs
10100 .get(&resolved_key)
10101 .or_else(|| internal_funcs.get(&key))
10102 .copied()
10103 .map(FuncRef::Internal)
10104 .unwrap_or_else(|| FuncRef::External(call_name));
10105 b.call(func_ref, call_args, IrType::Void);
10106 }
10107
10108 fn insert_implicit_dealloc(
10109 b: &mut FuncBuilder,
10110 owned_locals: &HashMap<String, LocalInfo>,
10111 closure_locals: &HashMap<String, LocalInfo>,
10112 type_layouts: &crate::sema::type_layout::TypeLayoutRegistry,
10113 st: &SymbolTable,
10114 internal_funcs: &HashMap<String, u32>,
10115 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
10116 skip_addr: Option<ValueId>,
10117 ) {
10118 // Audit Med-2: only allocate the stat_addr scratch slot if we
10119 // actually need it for an `afs_deallocate_array` call. Without
10120 // this guard every function (even one with no allocatables)
10121 // got a zombie i32 alloca right before its ret, bloating the
10122 // frame and the IR — and DCE couldn't drop it because allocas
10123 // are classified as side-effecting.
10124 let needs_dealloc = owned_locals.values().any(|info| {
10125 info.allocatable
10126 || matches!(
10127 info.char_kind,
10128 CharKind::Deferred | CharKind::FixedRuntime { .. }
10129 )
10130 });
10131 let needs_stat = owned_locals.values().any(|info| info.allocatable);
10132 if !needs_dealloc
10133 && !owned_locals.values().any(|info| {
10134 !info.by_ref
10135 && info
10136 .derived_type
10137 .as_ref()
10138 .and_then(|tn| type_layouts.get(tn))
10139 .is_some_and(|l| !l.final_procs.is_empty())
10140 })
10141 {
10142 return;
10143 }
10144
10145 let stat_addr = if needs_stat {
10146 Some(b.alloca(IrType::Int(IntWidth::I32)))
10147 } else {
10148 None
10149 };
10150 // Module globals installed via install_globals_as_locals have
10151 // `allocatable: true` but their storage lives in the module's
10152 // .data section — freeing it at procedure return would destroy
10153 // state that the caller still needs. Detect them by checking
10154 // whether info.addr was produced by a GlobalAddr instruction.
10155 let global_addrs = collect_global_addr_values(b);
10156
10157 let mut sorted: Vec<(&String, &LocalInfo)> = owned_locals.iter().collect();
10158 sorted.sort_by(|a, b| a.0.cmp(b.0));
10159 for (_name, info) in sorted {
10160 // Skip caller-owned allocatables (sret result variables).
10161 if skip_addr == Some(info.addr) {
10162 continue;
10163 }
10164 // Skip module globals — they outlive this procedure.
10165 if global_addrs.contains(&info.addr) {
10166 continue;
10167 }
10168 // Skip pointers: a POINTER variable does not own its target.
10169 if info.is_pointer {
10170 continue;
10171 }
10172 if matches!(info.char_kind, CharKind::Deferred) {
10173 if info.by_ref {
10174 continue;
10175 }
10176 b.call(
10177 FuncRef::External("afs_dealloc_string".into()),
10178 vec![info.addr],
10179 IrType::Void,
10180 );
10181 } else if let CharKind::FixedRuntime { .. } = info.char_kind {
10182 if !info.by_ref {
10183 let ptr = b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
10184 b.runtime_call(RuntimeFunc::Deallocate, vec![ptr], IrType::Void);
10185 }
10186 } else if info.allocatable {
10187 b.call(
10188 FuncRef::External("afs_deallocate_array".into()),
10189 vec![info.addr, stat_addr.unwrap()],
10190 IrType::Void,
10191 );
10192 }
10193 // Finalization: call FINAL procedures for locally-owned derived type variables.
10194 // Skip by-ref params (they're owned by the caller, not the callee).
10195 if !info.by_ref {
10196 if let Some(ref type_name) = info.derived_type {
10197 if let Some(layout) = type_layouts.get(type_name) {
10198 for final_proc in &layout.final_procs {
10199 emit_final_proc_call(
10200 b,
10201 st,
10202 internal_funcs,
10203 contained_host_refs,
10204 closure_locals,
10205 final_proc,
10206 info.addr,
10207 );
10208 }
10209 }
10210 }
10211 }
10212 }
10213 }
10214
10215 /// Ensure a block has a terminator.
10216 fn ensure_termination(b: &mut FuncBuilder, result_addr: Option<ValueId>) {
10217 if b.func().block(b.current_block()).terminator.is_none() {
10218 if let Some(addr) = result_addr {
10219 let rv = b.load(addr);
10220 b.ret(Some(rv));
10221 } else {
10222 b.ret_void();
10223 }
10224 }
10225 }
10226
10227 /// Extract the kind value from a KindSelector, defaulting if absent.
10228 /// Resolves named constants (e.g., c_double, real64) via the symbol table.
10229 fn extract_kind(sel: &Option<crate::ast::decl::KindSelector>, default: u8) -> u8 {
10230 extract_kind_with_st(sel, default, None)
10231 }
10232
10233 fn extract_kind_with_st(
10234 sel: &Option<crate::ast::decl::KindSelector>,
10235 default: u8,
10236 st: Option<&SymbolTable>,
10237 ) -> u8 {
10238 use crate::ast::decl::KindSelector;
10239 use crate::ast::expr::Expr;
10240 match sel {
10241 Some(KindSelector::Expr(e)) | Some(KindSelector::Star(e)) => {
10242 match &e.node {
10243 Expr::IntegerLiteral { text, .. } => text.parse().unwrap_or(default),
10244 Expr::Name { name } => {
10245 if let Some(st) = st {
10246 let key = name.to_lowercase();
10247 // Search all scopes for the named constant — the
10248 // current scope may not be set correctly during lowering
10249 // (sema has already popped all scopes by this point).
10250 st.find_symbol_any_scope(&key)
10251 .and_then(|sym| sym.const_value.map(|v| v as u8))
10252 .unwrap_or(default)
10253 } else {
10254 default
10255 }
10256 }
10257 _ => default,
10258 }
10259 }
10260 None => default,
10261 }
10262 }
10263
10264 /// Lower a Fortran type specifier to an IR type.
10265 fn lower_type_spec(ts: &TypeSpec) -> IrType {
10266 lower_type_spec_st(ts, None)
10267 }
10268
10269 fn fixed_char_storage_ir_type(len: i64) -> IrType {
10270 if len <= 1 {
10271 IrType::Int(IntWidth::I8)
10272 } else {
10273 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), len as u64)
10274 }
10275 }
10276
10277 fn lower_type_spec_st(ts: &TypeSpec, st: Option<&SymbolTable>) -> IrType {
10278 match ts {
10279 TypeSpec::Integer(sel) => IrType::int_from_kind(extract_kind_with_st(
10280 sel,
10281 crate::driver::defaults::default_int_kind(),
10282 st,
10283 )),
10284 TypeSpec::Real(sel) => IrType::float_from_kind(extract_kind_with_st(
10285 sel,
10286 crate::driver::defaults::default_real_kind(),
10287 st,
10288 )),
10289 TypeSpec::DoublePrecision => IrType::Float(FloatWidth::F64),
10290 TypeSpec::Complex(sel) => {
10291 let fw = match extract_kind_with_st(sel, 4, st) {
10292 8 => FloatWidth::F64,
10293 _ => FloatWidth::F32,
10294 };
10295 IrType::Array(Box::new(IrType::Float(fw)), 2)
10296 }
10297 TypeSpec::DoubleComplex => IrType::Array(Box::new(IrType::Float(FloatWidth::F64)), 2),
10298 TypeSpec::Logical(_) => IrType::Bool,
10299 TypeSpec::Character(_) => IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
10300 TypeSpec::Type(name) | TypeSpec::Class(name) => {
10301 // c_ptr and c_funptr are opaque address types — i64 on ARM64.
10302 let lower_name = name.to_lowercase();
10303 if lower_name == "c_ptr" || lower_name == "c_funptr" {
10304 IrType::Int(IntWidth::I64)
10305 } else {
10306 // User-defined derived types are byte pointers (struct layout resolved elsewhere).
10307 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8)))
10308 }
10309 }
10310 _ => IrType::Int(IntWidth::I32), // fallback
10311 }
10312 }
10313
10314 /// Lower a list of statements.
10315 /// Pre-scan a body of statements and create one IR basic block per
10316 /// Fortran statement label. Must be called before `lower_stmts` so
10317 /// that both forward and backward `GOTO` targets can branch to an
10318 /// already-existing block.
10319 fn collect_label_blocks(
10320 b: &mut FuncBuilder,
10321 stmts: &[SpannedStmt],
10322 out: &mut HashMap<u64, BlockId>,
10323 ) {
10324 for stmt in stmts {
10325 match &stmt.node {
10326 Stmt::Labeled { label, stmt: inner } => {
10327 let bb = b.create_block(&format!("label_{}", label));
10328 out.entry(*label).or_insert(bb);
10329 // Recurse into the inner statement (e.g., a DO or IF block with labels inside).
10330 collect_label_blocks(b, std::slice::from_ref(inner.as_ref()), out);
10331 }
10332 Stmt::Continue { label: Some(lbl) } => {
10333 let bb = b.create_block(&format!("label_{}", lbl));
10334 out.entry(*lbl).or_insert(bb);
10335 }
10336 Stmt::IfConstruct {
10337 then_body,
10338 else_ifs,
10339 else_body,
10340 ..
10341 } => {
10342 collect_label_blocks(b, then_body, out);
10343 for (_, body) in else_ifs {
10344 collect_label_blocks(b, body, out);
10345 }
10346 if let Some(body) = else_body {
10347 collect_label_blocks(b, body, out);
10348 }
10349 }
10350 Stmt::IfStmt { action, .. } => {
10351 collect_label_blocks(b, std::slice::from_ref(action.as_ref()), out);
10352 }
10353 Stmt::DoLoop { body, .. }
10354 | Stmt::DoWhile { body, .. }
10355 | Stmt::DoConcurrent { body, .. } => {
10356 collect_label_blocks(b, body, out);
10357 }
10358 _ => {}
10359 }
10360 }
10361 }
10362
10363 fn lower_stmts(b: &mut FuncBuilder, ctx: &mut LowerCtx, stmts: &[SpannedStmt]) {
10364 for stmt in stmts {
10365 // Labeled statements and labeled CONTINUEs create new basic blocks; they must be
10366 // processed even after a branch/goto terminates the current block. All other dead
10367 // code (statements after a terminator in an unlabeled position) is skipped.
10368 let is_label_creating = matches!(
10369 &stmt.node,
10370 Stmt::Labeled { .. } | Stmt::Continue { label: Some(_) }
10371 );
10372 if !is_label_creating && b.func().block(b.current_block()).terminator.is_some() {
10373 continue; // dead code — but keep looping so we can find the next label
10374 }
10375 lower_stmt(b, ctx, stmt);
10376 }
10377 }
10378
10379 /// Lower a single statement.
10380 fn lower_stmt(b: &mut FuncBuilder, ctx: &mut LowerCtx, stmt: &SpannedStmt) {
10381 match &stmt.node {
10382 Stmt::Assignment { target, value } => {
10383 match &target.node {
10384 Expr::Name { name } => {
10385 let key = name.to_lowercase();
10386 // Defined assignment: INTERFACE ASSIGNMENT(=) covers
10387 // cases where the LHS and RHS types differ or the
10388 // user defined a custom store semantics. When we
10389 // resolve a specific, lower the call and return —
10390 // the default type-matched paths below would either
10391 // memcpy garbage or fall through silently.
10392 if try_defined_assignment(b, ctx, &key, value) {
10393 return;
10394 }
10395 if let Some(info) = ctx.locals.get(&key).cloned() {
10396 if local_is_array_like(&info)
10397 && (info.char_kind != CharKind::None
10398 || descriptor_backed_runtime_char_array(&info))
10399 && local_uses_array_descriptor(&info)
10400 {
10401 lower_array_assign(b, ctx, name, &info, value);
10402 return;
10403 }
10404 match &info.char_kind {
10405 CharKind::Fixed(len) => {
10406 // Fixed-length character assignment: copy with space padding.
10407 // Get source pointer and length from the expression.
10408 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10409 let dest_len = b.const_i64(*len);
10410 b.call(
10411 FuncRef::External("afs_assign_char_fixed".into()),
10412 vec![info.addr, dest_len, src_ptr, src_len],
10413 IrType::Void,
10414 );
10415 }
10416 CharKind::FixedRuntime { len_addr } => {
10417 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10418 let (dest_ptr, dest_len) =
10419 fixed_runtime_char_ptr_and_len(b, &info, *len_addr);
10420 b.call(
10421 FuncRef::External("afs_assign_char_fixed".into()),
10422 vec![dest_ptr, dest_len, src_ptr, src_len],
10423 IrType::Void,
10424 );
10425 }
10426 CharKind::Deferred => {
10427 // Deferred-length: call afs_assign_char_deferred.
10428 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10429 let desc = string_descriptor_addr(b, &info);
10430 b.call(
10431 FuncRef::External("afs_assign_char_deferred".into()),
10432 vec![desc, src_ptr, src_len],
10433 IrType::Void,
10434 );
10435 }
10436 CharKind::AssumedLen { len_addr } => {
10437 // Assumed-length dummy assignment: use
10438 // the hidden-length param as the
10439 // destination length.
10440 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10441 let outer = b.load(info.addr);
10442 let dest_ptr = b.load_typed(
10443 outer,
10444 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
10445 );
10446 let dest_len = b.load(*len_addr);
10447 b.call(
10448 FuncRef::External("afs_assign_char_fixed".into()),
10449 vec![dest_ptr, dest_len, src_ptr, src_len],
10450 IrType::Void,
10451 );
10452 }
10453 CharKind::None => {
10454 if local_fixed_char_allocatable_scalar_len(&info).is_some() {
10455 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10456 if let Some((dest_ptr, dest_len)) =
10457 char_addr_and_runtime_len(b, target, &ctx.locals)
10458 {
10459 b.call(
10460 FuncRef::External("afs_assign_char_fixed".into()),
10461 vec![dest_ptr, dest_len, src_ptr, src_len],
10462 IrType::Void,
10463 );
10464 }
10465 } else if !info.dims.is_empty() || info.allocatable {
10466 if try_lower_elemental_array_assign(b, ctx, name, &info, value)
10467 {
10468 return;
10469 }
10470 if let Expr::FunctionCall {
10471 callee,
10472 args: call_args,
10473 } = &value.node
10474 {
10475 if let Expr::Name { name: callee_name } = &callee.node {
10476 let callee_key = callee_name.to_lowercase();
10477 if ctx.alloc_return_funcs.contains(&callee_key) {
10478 // Audit6 BLOCKING-1: sret call — pass info.addr as
10479 // the hidden first arg so the function writes its
10480 // result directly into the destination descriptor.
10481 // No temp descriptor or afs_assign_allocatable needed.
10482 lower_alloc_return_call_into_desc(
10483 b,
10484 ctx,
10485 info.addr,
10486 callee_name,
10487 call_args,
10488 );
10489 } else {
10490 // Non-sret: function returns a temp descriptor.
10491 let src_desc = lower_expr_ctx_tl(b, ctx, value);
10492 b.call(
10493 FuncRef::External(
10494 "afs_assign_allocatable".into(),
10495 ),
10496 vec![info.addr, src_desc],
10497 IrType::Void,
10498 );
10499 let stat = b.alloca(IrType::Int(IntWidth::I32));
10500 b.call(
10501 FuncRef::External(
10502 "afs_deallocate_array".into(),
10503 ),
10504 vec![src_desc, stat],
10505 IrType::Void,
10506 );
10507 }
10508 } else {
10509 // Indirect callee: fall back to assign path.
10510 let src_desc = lower_expr_ctx_tl(b, ctx, value);
10511 b.call(
10512 FuncRef::External("afs_assign_allocatable".into()),
10513 vec![info.addr, src_desc],
10514 IrType::Void,
10515 );
10516 let stat = b.alloca(IrType::Int(IntWidth::I32));
10517 b.call(
10518 FuncRef::External("afs_deallocate_array".into()),
10519 vec![src_desc, stat],
10520 IrType::Void,
10521 );
10522 }
10523 } else {
10524 lower_array_assign(b, ctx, name, &info, value);
10525 }
10526 } else if info.derived_type.is_some() {
10527 let val = lower_expr_ctx_tl(b, ctx, value);
10528 let size = if let Some(ref tn) = info.derived_type {
10529 ctx.type_layouts.get(tn).map(|l| l.size).unwrap_or(8)
10530 } else {
10531 8
10532 };
10533 let size_val = b.const_i64(size as i64);
10534 b.call(
10535 FuncRef::External("memcpy".into()),
10536 vec![info.addr, val, size_val],
10537 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
10538 );
10539 } else if info.is_pointer {
10540 // Plain `=` on a POINTER dereferences:
10541 // load the target address out of the
10542 // pointer slot, then store through it.
10543 let val = lower_expr_ctx_tl(b, ctx, value);
10544 let coerced = coerce_to_type(b, val, &info.ty);
10545 let tgt = b.load_typed(
10546 info.addr,
10547 IrType::Ptr(Box::new(info.ty.clone())),
10548 );
10549 b.store(coerced, tgt);
10550 } else if is_complex_ty(&info.ty) {
10551 // Complex assignment: RHS returns a ptr to [f32/f64 x 2] buffer.
10552 // Memcpy the 8 or 16 bytes into the destination slot.
10553 let src = lower_expr_ctx_tl(b, ctx, value);
10554 let bytes = complex_byte_size(&info.ty);
10555 let sz = b.const_i64(bytes);
10556 if info.by_ref {
10557 let dst = b.load(info.addr);
10558 b.call(
10559 FuncRef::External("memcpy".into()),
10560 vec![dst, src, sz],
10561 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
10562 );
10563 } else {
10564 b.call(
10565 FuncRef::External("memcpy".into()),
10566 vec![info.addr, src, sz],
10567 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
10568 );
10569 }
10570 } else if info.by_ref {
10571 let val = lower_expr_ctx_tl(b, ctx, value);
10572 let coerced = coerce_to_type(b, val, &info.ty);
10573 let ptr = b.load(info.addr);
10574 b.store(coerced, ptr);
10575 } else {
10576 let val = lower_expr_ctx_tl(b, ctx, value);
10577 let coerced = coerce_to_type(b, val, &info.ty);
10578 b.store(coerced, info.addr);
10579 }
10580 }
10581 }
10582 }
10583 }
10584 Expr::FunctionCall { callee, args } => {
10585 if let Expr::Name { name } = &callee.node {
10586 let akey = name.to_lowercase();
10587 if let Some(info) = ctx.locals.get(&akey).cloned() {
10588 let is_scalar_fixed_alloc_char =
10589 local_fixed_char_allocatable_scalar_len(&info).is_some();
10590 if local_is_array_like(&info)
10591 && !is_scalar_fixed_alloc_char
10592 && lower_1d_section_assign(b, ctx, &info, args, value)
10593 {
10594 return;
10595 }
10596 // Substring LHS: s(lo:hi) = rhs where s is a
10597 // scalar character. Compute the target substring
10598 // pointer+length, get the RHS as (ptr, len), and
10599 // call afs_assign_char_fixed to do the bounded
10600 // copy with space-padding.
10601 if (info.char_kind != CharKind::None || is_scalar_fixed_alloc_char)
10602 && info.dims.is_empty()
10603 && args.len() == 1
10604 && matches!(
10605 args[0].value,
10606 crate::ast::expr::SectionSubscript::Range { .. }
10607 )
10608 {
10609 if let crate::ast::expr::SectionSubscript::Range {
10610 ref start,
10611 ref end,
10612 ..
10613 } = args[0].value
10614 {
10615 if let Some((base_ptr, base_len)) =
10616 char_addr_and_runtime_len(b, callee, &ctx.locals)
10617 {
10618 let (dest_ptr, dest_len) = lower_substring(
10619 b,
10620 &ctx.locals,
10621 ctx.st,
10622 base_ptr,
10623 base_len,
10624 start.as_ref(),
10625 end.as_ref(),
10626 );
10627 let (src_ptr, src_len) =
10628 lower_string_expr_ctx(b, ctx, value);
10629 b.call(
10630 FuncRef::External("afs_assign_char_fixed".into()),
10631 vec![dest_ptr, dest_len, src_ptr, src_len],
10632 IrType::Void,
10633 );
10634 }
10635 }
10636 } else if !is_scalar_fixed_alloc_char && local_is_array_like(&info) {
10637 // Array element assignment: a(i) = val
10638 if info.char_kind != CharKind::None
10639 || descriptor_backed_runtime_char_array(&info)
10640 {
10641 lower_char_array_store(
10642 b,
10643 &ctx.locals,
10644 &info,
10645 args,
10646 value,
10647 ctx.st,
10648 );
10649 } else {
10650 let arr_val = lower_expr_ctx(b, ctx, value);
10651 if matches!(
10652 b.func().value_type(arr_val),
10653 Some(IrType::Array(inner, 4096))
10654 if matches!(inner.as_ref(), IrType::Int(IntWidth::I8))
10655 ) && matches!(info.ty, IrType::Ptr(ref inner) if matches!(inner.as_ref(), IrType::Int(IntWidth::I8)))
10656 {
10657 eprintln!(
10658 "DEBUG suspicious array store target={} dims={:?} alloc={} by_ref={} descriptor={} ty={:?}",
10659 name,
10660 info.dims,
10661 info.allocatable,
10662 info.by_ref,
10663 info.descriptor_arg,
10664 info.ty
10665 );
10666 }
10667 lower_array_store(b, &ctx.locals, &info, args, arr_val, ctx.st);
10668 }
10669 }
10670 }
10671 } else if let Expr::ComponentAccess { .. } = &callee.node {
10672 if let Some(info) = component_array_local_info(
10673 b,
10674 &ctx.locals,
10675 callee,
10676 ctx.st,
10677 ctx.type_layouts,
10678 ) {
10679 if local_is_array_like(&info)
10680 && lower_1d_section_assign(b, ctx, &info, args, value)
10681 {
10682 return;
10683 }
10684 if local_is_array_like(&info) {
10685 if info.char_kind != CharKind::None
10686 || descriptor_backed_runtime_char_array(&info)
10687 {
10688 lower_char_array_store(
10689 b,
10690 &ctx.locals,
10691 &info,
10692 args,
10693 value,
10694 ctx.st,
10695 );
10696 } else {
10697 let arr_val = lower_expr_ctx(b, ctx, value);
10698 lower_array_store(b, &ctx.locals, &info, args, arr_val, ctx.st);
10699 }
10700 return;
10701 }
10702 }
10703 if args.len() == 1
10704 && matches!(
10705 args[0].value,
10706 crate::ast::expr::SectionSubscript::Range { .. }
10707 )
10708 {
10709 if let crate::ast::expr::SectionSubscript::Range {
10710 ref start,
10711 ref end,
10712 ..
10713 } = args[0].value
10714 {
10715 if let Some((field_ptr, field)) = resolve_component_field_access(
10716 b,
10717 &ctx.locals,
10718 callee,
10719 ctx.st,
10720 ctx.type_layouts,
10721 ) {
10722 if is_deferred_char_component_field(&field) {
10723 let (base_ptr, base_len) =
10724 load_string_descriptor_view(b, field_ptr);
10725 let (dest_ptr, dest_len) = lower_substring(
10726 b,
10727 &ctx.locals,
10728 ctx.st,
10729 base_ptr,
10730 base_len,
10731 start.as_ref(),
10732 end.as_ref(),
10733 );
10734 let (src_ptr, src_len) =
10735 lower_string_expr_ctx(b, ctx, value);
10736 b.call(
10737 FuncRef::External("afs_assign_char_fixed".into()),
10738 vec![dest_ptr, dest_len, src_ptr, src_len],
10739 IrType::Void,
10740 );
10741 }
10742 }
10743 }
10744 }
10745 }
10746 }
10747 Expr::ComponentAccess { base, component } => {
10748 // x%field = val (supports chained: x%a%b = val).
10749 if let Some((base_addr, type_name)) =
10750 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)
10751 {
10752 if let Some(layout) = ctx.type_layouts.get(&type_name) {
10753 if let Some(field) = layout.field(component) {
10754 let offset = b.const_i64(field.offset as i64);
10755 let field_ptr =
10756 b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
10757
10758 // Character field: copy string data with space padding.
10759 if let crate::sema::symtab::TypeInfo::Character {
10760 len: Some(flen),
10761 ..
10762 } = &field.type_info
10763 {
10764 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10765 let dest_len = b.const_i64(*flen);
10766 b.call(
10767 FuncRef::External("afs_assign_char_fixed".into()),
10768 vec![field_ptr, dest_len, src_ptr, src_len],
10769 IrType::Void,
10770 );
10771 } else if is_deferred_char_component_field(field) {
10772 let (src_ptr, src_len) = lower_string_expr_ctx(b, ctx, value);
10773 b.call(
10774 FuncRef::External("afs_assign_char_deferred".into()),
10775 vec![field_ptr, src_ptr, src_len],
10776 IrType::Void,
10777 );
10778 } else {
10779 let val = lower_expr_ctx_tl(b, ctx, value);
10780 let coerced = coerce_to_type(
10781 b,
10782 val,
10783 &type_info_to_ir_type(&field.type_info),
10784 );
10785 b.store(coerced, field_ptr);
10786 }
10787 }
10788 }
10789 }
10790 }
10791 _ => {}
10792 }
10793 }
10794
10795 Stmt::Print { items, .. } => {
10796 // PRINT * → unit 6 (stdout).
10797 let unit = b.const_i32(6);
10798 lower_write_items(b, ctx, items, unit);
10799 }
10800
10801 Stmt::Write { controls, items } => {
10802 // Check for format specifier (second positional control).
10803 // * means list-directed; a string literal means formatted.
10804 let fmt_control = controls
10805 .iter()
10806 .skip(1)
10807 .find(|c| c.keyword.is_none()) // positional, not keyword=
10808 .or_else(|| {
10809 controls.iter().find(|c| {
10810 c.keyword
10811 .as_deref()
10812 .map(|k| k.eq_ignore_ascii_case("fmt"))
10813 .unwrap_or(false)
10814 })
10815 });
10816
10817 let is_list_directed = match fmt_control {
10818 None => true,
10819 Some(ctrl) => matches!(&ctrl.value.node, Expr::Name { name } if name == "*"),
10820 };
10821
10822 // Check for ADVANCE='NO'.
10823 let advance = controls
10824 .iter()
10825 .find(|c| {
10826 c.keyword
10827 .as_deref()
10828 .map(|k| k.eq_ignore_ascii_case("advance"))
10829 .unwrap_or(false)
10830 })
10831 .map(|c| {
10832 if let Expr::StringLiteral { value, .. } = &c.value.node {
10833 !value.eq_ignore_ascii_case("no")
10834 } else {
10835 true
10836 }
10837 })
10838 .unwrap_or(true);
10839
10840 if let Some(ctrl) = controls.first() {
10841 if let Some((buf_ptr, buf_len)) = internal_io_buffer(b, ctx, ctrl) {
10842 if is_list_directed {
10843 lower_internal_write_items(b, ctx, items, buf_ptr, buf_len);
10844 } else {
10845 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
10846 b,
10847 &ctx.locals,
10848 &fmt_control.unwrap().value,
10849 ctx.st,
10850 Some(ctx.type_layouts),
10851 );
10852 b.call(
10853 FuncRef::External("afs_fmt_begin_internal".into()),
10854 vec![buf_ptr, buf_len, fmt_ptr, fmt_len],
10855 IrType::Void,
10856 );
10857 for item in items {
10858 lower_fmt_push(b, ctx, item);
10859 }
10860 let adv = b.const_i32(if advance { 1 } else { 0 });
10861 b.call(
10862 FuncRef::External("afs_fmt_end".into()),
10863 vec![adv],
10864 IrType::Void,
10865 );
10866 }
10867 return;
10868 }
10869 }
10870
10871 // Extract unit (first control). * means stdout (unit 6).
10872 let unit = if let Some(ctrl) = controls.first() {
10873 if matches!(&ctrl.value.node, Expr::Name { name } if name == "*") {
10874 b.const_i32(6)
10875 } else {
10876 lower_expr_ctx(b, ctx, &ctrl.value)
10877 }
10878 } else {
10879 b.const_i32(6)
10880 };
10881
10882 if is_list_directed {
10883 lower_write_items_adv(b, ctx, items, unit, advance);
10884 } else {
10885 // Formatted I/O: use push-based API.
10886 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
10887 b,
10888 &ctx.locals,
10889 &fmt_control.unwrap().value,
10890 ctx.st,
10891 Some(ctx.type_layouts),
10892 );
10893 b.call(
10894 FuncRef::External("afs_fmt_begin".into()),
10895 vec![unit, fmt_ptr, fmt_len],
10896 IrType::Void,
10897 );
10898
10899 for item in items {
10900 lower_fmt_push(b, ctx, item);
10901 }
10902
10903 let adv = b.const_i32(if advance { 1 } else { 0 });
10904 b.call(
10905 FuncRef::External("afs_fmt_end".into()),
10906 vec![adv],
10907 IrType::Void,
10908 );
10909 }
10910 }
10911
10912 Stmt::Call { callee, args } => {
10913 // Handle type-bound procedure calls: call obj%method(args)
10914 if let Expr::ComponentAccess { base, component } = &callee.node {
10915 if let Some((obj_addr, type_name)) = resolve_component_base_for_method(
10916 b,
10917 &ctx.locals,
10918 base,
10919 ctx.st,
10920 ctx.type_layouts,
10921 ) {
10922 if let Some(layout) = ctx.type_layouts.get(&type_name) {
10923 if let Some(bp) = layout.bound_proc(component) {
10924 let target = bp.target_name.clone();
10925 let nopass = bp.nopass;
10926
10927 // Build argument list: obj as first arg (PASS), then explicit args.
10928 let mut call_args = Vec::new();
10929 if !nopass {
10930 call_args.push(obj_addr); // PASS: object address
10931 }
10932 for a in args {
10933 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
10934 call_args.push(lower_arg_by_ref_ctx(b, ctx, e));
10935 }
10936 }
10937 b.call(FuncRef::External(target), call_args, IrType::Void);
10938 }
10939 }
10940 }
10941 } else if let Expr::Name { name } = &callee.node {
10942 let key = name.to_lowercase();
10943
10944 // Try intrinsic subroutine lowering first.
10945 if !lower_intrinsic_subroutine(b, ctx, &key, args) {
10946 let procptr_target =
10947 procedure_pointer_call_target(b, &ctx.locals, ctx.st, &key);
10948 let signature_key = procptr_target
10949 .as_ref()
10950 .map(|(_, sig_key)| sig_key.clone())
10951 .unwrap_or_else(|| key.clone());
10952 // Not an intrinsic — general subroutine call.
10953 // Keyword-argument reordering (F2003 §12.4.1.2).
10954 // `call sub(b=10, a=20)` must bind by name, not
10955 // position. reorder_args_by_keyword permutes the
10956 // actual-arg list to match the callee's declared
10957 // param order; the rest of the call-site code
10958 // then runs positionally against that reordered
10959 // list.
10960 let reordered = reorder_args_by_keyword(args, &signature_key, ctx.st);
10961 let args: &[crate::ast::expr::Argument] = &reordered;
10962 let mut arg_vals: Vec<ValueId> = args
10963 .iter()
10964 .map(|a| match &a.value {
10965 crate::ast::expr::SectionSubscript::Element(e) => {
10966 lower_arg_by_ref_ctx(b, ctx, e)
10967 }
10968 _ => b.const_i32(0),
10969 })
10970 .collect();
10971 // Generic SUBROUTINE dispatch: if the callee name
10972 // resolves to a NamedInterface symbol, replace it
10973 // with the specific matched by the actual argument
10974 // types. On failure, emit a diagnostic — the same
10975 // rule as generic function-call resolution.
10976 let (resolved_name, resolved_key) = if procptr_target.is_some() {
10977 (name.clone(), signature_key.clone())
10978 } else {
10979 resolve_subroutine_call_name(ctx.st, b, name, &key, &arg_vals, callee.span)
10980 };
10981 if let Some(value_mask) = callee_value_arg_mask(ctx.st, &resolved_key)
10982 .or_else(|| callee_value_arg_mask(ctx.st, &signature_key))
10983 .or_else(|| callee_value_arg_mask(ctx.st, &key))
10984 {
10985 for (i, a) in args.iter().enumerate() {
10986 if !value_mask.get(i).copied().unwrap_or(false) {
10987 continue;
10988 }
10989 arg_vals[i] = match &a.value {
10990 crate::ast::expr::SectionSubscript::Element(e) => lower_expr_full(
10991 b,
10992 &ctx.locals,
10993 e,
10994 ctx.st,
10995 Some(ctx.type_layouts),
10996 Some(ctx.internal_funcs),
10997 Some(ctx.contained_host_refs),
10998 Some(ctx.descriptor_params),
10999 ),
11000 _ => b.const_i32(0),
11001 };
11002 }
11003 }
11004 let desc_mask = ctx
11005 .descriptor_params
11006 .get(&resolved_key)
11007 .or_else(|| ctx.descriptor_params.get(&signature_key))
11008 .or_else(|| ctx.descriptor_params.get(&key));
11009 if let Some(desc_mask) = desc_mask {
11010 for (i, a) in args.iter().enumerate() {
11011 if !desc_mask.get(i).copied().unwrap_or(false) {
11012 continue;
11013 }
11014 arg_vals[i] = match &a.value {
11015 crate::ast::expr::SectionSubscript::Element(e) => {
11016 lower_arg_descriptor(b, &ctx.locals, e, ctx.st)
11017 }
11018 _ => b.const_i64(0),
11019 };
11020 }
11021 }
11022 if let Some(string_desc_mask) =
11023 callee_string_descriptor_arg_mask(ctx.st, &resolved_key)
11024 .or_else(|| callee_string_descriptor_arg_mask(ctx.st, &signature_key))
11025 .or_else(|| callee_string_descriptor_arg_mask(ctx.st, &key))
11026 {
11027 for (i, a) in args.iter().enumerate() {
11028 if desc_mask
11029 .map(|mask| mask.get(i).copied().unwrap_or(false))
11030 .unwrap_or(false)
11031 {
11032 continue;
11033 }
11034 if !string_desc_mask.get(i).copied().unwrap_or(false) {
11035 continue;
11036 }
11037 arg_vals[i] = match &a.value {
11038 crate::ast::expr::SectionSubscript::Element(e) => {
11039 lower_arg_string_descriptor(
11040 b,
11041 &ctx.locals,
11042 e,
11043 ctx.st,
11044 Some(ctx.type_layouts),
11045 )
11046 }
11047 _ => b.const_i64(0),
11048 };
11049 }
11050 }
11051 // If the callee has more parameters than provided args, and the
11052 // trailing ones are OPTIONAL, pass null pointers so PRESENT() works.
11053 if let Some(opt_flags) = ctx
11054 .optional_params
11055 .get(&resolved_key)
11056 .or_else(|| ctx.optional_params.get(&signature_key))
11057 .or_else(|| ctx.optional_params.get(&key))
11058 {
11059 for flag in opt_flags.iter().skip(arg_vals.len()) {
11060 if *flag {
11061 arg_vals.push(b.const_i64(0)); // null → absent
11062 }
11063 }
11064 }
11065 // Hidden character-length ABI: for each callee
11066 // param that is character(len=*), append the
11067 // actual argument's string length as an i64.
11068 if let Some(cls_flags) = ctx
11069 .char_len_star_params
11070 .get(&resolved_key)
11071 .or_else(|| ctx.char_len_star_params.get(&signature_key))
11072 .or_else(|| ctx.char_len_star_params.get(&key))
11073 {
11074 for (i, flag) in cls_flags.iter().enumerate() {
11075 if *flag && i < args.len() {
11076 if let crate::ast::expr::SectionSubscript::Element(e) =
11077 &args[i].value
11078 {
11079 arg_vals.push(
11080 actual_char_arg_runtime_len(
11081 b,
11082 &ctx.locals,
11083 e,
11084 ctx.st,
11085 Some(ctx.type_layouts),
11086 )
11087 .unwrap_or_else(|| b.const_i64(0)),
11088 );
11089 } else {
11090 arg_vals.push(b.const_i64(0));
11091 }
11092 }
11093 }
11094 }
11095 // Host-association closure-passing ABI: if the
11096 // callee is a contained procedure, append one
11097 // address per host-local variable it reads or
11098 // writes. Caller must hold the matching variable
11099 // in its own locals — this is guaranteed by the
11100 // host-refs analysis that drove the callee
11101 // signature, since both caller and callee share
11102 // the same enclosing host.
11103 if procptr_target.is_none() {
11104 append_host_closure_args(b, ctx, &resolved_key, &mut arg_vals);
11105 }
11106 let func_ref = if let Some((target, _)) = procptr_target {
11107 FuncRef::Indirect(target)
11108 } else {
11109 ctx.internal_funcs
11110 .get(&resolved_key)
11111 .copied()
11112 .map(FuncRef::Internal)
11113 .unwrap_or_else(|| FuncRef::External(resolved_name))
11114 };
11115 b.call(func_ref, arg_vals, IrType::Void);
11116 }
11117 }
11118 }
11119
11120 // ---- Control flow ----
11121 Stmt::IfConstruct {
11122 condition,
11123 then_body,
11124 else_ifs,
11125 else_body,
11126 ..
11127 } => {
11128 lower_if(b, ctx, condition, then_body, else_ifs, else_body);
11129 }
11130
11131 Stmt::IfStmt { condition, action } => {
11132 let cond = lower_expr_ctx(b, ctx, condition);
11133 let bb_then = b.create_block("if_then");
11134 let bb_end = b.create_block("if_end");
11135 b.cond_branch(cond, bb_then, vec![], bb_end, vec![]);
11136
11137 b.set_block(bb_then);
11138 lower_stmt(b, ctx, action);
11139 if b.func().block(b.current_block()).terminator.is_none() {
11140 b.branch(bb_end, vec![]);
11141 }
11142
11143 b.set_block(bb_end);
11144 }
11145
11146 Stmt::DoLoop {
11147 name,
11148 var,
11149 start,
11150 end,
11151 step,
11152 body,
11153 } => {
11154 lower_do_loop(
11155 b,
11156 ctx,
11157 DoLoopFields {
11158 name,
11159 var,
11160 start,
11161 end,
11162 step,
11163 body,
11164 concurrent: false,
11165 },
11166 );
11167 }
11168
11169 Stmt::DoConcurrent {
11170 name,
11171 controls,
11172 mask,
11173 body,
11174 locality: _,
11175 ..
11176 } => {
11177 lower_do_concurrent(b, ctx, name, controls, mask.as_ref(), body, stmt.span);
11178 }
11179
11180 Stmt::DoWhile {
11181 name,
11182 condition,
11183 body,
11184 } => {
11185 let bb_header = b.create_block("do_while_header");
11186 let bb_body = b.create_block("do_while_body");
11187 let bb_exit = b.create_block("do_while_exit");
11188 b.branch(bb_header, vec![]);
11189
11190 ctx.push_loop(name.clone(), bb_header, bb_exit);
11191
11192 b.set_block(bb_header);
11193 let cond = lower_expr_ctx(b, ctx, condition);
11194 b.cond_branch(cond, bb_body, vec![], bb_exit, vec![]);
11195
11196 b.set_block(bb_body);
11197 lower_stmts(b, ctx, body);
11198 if b.func().block(b.current_block()).terminator.is_none() {
11199 b.branch(bb_header, vec![]);
11200 }
11201
11202 ctx.pop_loop();
11203 b.set_block(bb_exit);
11204 }
11205
11206 Stmt::SelectCase {
11207 selector, cases, ..
11208 } => {
11209 lower_select_case(b, ctx, selector, cases);
11210 }
11211
11212 Stmt::WhereConstruct {
11213 mask,
11214 body,
11215 elsewhere,
11216 ..
11217 } => {
11218 // WHERE(mask) body [ELSEWHERE body] END WHERE
11219 // Collect ALL array names referenced in mask or body.
11220 let mut array_names: Vec<String> = Vec::new();
11221 collect_array_names(mask, &ctx.locals, &mut array_names);
11222 for s in body {
11223 collect_array_names_stmt(s, &ctx.locals, &mut array_names);
11224 }
11225
11226 if array_names.is_empty() {
11227 // No arrays — fall back to scalar IF-THEN-ELSE.
11228 let cond = lower_expr_ctx_tl(b, ctx, mask);
11229 let bb_then = b.create_block("where_then");
11230 let bb_else = if !elsewhere.is_empty() {
11231 Some(b.create_block("where_else"))
11232 } else {
11233 None
11234 };
11235 let bb_end = b.create_block("where_end");
11236 b.cond_branch(cond, bb_then, vec![], bb_else.unwrap_or(bb_end), vec![]);
11237
11238 b.set_block(bb_then);
11239 lower_stmts(b, ctx, body);
11240 if b.func().block(b.current_block()).terminator.is_none() {
11241 b.branch(bb_end, vec![]);
11242 }
11243 if let Some(bb_e) = bb_else {
11244 b.set_block(bb_e);
11245 if let Some((_m, else_body)) = elsewhere.first() {
11246 lower_stmts(b, ctx, else_body);
11247 }
11248 if b.func().block(b.current_block()).terminator.is_none() {
11249 b.branch(bb_end, vec![]);
11250 }
11251 }
11252 b.set_block(bb_end);
11253 return;
11254 }
11255
11256 // Array-level WHERE: iterate over elements.
11257 // Use the first array to determine the iteration count. For
11258 // stack arrays `info.addr` is the raw element buffer — calling
11259 // afs_array_size on that would read garbage out of the rank
11260 // slot. array_total_elems_value picks the right source: it
11261 // materialises a descriptor query for descriptor-backed locals
11262 // and folds dims to a constant for explicit-shape stack arrays.
11263 let first_arr_name = &array_names[0];
11264 let first_arr = ctx
11265 .locals
11266 .get(first_arr_name)
11267 .cloned()
11268 .expect("array must exist");
11269 let n = array_total_elems_value(b, &first_arr);
11270
11271 // Get base addresses for all arrays (loaded once outside the loop).
11272 let mut array_bases: HashMap<String, ValueId> = HashMap::new();
11273 for arr_name in &array_names {
11274 if let Some(info) = ctx.locals.get(arr_name) {
11275 let base = if info.allocatable {
11276 b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
11277 } else {
11278 info.addr
11279 };
11280 array_bases.insert(arr_name.clone(), base);
11281 }
11282 }
11283
11284 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
11285 let i_zero = b.const_i64(0);
11286 b.store(i_zero, i_addr);
11287
11288 let bb_check = b.create_block("where_check");
11289 let bb_body = b.create_block("where_body");
11290 let bb_exit = b.create_block("where_exit");
11291 b.branch(bb_check, vec![]);
11292
11293 b.set_block(bb_check);
11294 let i = b.load(i_addr);
11295 let done = b.icmp(CmpOp::Ge, i, n);
11296 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
11297
11298 b.set_block(bb_body);
11299 let i_val = b.load(i_addr);
11300
11301 // Substitute each array variable with a scalar local bound to element i.
11302 // Save original locals for restoration.
11303 let mut saved_locals: Vec<(String, Option<LocalInfo>)> = Vec::new();
11304 for arr_name in &array_names {
11305 saved_locals.push((arr_name.clone(), ctx.locals.get(arr_name).cloned()));
11306 if let Some(orig_info) = ctx.locals.get(arr_name).cloned() {
11307 let base = *array_bases.get(arr_name).unwrap();
11308 // Compute element address: base + i * elem_bytes.
11309 let elem_bytes_val = match &orig_info.ty {
11310 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => {
11311 b.const_i64(8)
11312 }
11313 IrType::Int(IntWidth::I16) => b.const_i64(2),
11314 IrType::Int(IntWidth::I8) => b.const_i64(1),
11315 _ => b.const_i64(4),
11316 };
11317 let byte_off = b.imul(i_val, elem_bytes_val);
11318 let elem_ptr = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
11319 // Replace the local with a scalar pointing to this element.
11320 ctx.locals.insert(
11321 arr_name.clone(),
11322 LocalInfo {
11323 addr: elem_ptr,
11324 ty: orig_info.ty.clone(),
11325 dims: vec![],
11326 allocatable: false,
11327 descriptor_arg: false,
11328 by_ref: false,
11329 char_kind: CharKind::None,
11330 derived_type: None,
11331 inline_const: None,
11332 is_pointer: false,
11333 runtime_dim_upper: vec![],
11334 },
11335 );
11336 }
11337 }
11338
11339 // Evaluate mask with element-level bindings.
11340 let cond = lower_expr_ctx_tl(b, ctx, mask);
11341
11342 let bb_then = b.create_block("where_then");
11343 let bb_else = b.create_block("where_else");
11344 let bb_incr = b.create_block("where_incr");
11345 b.cond_branch(cond, bb_then, vec![], bb_else, vec![]);
11346
11347 b.set_block(bb_then);
11348 lower_stmts(b, ctx, body);
11349 if b.func().block(b.current_block()).terminator.is_none() {
11350 b.branch(bb_incr, vec![]);
11351 }
11352
11353 b.set_block(bb_else);
11354 if let Some((_else_mask, else_body)) = elsewhere.first() {
11355 lower_stmts(b, ctx, else_body);
11356 }
11357 if b.func().block(b.current_block()).terminator.is_none() {
11358 b.branch(bb_incr, vec![]);
11359 }
11360
11361 b.set_block(bb_incr);
11362 // Restore original locals.
11363 for (name, orig) in saved_locals {
11364 if let Some(info) = orig {
11365 ctx.locals.insert(name, info);
11366 } else {
11367 ctx.locals.remove(&name);
11368 }
11369 }
11370
11371 let i_cur = b.load(i_addr);
11372 let one = b.const_i64(1);
11373 let next = b.iadd(i_cur, one);
11374 b.store(next, i_addr);
11375 b.branch(bb_check, vec![]);
11376
11377 b.set_block(bb_exit);
11378 }
11379
11380 Stmt::WhereStmt { mask, stmt } => {
11381 // Single-line WHERE: where (cond) assignment
11382 let cond = lower_expr_ctx_tl(b, ctx, mask);
11383 let bb_then = b.create_block("where_stmt");
11384 let bb_end = b.create_block("where_stmt_end");
11385 b.cond_branch(cond, bb_then, vec![], bb_end, vec![]);
11386 b.set_block(bb_then);
11387 lower_stmt(b, ctx, stmt);
11388 if b.func().block(b.current_block()).terminator.is_none() {
11389 b.branch(bb_end, vec![]);
11390 }
11391 b.set_block(bb_end);
11392 }
11393
11394 Stmt::ForallConstruct {
11395 specs, mask, body, ..
11396 } => {
11397 // FORALL: nest loops. The body goes inside the innermost loop.
11398 // Build the body statements including optional mask as a closure-like pattern.
11399 // The innermost loop gets the real body; outer loops wrap it.
11400 lower_forall_nested(b, ctx, specs, mask.as_ref(), body);
11401 }
11402
11403 Stmt::ForallStmt { specs, mask, stmt } => {
11404 let body_vec = vec![(**stmt).clone()];
11405 lower_forall_nested(b, ctx, specs, mask.as_ref(), &body_vec);
11406 }
11407
11408 Stmt::SelectType {
11409 selector,
11410 guards,
11411 assoc_name: _,
11412 ..
11413 } => {
11414 // SELECT TYPE: compare the type tag of a polymorphic variable.
11415 // For now, support basic pattern where selector is a local derived type
11416 // variable and TYPE IS guards match by type tag.
11417 let bb_end = b.create_block("select_type_end");
11418
11419 // Get the selector's type tag. For non-polymorphic variables,
11420 // we know the static type and can match directly.
11421 let static_type = if let Expr::Name { name } = &selector.node {
11422 let key = name.to_lowercase();
11423 ctx.locals
11424 .get(&key)
11425 .and_then(|info| info.derived_type.clone())
11426 } else {
11427 None
11428 };
11429
11430 if let Some(ref type_name) = static_type {
11431 if let Some(layout) = ctx.type_layouts.get(type_name) {
11432 let tag_val = b.const_i64(layout.type_tag as i64);
11433
11434 for guard in guards {
11435 match guard {
11436 crate::ast::stmt::TypeGuard::TypeIs {
11437 type_name: guard_type,
11438 body,
11439 } => {
11440 if let Some(guard_layout) = ctx.type_layouts.get(guard_type) {
11441 let guard_tag = b.const_i64(guard_layout.type_tag as i64);
11442 let matches = b.icmp(CmpOp::Eq, tag_val, guard_tag);
11443 let bb_match = b.create_block("type_is_match");
11444 let bb_next = b.create_block("type_is_next");
11445 b.cond_branch(matches, bb_match, vec![], bb_next, vec![]);
11446
11447 b.set_block(bb_match);
11448 lower_stmts(b, ctx, body);
11449 if b.func().block(b.current_block()).terminator.is_none() {
11450 b.branch(bb_end, vec![]);
11451 }
11452
11453 b.set_block(bb_next);
11454 } else {
11455 // Unknown guard type — skip.
11456 let tag_matches = type_name.eq_ignore_ascii_case(guard_type);
11457 if tag_matches {
11458 lower_stmts(b, ctx, body);
11459 if b.func().block(b.current_block()).terminator.is_none() {
11460 b.branch(bb_end, vec![]);
11461 }
11462 break;
11463 }
11464 }
11465 }
11466 crate::ast::stmt::TypeGuard::ClassIs {
11467 type_name: guard_type,
11468 body,
11469 } => {
11470 // CLASS IS matches the type or any extension.
11471 // Check if static type is or extends the guard type.
11472 let is_match =
11473 is_type_or_extends(type_name, guard_type, ctx.type_layouts);
11474 if is_match {
11475 lower_stmts(b, ctx, body);
11476 if b.func().block(b.current_block()).terminator.is_none() {
11477 b.branch(bb_end, vec![]);
11478 }
11479 break; // CLASS IS matched, skip remaining guards.
11480 }
11481 }
11482 crate::ast::stmt::TypeGuard::ClassDefault { body } => {
11483 lower_stmts(b, ctx, body);
11484 if b.func().block(b.current_block()).terminator.is_none() {
11485 b.branch(bb_end, vec![]);
11486 }
11487 }
11488 }
11489 }
11490 }
11491 }
11492
11493 if b.func().block(b.current_block()).terminator.is_none() {
11494 b.branch(bb_end, vec![]);
11495 }
11496 b.set_block(bb_end);
11497 }
11498
11499 Stmt::Exit { name } => {
11500 if let Some(lp) = ctx.find_loop(name) {
11501 let exit = lp.exit;
11502 b.branch(exit, vec![]);
11503 }
11504 }
11505
11506 Stmt::Cycle { name } => {
11507 if let Some(lp) = ctx.find_loop(name) {
11508 let header = lp.header;
11509 b.branch(header, vec![]);
11510 }
11511 }
11512
11513 Stmt::Return { .. } => {
11514 let skip = if ctx.is_alloc_return {
11515 Some(ValueId(0))
11516 } else {
11517 None
11518 };
11519 insert_implicit_dealloc(
11520 b,
11521 &ctx.locals,
11522 &ctx.locals,
11523 ctx.type_layouts,
11524 ctx.st,
11525 ctx.internal_funcs,
11526 Some(ctx.contained_host_refs),
11527 skip,
11528 );
11529 if ctx.is_alloc_return {
11530 // sret convention: result was written into the hidden first param.
11531 b.ret(None);
11532 } else if let Some(addr) = ctx.result_addr {
11533 let rv = b.load(addr);
11534 b.ret(Some(rv));
11535 } else {
11536 b.ret_void();
11537 }
11538 }
11539
11540 Stmt::Stop { .. } => {
11541 let skip = if ctx.is_alloc_return {
11542 Some(ValueId(0))
11543 } else {
11544 None
11545 };
11546 insert_implicit_dealloc(
11547 b,
11548 &ctx.locals,
11549 &ctx.locals,
11550 ctx.type_layouts,
11551 ctx.st,
11552 ctx.internal_funcs,
11553 Some(ctx.contained_host_refs),
11554 skip,
11555 );
11556 b.runtime_call(RuntimeFunc::Stop, vec![], IrType::Void);
11557 b.unreachable();
11558 }
11559 Stmt::ErrorStop { .. } => {
11560 let skip = if ctx.is_alloc_return {
11561 Some(ValueId(0))
11562 } else {
11563 None
11564 };
11565 insert_implicit_dealloc(
11566 b,
11567 &ctx.locals,
11568 &ctx.locals,
11569 ctx.type_layouts,
11570 ctx.st,
11571 ctx.internal_funcs,
11572 Some(ctx.contained_host_refs),
11573 skip,
11574 );
11575 b.runtime_call(RuntimeFunc::ErrorStop, vec![], IrType::Void);
11576 b.unreachable();
11577 }
11578
11579 Stmt::Allocate {
11580 type_spec,
11581 items,
11582 opts,
11583 } => {
11584 // Resolve STAT= option: find the user's stat variable address.
11585 // The runtime writes 0 on success or a nonzero error code to this slot.
11586 // If absent, use a private scratch slot (allocation failure aborts).
11587 let stat_addr: ValueId = {
11588 let stat_expr = opts.iter().find(|o| {
11589 o.keyword
11590 .as_deref()
11591 .map(|k| k.eq_ignore_ascii_case("stat"))
11592 .unwrap_or(false)
11593 });
11594 if let Some(stat_io) = stat_expr {
11595 if let Expr::Name { name } = &stat_io.value.node {
11596 if let Some(stat_info) = ctx.locals.get(&name.to_lowercase()) {
11597 // Pass the user's variable address directly: runtime writes
11598 // 0 (success) or error code into it, so the variable is set.
11599 stat_info.addr
11600 } else {
11601 b.alloca(IrType::Int(IntWidth::I32))
11602 }
11603 } else {
11604 b.alloca(IrType::Int(IntWidth::I32))
11605 }
11606 } else {
11607 b.alloca(IrType::Int(IntWidth::I32))
11608 }
11609 };
11610 let typed_char_len =
11611 typed_allocate_char_len(b, &ctx.locals, type_spec.as_ref(), ctx.st);
11612
11613 for item in items {
11614 if let Expr::FunctionCall { callee, args } = &item.node {
11615 if let Expr::ComponentAccess { .. } = &callee.node {
11616 if let Some((field_ptr, field)) = resolve_component_field_access(
11617 b,
11618 &ctx.locals,
11619 callee,
11620 ctx.st,
11621 ctx.type_layouts,
11622 ) {
11623 if matches!(field_char_kind(&field), CharKind::Deferred)
11624 && field.size == 32
11625 {
11626 let Some(len_val) = typed_char_len else {
11627 eprintln!(
11628 "armfortas: error: {}:{}: deferred-length character ALLOCATE requires a typed length or SOURCE/MOLD support",
11629 stmt.span.start.line, stmt.span.start.col
11630 );
11631 let _ = std::io::stderr().flush();
11632 std::process::exit(1);
11633 };
11634 init_allocated_string_descriptor(b, field_ptr, len_val);
11635 continue;
11636 }
11637 if field.size == 384 && (field.allocatable || field.pointer) {
11638 let elem_ty = type_info_to_storage_ir_type(
11639 &field.type_info,
11640 ctx.type_layouts,
11641 );
11642 let elem_size_bytes = descriptor_element_size_bytes(&LocalInfo {
11643 addr: field_ptr,
11644 ty: elem_ty.clone(),
11645 dims: vec![],
11646 allocatable: true,
11647 descriptor_arg: false,
11648 by_ref: false,
11649 char_kind: field_char_kind(&field),
11650 derived_type: field_derived_type_name(&field),
11651 inline_const: None,
11652 is_pointer: field.pointer,
11653 runtime_dim_upper: vec![],
11654 });
11655 let es = if matches!(
11656 field.type_info,
11657 crate::sema::symtab::TypeInfo::Character { len: None, .. }
11658 ) {
11659 typed_char_len.unwrap_or_else(|| b.const_i64(elem_size_bytes))
11660 } else {
11661 b.const_i64(elem_size_bytes)
11662 };
11663 let rank = args.len();
11664 let one_i64 = b.const_i64(1);
11665 let dim_buf = if rank == 0 {
11666 b.const_i64(0)
11667 } else {
11668 let dim_buf_bytes = (rank * 24) as u64;
11669 let dim_buf = b.alloca(IrType::Array(
11670 Box::new(IrType::Int(IntWidth::I8)),
11671 dim_buf_bytes,
11672 ));
11673 for (i, arg) in args.iter().enumerate() {
11674 let (lo64, up64) = lower_alloc_bounds(b, ctx, &arg.value);
11675 let base = (i * 24) as i64;
11676 let off_lo = b.const_i64(base);
11677 let off_up = b.const_i64(base + 8);
11678 let off_st = b.const_i64(base + 16);
11679 let p_lo =
11680 b.gep(dim_buf, vec![off_lo], IrType::Int(IntWidth::I8));
11681 let p_up =
11682 b.gep(dim_buf, vec![off_up], IrType::Int(IntWidth::I8));
11683 let p_st =
11684 b.gep(dim_buf, vec![off_st], IrType::Int(IntWidth::I8));
11685 b.store(lo64, p_lo);
11686 b.store(up64, p_up);
11687 b.store(one_i64, p_st);
11688 }
11689 dim_buf
11690 };
11691 let rank_val = b.const_i32(rank as i32);
11692 b.call(
11693 FuncRef::External("afs_allocate_array".into()),
11694 vec![field_ptr, es, rank_val, dim_buf, stat_addr],
11695 IrType::Void,
11696 );
11697 continue;
11698 }
11699 }
11700 }
11701 }
11702 let (base_name, args): (Option<String>, &[crate::ast::expr::Argument]) =
11703 match &item.node {
11704 Expr::FunctionCall { callee, args } => (extract_base_name(callee), args),
11705 Expr::Name { name } => (Some(name.clone()), &[]),
11706 _ => (None, &[]),
11707 };
11708 if let Some(name) = base_name {
11709 if let Some(info) = ctx.locals.get(&name.to_lowercase()).cloned() {
11710 if matches!(info.char_kind, CharKind::Deferred) {
11711 let Some(len_val) = typed_char_len else {
11712 eprintln!(
11713 "armfortas: error: {}:{}: deferred-length character ALLOCATE requires a typed length or SOURCE/MOLD support",
11714 stmt.span.start.line, stmt.span.start.col
11715 );
11716 let _ = std::io::stderr().flush();
11717 std::process::exit(1);
11718 };
11719 let desc = string_descriptor_addr(b, &info);
11720 init_allocated_string_descriptor(b, desc, len_val);
11721 continue;
11722 }
11723 let elem_size_bytes = descriptor_element_size_bytes(&info);
11724
11725 if info.allocatable || info.descriptor_arg {
11726 // Build a stack DimDescriptor[rank] honoring
11727 // each subscript's actual (lower, upper) bounds,
11728 // then call afs_allocate_array. Descriptor-backed
11729 // dummy arrays use the caller-owned descriptor
11730 // rather than the local spill slot that holds its
11731 // address. Scalar allocatables lower as a rank-0
11732 // descriptor allocation.
11733 let es = if descriptor_backed_runtime_char_array(&info) {
11734 typed_char_len.unwrap_or_else(|| b.const_i64(elem_size_bytes))
11735 } else {
11736 b.const_i64(elem_size_bytes)
11737 };
11738 let desc = array_descriptor_addr(b, &info);
11739 let rank = args.len();
11740 let one_i64 = b.const_i64(1);
11741 let dim_buf = if rank == 0 {
11742 b.const_i64(0)
11743 } else {
11744 let dim_buf_bytes = (rank * 24) as u64;
11745 let dim_buf = b.alloca(IrType::Array(
11746 Box::new(IrType::Int(IntWidth::I8)),
11747 dim_buf_bytes,
11748 ));
11749 for (i, arg) in args.iter().enumerate() {
11750 let (lo64, up64) = lower_alloc_bounds(b, ctx, &arg.value);
11751 let base = (i * 24) as i64;
11752 let off_lo = b.const_i64(base);
11753 let off_up = b.const_i64(base + 8);
11754 let off_st = b.const_i64(base + 16);
11755 let p_lo =
11756 b.gep(dim_buf, vec![off_lo], IrType::Int(IntWidth::I8));
11757 let p_up =
11758 b.gep(dim_buf, vec![off_up], IrType::Int(IntWidth::I8));
11759 let p_st =
11760 b.gep(dim_buf, vec![off_st], IrType::Int(IntWidth::I8));
11761 b.store(lo64, p_lo);
11762 b.store(up64, p_up);
11763 b.store(one_i64, p_st);
11764 }
11765 dim_buf
11766 };
11767 let rank_val = b.const_i32(rank as i32);
11768 b.call(
11769 FuncRef::External("afs_allocate_array".into()),
11770 vec![desc, es, rank_val, dim_buf, stat_addr],
11771 IrType::Void,
11772 );
11773 } else {
11774 // Non-allocatable array: old path (shouldn't happen for ALLOCATE).
11775 let size_val = b.const_i32(elem_size_bytes as i32);
11776 let ptr = b.runtime_call(
11777 RuntimeFunc::Allocate,
11778 vec![size_val],
11779 IrType::Ptr(Box::new(info.ty.clone())),
11780 );
11781 b.store(ptr, info.addr);
11782 }
11783 }
11784 }
11785 }
11786 }
11787
11788 Stmt::Deallocate { items, .. } => {
11789 for item in items {
11790 if let Expr::ComponentAccess { .. } = &item.node {
11791 if let Some((field_ptr, field)) = resolve_component_field_access(
11792 b,
11793 &ctx.locals,
11794 item,
11795 ctx.st,
11796 ctx.type_layouts,
11797 ) {
11798 if is_deferred_char_component_field(&field) {
11799 b.call(
11800 FuncRef::External("afs_dealloc_string".into()),
11801 vec![field_ptr],
11802 IrType::Void,
11803 );
11804 continue;
11805 }
11806 if field.size == 384 && (field.allocatable || field.pointer) {
11807 let stat_slot = b.alloca(IrType::Int(IntWidth::I32));
11808 b.call(
11809 FuncRef::External("afs_deallocate_array".into()),
11810 vec![field_ptr, stat_slot],
11811 IrType::Void,
11812 );
11813 continue;
11814 }
11815 }
11816 }
11817 let base_name = extract_base_name(item);
11818 if let Some(name) = base_name {
11819 if let Some(info) = ctx.locals.get(&name.to_lowercase()) {
11820 if info.allocatable || info.descriptor_arg {
11821 // Pass descriptor address to runtime with null STAT.
11822 // Alloca a dummy STAT to avoid abort on already-deallocated.
11823 let stat_slot = b.alloca(IrType::Int(IntWidth::I32));
11824 let desc = array_descriptor_addr(b, info);
11825 b.call(
11826 FuncRef::External("afs_deallocate_array".into()),
11827 vec![desc, stat_slot],
11828 IrType::Void,
11829 );
11830 } else {
11831 let ptr = b.load(info.addr);
11832 b.runtime_call(RuntimeFunc::Deallocate, vec![ptr], IrType::Void);
11833 }
11834 }
11835 }
11836 }
11837 }
11838
11839 Stmt::Block {
11840 uses,
11841 implicit,
11842 decls,
11843 body,
11844 ..
11845 } => {
11846 // F2008 BLOCK: declarations are scoped to the body.
11847 // Save any locals that the BLOCK's decls shadow, run
11848 // the body, then restore the originals. F2018 §11.1.4
11849 // also gives the BLOCK its own implicit-typing
11850 // environment: an `implicit integer (i-n)` here introduces
11851 // `n` as an integer local even when the enclosing scope
11852 // is IMPLICIT NONE. Synthesise TypeDecl entries for any
11853 // name the body references that isn't in ctx.locals and
11854 // whose first letter falls in a block-local implicit
11855 // range, then run alloc_decls / init_decls over the
11856 // combined list.
11857 let pre_block_keys: HashSet<String> = ctx.locals.keys().cloned().collect();
11858 let mut effective_decls: Vec<crate::ast::decl::SpannedDecl> = decls.clone();
11859 let mut implicit_map: std::collections::HashMap<char, crate::ast::decl::TypeSpec> =
11860 std::collections::HashMap::new();
11861 for d in implicit {
11862 if let crate::ast::decl::Decl::ImplicitStmt { specs } = &d.node {
11863 for spec in specs {
11864 for &(start, end) in &spec.ranges {
11865 for letter_byte in start as u8..=end as u8 {
11866 let letter = (letter_byte as char).to_ascii_lowercase();
11867 implicit_map.insert(letter, spec.type_spec.clone());
11868 }
11869 }
11870 }
11871 }
11872 }
11873 if !implicit_map.is_empty() {
11874 let mut already_decl: std::collections::HashSet<String> = decls
11875 .iter()
11876 .flat_map(|d| {
11877 if let crate::ast::decl::Decl::TypeDecl { entities, .. } = &d.node {
11878 entities
11879 .iter()
11880 .map(|e| e.name.to_lowercase())
11881 .collect::<Vec<_>>()
11882 } else {
11883 vec![]
11884 }
11885 })
11886 .collect();
11887 let mut referenced: Vec<String> = Vec::new();
11888 for s in body {
11889 collect_referenced_names(s, &mut referenced);
11890 }
11891 for name in referenced {
11892 let key = name.to_lowercase();
11893 if already_decl.contains(&key) {
11894 continue;
11895 }
11896 if ctx.locals.contains_key(&key) {
11897 continue;
11898 }
11899 let Some(first) = key.chars().next() else {
11900 continue;
11901 };
11902 let Some(type_spec) = implicit_map.get(&first.to_ascii_lowercase()) else {
11903 continue;
11904 };
11905 already_decl.insert(key.clone());
11906 let synth = crate::ast::decl::Decl::TypeDecl {
11907 type_spec: type_spec.clone(),
11908 attrs: Vec::new(),
11909 entities: vec![crate::ast::decl::EntityDecl {
11910 name: name.clone(),
11911 array_spec: None,
11912 char_len: None,
11913 init: None,
11914 ptr_init: None,
11915 }],
11916 };
11917 effective_decls.push(crate::ast::Spanned {
11918 node: synth,
11919 span: stmt.span,
11920 });
11921 }
11922 }
11923 let block_keys: Vec<String> = effective_decls
11924 .iter()
11925 .flat_map(|d| {
11926 if let crate::ast::decl::Decl::TypeDecl { entities, .. } = &d.node {
11927 entities
11928 .iter()
11929 .map(|e| e.name.to_lowercase())
11930 .collect::<Vec<_>>()
11931 } else {
11932 vec![]
11933 }
11934 })
11935 .collect();
11936 let saved: Vec<(String, Option<LocalInfo>)> = block_keys
11937 .iter()
11938 .map(|k| (k.clone(), ctx.locals.get(k).cloned()))
11939 .collect();
11940 if !effective_decls.is_empty() {
11941 // Remove shadowed keys so alloc_decls creates fresh allocas.
11942 for k in &block_keys {
11943 ctx.locals.remove(k);
11944 }
11945 alloc_decls(
11946 b,
11947 &mut ctx.locals,
11948 &effective_decls,
11949 &HashMap::new(),
11950 ctx.type_layouts,
11951 &mut Vec::new(),
11952 "",
11953 ctx.st,
11954 );
11955 init_decls(b, &ctx.locals, &effective_decls, ctx.st);
11956 }
11957 if !uses.is_empty() {
11958 install_globals_as_locals(b, &mut ctx.locals, ctx.globals, uses, None, ctx.st);
11959 }
11960 lower_stmts(b, ctx, body);
11961 // F2018 §7.5.6.3 / §9.7.3.2: at END BLOCK, finalize derived-type
11962 // locals that have FINAL subroutines and deallocate
11963 // block-scoped allocatables. Only do this for keys that were
11964 // newly introduced by the block (not shadowed outer locals).
11965 if b.func().block(b.current_block()).terminator.is_none() {
11966 let block_only: HashMap<String, LocalInfo> = block_keys
11967 .iter()
11968 .filter(|k| ctx.locals.contains_key(*k))
11969 .filter(|k| !saved.iter().any(|(sk, so)| sk == *k && so.is_some()))
11970 .filter_map(|k| ctx.locals.get(k).map(|v| (k.clone(), v.clone())))
11971 .collect();
11972 if !block_only.is_empty() {
11973 insert_implicit_dealloc(
11974 b,
11975 &block_only,
11976 &ctx.locals,
11977 ctx.type_layouts,
11978 ctx.st,
11979 ctx.internal_funcs,
11980 Some(ctx.contained_host_refs),
11981 None,
11982 );
11983 }
11984 }
11985 // Restore the outer scope's locals.
11986 for (k, orig) in saved {
11987 if let Some(info) = orig {
11988 ctx.locals.insert(k, info);
11989 } else {
11990 ctx.locals.remove(&k);
11991 }
11992 }
11993 ctx.locals.retain(|k, _| pre_block_keys.contains(k));
11994 }
11995
11996 Stmt::Associate { assocs, body, .. } => {
11997 // Associate names are scoped — they only exist within the body.
11998 let added_keys: Vec<String> =
11999 assocs.iter().map(|(name, _)| name.to_lowercase()).collect();
12000
12001 for (name, expr) in assocs {
12002 let val = lower_expr_ctx(b, ctx, expr);
12003 let ty = b
12004 .func()
12005 .value_type(val)
12006 .unwrap_or(IrType::Int(IntWidth::I32));
12007 let addr = b.alloca(ty.clone());
12008 b.store(val, addr);
12009 ctx.locals.insert(
12010 name.to_lowercase(),
12011 LocalInfo {
12012 addr,
12013 ty,
12014 dims: vec![],
12015 allocatable: false,
12016 descriptor_arg: false,
12017 by_ref: false,
12018 char_kind: CharKind::None,
12019 derived_type: None,
12020 inline_const: None,
12021 is_pointer: false,
12022 runtime_dim_upper: vec![],
12023 },
12024 );
12025 }
12026 lower_stmts(b, ctx, body);
12027
12028 // Remove associate names from scope.
12029 for key in &added_keys {
12030 ctx.locals.remove(key);
12031 }
12032 }
12033
12034 Stmt::Continue { label: Some(lbl) } => {
12035 // Labeled CONTINUE: fall through to the label's block.
12036 if let Some(&label_bb) = ctx.label_blocks.get(lbl) {
12037 if b.func().block(b.current_block()).terminator.is_none() {
12038 b.branch(label_bb, vec![]);
12039 }
12040 b.set_block(label_bb);
12041 }
12042 }
12043 Stmt::Continue { label: None } => {} // no-op
12044
12045 Stmt::Goto { label } => {
12046 if let Some(&target_bb) = ctx.label_blocks.get(label) {
12047 b.branch(target_bb, vec![]);
12048 }
12049 }
12050
12051 Stmt::Labeled { label, stmt: inner } => {
12052 // Create an edge from the current block into the label's block (fall-through),
12053 // then switch to the label's block and lower the inner statement.
12054 if let Some(&label_bb) = ctx.label_blocks.get(label) {
12055 if b.func().block(b.current_block()).terminator.is_none() {
12056 b.branch(label_bb, vec![]);
12057 }
12058 b.set_block(label_bb);
12059 }
12060 lower_stmt(b, ctx, inner);
12061 }
12062
12063 Stmt::Open { specs } => {
12064 // Extract UNIT and FILE from specs. Simplified: first spec is unit, second is file.
12065 let unit = if let Some(s) = specs.first() {
12066 lower_expr_ctx(b, ctx, &s.value)
12067 } else {
12068 b.const_i32(6)
12069 };
12070
12071 // Find FILE= spec.
12072 let (file_ptr, file_len) = specs
12073 .iter()
12074 .find(|s| {
12075 s.keyword
12076 .as_deref()
12077 .map(|k| k.eq_ignore_ascii_case("file"))
12078 .unwrap_or(false)
12079 })
12080 .map(|s| {
12081 lower_string_expr_with_layouts(
12082 b,
12083 &ctx.locals,
12084 &s.value,
12085 ctx.st,
12086 Some(ctx.type_layouts),
12087 )
12088 })
12089 .unwrap_or_else(|| {
12090 let z = b.const_i64(0);
12091 (z, z)
12092 });
12093
12094 // Find STATUS= spec.
12095 let (status_ptr, status_len) = specs
12096 .iter()
12097 .find(|s| {
12098 s.keyword
12099 .as_deref()
12100 .map(|k| k.eq_ignore_ascii_case("status"))
12101 .unwrap_or(false)
12102 })
12103 .map(|s| {
12104 lower_string_expr_with_layouts(
12105 b,
12106 &ctx.locals,
12107 &s.value,
12108 ctx.st,
12109 Some(ctx.type_layouts),
12110 )
12111 })
12112 .unwrap_or_else(|| {
12113 let z = b.const_i64(0);
12114 (z, z)
12115 });
12116
12117 // Find ACTION= spec.
12118 let (action_ptr, action_len) = specs
12119 .iter()
12120 .find(|s| {
12121 s.keyword
12122 .as_deref()
12123 .map(|k| k.eq_ignore_ascii_case("action"))
12124 .unwrap_or(false)
12125 })
12126 .map(|s| {
12127 lower_string_expr_with_layouts(
12128 b,
12129 &ctx.locals,
12130 &s.value,
12131 ctx.st,
12132 Some(ctx.type_layouts),
12133 )
12134 })
12135 .unwrap_or_else(|| {
12136 let z = b.const_i64(0);
12137 (z, z)
12138 });
12139
12140 // Find ACCESS= spec.
12141 let (access_ptr, access_len) = specs
12142 .iter()
12143 .find(|s| {
12144 s.keyword
12145 .as_deref()
12146 .map(|k| k.eq_ignore_ascii_case("access"))
12147 .unwrap_or(false)
12148 })
12149 .map(|s| {
12150 lower_string_expr_with_layouts(
12151 b,
12152 &ctx.locals,
12153 &s.value,
12154 ctx.st,
12155 Some(ctx.type_layouts),
12156 )
12157 })
12158 .unwrap_or_else(|| {
12159 let z = b.const_i64(0);
12160 (z, z)
12161 });
12162
12163 // Find FORM= spec.
12164 let (form_ptr, form_len) = specs
12165 .iter()
12166 .find(|s| {
12167 s.keyword
12168 .as_deref()
12169 .map(|k| k.eq_ignore_ascii_case("form"))
12170 .unwrap_or(false)
12171 })
12172 .map(|s| {
12173 lower_string_expr_with_layouts(
12174 b,
12175 &ctx.locals,
12176 &s.value,
12177 ctx.st,
12178 Some(ctx.type_layouts),
12179 )
12180 })
12181 .unwrap_or_else(|| {
12182 let z = b.const_i64(0);
12183 (z, z)
12184 });
12185
12186 // Find RECL= spec.
12187 let recl_val = specs
12188 .iter()
12189 .find(|s| {
12190 s.keyword
12191 .as_deref()
12192 .map(|k| k.eq_ignore_ascii_case("recl"))
12193 .unwrap_or(false)
12194 })
12195 .map(|s| lower_expr_ctx(b, ctx, &s.value))
12196 .unwrap_or_else(|| b.const_i64(0));
12197
12198 let null = b.const_i64(0);
12199 let unit_i32 = coerce_to_type(b, unit, &IrType::Int(IntWidth::I32));
12200 let recl_i64 = coerce_to_type(b, recl_val, &IrType::Int(IntWidth::I64));
12201
12202 // Check if we have any extended specifiers beyond the basic 7-arg set.
12203 let has_access = specs.iter().any(|s| {
12204 s.keyword
12205 .as_deref()
12206 .map(|k| k.eq_ignore_ascii_case("access"))
12207 .unwrap_or(false)
12208 });
12209 let has_form = specs.iter().any(|s| {
12210 s.keyword
12211 .as_deref()
12212 .map(|k| k.eq_ignore_ascii_case("form"))
12213 .unwrap_or(false)
12214 });
12215 let has_recl = specs.iter().any(|s| {
12216 s.keyword
12217 .as_deref()
12218 .map(|k| k.eq_ignore_ascii_case("recl"))
12219 .unwrap_or(false)
12220 });
12221 let has_position = specs.iter().any(|s| {
12222 s.keyword
12223 .as_deref()
12224 .map(|k| k.eq_ignore_ascii_case("position"))
12225 .unwrap_or(false)
12226 });
12227
12228 if !has_access && !has_form && !has_recl && !has_position {
12229 // Simple case: use 7-arg afs_open_simple (unit + 3 string pairs).
12230 b.call(
12231 FuncRef::External("afs_open_simple".into()),
12232 vec![
12233 unit_i32, file_ptr, file_len, status_ptr, status_len, action_ptr,
12234 action_len,
12235 ],
12236 IrType::Void,
12237 );
12238 } else {
12239 // Extended case: build OpenControlBlock on the stack.
12240 // Find POSITION= spec.
12241 let (position_ptr, position_len) = specs
12242 .iter()
12243 .find(|s| {
12244 s.keyword
12245 .as_deref()
12246 .map(|k| k.eq_ignore_ascii_case("position"))
12247 .unwrap_or(false)
12248 })
12249 .map(|s| {
12250 lower_string_expr_with_layouts(
12251 b,
12252 &ctx.locals,
12253 &s.value,
12254 ctx.st,
12255 Some(ctx.type_layouts),
12256 )
12257 })
12258 .unwrap_or_else(|| {
12259 let z = b.const_i64(0);
12260 (z, z)
12261 });
12262
12263 // Layout matches repr(C) OpenControlBlock (128 bytes):
12264 // 0: unit(i32) + 4 pad, 8: filename(ptr), 16: filename_len(i64),
12265 // 24: status(ptr), 32: status_len(i64), 40: action(ptr), 48: action_len(i64),
12266 // 56: access(ptr), 64: access_len(i64), 72: form(ptr), 80: form_len(i64),
12267 // 88: recl(i64), 96: iostat(ptr), 104: newunit(ptr),
12268 // 112: position(ptr), 120: position_len(i64)
12269 let cb_ty = IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 128);
12270 let cb = b.alloca(cb_ty);
12271
12272 let store_at = |b: &mut crate::ir::builder::FuncBuilder,
12273 base,
12274 offset: i64,
12275 field_ty: IrType,
12276 val| {
12277 let field_bytes = field_ty.size_bytes() as i64;
12278 debug_assert!(field_bytes > 0 && offset % field_bytes == 0);
12279 let slot = b.const_i64(offset / field_bytes);
12280 let ptr = b.gep(base, vec![slot], field_ty.clone());
12281 let stored = match field_ty {
12282 IrType::Int(_) | IrType::Float(_) | IrType::Bool => {
12283 coerce_to_type(b, val, &field_ty)
12284 }
12285 _ => val,
12286 };
12287 b.store(stored, ptr);
12288 };
12289
12290 let file_ptr_ty = b
12291 .func()
12292 .value_type(file_ptr)
12293 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
12294 let status_ptr_ty = b
12295 .func()
12296 .value_type(status_ptr)
12297 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
12298 let action_ptr_ty = b
12299 .func()
12300 .value_type(action_ptr)
12301 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
12302 let access_ptr_ty = b
12303 .func()
12304 .value_type(access_ptr)
12305 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
12306 let form_ptr_ty = b
12307 .func()
12308 .value_type(form_ptr)
12309 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
12310 let position_ptr_ty = b
12311 .func()
12312 .value_type(position_ptr)
12313 .unwrap_or(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
12314
12315 store_at(b, cb, 0, IrType::Int(IntWidth::I32), unit_i32);
12316 store_at(b, cb, 8, file_ptr_ty, file_ptr);
12317 store_at(b, cb, 16, IrType::Int(IntWidth::I64), file_len);
12318 store_at(b, cb, 24, status_ptr_ty, status_ptr);
12319 store_at(b, cb, 32, IrType::Int(IntWidth::I64), status_len);
12320 store_at(b, cb, 40, action_ptr_ty, action_ptr);
12321 store_at(b, cb, 48, IrType::Int(IntWidth::I64), action_len);
12322 store_at(b, cb, 56, access_ptr_ty, access_ptr);
12323 store_at(b, cb, 64, IrType::Int(IntWidth::I64), access_len);
12324 store_at(b, cb, 72, form_ptr_ty, form_ptr);
12325 store_at(b, cb, 80, IrType::Int(IntWidth::I64), form_len);
12326 store_at(b, cb, 88, IrType::Int(IntWidth::I64), recl_i64);
12327 store_at(b, cb, 96, IrType::Int(IntWidth::I64), null); // iostat = null
12328 store_at(b, cb, 104, IrType::Int(IntWidth::I64), null); // newunit = null
12329 store_at(b, cb, 112, position_ptr_ty, position_ptr);
12330 store_at(b, cb, 120, IrType::Int(IntWidth::I64), position_len);
12331
12332 b.call(FuncRef::External("afs_open".into()), vec![cb], IrType::Void);
12333 }
12334 }
12335
12336 Stmt::Close { specs } => {
12337 let unit = if let Some(s) = specs.first() {
12338 lower_expr_ctx(b, ctx, &s.value)
12339 } else {
12340 b.const_i32(6)
12341 };
12342 let null = b.const_i64(0);
12343 b.call(
12344 FuncRef::External("afs_close".into()),
12345 vec![unit, null],
12346 IrType::Void,
12347 );
12348 }
12349
12350 Stmt::Read { controls, items } => {
12351 let fmt_control = controls
12352 .iter()
12353 .skip(1)
12354 .find(|c| c.keyword.is_none())
12355 .or_else(|| {
12356 controls.iter().find(|c| {
12357 c.keyword
12358 .as_deref()
12359 .map(|k| k.eq_ignore_ascii_case("fmt"))
12360 .unwrap_or(false)
12361 })
12362 });
12363
12364 let is_list_directed = match fmt_control {
12365 None => true,
12366 Some(ctrl) => matches!(&ctrl.value.node, Expr::Name { name } if name == "*"),
12367 };
12368
12369 if let Some(ctrl) = controls.first() {
12370 if let Some((buf_ptr, buf_len)) = internal_io_buffer(b, ctx, ctrl) {
12371 if is_list_directed {
12372 lower_internal_read_items(b, ctx, items, buf_ptr, buf_len);
12373 } else {
12374 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
12375 b,
12376 &ctx.locals,
12377 &fmt_control.unwrap().value,
12378 ctx.st,
12379 Some(ctx.type_layouts),
12380 );
12381 lower_formatted_internal_read_items(
12382 b, ctx, items, buf_ptr, buf_len, fmt_ptr, fmt_len,
12383 );
12384 }
12385 return;
12386 }
12387 }
12388
12389 // READ(unit, *) items — simplified: first control is unit.
12390 let unit = if let Some(ctrl) = controls.first() {
12391 lower_expr_ctx(b, ctx, &ctrl.value)
12392 } else {
12393 b.const_i32(5) // default stdin
12394 };
12395 if is_list_directed {
12396 lower_list_read_items(b, ctx, items, unit);
12397 } else {
12398 let (fmt_ptr, fmt_len) = lower_string_expr_with_layouts(
12399 b,
12400 &ctx.locals,
12401 &fmt_control.unwrap().value,
12402 ctx.st,
12403 Some(ctx.type_layouts),
12404 );
12405 lower_formatted_read_items(b, ctx, items, unit, fmt_ptr, fmt_len);
12406 }
12407 }
12408
12409 Stmt::Inquire { specs, .. } => {
12410 // Simplified INQUIRE: extract UNIT or FILE, and EXIST.
12411 let null = b.const_i64(0);
12412 let file_spec = specs.iter().find(|s| {
12413 s.keyword
12414 .as_deref()
12415 .map(|k| k.eq_ignore_ascii_case("file"))
12416 .unwrap_or(false)
12417 });
12418 let exist_spec = specs.iter().find(|s| {
12419 s.keyword
12420 .as_deref()
12421 .map(|k| k.eq_ignore_ascii_case("exist"))
12422 .unwrap_or(false)
12423 });
12424
12425 if let Some(fs) = file_spec {
12426 let (fptr, flen) = lower_string_expr_with_layouts(
12427 b,
12428 &ctx.locals,
12429 &fs.value,
12430 ctx.st,
12431 Some(ctx.type_layouts),
12432 );
12433 let exist_addr = if let Some(es) = exist_spec {
12434 if let Expr::Name { name } = &es.value.node {
12435 ctx.locals
12436 .get(&name.to_lowercase())
12437 .map(|i| i.addr)
12438 .unwrap_or(null)
12439 } else {
12440 null
12441 }
12442 } else {
12443 null
12444 };
12445 b.call(
12446 FuncRef::External("afs_inquire_file".into()),
12447 vec![fptr, flen, exist_addr, null, null],
12448 IrType::Void,
12449 );
12450 }
12451 }
12452
12453 Stmt::Flush { specs } => {
12454 let unit = if let Some(s) = specs.first() {
12455 lower_expr_ctx(b, ctx, &s.value)
12456 } else {
12457 b.const_i32(6)
12458 };
12459 let null = b.const_i64(0);
12460 b.call(
12461 FuncRef::External("afs_flush".into()),
12462 vec![unit, null],
12463 IrType::Void,
12464 );
12465 }
12466
12467 Stmt::Rewind { specs } => {
12468 let unit = if let Some(s) = specs.first() {
12469 lower_expr_ctx(b, ctx, &s.value)
12470 } else {
12471 b.const_i32(6)
12472 };
12473 let null = b.const_i64(0);
12474 b.call(
12475 FuncRef::External("afs_rewind".into()),
12476 vec![unit, null],
12477 IrType::Void,
12478 );
12479 }
12480
12481 Stmt::Nullify { items } => {
12482 // Zero each pointer slot so ASSOCIATED returns false.
12483 for item in items {
12484 let Expr::Name { name } = &item.node else {
12485 continue;
12486 };
12487 let Some(info) = ctx.locals.get(&name.to_lowercase()) else {
12488 continue;
12489 };
12490 if !info.is_pointer {
12491 continue;
12492 }
12493 // Array pointers use the 384-byte descriptor (allocatable=true);
12494 // scalar and DT pointers use an 8-byte slot.
12495 let size = if info.allocatable { 384i64 } else { 8i64 };
12496 let zero_byte = b.const_i32(0);
12497 let sz = b.const_i64(size);
12498 b.call(
12499 FuncRef::External("memset".into()),
12500 vec![info.addr, zero_byte, sz],
12501 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
12502 );
12503 }
12504 }
12505
12506 Stmt::PointerAssignment { target, value } => {
12507 // `p => q` or `p => x`: rebind the pointer slot `p` to the
12508 // address of the RHS designator. Three shapes:
12509 //
12510 // * scalar + derived-type pointer: slot holds an 8-byte
12511 // pointer, `=>` stores the target's address into it.
12512 // * array pointer: slot holds a 384-byte ArrayDescriptor,
12513 // `=>` materialises a descriptor of the target and
12514 // memcpy's it into the slot.
12515 //
12516 // In both cases the target must be a simple Name for now;
12517 // component-access and slice targets are follow-up work.
12518 if let Some((tgt_field_ptr, tgt_field)) =
12519 resolve_component_field_access(b, &ctx.locals, target, ctx.st, ctx.type_layouts)
12520 {
12521 if !tgt_field.pointer {
12522 return;
12523 }
12524 if is_deferred_char_component_field(&tgt_field) {
12525 if let Expr::FunctionCall { callee, .. } = &value.node {
12526 if let Expr::Name { name } = &callee.node {
12527 if name.eq_ignore_ascii_case("null") {
12528 let zero = b.const_i64(0);
12529 let null = b.int_to_ptr(zero, IrType::Int(IntWidth::I8));
12530 store_string_descriptor_view(b, tgt_field_ptr, null, zero);
12531 return;
12532 }
12533 }
12534 }
12535 if let Expr::FunctionCall { callee, args } = &value.node {
12536 if let Expr::FunctionCall {
12537 callee: inner_callee,
12538 args: inner_args,
12539 } = &callee.node
12540 {
12541 if let Expr::Name { name: arr_name } = &inner_callee.node {
12542 let akey = arr_name.to_lowercase();
12543 if let Some(info) = ctx.locals.get(&akey) {
12544 if matches!(info.char_kind, CharKind::Fixed(_))
12545 && (!info.dims.is_empty() || info.allocatable)
12546 && args.len() == 1
12547 {
12548 if let crate::ast::expr::SectionSubscript::Range {
12549 ref start,
12550 ref end,
12551 ..
12552 } = args[0].value
12553 {
12554 let idx64 = if inner_args.len() == 1 {
12555 if let crate::ast::expr::SectionSubscript::Element(
12556 idx_expr,
12557 ) = &inner_args[0].value
12558 {
12559 let idx = lower_expr_ctx(b, ctx, idx_expr);
12560 let idx_wide = match b.func().value_type(idx) {
12561 Some(IrType::Int(IntWidth::I64)) => idx,
12562 _ => b.int_extend(idx, IntWidth::I64, true),
12563 };
12564 let one = b.const_i64(1);
12565 b.isub(idx_wide, one)
12566 } else {
12567 b.const_i64(0)
12568 }
12569 } else {
12570 b.const_i64(0)
12571 };
12572 let base = array_base_addr(b, info);
12573 let elem_slot =
12574 b.gep(base, vec![idx64], info.ty.clone());
12575 let elem_ptr = b.load_typed(
12576 elem_slot,
12577 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
12578 );
12579 let elem_len = match info.char_kind {
12580 CharKind::Fixed(n) => b.const_i64(n),
12581 _ => b.const_i64(0),
12582 };
12583 let (ptr, len) = lower_substring(
12584 b,
12585 &ctx.locals,
12586 ctx.st,
12587 elem_ptr,
12588 elem_len,
12589 start.as_ref(),
12590 end.as_ref(),
12591 );
12592 store_string_descriptor_view(
12593 b,
12594 tgt_field_ptr,
12595 ptr,
12596 len,
12597 );
12598 return;
12599 }
12600 }
12601 }
12602 }
12603 }
12604 }
12605 if let Some((src_field_ptr, src_field)) = resolve_component_field_access(
12606 b,
12607 &ctx.locals,
12608 value,
12609 ctx.st,
12610 ctx.type_layouts,
12611 ) {
12612 if is_deferred_char_component_field(&src_field) {
12613 let (ptr, len) = load_string_descriptor_view(b, src_field_ptr);
12614 store_string_descriptor_view(b, tgt_field_ptr, ptr, len);
12615 return;
12616 }
12617 }
12618 let (ptr, len) = lower_string_expr_with_layouts(
12619 b,
12620 &ctx.locals,
12621 value,
12622 ctx.st,
12623 Some(ctx.type_layouts),
12624 );
12625 store_string_descriptor_view(b, tgt_field_ptr, ptr, len);
12626 return;
12627 }
12628 }
12629
12630 let Expr::Name { name: tgt_name } = &target.node else {
12631 return;
12632 };
12633 let tgt_key = tgt_name.to_lowercase();
12634 let Some(tgt_info) = ctx.locals.get(&tgt_key).cloned() else {
12635 return;
12636 };
12637 if !tgt_info.is_pointer {
12638 return;
12639 }
12640 if matches!(tgt_info.char_kind, CharKind::Deferred) {
12641 let tgt_desc = string_descriptor_addr(b, &tgt_info);
12642 if let Expr::FunctionCall { callee, .. } = &value.node {
12643 if let Expr::Name { name } = &callee.node {
12644 if name.eq_ignore_ascii_case("null") {
12645 let zero = b.const_i64(0);
12646 let null = b.int_to_ptr(zero, IrType::Int(IntWidth::I8));
12647 store_string_descriptor_view(b, tgt_desc, null, zero);
12648 return;
12649 }
12650 }
12651 }
12652 if let Some((src_field_ptr, src_field)) =
12653 resolve_component_field_access(b, &ctx.locals, value, ctx.st, ctx.type_layouts)
12654 {
12655 if is_deferred_char_component_field(&src_field) {
12656 let (ptr, len) = load_string_descriptor_view(b, src_field_ptr);
12657 store_string_descriptor_view(b, tgt_desc, ptr, len);
12658 return;
12659 }
12660 }
12661 if let Expr::Name { name: src_name } = &value.node {
12662 if let Some(src_info) = ctx.locals.get(&src_name.to_lowercase()) {
12663 if matches!(src_info.char_kind, CharKind::Deferred) {
12664 let src_desc = string_descriptor_addr(b, src_info);
12665 let (ptr, len) = load_string_descriptor_view(b, src_desc);
12666 store_string_descriptor_view(b, tgt_desc, ptr, len);
12667 return;
12668 }
12669 }
12670 }
12671 let (ptr, len) = lower_string_expr_with_layouts(
12672 b,
12673 &ctx.locals,
12674 value,
12675 ctx.st,
12676 Some(ctx.type_layouts),
12677 );
12678 store_string_descriptor_view(b, tgt_desc, ptr, len);
12679 return;
12680 }
12681
12682 // Handle section-RHS: pa => ia(lo:hi). The RHS is a
12683 // FunctionCall{Name(arr), [Range(lo,hi)]}. Build a
12684 // descriptor pointing at arr(lo) with extent hi-lo+1.
12685 if let Expr::FunctionCall {
12686 callee,
12687 args: val_args,
12688 } = &value.node
12689 {
12690 if let Expr::Name { name: arr_name } = &callee.node {
12691 let arr_key = arr_name.to_lowercase();
12692 if let Some(arr_info) = ctx.locals.get(&arr_key).cloned() {
12693 if (!arr_info.dims.is_empty() || arr_info.allocatable)
12694 && val_args.len() == 1
12695 {
12696 if let crate::ast::expr::SectionSubscript::Range {
12697 start,
12698 end,
12699 stride: _,
12700 } = &val_args[0].value
12701 {
12702 let base = array_data_ptr_for_call(b, &arr_info);
12703 let lo = if let Some(se) = start {
12704 let v = lower_expr_ctx(b, ctx, se);
12705 match b.func().value_type(v) {
12706 Some(IrType::Int(IntWidth::I64)) => v,
12707 _ => b.int_extend(v, IntWidth::I64, true),
12708 }
12709 } else {
12710 b.const_i64(1)
12711 };
12712 let hi = if let Some(ee) = end {
12713 let v = lower_expr_ctx(b, ctx, ee);
12714 match b.func().value_type(v) {
12715 Some(IrType::Int(IntWidth::I64)) => v,
12716 _ => b.int_extend(v, IntWidth::I64, true),
12717 }
12718 } else {
12719 array_total_elems_value(b, &arr_info)
12720 };
12721 // Build a descriptor in the pointer's slot.
12722 let desc = tgt_info.addr;
12723 let zero32 = b.const_i32(0);
12724 let sz384 = b.const_i64(384);
12725 b.call(
12726 FuncRef::External("memset".into()),
12727 vec![desc, zero32, sz384],
12728 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
12729 );
12730 // base_addr: base + (lo - 1) * elem_size
12731 let one = b.const_i64(1);
12732 let lo_0 = b.isub(lo, one);
12733 let elem_bytes = b.const_i64(ir_scalar_byte_size(&arr_info.ty));
12734 let byte_off = b.imul(lo_0, elem_bytes);
12735 let slice_base =
12736 b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
12737 store_byte_aggregate_field(
12738 b,
12739 desc,
12740 0,
12741 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
12742 slice_base,
12743 );
12744 store_byte_aggregate_field(
12745 b,
12746 desc,
12747 8,
12748 IrType::Int(IntWidth::I64),
12749 elem_bytes,
12750 );
12751 let rank = b.const_i32(1);
12752 store_byte_aggregate_field(
12753 b,
12754 desc,
12755 16,
12756 IrType::Int(IntWidth::I32),
12757 rank,
12758 );
12759 let flags = b.const_i32(2);
12760 store_byte_aggregate_field(
12761 b,
12762 desc,
12763 20,
12764 IrType::Int(IntWidth::I32),
12765 flags,
12766 );
12767 // dim[0]: lower=1, upper=extent, stride=1
12768 store_byte_aggregate_field(
12769 b,
12770 desc,
12771 24,
12772 IrType::Int(IntWidth::I64),
12773 one,
12774 );
12775 let extent = b.isub(hi, lo);
12776 let extent1 = b.iadd(extent, one);
12777 store_byte_aggregate_field(
12778 b,
12779 desc,
12780 32,
12781 IrType::Int(IntWidth::I64),
12782 extent1,
12783 );
12784 store_byte_aggregate_field(
12785 b,
12786 desc,
12787 40,
12788 IrType::Int(IntWidth::I64),
12789 one,
12790 );
12791 return;
12792 }
12793 }
12794 }
12795 }
12796 }
12797
12798 // Array element target: p => arr(i) — compute the
12799 // element's address via GEP and store into the pointer slot.
12800 if let Expr::FunctionCall {
12801 callee,
12802 args: val_args,
12803 } = &value.node
12804 {
12805 if let Expr::Name { name: arr_name } = &callee.node {
12806 let arr_key = arr_name.to_lowercase();
12807 if let Some(arr_info) = ctx.locals.get(&arr_key).cloned() {
12808 if (!arr_info.dims.is_empty() || arr_info.allocatable)
12809 && val_args.len() == 1
12810 && matches!(
12811 val_args[0].value,
12812 crate::ast::expr::SectionSubscript::Element(_)
12813 )
12814 {
12815 if let crate::ast::expr::SectionSubscript::Element(idx_expr) =
12816 &val_args[0].value
12817 {
12818 let base = array_data_ptr_for_call(b, &arr_info);
12819 let idx = lower_expr_ctx(b, ctx, idx_expr);
12820 let idx64 = match b.func().value_type(idx) {
12821 Some(IrType::Int(IntWidth::I64)) => idx,
12822 _ => b.int_extend(idx, IntWidth::I64, true),
12823 };
12824 let one = b.const_i64(1);
12825 let idx0 = b.isub(idx64, one);
12826 let elem_ptr = b.gep(base, vec![idx0], arr_info.ty.clone());
12827 b.store(elem_ptr, tgt_info.addr);
12828 return;
12829 }
12830 }
12831 }
12832 }
12833 }
12834
12835 // Component access target: p => dt%field — resolve the
12836 // field's address and store into the pointer slot.
12837 if let Expr::ComponentAccess { base, component } = &value.node {
12838 if let Some((base_addr, type_name)) =
12839 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)
12840 {
12841 if let Some(layout) = ctx.type_layouts.get(&type_name) {
12842 if let Some(field) = layout.field(component) {
12843 let offset = b.const_i64(field.offset as i64);
12844 let field_ptr =
12845 b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
12846 if is_deferred_char_component_field(field) {
12847 let (ptr, _len) = load_string_descriptor_view(b, field_ptr);
12848 b.store(ptr, tgt_info.addr);
12849 return;
12850 }
12851 // Cast ptr<i8> → ptr<tgt_ty> via zero-offset GEP
12852 // so the store type matches the pointer slot.
12853 let zero = b.const_i64(0);
12854 let typed_ptr = b.gep(field_ptr, vec![zero], tgt_info.ty.clone());
12855 b.store(typed_ptr, tgt_info.addr);
12856 return;
12857 }
12858 }
12859 }
12860 }
12861
12862 let Expr::Name { name: src_name } = &value.node else {
12863 return;
12864 };
12865 let src_key = src_name.to_lowercase();
12866 let Some(src_info) = ctx.locals.get(&src_key).cloned() else {
12867 if let Some(sym) = ctx.st.find_symbol_any_scope(&src_key) {
12868 if matches!(
12869 sym.kind,
12870 crate::sema::symtab::SymbolKind::Function
12871 | crate::sema::symtab::SymbolKind::Subroutine
12872 ) {
12873 let (link_name, _) =
12874 resolved_symbol_call_target(ctx.st, &src_key, src_name);
12875 let addr = b.global_addr(
12876 &link_name,
12877 procedure_pointer_symbol_addr_elem_type(&tgt_info),
12878 );
12879 b.store(addr, tgt_info.addr);
12880 }
12881 }
12882 return;
12883 };
12884
12885 // Array pointer path: materialise a descriptor from the
12886 // target and memcpy 384 bytes into the pointer's slot.
12887 // Both explicit-shape stack arrays and descriptor-backed
12888 // allocatables are supported via array_data_ptr_for_call.
12889 let target_is_array =
12890 !src_info.dims.is_empty() || src_info.allocatable || src_info.descriptor_arg;
12891 if target_is_array {
12892 let src_desc = if local_uses_array_descriptor(&src_info) {
12893 array_descriptor_addr(b, &src_info)
12894 } else {
12895 materialize_array_descriptor_for_info(b, &src_info)
12896 };
12897 let size = b.const_i64(384);
12898 b.call(
12899 FuncRef::External("memcpy".into()),
12900 vec![tgt_info.addr, src_desc, size],
12901 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
12902 );
12903 return;
12904 }
12905
12906 // Scalar / derived-type pointer path.
12907 let addr = if src_info.is_pointer {
12908 // Copy the current association of another pointer
12909 // (pointer-to-pointer, including derived-type pointer
12910 // chains). For scalar pointers (ty = i32) the stored
12911 // value is Ptr<i32>; for DT pointers (ty = Ptr<i8>)
12912 // the stored value is already Ptr<i8> — wrapping
12913 // again would produce Ptr<Ptr<i8>> and fail the
12914 // verifier. Use ty directly when it's already a
12915 // pointer.
12916 let load_ty = if src_info.ty.is_ptr() {
12917 src_info.ty.clone()
12918 } else {
12919 IrType::Ptr(Box::new(src_info.ty.clone()))
12920 };
12921 b.load_typed(src_info.addr, load_ty)
12922 } else if src_info.derived_type.is_some() {
12923 // Derived-type TARGET. src_info.addr is a
12924 // ptr<[i8 x size]>; the pointer slot expects ptr<i8>.
12925 // A zero-offset GEP with element type i8 produces
12926 // the element-pointer view and round-trips through
12927 // the verifier.
12928 let zero = b.const_i64(0);
12929 b.gep(src_info.addr, vec![zero], IrType::Int(IntWidth::I8))
12930 } else {
12931 // Plain TARGET or ordinary scalar local: the alloca
12932 // address IS the associated target.
12933 src_info.addr
12934 };
12935 b.store(addr, tgt_info.addr);
12936 }
12937
12938 _ => {} // remaining statements (FORALL, WHERE, etc.) deferred
12939 }
12940 }
12941
12942 /// Returns true if an expression contains no function calls, I/O, or
12943 /// other side effects. Used by Select lowering to ensure both branches
12944 /// are safe to evaluate unconditionally.
12945 fn is_pure_expr(expr: &crate::ast::expr::Expr) -> bool {
12946 use crate::ast::expr::Expr;
12947 match expr {
12948 // Leaf nodes — always pure.
12949 Expr::IntegerLiteral { .. }
12950 | Expr::RealLiteral { .. }
12951 | Expr::LogicalLiteral { .. }
12952 | Expr::StringLiteral { .. }
12953 | Expr::ComplexLiteral { .. }
12954 | Expr::BozLiteral { .. }
12955 | Expr::Name { .. } => true,
12956
12957 // Binary/unary — pure if operands are pure.
12958 Expr::BinaryOp { left, right, .. } => is_pure_expr(&left.node) && is_pure_expr(&right.node),
12959 Expr::UnaryOp { operand, .. } => is_pure_expr(&operand.node),
12960 Expr::ParenExpr { inner } => is_pure_expr(&inner.node),
12961
12962 // Function calls, array constructors, component access — not pure
12963 // (function calls have side effects; component access and array
12964 // constructors may involve complex lowering).
12965 _ => false,
12966 }
12967 }
12968
12969 /// Try to lower `if (cond) x = a; else x = b` as a Select instruction
12970 /// instead of a diamond of basic blocks. Returns true on success.
12971 ///
12972 /// Detection criteria (all must hold):
12973 /// 1. No else-ifs.
12974 /// 2. Then body has exactly 1 statement — a scalar assignment to a Name.
12975 /// 3. Else body has exactly 1 statement — a scalar assignment to the
12976 /// **same** Name.
12977 /// 4. The target variable is a non-character, non-array, non-allocatable
12978 /// local (a simple alloca scalar).
12979 ///
12980 /// When this fires, the result is a single `Select` + `Store`, enabling
12981 /// ARM64 `CSEL` instruction selection.
12982 fn try_lower_select(
12983 b: &mut FuncBuilder,
12984 ctx: &mut LowerCtx,
12985 condition: &crate::ast::expr::SpannedExpr,
12986 then_body: &[SpannedStmt],
12987 else_ifs: &[(crate::ast::expr::SpannedExpr, Vec<SpannedStmt>)],
12988 else_body: &Option<Vec<SpannedStmt>>,
12989 ) -> bool {
12990 use crate::ast::expr::Expr;
12991 use crate::ast::stmt::Stmt;
12992
12993 // Only simple if/else — no else-if chain.
12994 if !else_ifs.is_empty() {
12995 return false;
12996 }
12997 let eb = match else_body {
12998 Some(eb) => eb,
12999 None => return false,
13000 };
13001
13002 // Exactly one statement in each branch.
13003 if then_body.len() != 1 || eb.len() != 1 {
13004 return false;
13005 }
13006
13007 // Both must be simple scalar assignments (Stmt::Assignment to a Name).
13008 let (then_name, then_val_expr) = match &then_body[0].node {
13009 Stmt::Assignment { target, value } => match &target.node {
13010 Expr::Name { name } => (name.to_lowercase(), value),
13011 _ => return false,
13012 },
13013 _ => return false,
13014 };
13015 let (else_name, else_val_expr) = match &eb[0].node {
13016 Stmt::Assignment { target, value } => match &target.node {
13017 Expr::Name { name } => (name.to_lowercase(), value),
13018 _ => return false,
13019 },
13020 _ => return false,
13021 };
13022
13023 // Both must assign to the same variable.
13024 if then_name != else_name {
13025 return false;
13026 }
13027
13028 // The variable must be a simple scalar local (not character, not array,
13029 // not allocatable). These constraints ensure a plain store suffices.
13030 let info = match ctx.locals.get(&then_name) {
13031 Some(info) => info.clone(),
13032 None => return false,
13033 };
13034 if !info.dims.is_empty() || info.allocatable {
13035 return false;
13036 }
13037 if !matches!(info.char_kind, CharKind::None) {
13038 return false;
13039 }
13040
13041 // Both RHS expressions must be side-effect-free (no function calls,
13042 // no I/O). Select evaluates BOTH branches unconditionally, so a
13043 // call like `fib(n-1)` in an else branch would execute even when
13044 // the condition is true — causing infinite recursion.
13045 if !is_pure_expr(&then_val_expr.node) {
13046 return false;
13047 }
13048 if !is_pure_expr(&else_val_expr.node) {
13049 return false;
13050 }
13051
13052 // Lower condition, then both value expressions, then emit Select + Store.
13053 let cond = lower_expr_ctx(b, ctx, condition);
13054 let tv = lower_expr_ctx(b, ctx, then_val_expr);
13055 let fv = lower_expr_ctx(b, ctx, else_val_expr);
13056 let tv = coerce_to_type(b, tv, &info.ty);
13057 let fv = coerce_to_type(b, fv, &info.ty);
13058 let selected = b.select(cond, tv, fv);
13059 b.store(selected, info.addr);
13060 true
13061 }
13062
13063 /// Lower IF construct with else-if chain and optional else.
13064 fn lower_if(
13065 b: &mut FuncBuilder,
13066 ctx: &mut LowerCtx,
13067 condition: &crate::ast::expr::SpannedExpr,
13068 then_body: &[SpannedStmt],
13069 else_ifs: &[(crate::ast::expr::SpannedExpr, Vec<SpannedStmt>)],
13070 else_body: &Option<Vec<SpannedStmt>>,
13071 ) {
13072 // Fast path: simple diamond `if (cond) x = a; else x = b` → Select.
13073 if try_lower_select(b, ctx, condition, then_body, else_ifs, else_body) {
13074 return;
13075 }
13076
13077 let bb_end = b.create_block("if_end");
13078
13079 let cond = lower_expr_ctx(b, ctx, condition);
13080 let bb_then = b.create_block("if_then");
13081 let bb_next = if !else_ifs.is_empty() || else_body.is_some() {
13082 b.create_block("if_else")
13083 } else {
13084 bb_end
13085 };
13086 b.cond_branch(cond, bb_then, vec![], bb_next, vec![]);
13087
13088 // Then block.
13089 b.set_block(bb_then);
13090 lower_stmts(b, ctx, then_body);
13091 if b.func().block(b.current_block()).terminator.is_none() {
13092 b.branch(bb_end, vec![]);
13093 }
13094
13095 // Else-if chain.
13096 let mut current_else = bb_next;
13097 for (i, (ei_cond, ei_body)) in else_ifs.iter().enumerate() {
13098 b.set_block(current_else);
13099 let ei_cond_val = lower_expr_ctx(b, ctx, ei_cond);
13100 let bb_ei_then = b.create_block(&format!("elseif_{}_then", i));
13101 let bb_ei_next = if i + 1 < else_ifs.len() || else_body.is_some() {
13102 b.create_block(&format!("elseif_{}_else", i))
13103 } else {
13104 bb_end
13105 };
13106 b.cond_branch(ei_cond_val, bb_ei_then, vec![], bb_ei_next, vec![]);
13107
13108 b.set_block(bb_ei_then);
13109 lower_stmts(b, ctx, ei_body);
13110 if b.func().block(b.current_block()).terminator.is_none() {
13111 b.branch(bb_end, vec![]);
13112 }
13113
13114 current_else = bb_ei_next;
13115 }
13116
13117 // Else block.
13118 if let Some(eb) = else_body {
13119 b.set_block(current_else);
13120 lower_stmts(b, ctx, eb);
13121 if b.func().block(b.current_block()).terminator.is_none() {
13122 b.branch(bb_end, vec![]);
13123 }
13124 }
13125
13126 b.set_block(bb_end);
13127 }
13128
13129 /// DO loop fields bundled for passing without too many args.
13130 struct DoLoopFields<'a> {
13131 name: &'a Option<String>,
13132 var: &'a Option<String>,
13133 start: &'a Option<crate::ast::expr::SpannedExpr>,
13134 end: &'a Option<crate::ast::expr::SpannedExpr>,
13135 step: &'a Option<crate::ast::expr::SpannedExpr>,
13136 body: &'a [SpannedStmt],
13137 concurrent: bool,
13138 }
13139
13140 fn try_lower_bulk_do_concurrent(
13141 b: &mut FuncBuilder,
13142 ctx: &mut LowerCtx,
13143 controls: &[ConcurrentControl],
13144 mask: Option<&crate::ast::expr::SpannedExpr>,
13145 body: &[SpannedStmt],
13146 ) -> bool {
13147 if mask.is_some() || controls.len() != 1 || body.len() != 1 {
13148 return false;
13149 }
13150 let ctrl = &controls[0];
13151 let Stmt::Assignment { target, value } = &body[0].node else {
13152 return false;
13153 };
13154 let Some(dest) = loop_indexed_array_ref(&ctx.locals, target, &ctrl.var) else {
13155 return false;
13156 };
13157 if !control_covers_full_array(ctrl, &dest) {
13158 return false;
13159 }
13160 let Some(plan) = build_loop_bulk_plan(&ctx.locals, &dest.info, &ctrl.var, value) else {
13161 return false;
13162 };
13163 let n = array_total_elems_value(b, &dest.info);
13164 emit_bulk_array_plan(b, ctx, &dest.info, n, plan);
13165 true
13166 }
13167
13168 fn lower_do_concurrent(
13169 b: &mut FuncBuilder,
13170 ctx: &mut LowerCtx,
13171 name: &Option<String>,
13172 controls: &[ConcurrentControl],
13173 mask: Option<&crate::ast::expr::SpannedExpr>,
13174 body: &[SpannedStmt],
13175 span: crate::lexer::Span,
13176 ) {
13177 if try_lower_bulk_do_concurrent(b, ctx, controls, mask, body) {
13178 return;
13179 }
13180
13181 let Some((ctrl, rest)) = controls.split_first() else {
13182 return;
13183 };
13184
13185 let var_opt = Some(ctrl.var.clone());
13186 let start_opt = Some(ctrl.start.clone());
13187 let end_opt = Some(ctrl.end.clone());
13188
13189 let nested_body_storage;
13190 let masked_body_storage;
13191 let lowered_body = if rest.is_empty() {
13192 if let Some(mask_expr) = mask {
13193 masked_body_storage = vec![crate::ast::Spanned::new(
13194 Stmt::IfConstruct {
13195 name: None,
13196 condition: mask_expr.clone(),
13197 then_body: body.to_vec(),
13198 else_ifs: vec![],
13199 else_body: None,
13200 },
13201 mask_expr.span,
13202 )];
13203 masked_body_storage.as_slice()
13204 } else {
13205 body
13206 }
13207 } else {
13208 nested_body_storage = vec![crate::ast::Spanned::new(
13209 Stmt::DoConcurrent {
13210 name: None,
13211 controls: rest.to_vec(),
13212 mask: mask.cloned(),
13213 locality: vec![],
13214 body: body.to_vec(),
13215 },
13216 span,
13217 )];
13218 nested_body_storage.as_slice()
13219 };
13220
13221 lower_do_loop(
13222 b,
13223 ctx,
13224 DoLoopFields {
13225 name,
13226 var: &var_opt,
13227 start: &start_opt,
13228 end: &end_opt,
13229 step: &ctrl.step,
13230 body: lowered_body,
13231 concurrent: true,
13232 },
13233 );
13234 }
13235
13236 /// Lower DO loop (counted loop with variable, start, end, step).
13237 fn lower_do_loop(b: &mut FuncBuilder, ctx: &mut LowerCtx, fields: DoLoopFields) {
13238 let DoLoopFields {
13239 name,
13240 var,
13241 start,
13242 end,
13243 step,
13244 body,
13245 concurrent,
13246 } = fields;
13247 let (check_name, body_name, incr_name, exit_name, neg_check_name, pos_check_name) =
13248 if concurrent {
13249 (
13250 "doconc_check",
13251 "doconc_body",
13252 "doconc_incr",
13253 "doconc_exit",
13254 "doconc_neg_check",
13255 "doconc_pos_check",
13256 )
13257 } else {
13258 (
13259 "do_check",
13260 "do_body",
13261 "do_incr",
13262 "do_exit",
13263 "do_neg_check",
13264 "do_pos_check",
13265 )
13266 };
13267 if let (Some(var_name), Some(start_expr), Some(end_expr)) = (var, start, end) {
13268 // Counted DO loop.
13269 let key = var_name.to_lowercase();
13270 let var_ty = ctx
13271 .locals
13272 .get(&key)
13273 .map(|info| info.ty.clone())
13274 .unwrap_or(IrType::Int(IntWidth::I32));
13275 let var_addr = ctx
13276 .locals
13277 .get(&key)
13278 .map(|info| info.addr)
13279 .unwrap_or_else(|| {
13280 let addr = b.alloca(var_ty.clone());
13281 ctx.locals.insert(
13282 key.clone(),
13283 LocalInfo {
13284 addr,
13285 ty: var_ty.clone(),
13286 dims: vec![],
13287 allocatable: false,
13288 descriptor_arg: false,
13289 by_ref: false,
13290 char_kind: CharKind::None,
13291 derived_type: None,
13292 inline_const: None,
13293 is_pointer: false,
13294 runtime_dim_upper: vec![],
13295 },
13296 );
13297 addr
13298 });
13299
13300 // Initialize loop variable.
13301 let init_raw = lower_expr_ctx(b, ctx, start_expr);
13302 let init_val = coerce_to_type(b, init_raw, &var_ty);
13303 b.store(init_val, var_addr);
13304
13305 let end_raw = lower_expr_ctx(b, ctx, end_expr);
13306 let end_val = coerce_to_type(b, end_raw, &var_ty);
13307 let step_val = if let Some(step_expr) = step {
13308 let step_raw = lower_expr_ctx(b, ctx, step_expr);
13309 coerce_to_type(b, step_raw, &var_ty)
13310 } else {
13311 let one = b.const_i32(1);
13312 coerce_to_type(b, one, &var_ty)
13313 };
13314
13315 let bb_check = b.create_block(check_name);
13316 let bb_body = b.create_block(body_name);
13317 let bb_incr = b.create_block(incr_name);
13318 let bb_exit = b.create_block(exit_name);
13319
13320 b.branch(bb_check, vec![]);
13321
13322 // Check: i <= end for positive step, i >= end for negative step.
13323 b.set_block(bb_check);
13324 let cur = b.load(var_addr);
13325
13326 let const_step = step.as_ref().and_then(eval_const_int);
13327 if let Some(sv) = const_step {
13328 // Compile-time known step direction.
13329 let cmp_op = if sv < 0 { CmpOp::Ge } else { CmpOp::Le };
13330 let cond = b.icmp(cmp_op, cur, end_val);
13331 b.cond_branch(cond, bb_body, vec![], bb_exit, vec![]);
13332 } else {
13333 // Runtime step: check sign and use appropriate comparison.
13334 let zero_const = b.const_i32(0);
13335 let zero = coerce_to_type(b, zero_const, &var_ty);
13336 let step_neg = b.icmp(CmpOp::Lt, step_val, zero);
13337 let bb_neg_check = b.create_block(neg_check_name);
13338 let bb_pos_check = b.create_block(pos_check_name);
13339 b.cond_branch(step_neg, bb_neg_check, vec![], bb_pos_check, vec![]);
13340
13341 b.set_block(bb_neg_check);
13342 let cond_neg = b.icmp(CmpOp::Ge, cur, end_val);
13343 b.cond_branch(cond_neg, bb_body, vec![], bb_exit, vec![]);
13344
13345 b.set_block(bb_pos_check);
13346 let cond_pos = b.icmp(CmpOp::Le, cur, end_val);
13347 b.cond_branch(cond_pos, bb_body, vec![], bb_exit, vec![]);
13348 }
13349
13350 // Body.
13351 ctx.push_loop(name.clone(), bb_incr, bb_exit);
13352 b.set_block(bb_body);
13353 lower_stmts(b, ctx, body);
13354 if b.func().block(b.current_block()).terminator.is_none() {
13355 b.branch(bb_incr, vec![]);
13356 }
13357 ctx.pop_loop();
13358
13359 // Increment.
13360 b.set_block(bb_incr);
13361 let cur2 = b.load(var_addr);
13362 let next = b.iadd(cur2, step_val);
13363 b.store(next, var_addr);
13364 b.branch(bb_check, vec![]);
13365
13366 b.set_block(bb_exit);
13367 } else {
13368 // Infinite DO (no variable) — `do ... end do` without loop control.
13369 let bb_body = b.create_block(body_name);
13370 let bb_exit = b.create_block(exit_name);
13371 b.branch(bb_body, vec![]);
13372
13373 ctx.push_loop(name.clone(), bb_body, bb_exit);
13374 b.set_block(bb_body);
13375 lower_stmts(b, ctx, body);
13376 if b.func().block(b.current_block()).terminator.is_none() {
13377 b.branch(bb_body, vec![]);
13378 }
13379 ctx.pop_loop();
13380
13381 b.set_block(bb_exit);
13382 }
13383 }
13384
13385 /// Lower SELECT CASE.
13386 fn lower_select_case(
13387 b: &mut FuncBuilder,
13388 ctx: &mut LowerCtx,
13389 selector: &crate::ast::expr::SpannedExpr,
13390 cases: &[CaseBlock],
13391 ) {
13392 let sel_val = lower_expr_ctx(b, ctx, selector);
13393 let bb_end = b.create_block("select_end");
13394
13395 // For simplicity, lower as a chain of if-else comparisons.
13396 // (Switch terminator would be ideal for integer constants, but the
13397 // general case needs range checks and DEFAULT handling.)
13398 let mut bb_current = b.current_block();
13399
13400 for (i, case) in cases.iter().enumerate() {
13401 let is_default = case
13402 .selectors
13403 .iter()
13404 .any(|s| matches!(s, CaseSelector::Default));
13405
13406 if is_default {
13407 // Default case — always taken.
13408 b.set_block(bb_current);
13409 let bb_body = b.create_block(&format!("case_{}_body", i));
13410 b.branch(bb_body, vec![]);
13411
13412 b.set_block(bb_body);
13413 lower_stmts(b, ctx, &case.body);
13414 if b.func().block(b.current_block()).terminator.is_none() {
13415 b.branch(bb_end, vec![]);
13416 }
13417 // After default, no more cases matter.
13418 b.set_block(bb_end);
13419 return;
13420 }
13421
13422 let bb_body = b.create_block(&format!("case_{}_body", i));
13423 let bb_next = b.create_block(&format!("case_{}_next", i));
13424
13425 b.set_block(bb_current);
13426
13427 // Build condition from selectors (OR them together).
13428 let mut combined_cond: Option<ValueId> = None;
13429 for sel in &case.selectors {
13430 let cond = match sel {
13431 CaseSelector::Value(expr) => {
13432 let val = lower_expr_ctx(b, ctx, expr);
13433 b.icmp(CmpOp::Eq, sel_val, val)
13434 }
13435 CaseSelector::Range { low, high } => {
13436 let low_ok = if let Some(lo) = low {
13437 let lo_val = lower_expr_ctx(b, ctx, lo);
13438 Some(b.icmp(CmpOp::Ge, sel_val, lo_val))
13439 } else {
13440 None
13441 };
13442 let high_ok = if let Some(hi) = high {
13443 let hi_val = lower_expr_ctx(b, ctx, hi);
13444 Some(b.icmp(CmpOp::Le, sel_val, hi_val))
13445 } else {
13446 None
13447 };
13448 match (low_ok, high_ok) {
13449 (Some(l), Some(h)) => b.and(l, h),
13450 (Some(c), None) | (None, Some(c)) => c,
13451 (None, None) => b.const_bool(true),
13452 }
13453 }
13454 CaseSelector::Default => unreachable!(), // handled above
13455 };
13456 combined_cond = Some(match combined_cond {
13457 Some(prev) => b.or(prev, cond),
13458 None => cond,
13459 });
13460 }
13461
13462 let cond = combined_cond.unwrap_or_else(|| b.const_bool(false));
13463 b.cond_branch(cond, bb_body, vec![], bb_next, vec![]);
13464
13465 b.set_block(bb_body);
13466 lower_stmts(b, ctx, &case.body);
13467 if b.func().block(b.current_block()).terminator.is_none() {
13468 b.branch(bb_end, vec![]);
13469 }
13470
13471 bb_current = bb_next;
13472 }
13473
13474 // If no case matched and no default, fall through.
13475 b.set_block(bb_current);
13476 b.branch(bb_end, vec![]);
13477
13478 b.set_block(bb_end);
13479 }
13480
13481 /// Lower an array element access: compute flat offset from subscripts, GEP, load.
13482 /// Fortran column-major: a(i, j) in a(m, n) → offset = (i - lower1) + (j - lower2) * m
13483 fn lower_array_element(
13484 b: &mut FuncBuilder,
13485 locals: &HashMap<String, LocalInfo>,
13486 info: &LocalInfo,
13487 args: &[crate::ast::expr::Argument],
13488 st: &SymbolTable,
13489 ) -> ValueId {
13490 let elem_ptr = lower_array_element_addr(b, locals, info, args, st);
13491 if info.derived_type.is_some() {
13492 elem_ptr
13493 } else {
13494 b.load(elem_ptr)
13495 }
13496 }
13497
13498 fn lower_array_element_addr(
13499 b: &mut FuncBuilder,
13500 locals: &HashMap<String, LocalInfo>,
13501 info: &LocalInfo,
13502 args: &[crate::ast::expr::Argument],
13503 st: &SymbolTable,
13504 ) -> ValueId {
13505 let idx64 = compute_flat_elem_offset(b, locals, info, args, st);
13506 let base = array_base_addr(b, info);
13507 b.gep(base, vec![idx64], info.ty.clone())
13508 }
13509
13510 fn emit_bounds_check(b: &mut FuncBuilder, index: ValueId, lower: ValueId, upper: ValueId) {
13511 b.runtime_call(
13512 RuntimeFunc::CheckBounds,
13513 vec![index, lower, upper],
13514 IrType::Void,
13515 );
13516 }
13517
13518 /// Compute the column-major flat ELEMENT offset (i64) for an array
13519 /// subscript expression, returning a value suitable for `b.gep` (which
13520 /// scales by the GEP result element size).
13521 ///
13522 /// Two paths:
13523 /// * Static-shape arrays (`info.dims` populated): fold strides at
13524 /// compile time.
13525 /// * Allocatable arrays (rank/extents only known at runtime): load
13526 /// lower_bound and upper_bound from the runtime descriptor and
13527 /// accumulate the cumulative stride as a runtime i64.
13528 ///
13529 /// Audit5 MAJOR-1: previously both lower_array_element and
13530 /// lower_array_store fell back to `(1, 1)` for every dim of an
13531 /// allocatable, leaving cumulative stride = 1. m(i, j) for a 3x4
13532 /// allocatable then computed `(i-1) + (j-1)` instead of
13533 /// `(i-1) + (j-1)*3`, so writes clobbered each other and reads
13534 /// returned garbage.
13535 fn compute_flat_elem_offset(
13536 b: &mut FuncBuilder,
13537 locals: &HashMap<String, LocalInfo>,
13538 info: &LocalInfo,
13539 args: &[crate::ast::expr::Argument],
13540 st: &SymbolTable,
13541 ) -> ValueId {
13542 if local_uses_array_descriptor(info) {
13543 // Runtime descriptor path. Each DimDescriptor is 24 bytes
13544 // starting at descriptor offset 24:
13545 // +0 lower_bound : i64
13546 // +8 upper_bound : i64
13547 // +16 stride : i64 (we use 1)
13548 let desc = array_descriptor_addr(b, info);
13549 let mut flat: Option<ValueId> = None;
13550 let mut cum_stride: Option<ValueId> = None; // i64
13551 let one64 = b.const_i64(1);
13552 for (dim_idx, arg) in args.iter().enumerate() {
13553 let sub_raw = match &arg.value {
13554 crate::ast::expr::SectionSubscript::Element(e) => lower_expr(b, locals, e, st),
13555 _ => b.const_i64(0),
13556 };
13557 let sub = widen_idx_to_i64(b, sub_raw);
13558
13559 let dim_base = 24 + (dim_idx as i64) * 24;
13560 let off_lo = b.const_i64(dim_base);
13561 let off_up = b.const_i64(dim_base + 8);
13562 let p_lo = b.gep(desc, vec![off_lo], IrType::Int(IntWidth::I8));
13563 let p_up = b.gep(desc, vec![off_up], IrType::Int(IntWidth::I8));
13564 let lo = b.load_typed(p_lo, IrType::Int(IntWidth::I64));
13565 let up = b.load_typed(p_up, IrType::Int(IntWidth::I64));
13566 emit_bounds_check(b, sub, lo, up);
13567
13568 let adjusted = b.isub(sub, lo);
13569
13570 let dim_offset = match cum_stride {
13571 None => adjusted, // first dim has cumulative stride 1
13572 Some(s) => b.imul(adjusted, s),
13573 };
13574 flat = Some(match flat {
13575 Some(prev) => b.iadd(prev, dim_offset),
13576 None => dim_offset,
13577 });
13578
13579 // cum_stride *= (upper - lower + 1)
13580 let span = b.isub(up, lo);
13581 let extent = b.iadd(span, one64);
13582 cum_stride = Some(match cum_stride {
13583 None => extent,
13584 Some(prev) => b.imul(prev, extent),
13585 });
13586 }
13587 return flat.unwrap_or_else(|| b.const_i64(0));
13588 }
13589
13590 // Static-shape path: fold strides at compile time, but fall back
13591 // to runtime values for any dim whose upper bound is non-const
13592 // (e.g. `xs(n)` where `n` is a dummy arg). See
13593 // `install_runtime_dim_bounds`.
13594 let mut flat_offset: Option<ValueId> = None;
13595 let mut stride_static: i64 = 1;
13596 let mut stride_dynamic: Option<ValueId> = None;
13597
13598 for (dim_idx, arg) in args.iter().enumerate() {
13599 let subscript = match &arg.value {
13600 crate::ast::expr::SectionSubscript::Element(e) => lower_expr(b, locals, e, st),
13601 _ => b.const_i32(0),
13602 };
13603 let subscript64 = widen_idx_to_i64(b, subscript);
13604
13605 let (lower, extent) = if dim_idx < info.dims.len() {
13606 info.dims[dim_idx]
13607 } else {
13608 (1, 1)
13609 };
13610 let lower_val = b.const_i64(lower);
13611
13612 let runtime_upper = info.runtime_dim_upper.get(dim_idx).and_then(|u| *u);
13613
13614 let upper_val = match runtime_upper {
13615 Some(v) => v,
13616 None => b.const_i64(lower + extent - 1),
13617 };
13618 emit_bounds_check(b, subscript64, lower_val, upper_val);
13619 let adjusted = b.isub(subscript64, lower_val);
13620
13621 // Accumulate the offset. If we have any runtime stride on
13622 // the cumulative product, we're in runtime-stride territory
13623 // for the rest of the dims too.
13624 let dim_offset = match (&stride_dynamic, stride_static) {
13625 (None, 1) => adjusted,
13626 (None, s) => {
13627 let sval = b.const_i64(s);
13628 b.imul(adjusted, sval)
13629 }
13630 (Some(sdyn), _) => b.imul(adjusted, *sdyn),
13631 };
13632
13633 flat_offset = Some(match flat_offset {
13634 Some(prev) => b.iadd(prev, dim_offset),
13635 None => dim_offset,
13636 });
13637
13638 // Update the cumulative stride for the NEXT dim.
13639 // stride_next = stride * (upper - lower + 1)
13640 match runtime_upper {
13641 Some(up) => {
13642 // Transition to (or stay in) runtime-stride.
13643 let span = b.isub(up, lower_val);
13644 let one64 = b.const_i64(1);
13645 let ext = b.iadd(span, one64);
13646 let new_dyn = match stride_dynamic {
13647 Some(prev) => b.imul(prev, ext),
13648 None if stride_static == 1 => ext,
13649 None => {
13650 let sprev = b.const_i64(stride_static);
13651 b.imul(sprev, ext)
13652 }
13653 };
13654 stride_dynamic = Some(new_dyn);
13655 stride_static = 1; // retired; dynamic path takes over
13656 }
13657 None => {
13658 if stride_dynamic.is_some() {
13659 let ext = b.const_i64(extent);
13660 stride_dynamic = Some(b.imul(stride_dynamic.unwrap(), ext));
13661 } else {
13662 stride_static *= extent;
13663 }
13664 }
13665 }
13666 }
13667
13668 flat_offset.unwrap_or_else(|| b.const_i64(0))
13669 }
13670
13671 /// Widen a pair of integer operands to match the wider one. Used
13672 /// for mixed-kind bit-manipulation intrinsics (iand/ior/ieor) which
13673 /// F2018 allows between different integer kinds but the IR verifier
13674 /// requires same-width operands. Sign-extends the narrower operand
13675 /// since the intrinsics themselves are kind-neutral; gfortran picks
13676 /// the wider result type so zero-extending would produce a different
13677 /// bit pattern for negative narrow operands.
13678 fn unify_int_widths(b: &mut FuncBuilder, lhs: ValueId, rhs: ValueId) -> (ValueId, ValueId) {
13679 let lty = b.func().value_type(lhs);
13680 let rty = b.func().value_type(rhs);
13681 let lw = match lty.as_ref() {
13682 Some(IrType::Int(w)) => Some(w.bits()),
13683 Some(IrType::Bool) => Some(8),
13684 _ => None,
13685 };
13686 let rw = match rty.as_ref() {
13687 Some(IrType::Int(w)) => Some(w.bits()),
13688 Some(IrType::Bool) => Some(8),
13689 _ => None,
13690 };
13691 let (Some(lw), Some(rw)) = (lw, rw) else {
13692 return (lhs, rhs);
13693 };
13694 if lw == rw {
13695 return (lhs, rhs);
13696 }
13697 let target = if lw >= rw { lw } else { rw };
13698 let to_iw = match target {
13699 8 => IntWidth::I8,
13700 16 => IntWidth::I16,
13701 32 => IntWidth::I32,
13702 64 => IntWidth::I64,
13703 _ => return (lhs, rhs),
13704 };
13705 let l = if lw < rw {
13706 b.int_extend(lhs, to_iw, true)
13707 } else {
13708 lhs
13709 };
13710 let r = if rw < lw {
13711 b.int_extend(rhs, to_iw, true)
13712 } else {
13713 rhs
13714 };
13715 (l, r)
13716 }
13717
13718 /// Widen an i32 (or smaller) index value to i64 for pointer
13719 /// arithmetic. Pass through for values already i64 or larger.
13720 fn widen_idx_to_i64(b: &mut FuncBuilder, idx: ValueId) -> ValueId {
13721 match b.func().value_type(idx) {
13722 Some(IrType::Int(IntWidth::I64)) => idx,
13723 Some(IrType::Int(_)) => b.int_extend(idx, IntWidth::I64, true),
13724 _ => idx,
13725 }
13726 }
13727
13728 /// Lower an ALLOCATE bound subscript to (lower_bound, upper_bound)
13729 /// as i64 values. Both forms are valid:
13730 ///
13731 /// allocate(a(N)) → Element(N) → (1, N)
13732 /// allocate(a(0:N)) → Range(0, N) → (0, N)
13733 /// allocate(a(lo:hi)) → Range(lo, hi) → (lo, hi)
13734 ///
13735 /// A bare `Range { start: None, .. }` defaults the lower bound
13736 /// to 1 (Fortran convention). A `Range` with no `end` is
13737 /// invalid in ALLOCATE — defaults to 1 to keep the runtime
13738 /// from segfaulting and let the verifier catch it.
13739 ///
13740 /// Audit6 BLOCKING-4: the previous Stmt::Allocate code only
13741 /// handled Element subscripts and silently dropped the Range
13742 /// case to const_i64(1), causing heap corruption on
13743 /// `allocate(m(0:2, 0:3))`.
13744 fn lower_alloc_bounds(
13745 b: &mut FuncBuilder,
13746 ctx: &LowerCtx,
13747 sub: &crate::ast::expr::SectionSubscript,
13748 ) -> (ValueId, ValueId) {
13749 use crate::ast::expr::SectionSubscript;
13750 match sub {
13751 SectionSubscript::Element(e) => {
13752 let up = lower_expr_ctx(b, ctx, e);
13753 let up64 = widen_idx_to_i64(b, up);
13754 let lo64 = b.const_i64(1);
13755 (lo64, up64)
13756 }
13757 SectionSubscript::Range { start, end, .. } => {
13758 let lo64 = match start {
13759 Some(e) => {
13760 let v = lower_expr_ctx(b, ctx, e);
13761 widen_idx_to_i64(b, v)
13762 }
13763 None => b.const_i64(1),
13764 };
13765 let up64 = match end {
13766 Some(e) => {
13767 let v = lower_expr_ctx(b, ctx, e);
13768 widen_idx_to_i64(b, v)
13769 }
13770 None => b.const_i64(1),
13771 };
13772 (lo64, up64)
13773 }
13774 }
13775 }
13776
13777 /// Element-byte size for an IR scalar type. Used by array
13778 /// constructor lowering to compute byte offsets into a destination
13779 /// buffer. Defaults to 8 for unknown/wide types so we never
13780 /// under-step (a wrong-direction error would silently scribble
13781 /// over adjacent elements).
13782 fn ir_scalar_byte_size(ty: &IrType) -> i64 {
13783 match ty {
13784 IrType::Array(elem, count) => (elem.size_bytes() * count) as i64,
13785 IrType::Int(IntWidth::I8) | IrType::Bool => 1,
13786 IrType::Int(IntWidth::I16) => 2,
13787 IrType::Int(IntWidth::I32) | IrType::Float(FloatWidth::F32) => 4,
13788 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => 8,
13789 IrType::Int(IntWidth::I128) => 16,
13790 _ => 8,
13791 }
13792 }
13793
13794 fn descriptor_element_size_bytes(info: &LocalInfo) -> i64 {
13795 match &info.ty {
13796 IrType::Array(_, _) => info.ty.size_bytes() as i64,
13797 IrType::Int(IntWidth::I8) => 1,
13798 IrType::Int(IntWidth::I16) => 2,
13799 IrType::Int(IntWidth::I32) | IrType::Float(FloatWidth::F32) => 4,
13800 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => 8,
13801 IrType::Int(IntWidth::I128) => 16,
13802 IrType::Bool => 4,
13803 _ => 8,
13804 }
13805 }
13806
13807 /// Store the literal values of an array constructor into a
13808 /// destination buffer, one element at a time via byte-level GEP.
13809 ///
13810 /// `dest_base` is a byte pointer to the start of the buffer
13811 /// (already loaded from a descriptor if the dest is allocatable).
13812 /// `elem_ty` is the element type used to coerce/size each value.
13813 ///
13814 /// Handles both literal expressions and implied-do iterators.
13815 /// Literal expressions use a compile-time byte offset; implied-do
13816 /// iterators generate a real runtime loop that advances an
13817 /// alloca-backed offset. The DO variable is installed in a
13818 /// clone of `locals` so the inner expression can reference it.
13819 ///
13820 /// Audit BLOCKING-1: previously the implied-do branch silently
13821 /// skipped all stores and advanced a compile-time counter,
13822 /// leaving the destination buffer with whatever stack bytes
13823 /// happened to be there (the comment lied about allocas being
13824 /// zeroed). Programs that used `[(expr, i=1,n)]` got garbage.
13825 fn store_ac_values_into(
13826 b: &mut FuncBuilder,
13827 locals: &HashMap<String, LocalInfo>,
13828 dest_base: ValueId,
13829 elem_ty: &IrType,
13830 values: &[crate::ast::expr::AcValue],
13831 st: &SymbolTable,
13832 ) {
13833 let elem_bytes = ir_scalar_byte_size(elem_ty);
13834 // Runtime byte offset. Starts at 0 and is bumped by elem_bytes
13835 // after each store. Using an alloca (not a ValueId) lets the
13836 // implied-do loop body update the offset across iterations.
13837 let off_slot = b.alloca(IrType::Int(IntWidth::I64));
13838 let zero64 = b.const_i64(0);
13839 b.store(zero64, off_slot);
13840 let step_bytes = b.const_i64(elem_bytes);
13841
13842 for v in values {
13843 match v {
13844 crate::ast::expr::AcValue::Expr(e) => {
13845 let raw = lower_expr(b, locals, e, st);
13846 let coerced = coerce_to_type(b, raw, elem_ty);
13847 let cur_off = b.load(off_slot);
13848 let p = b.gep(dest_base, vec![cur_off], IrType::Int(IntWidth::I8));
13849 b.store(coerced, p);
13850 let next_off = b.iadd(cur_off, step_bytes);
13851 b.store(next_off, off_slot);
13852 }
13853 crate::ast::expr::AcValue::ImpliedDo(ido) => {
13854 store_ac_implied_do(
13855 b,
13856 locals,
13857 dest_base,
13858 elem_ty,
13859 elem_bytes,
13860 off_slot,
13861 &ido.values,
13862 &ido.var,
13863 &ido.start,
13864 &ido.end,
13865 ido.step.as_ref(),
13866 st,
13867 );
13868 }
13869 }
13870 }
13871 }
13872
13873 /// Lower an implied-do array constructor iterator:
13874 /// `( inner_values, var = start, end [, step] )`
13875 /// produces the sequence `inner_values[var=start], inner_values[var=start+step], …`.
13876 /// Each iteration evaluates the inner value list with `var` bound
13877 /// to the current iteration, stores them at the current offset,
13878 /// and advances the offset. The DO variable is installed into a
13879 /// scratch clone of `locals` for the duration of the iterator.
13880 #[allow(clippy::too_many_arguments)]
13881 fn store_ac_implied_do(
13882 b: &mut FuncBuilder,
13883 locals: &HashMap<String, LocalInfo>,
13884 dest_base: ValueId,
13885 elem_ty: &IrType,
13886 elem_bytes: i64,
13887 off_slot: ValueId,
13888 inner: &[crate::ast::expr::AcValue],
13889 var: &str,
13890 start: &crate::ast::expr::SpannedExpr,
13891 end: &crate::ast::expr::SpannedExpr,
13892 step: Option<&crate::ast::expr::SpannedExpr>,
13893 st: &SymbolTable,
13894 ) {
13895 // DO variable as a fresh i32 alloca, installed in a scratch
13896 // locals map so the inner expressions can reference it.
13897 let var_ty = IrType::Int(IntWidth::I32);
13898 let var_addr = b.alloca(var_ty.clone());
13899 let start_val = lower_expr(b, locals, start, st);
13900 let start_coerced = coerce_to_type(b, start_val, &var_ty);
13901 b.store(start_coerced, var_addr);
13902
13903 let end_val = lower_expr(b, locals, end, st);
13904 let end_coerced = coerce_to_type(b, end_val, &var_ty);
13905
13906 let step_val_raw = match step {
13907 Some(e) => lower_expr(b, locals, e, st),
13908 None => b.const_i32(1),
13909 };
13910 let step_val = coerce_to_type(b, step_val_raw, &var_ty);
13911
13912 let mut scratch_locals = locals.clone();
13913 scratch_locals.insert(
13914 var.to_lowercase(),
13915 LocalInfo {
13916 addr: var_addr,
13917 ty: var_ty.clone(),
13918 dims: vec![],
13919 allocatable: false,
13920 descriptor_arg: false,
13921 by_ref: false,
13922 char_kind: CharKind::None,
13923 derived_type: None,
13924 inline_const: None,
13925 is_pointer: false,
13926 runtime_dim_upper: vec![],
13927 },
13928 );
13929
13930 // Loop skeleton: check → body → exit. Mirrors the regular DO
13931 // lowerer's sign-of-step handling: if `step` is a compile-time
13932 // constant, pick `Le` for positive and `Ge` for negative; if
13933 // it's a runtime value, branch on the sign and emit two check
13934 // arms. Audit BLOCKING-1: the previous version hardcoded `Le`
13935 // and `[(i, i=5,1,-1)]` skipped the body entirely.
13936 let check = b.create_block("ac_impdo_check");
13937 let body = b.create_block("ac_impdo_body");
13938 let exit = b.create_block("ac_impdo_exit");
13939 b.branch(check, vec![]);
13940
13941 b.set_block(check);
13942 let cur_var = b.load(var_addr);
13943 let const_step = step.and_then(eval_const_int);
13944 if let Some(sv) = const_step {
13945 let cmp_op = if sv < 0 { CmpOp::Ge } else { CmpOp::Le };
13946 let cond = b.icmp(cmp_op, cur_var, end_coerced);
13947 b.cond_branch(cond, body, vec![], exit, vec![]);
13948 } else {
13949 // Runtime step: branch on sign at the check site so we
13950 // pick the correct comparison without recomputing on each
13951 // iteration. Two check sub-blocks, one per direction.
13952 let zero = b.const_i32(0);
13953 let step_neg = b.icmp(CmpOp::Lt, step_val, zero);
13954 let bb_neg = b.create_block("ac_impdo_neg_check");
13955 let bb_pos = b.create_block("ac_impdo_pos_check");
13956 b.cond_branch(step_neg, bb_neg, vec![], bb_pos, vec![]);
13957
13958 b.set_block(bb_neg);
13959 let cond_neg = b.icmp(CmpOp::Ge, cur_var, end_coerced);
13960 b.cond_branch(cond_neg, body, vec![], exit, vec![]);
13961
13962 b.set_block(bb_pos);
13963 let cond_pos = b.icmp(CmpOp::Le, cur_var, end_coerced);
13964 b.cond_branch(cond_pos, body, vec![], exit, vec![]);
13965 }
13966
13967 // Body: evaluate each inner value and store at the current
13968 // offset. Recurses into store_ac_values_into so nested
13969 // implied-do works.
13970 b.set_block(body);
13971 for iv in inner {
13972 match iv {
13973 crate::ast::expr::AcValue::Expr(e) => {
13974 let raw = lower_expr(b, &scratch_locals, e, st);
13975 let coerced = coerce_to_type(b, raw, elem_ty);
13976 let cur_off = b.load(off_slot);
13977 let p = b.gep(dest_base, vec![cur_off], IrType::Int(IntWidth::I8));
13978 b.store(coerced, p);
13979 let step_bytes = b.const_i64(elem_bytes);
13980 let next_off = b.iadd(cur_off, step_bytes);
13981 b.store(next_off, off_slot);
13982 }
13983 crate::ast::expr::AcValue::ImpliedDo(ido) => {
13984 store_ac_implied_do(
13985 b,
13986 &scratch_locals,
13987 dest_base,
13988 elem_ty,
13989 elem_bytes,
13990 off_slot,
13991 &ido.values,
13992 &ido.var,
13993 &ido.start,
13994 &ido.end,
13995 ido.step.as_ref(),
13996 st,
13997 );
13998 }
13999 }
14000 }
14001 // Advance the DO variable and loop.
14002 let cur_var_end = b.load(var_addr);
14003 let next_var = b.iadd(cur_var_end, step_val);
14004 b.store(next_var, var_addr);
14005 b.branch(check, vec![]);
14006
14007 // Continue emitting into exit.
14008 b.set_block(exit);
14009 }
14010
14011 fn lower_char_array_store(
14012 b: &mut FuncBuilder,
14013 locals: &HashMap<String, LocalInfo>,
14014 info: &LocalInfo,
14015 args: &[crate::ast::expr::Argument],
14016 value: &crate::ast::expr::SpannedExpr,
14017 st: &SymbolTable,
14018 ) {
14019 let Some((dest_ptr, dest_len)) = char_array_element_ptr_and_len(b, locals, info, args, st)
14020 else {
14021 return;
14022 };
14023 let (src_ptr, src_len) = lower_string_expr(b, locals, value, st);
14024 b.call(
14025 FuncRef::External("afs_assign_char_fixed".into()),
14026 vec![dest_ptr, dest_len, src_ptr, src_len],
14027 IrType::Void,
14028 );
14029 }
14030
14031 /// Lower an array element store: compute flat offset, GEP, store.
14032 fn lower_array_store(
14033 b: &mut FuncBuilder,
14034 locals: &HashMap<String, LocalInfo>,
14035 info: &LocalInfo,
14036 args: &[crate::ast::expr::Argument],
14037 value: ValueId,
14038 st: &SymbolTable,
14039 ) {
14040 let idx64 = compute_flat_elem_offset(b, locals, info, args, st);
14041 let base = array_base_addr(b, info);
14042 let elem_ptr = b.gep(base, vec![idx64], info.ty.clone());
14043 if info.derived_type.is_some() {
14044 let size = b.const_i64(ir_scalar_byte_size(&info.ty));
14045 b.call(
14046 FuncRef::External("memcpy".into()),
14047 vec![elem_ptr, value, size],
14048 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
14049 );
14050 return;
14051 }
14052 // Audit5 CRITICAL-1: coerce the RHS to the array element
14053 // type before the store. Without this, an i32-typed value
14054 // assigned into an i8 array would emit a 4-byte STR through
14055 // a 1-byte slot, clobbering the next 3 bytes. The verifier's
14056 // store-pointee width check has a `pointee_is_byte` escape
14057 // hatch for derived-type byte-cursor GEPs, so the bad store
14058 // wasn't caught at the IR level either.
14059 let coerced = coerce_to_type(b, value, &info.ty);
14060 b.store(coerced, elem_ptr);
14061 }
14062
14063 /// Lower the items of a PRINT/WRITE statement to unit-based I/O calls.
14064 fn lower_write_items(
14065 b: &mut FuncBuilder,
14066 ctx: &mut LowerCtx,
14067 items: &[crate::ast::expr::SpannedExpr],
14068 unit: ValueId,
14069 ) {
14070 lower_write_items_adv(b, ctx, items, unit, true);
14071 }
14072
14073 fn lower_write_items_adv(
14074 b: &mut FuncBuilder,
14075 ctx: &mut LowerCtx,
14076 items: &[crate::ast::expr::SpannedExpr],
14077 unit: ValueId,
14078 advance: bool,
14079 ) {
14080 for item in items {
14081 let is_char = match &item.node {
14082 Expr::Name { name } => ctx
14083 .locals
14084 .get(&name.to_lowercase())
14085 .map(|i| i.char_kind != CharKind::None)
14086 .unwrap_or(false),
14087 Expr::FunctionCall { callee, args } => {
14088 if let Expr::Name { name } = &callee.node {
14089 let key = name.to_lowercase();
14090 matches!(
14091 key.as_str(),
14092 "trim"
14093 | "adjustl"
14094 | "adjustr"
14095 | "char"
14096 | "achar"
14097 | "compiler_version"
14098 | "compiler_options"
14099 ) || ctx
14100 .locals
14101 .get(&key)
14102 .map(|i| {
14103 i.char_kind != CharKind::None
14104 && (i.dims.is_empty()
14105 || args.iter().all(|a| {
14106 matches!(
14107 a.value,
14108 crate::ast::expr::SectionSubscript::Element(_)
14109 )
14110 }))
14111 })
14112 .unwrap_or(false)
14113 } else if let Expr::FunctionCall { callee: inner, .. } = &callee.node {
14114 // Nested: arr(i)(lo:hi) — substring of char array element.
14115 if let Expr::Name { name } = &inner.node {
14116 let key = name.to_lowercase();
14117 ctx.locals
14118 .get(&key)
14119 .map(|i| {
14120 i.char_kind != CharKind::None
14121 && (!i.dims.is_empty() || i.allocatable)
14122 })
14123 .unwrap_or(false)
14124 && args.iter().any(|a| {
14125 matches!(a.value, crate::ast::expr::SectionSubscript::Range { .. })
14126 })
14127 } else {
14128 false
14129 }
14130 } else {
14131 false
14132 }
14133 }
14134 Expr::BinaryOp {
14135 op: BinaryOp::Concat,
14136 ..
14137 } => true,
14138 Expr::ComponentAccess { base, component } => {
14139 // Check if the component is a character field.
14140 if let Some((_addr, type_name)) =
14141 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)
14142 {
14143 if let Some(layout) = ctx.type_layouts.get(&type_name) {
14144 layout
14145 .field(component)
14146 .map(|f| {
14147 matches!(
14148 f.type_info,
14149 crate::sema::symtab::TypeInfo::Character { .. }
14150 )
14151 })
14152 .unwrap_or(false)
14153 } else {
14154 false
14155 }
14156 } else {
14157 false
14158 }
14159 }
14160 _ => false,
14161 };
14162
14163 // Whole-array print: a plain Name reference whose local
14164 // has array dims. Iterate elements and call the per-element
14165 // write helper. Without this, the Ptr<_> the item lowers to
14166 // would fall into the IrType::Ptr arm below and dispatch to
14167 // afs_write_string with a bogus length.
14168 //
14169 // Also handles 1-D array slices `a(lo:hi)` and `a(lo:hi:step)`
14170 // by detecting a FunctionCall with a Range subscript on an
14171 // array name. Slices bypass the section-descriptor code
14172 // path (which crashes in afs_create_section for a bare
14173 // write item) and instead lower directly into a bounded
14174 // loop over the underlying base.
14175 if !is_char {
14176 if let Expr::Name { name } = &item.node {
14177 let key = name.to_lowercase();
14178 if let Some(info) = ctx.locals.get(&key).cloned() {
14179 if is_complex_ty(&info.ty) {
14180 // Complex variable: pass pointer to [f32/f64 x 2] buffer.
14181 // For a POINTER complex, the slot holds the
14182 // target buffer address — load it first.
14183 let addr = if info.is_pointer {
14184 b.load_typed(info.addr, IrType::Ptr(Box::new(info.ty.clone())))
14185 } else if info.by_ref {
14186 b.load(info.addr)
14187 } else {
14188 info.addr
14189 };
14190 let func = if matches!(info.ty, IrType::Array(ref e, 2)
14191 if matches!(e.as_ref(), IrType::Float(FloatWidth::F64)))
14192 {
14193 "afs_write_complex_f64"
14194 } else {
14195 "afs_write_complex_f32"
14196 };
14197 b.call(
14198 FuncRef::External(func.into()),
14199 vec![unit, addr],
14200 IrType::Void,
14201 );
14202 continue;
14203 }
14204 if local_is_array_like(&info) {
14205 lower_whole_array_write(b, ctx, &info, unit);
14206 continue;
14207 }
14208 }
14209 }
14210 // Complex literal in print position: detect ptr<[f32/f64 x 2]>
14211 if matches!(item.node, Expr::ComplexLiteral { .. }) {
14212 let addr = lower_expr_ctx_tl(b, ctx, item);
14213 // Default to f32 — if literal had f64 components lower_expr would
14214 // have allocated [f64 x 2]. Check the original node to be precise.
14215 let func = if let Expr::ComplexLiteral { real, imag } = &item.node {
14216 let is_double = |e: &crate::ast::expr::SpannedExpr| {
14217 if let Expr::RealLiteral { text, .. } = &e.node {
14218 text.to_lowercase().contains('d')
14219 } else {
14220 false
14221 }
14222 };
14223 if is_double(real) || is_double(imag) {
14224 "afs_write_complex_f64"
14225 } else {
14226 "afs_write_complex_f32"
14227 }
14228 } else {
14229 "afs_write_complex_f32"
14230 };
14231 b.call(
14232 FuncRef::External(func.into()),
14233 vec![unit, addr],
14234 IrType::Void,
14235 );
14236 continue;
14237 }
14238 if let Expr::FunctionCall { callee, args } = &item.node {
14239 if let Expr::Name { name } = &callee.node {
14240 let key = name.to_lowercase();
14241 if let Some(info) = ctx.locals.get(&key).cloned() {
14242 if local_is_array_like(&info) {
14243 let has_range = args.iter().any(|a| {
14244 matches!(a.value, crate::ast::expr::SectionSubscript::Range { .. })
14245 });
14246 if has_range {
14247 if args.len() == 1 {
14248 lower_1d_slice_write(b, ctx, &info, &args[0], unit);
14249 } else {
14250 // Audit CRITICAL-3: multi-dim
14251 // slice prints used to fall
14252 // through to afs_create_section
14253 // on a bare stack pointer and
14254 // crash. Now lowered as nested
14255 // column-major loops directly.
14256 lower_section_write_nd(b, ctx, &info, args, unit);
14257 }
14258 continue;
14259 }
14260 }
14261 }
14262 }
14263 }
14264 }
14265
14266 if is_char || matches!(item.node, Expr::StringLiteral { .. }) {
14267 // Special case: character field in derived type — get ptr and
14268 // length directly from the type layout since lower_string_expr
14269 // doesn't have access to type_layouts.
14270 if let Expr::ComponentAccess { base, component } = &item.node {
14271 if let Some((base_addr, type_name)) =
14272 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)
14273 {
14274 if let Some(layout) = ctx.type_layouts.get(&type_name) {
14275 if let Some(field) = layout.field(component) {
14276 if let crate::sema::symtab::TypeInfo::Character {
14277 len: Some(flen),
14278 ..
14279 } = &field.type_info
14280 {
14281 let offset = b.const_i64(field.offset as i64);
14282 let field_ptr =
14283 b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
14284 let len_val = b.const_i64(*flen);
14285 b.call(
14286 FuncRef::External("afs_write_string".into()),
14287 vec![unit, field_ptr, len_val],
14288 IrType::Void,
14289 );
14290 continue;
14291 }
14292 }
14293 }
14294 }
14295 }
14296 let (ptr, len) = lower_string_expr_with_layouts(
14297 b,
14298 &ctx.locals,
14299 item,
14300 ctx.st,
14301 Some(ctx.type_layouts),
14302 );
14303 b.call(
14304 FuncRef::External("afs_write_string".into()),
14305 vec![unit, ptr, len],
14306 IrType::Void,
14307 );
14308 } else {
14309 let val = lower_expr_ctx_tl(b, ctx, item);
14310 let ty = b
14311 .func()
14312 .value_type(val)
14313 .unwrap_or(IrType::Int(IntWidth::I32));
14314 let func_name = match &ty {
14315 IrType::Int(IntWidth::I128) => "afs_write_int128",
14316 IrType::Int(IntWidth::I64) => "afs_write_int64",
14317 IrType::Int(_) => "afs_write_int",
14318 IrType::Float(FloatWidth::F64) => "afs_write_real64",
14319 IrType::Float(_) => "afs_write_real",
14320 IrType::Bool => "afs_write_logical",
14321 IrType::Ptr(ref inner) => {
14322 // Complex expression result: ptr<[f32/f64 x 2]>
14323 if is_complex_ty(&ty) {
14324 let fw = complex_float_width(&ty);
14325 let func = if fw == FloatWidth::F64 {
14326 "afs_write_complex_f64"
14327 } else {
14328 "afs_write_complex_f32"
14329 };
14330 b.call(
14331 FuncRef::External(func.into()),
14332 vec![unit, val],
14333 IrType::Void,
14334 );
14335 continue;
14336 }
14337 // Other pointer — likely a string. Use write_string with literal length.
14338 let _ = inner; // suppress unused warning
14339 let len = string_literal_len(item);
14340 let len_val = b.const_i64(len);
14341 b.call(
14342 FuncRef::External("afs_write_string".into()),
14343 vec![unit, val, len_val],
14344 IrType::Void,
14345 );
14346 continue;
14347 }
14348 _ => "afs_write_int",
14349 };
14350 b.call(
14351 FuncRef::External(func_name.into()),
14352 vec![unit, val],
14353 IrType::Void,
14354 );
14355 }
14356 }
14357 if advance {
14358 b.call(
14359 FuncRef::External("afs_write_newline".into()),
14360 vec![unit],
14361 IrType::Void,
14362 );
14363 }
14364 }
14365
14366 fn internal_io_buffer(
14367 b: &mut FuncBuilder,
14368 ctx: &LowerCtx,
14369 control: &crate::ast::stmt::IoControl,
14370 ) -> Option<(ValueId, ValueId)> {
14371 if control
14372 .keyword
14373 .as_deref()
14374 .map(|k| !k.eq_ignore_ascii_case("unit"))
14375 .unwrap_or(false)
14376 {
14377 return None;
14378 }
14379
14380 match &control.value.node {
14381 Expr::Name { name } => {
14382 let info = ctx.locals.get(&name.to_lowercase())?;
14383 if info.char_kind == CharKind::None {
14384 return None;
14385 }
14386 Some(lower_string_expr_with_layouts(
14387 b,
14388 &ctx.locals,
14389 &control.value,
14390 ctx.st,
14391 Some(ctx.type_layouts),
14392 ))
14393 }
14394 _ => None,
14395 }
14396 }
14397
14398 fn lower_internal_write_items(
14399 b: &mut FuncBuilder,
14400 ctx: &mut LowerCtx,
14401 items: &[crate::ast::expr::SpannedExpr],
14402 buf_ptr: ValueId,
14403 buf_len: ValueId,
14404 ) {
14405 let zero = b.const_i64(0);
14406 let pos = b.alloca(IrType::Int(IntWidth::I64));
14407 b.store(zero, pos);
14408
14409 for item in items {
14410 let is_char = match &item.node {
14411 Expr::Name { name } => ctx
14412 .locals
14413 .get(&name.to_lowercase())
14414 .map(|i| i.char_kind != CharKind::None)
14415 .unwrap_or(false),
14416 Expr::FunctionCall { callee, args } => {
14417 if let Expr::Name { name } = &callee.node {
14418 let key = name.to_lowercase();
14419 matches!(
14420 key.as_str(),
14421 "trim"
14422 | "adjustl"
14423 | "adjustr"
14424 | "char"
14425 | "achar"
14426 | "compiler_version"
14427 | "compiler_options"
14428 ) || ctx
14429 .locals
14430 .get(&key)
14431 .map(|i| {
14432 i.char_kind != CharKind::None
14433 && (i.dims.is_empty()
14434 || args.iter().all(|a| {
14435 matches!(
14436 a.value,
14437 crate::ast::expr::SectionSubscript::Element(_)
14438 )
14439 }))
14440 })
14441 .unwrap_or(false)
14442 } else if let Expr::FunctionCall { callee: inner, .. } = &callee.node {
14443 // Nested: arr(i)(lo:hi) — substring of char array element.
14444 if let Expr::Name { name } = &inner.node {
14445 let key = name.to_lowercase();
14446 ctx.locals
14447 .get(&key)
14448 .map(|i| {
14449 i.char_kind != CharKind::None
14450 && (!i.dims.is_empty() || i.allocatable)
14451 })
14452 .unwrap_or(false)
14453 && args.iter().any(|a| {
14454 matches!(a.value, crate::ast::expr::SectionSubscript::Range { .. })
14455 })
14456 } else {
14457 false
14458 }
14459 } else {
14460 false
14461 }
14462 }
14463 Expr::BinaryOp {
14464 op: BinaryOp::Concat,
14465 ..
14466 } => true,
14467 _ => false,
14468 };
14469
14470 if is_char || matches!(item.node, Expr::StringLiteral { .. }) {
14471 let (ptr, len) = lower_string_expr_with_layouts(
14472 b,
14473 &ctx.locals,
14474 item,
14475 ctx.st,
14476 Some(ctx.type_layouts),
14477 );
14478 b.call(
14479 FuncRef::External("afs_write_internal_string".into()),
14480 vec![buf_ptr, buf_len, ptr, len, pos],
14481 IrType::Void,
14482 );
14483 continue;
14484 }
14485
14486 let val = lower_expr_ctx_tl(b, ctx, item);
14487 let ty = b
14488 .func()
14489 .value_type(val)
14490 .unwrap_or(IrType::Int(IntWidth::I32));
14491 match ty {
14492 IrType::Int(IntWidth::I128) => {
14493 b.call(
14494 FuncRef::External("afs_write_internal_int128".into()),
14495 vec![buf_ptr, buf_len, val, pos],
14496 IrType::Void,
14497 );
14498 }
14499 IrType::Int(IntWidth::I64) => {
14500 b.call(
14501 FuncRef::External("afs_write_internal_int64".into()),
14502 vec![buf_ptr, buf_len, val, pos],
14503 IrType::Void,
14504 );
14505 }
14506 IrType::Int(_) => {
14507 let i32_val = if matches!(ty, IrType::Int(IntWidth::I32)) {
14508 val
14509 } else {
14510 b.int_extend(val, IntWidth::I32, true)
14511 };
14512 b.call(
14513 FuncRef::External("afs_write_internal_int".into()),
14514 vec![buf_ptr, buf_len, i32_val, pos],
14515 IrType::Void,
14516 );
14517 }
14518 IrType::Float(FloatWidth::F64) => {
14519 b.call(
14520 FuncRef::External("afs_write_internal_real64".into()),
14521 vec![buf_ptr, buf_len, val, pos],
14522 IrType::Void,
14523 );
14524 }
14525 IrType::Float(_) => {
14526 let widened = b.float_extend(val, FloatWidth::F64);
14527 b.call(
14528 FuncRef::External("afs_write_internal_real64".into()),
14529 vec![buf_ptr, buf_len, widened, pos],
14530 IrType::Void,
14531 );
14532 }
14533 _ => {}
14534 }
14535 }
14536 }
14537
14538 fn lower_list_read_items(
14539 b: &mut FuncBuilder,
14540 ctx: &mut LowerCtx,
14541 items: &[crate::ast::expr::SpannedExpr],
14542 unit: ValueId,
14543 ) {
14544 let iostat = b.const_i64(0);
14545 let mode = ReadMode::Unit { unit, iostat };
14546
14547 for item in items {
14548 if lower_array_read_item(b, ctx, item, mode) {
14549 continue;
14550 }
14551 let Some((addr, ty)) = lower_read_target_addr(b, ctx, item) else {
14552 continue;
14553 };
14554 let _ = lower_read_into_addr(b, mode, &ty, addr);
14555 }
14556 }
14557
14558 fn lower_internal_read_items(
14559 b: &mut FuncBuilder,
14560 ctx: &mut LowerCtx,
14561 items: &[crate::ast::expr::SpannedExpr],
14562 buf_ptr: ValueId,
14563 buf_len: ValueId,
14564 ) {
14565 let zero = b.const_i64(0);
14566 let pos = b.alloca(IrType::Int(IntWidth::I64));
14567 b.store(zero, pos);
14568 let iostat = b.alloca(IrType::Int(IntWidth::I32));
14569 let mode = ReadMode::Internal {
14570 buf_ptr,
14571 buf_len,
14572 pos,
14573 iostat,
14574 };
14575
14576 for item in items {
14577 if lower_array_read_item(b, ctx, item, mode) {
14578 continue;
14579 }
14580 let Some((addr, ty)) = lower_read_target_addr(b, ctx, item) else {
14581 continue;
14582 };
14583 let _ = lower_read_into_addr(b, mode, &ty, addr);
14584 }
14585 }
14586
14587 #[derive(Clone, Copy)]
14588 enum ReadMode {
14589 Unit {
14590 unit: ValueId,
14591 iostat: ValueId,
14592 },
14593 Internal {
14594 buf_ptr: ValueId,
14595 buf_len: ValueId,
14596 pos: ValueId,
14597 iostat: ValueId,
14598 },
14599 FormattedUnit {
14600 unit: ValueId,
14601 fmt_ptr: ValueId,
14602 fmt_len: ValueId,
14603 item_idx: ValueId,
14604 iostat: ValueId,
14605 },
14606 FormattedInternal {
14607 buf_ptr: ValueId,
14608 buf_len: ValueId,
14609 fmt_ptr: ValueId,
14610 fmt_len: ValueId,
14611 item_idx: ValueId,
14612 iostat: ValueId,
14613 },
14614 }
14615
14616 fn lower_read_into_addr(b: &mut FuncBuilder, mode: ReadMode, ty: &IrType, addr: ValueId) -> bool {
14617 match ty {
14618 IrType::Int(IntWidth::I128) => {
14619 match mode {
14620 ReadMode::Unit { unit, iostat } => {
14621 b.call(
14622 FuncRef::External("afs_read_int128".into()),
14623 vec![unit, addr, iostat],
14624 IrType::Void,
14625 );
14626 }
14627 ReadMode::Internal {
14628 buf_ptr,
14629 buf_len,
14630 pos,
14631 iostat,
14632 } => {
14633 b.call(
14634 FuncRef::External("afs_read_internal_int128".into()),
14635 vec![buf_ptr, buf_len, pos, addr, iostat],
14636 IrType::Void,
14637 );
14638 }
14639 ReadMode::FormattedUnit {
14640 unit,
14641 fmt_ptr,
14642 fmt_len,
14643 item_idx,
14644 iostat,
14645 } => {
14646 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14647 b.call(
14648 FuncRef::External("afs_fmt_read_int128".into()),
14649 vec![unit, fmt_ptr, fmt_len, current_idx, addr, iostat],
14650 IrType::Void,
14651 );
14652 bump_formatted_read_index(b, item_idx);
14653 }
14654 ReadMode::FormattedInternal {
14655 buf_ptr,
14656 buf_len,
14657 fmt_ptr,
14658 fmt_len,
14659 item_idx,
14660 iostat,
14661 } => {
14662 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14663 b.call(
14664 FuncRef::External("afs_fmt_read_int128_internal".into()),
14665 vec![
14666 buf_ptr,
14667 buf_len,
14668 fmt_ptr,
14669 fmt_len,
14670 current_idx,
14671 addr,
14672 iostat,
14673 ],
14674 IrType::Void,
14675 );
14676 bump_formatted_read_index(b, item_idx);
14677 }
14678 }
14679 true
14680 }
14681 IrType::Int(IntWidth::I64) => {
14682 match mode {
14683 ReadMode::Unit { unit, iostat } => {
14684 b.call(
14685 FuncRef::External("afs_read_int64".into()),
14686 vec![unit, addr, iostat],
14687 IrType::Void,
14688 );
14689 }
14690 ReadMode::Internal {
14691 buf_ptr,
14692 buf_len,
14693 pos,
14694 iostat,
14695 } => {
14696 b.call(
14697 FuncRef::External("afs_read_internal_int64".into()),
14698 vec![buf_ptr, buf_len, pos, addr, iostat],
14699 IrType::Void,
14700 );
14701 }
14702 ReadMode::FormattedUnit {
14703 unit,
14704 fmt_ptr,
14705 fmt_len,
14706 item_idx,
14707 iostat,
14708 } => {
14709 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14710 b.call(
14711 FuncRef::External("afs_fmt_read_int64".into()),
14712 vec![unit, fmt_ptr, fmt_len, current_idx, addr, iostat],
14713 IrType::Void,
14714 );
14715 bump_formatted_read_index(b, item_idx);
14716 }
14717 ReadMode::FormattedInternal {
14718 buf_ptr,
14719 buf_len,
14720 fmt_ptr,
14721 fmt_len,
14722 item_idx,
14723 iostat,
14724 } => {
14725 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14726 b.call(
14727 FuncRef::External("afs_fmt_read_int64_internal".into()),
14728 vec![
14729 buf_ptr,
14730 buf_len,
14731 fmt_ptr,
14732 fmt_len,
14733 current_idx,
14734 addr,
14735 iostat,
14736 ],
14737 IrType::Void,
14738 );
14739 bump_formatted_read_index(b, item_idx);
14740 }
14741 }
14742 true
14743 }
14744 IrType::Int(_) => {
14745 match mode {
14746 ReadMode::Unit { unit, iostat } => {
14747 b.call(
14748 FuncRef::External("afs_read_int".into()),
14749 vec![unit, addr, iostat],
14750 IrType::Void,
14751 );
14752 }
14753 ReadMode::Internal {
14754 buf_ptr,
14755 buf_len,
14756 pos,
14757 iostat,
14758 } => {
14759 b.call(
14760 FuncRef::External("afs_read_internal_int".into()),
14761 vec![buf_ptr, buf_len, pos, addr, iostat],
14762 IrType::Void,
14763 );
14764 }
14765 ReadMode::FormattedUnit {
14766 unit,
14767 fmt_ptr,
14768 fmt_len,
14769 item_idx,
14770 iostat,
14771 } => {
14772 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14773 b.call(
14774 FuncRef::External("afs_fmt_read_int".into()),
14775 vec![unit, fmt_ptr, fmt_len, current_idx, addr, iostat],
14776 IrType::Void,
14777 );
14778 bump_formatted_read_index(b, item_idx);
14779 }
14780 ReadMode::FormattedInternal {
14781 buf_ptr,
14782 buf_len,
14783 fmt_ptr,
14784 fmt_len,
14785 item_idx,
14786 iostat,
14787 } => {
14788 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14789 b.call(
14790 FuncRef::External("afs_fmt_read_int_internal".into()),
14791 vec![
14792 buf_ptr,
14793 buf_len,
14794 fmt_ptr,
14795 fmt_len,
14796 current_idx,
14797 addr,
14798 iostat,
14799 ],
14800 IrType::Void,
14801 );
14802 bump_formatted_read_index(b, item_idx);
14803 }
14804 }
14805 true
14806 }
14807 IrType::Float(FloatWidth::F64) => {
14808 match mode {
14809 ReadMode::Unit { unit, iostat } => {
14810 b.call(
14811 FuncRef::External("afs_read_real64".into()),
14812 vec![unit, addr, iostat],
14813 IrType::Void,
14814 );
14815 }
14816 ReadMode::Internal {
14817 buf_ptr,
14818 buf_len,
14819 pos,
14820 iostat,
14821 } => {
14822 b.call(
14823 FuncRef::External("afs_read_internal_real".into()),
14824 vec![buf_ptr, buf_len, pos, addr, iostat],
14825 IrType::Void,
14826 );
14827 }
14828 ReadMode::FormattedUnit {
14829 unit,
14830 fmt_ptr,
14831 fmt_len,
14832 item_idx,
14833 iostat,
14834 } => {
14835 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14836 b.call(
14837 FuncRef::External("afs_fmt_read_real".into()),
14838 vec![unit, fmt_ptr, fmt_len, current_idx, addr, iostat],
14839 IrType::Void,
14840 );
14841 bump_formatted_read_index(b, item_idx);
14842 }
14843 ReadMode::FormattedInternal {
14844 buf_ptr,
14845 buf_len,
14846 fmt_ptr,
14847 fmt_len,
14848 item_idx,
14849 iostat,
14850 } => {
14851 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14852 b.call(
14853 FuncRef::External("afs_fmt_read_real_internal".into()),
14854 vec![
14855 buf_ptr,
14856 buf_len,
14857 fmt_ptr,
14858 fmt_len,
14859 current_idx,
14860 addr,
14861 iostat,
14862 ],
14863 IrType::Void,
14864 );
14865 bump_formatted_read_index(b, item_idx);
14866 }
14867 }
14868 true
14869 }
14870 IrType::Float(FloatWidth::F32) => {
14871 let tmp = b.alloca(IrType::Float(FloatWidth::F64));
14872 let handled = match mode {
14873 ReadMode::Unit { unit, iostat } => {
14874 b.call(
14875 FuncRef::External("afs_read_real".into()),
14876 vec![unit, tmp, iostat],
14877 IrType::Void,
14878 );
14879 true
14880 }
14881 ReadMode::Internal {
14882 buf_ptr,
14883 buf_len,
14884 pos,
14885 iostat,
14886 } => {
14887 b.call(
14888 FuncRef::External("afs_read_internal_real".into()),
14889 vec![buf_ptr, buf_len, pos, tmp, iostat],
14890 IrType::Void,
14891 );
14892 true
14893 }
14894 ReadMode::FormattedUnit {
14895 unit,
14896 fmt_ptr,
14897 fmt_len,
14898 item_idx,
14899 iostat,
14900 } => {
14901 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14902 b.call(
14903 FuncRef::External("afs_fmt_read_real".into()),
14904 vec![unit, fmt_ptr, fmt_len, current_idx, tmp, iostat],
14905 IrType::Void,
14906 );
14907 bump_formatted_read_index(b, item_idx);
14908 true
14909 }
14910 ReadMode::FormattedInternal {
14911 buf_ptr,
14912 buf_len,
14913 fmt_ptr,
14914 fmt_len,
14915 item_idx,
14916 iostat,
14917 } => {
14918 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14919 b.call(
14920 FuncRef::External("afs_fmt_read_real_internal".into()),
14921 vec![buf_ptr, buf_len, fmt_ptr, fmt_len, current_idx, tmp, iostat],
14922 IrType::Void,
14923 );
14924 bump_formatted_read_index(b, item_idx);
14925 true
14926 }
14927 };
14928 let wide = b.load_typed(tmp, IrType::Float(FloatWidth::F64));
14929 let narrow = b.float_trunc(wide, FloatWidth::F32);
14930 b.store(narrow, addr);
14931 handled
14932 }
14933 _ => false,
14934 }
14935 }
14936
14937 fn bump_formatted_read_index(b: &mut FuncBuilder, item_idx: ValueId) {
14938 let current_idx = b.load_typed(item_idx, IrType::Int(IntWidth::I64));
14939 let one = b.const_i64(1);
14940 let next_idx = b.iadd(current_idx, one);
14941 b.store(next_idx, item_idx);
14942 }
14943
14944 fn lower_array_read_item(
14945 b: &mut FuncBuilder,
14946 ctx: &mut LowerCtx,
14947 item: &crate::ast::expr::SpannedExpr,
14948 mode: ReadMode,
14949 ) -> bool {
14950 match &item.node {
14951 Expr::Name { name } => {
14952 let key = name.to_lowercase();
14953 let Some(info) = ctx.locals.get(&key).cloned() else {
14954 return false;
14955 };
14956 if info.dims.is_empty() && !info.allocatable {
14957 return false;
14958 }
14959 lower_whole_array_read(b, &info, mode);
14960 true
14961 }
14962 Expr::FunctionCall { callee, args } => {
14963 let Expr::Name { name } = &callee.node else {
14964 return false;
14965 };
14966 let key = name.to_lowercase();
14967 let Some(info) = ctx.locals.get(&key).cloned() else {
14968 return false;
14969 };
14970 if info.dims.is_empty() && !info.allocatable {
14971 return false;
14972 }
14973 let has_range = args
14974 .iter()
14975 .any(|arg| matches!(arg.value, crate::ast::expr::SectionSubscript::Range { .. }));
14976 if has_range && info.allocatable {
14977 lower_alloc_section_read(b, ctx, &info, args, mode);
14978 true
14979 } else if has_range && args.len() == 1 {
14980 lower_1d_slice_read(b, ctx, &info, &args[0], mode);
14981 true
14982 } else if has_range && args.len() > 1 && !info.allocatable {
14983 lower_section_read_nd(b, ctx, &info, args, mode);
14984 true
14985 } else {
14986 false
14987 }
14988 }
14989 _ => false,
14990 }
14991 }
14992
14993 fn lower_read_target_addr(
14994 b: &mut FuncBuilder,
14995 ctx: &LowerCtx,
14996 item: &crate::ast::expr::SpannedExpr,
14997 ) -> Option<(ValueId, IrType)> {
14998 match &item.node {
14999 Expr::Name { name } => {
15000 let key = name.to_lowercase();
15001 let info = ctx.locals.get(&key)?;
15002 if local_is_array_like(info) {
15003 return None;
15004 }
15005 let addr = if info.by_ref {
15006 b.load(info.addr)
15007 } else {
15008 info.addr
15009 };
15010 Some((addr, info.ty.clone()))
15011 }
15012 Expr::FunctionCall { callee, args } => {
15013 let Expr::Name { name } = &callee.node else {
15014 return None;
15015 };
15016 let key = name.to_lowercase();
15017 let info = ctx.locals.get(&key)?;
15018 if info.dims.is_empty() && !info.allocatable {
15019 return None;
15020 }
15021 if args
15022 .iter()
15023 .any(|arg| !matches!(arg.value, crate::ast::expr::SectionSubscript::Element(_)))
15024 {
15025 return None;
15026 }
15027 let idx64 = compute_flat_elem_offset(b, &ctx.locals, info, args, ctx.st);
15028 let base = array_base_addr(b, info);
15029 let elem_ptr = b.gep(base, vec![idx64], info.ty.clone());
15030 Some((elem_ptr, info.ty.clone()))
15031 }
15032 Expr::ComponentAccess { base, component } => {
15033 let (base_addr, type_name) =
15034 resolve_component_base(b, &ctx.locals, base, ctx.st, ctx.type_layouts)?;
15035 let layout = ctx.type_layouts.get(&type_name)?;
15036 let field = layout.field(component)?;
15037 if matches!(&field.type_info, crate::sema::symtab::TypeInfo::Derived(_)) {
15038 return None;
15039 }
15040 let offset = b.const_i64(field.offset as i64);
15041 let field_ptr = b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
15042 Some((field_ptr, type_info_to_ir_type(&field.type_info)))
15043 }
15044 _ => None,
15045 }
15046 }
15047
15048 fn lower_formatted_internal_read_items(
15049 b: &mut FuncBuilder,
15050 ctx: &mut LowerCtx,
15051 items: &[crate::ast::expr::SpannedExpr],
15052 buf_ptr: ValueId,
15053 buf_len: ValueId,
15054 fmt_ptr: ValueId,
15055 fmt_len: ValueId,
15056 ) {
15057 let item_idx = b.alloca(IrType::Int(IntWidth::I64));
15058 let iostat = b.alloca(IrType::Int(IntWidth::I32));
15059 let zero = b.const_i64(0);
15060 b.store(zero, item_idx);
15061 let mode = ReadMode::FormattedInternal {
15062 buf_ptr,
15063 buf_len,
15064 fmt_ptr,
15065 fmt_len,
15066 item_idx,
15067 iostat,
15068 };
15069
15070 for item in items {
15071 if lower_array_read_item(b, ctx, item, mode) {
15072 continue;
15073 }
15074 let Some((addr, ty)) = lower_read_target_addr(b, ctx, item) else {
15075 continue;
15076 };
15077 let _ = lower_read_into_addr(b, mode, &ty, addr);
15078 }
15079 }
15080
15081 fn lower_formatted_read_items(
15082 b: &mut FuncBuilder,
15083 ctx: &mut LowerCtx,
15084 items: &[crate::ast::expr::SpannedExpr],
15085 unit: ValueId,
15086 fmt_ptr: ValueId,
15087 fmt_len: ValueId,
15088 ) {
15089 let item_idx = b.alloca(IrType::Int(IntWidth::I64));
15090 let iostat = b.alloca(IrType::Int(IntWidth::I32));
15091 let zero = b.const_i64(0);
15092 b.store(zero, item_idx);
15093 let mode = ReadMode::FormattedUnit {
15094 unit,
15095 fmt_ptr,
15096 fmt_len,
15097 item_idx,
15098 iostat,
15099 };
15100
15101 for item in items {
15102 if lower_array_read_item(b, ctx, item, mode) {
15103 continue;
15104 }
15105 let Some((addr, ty)) = lower_read_target_addr(b, ctx, item) else {
15106 continue;
15107 };
15108 let _ = lower_read_into_addr(b, mode, &ty, addr);
15109 }
15110 }
15111
15112 /// Push a single I/O item value for formatted output via afs_fmt_push_*.
15113 fn lower_fmt_push(b: &mut FuncBuilder, ctx: &mut LowerCtx, item: &crate::ast::expr::SpannedExpr) {
15114 let is_char = match &item.node {
15115 Expr::Name { name } => ctx
15116 .locals
15117 .get(&name.to_lowercase())
15118 .map(|i| i.char_kind != CharKind::None)
15119 .unwrap_or(false),
15120 Expr::FunctionCall { callee, .. } => {
15121 if let Expr::Name { name } = &callee.node {
15122 matches!(
15123 name.to_lowercase().as_str(),
15124 "trim"
15125 | "adjustl"
15126 | "adjustr"
15127 | "char"
15128 | "achar"
15129 | "compiler_version"
15130 | "compiler_options"
15131 )
15132 } else {
15133 false
15134 }
15135 }
15136 _ => false,
15137 };
15138
15139 if is_char || matches!(item.node, Expr::StringLiteral { .. }) {
15140 let (ptr, len) =
15141 lower_string_expr_with_layouts(b, &ctx.locals, item, ctx.st, Some(ctx.type_layouts));
15142 b.call(
15143 FuncRef::External("afs_fmt_push_string".into()),
15144 vec![ptr, len],
15145 IrType::Void,
15146 );
15147 } else {
15148 let val = lower_expr_ctx(b, ctx, item);
15149 let ty = b
15150 .func()
15151 .value_type(val)
15152 .unwrap_or(IrType::Int(IntWidth::I32));
15153 match &ty {
15154 IrType::Int(IntWidth::I128) => {
15155 let slot = b.alloca(IrType::Int(IntWidth::I128));
15156 b.store(val, slot);
15157 b.call(
15158 FuncRef::External("afs_fmt_push_int128".into()),
15159 vec![slot],
15160 IrType::Void,
15161 );
15162 }
15163 IrType::Int(IntWidth::I64) => {
15164 b.call(
15165 FuncRef::External("afs_fmt_push_int".into()),
15166 vec![val],
15167 IrType::Void,
15168 );
15169 }
15170 IrType::Int(_) => {
15171 // Widen i32 to i64 for the push API.
15172 let widened = b.int_extend(val, IntWidth::I64, true);
15173 b.call(
15174 FuncRef::External("afs_fmt_push_int".into()),
15175 vec![widened],
15176 IrType::Void,
15177 );
15178 }
15179 IrType::Float(FloatWidth::F32) => {
15180 // afs_fmt_push_real takes f64; explicitly widen f32 → f64.
15181 // AArch64 does NOT auto-promote floats across the call boundary.
15182 let widened = b.float_extend(val, FloatWidth::F64);
15183 b.call(
15184 FuncRef::External("afs_fmt_push_real".into()),
15185 vec![widened],
15186 IrType::Void,
15187 );
15188 }
15189 IrType::Float(_) => {
15190 b.call(
15191 FuncRef::External("afs_fmt_push_real".into()),
15192 vec![val],
15193 IrType::Void,
15194 );
15195 }
15196 IrType::Bool => {
15197 let int_val = b.int_extend(val, IntWidth::I32, false);
15198 b.call(
15199 FuncRef::External("afs_fmt_push_logical".into()),
15200 vec![int_val],
15201 IrType::Void,
15202 );
15203 }
15204 IrType::Ptr(_) => {
15205 // Pointer type — likely a string.
15206 let len = string_literal_len(item);
15207 let len_val = b.const_i64(len);
15208 b.call(
15209 FuncRef::External("afs_fmt_push_string".into()),
15210 vec![val, len_val],
15211 IrType::Void,
15212 );
15213 }
15214 _ => {
15215 let widened = b.int_extend(val, IntWidth::I64, true);
15216 b.call(
15217 FuncRef::External("afs_fmt_push_int".into()),
15218 vec![widened],
15219 IrType::Void,
15220 );
15221 }
15222 }
15223 }
15224 }
15225
15226 /// Lower a 1-D slice write item: `print *, a(lo:hi[:step])`.
15227 /// Iterates the declared range and calls the per-element write
15228 /// helper. Sections with a rank > 1 and non-lower-dim subscripts
15229 /// are not yet supported and fall through to the existing
15230 /// section-descriptor path, which may not format nicely.
15231 ///
15232 /// Missing bounds default to the array's declared extents:
15233 /// `a(:)` → full range
15234 /// `a(lo:)` → lo to end
15235 /// `a(:hi)` → start to hi
15236 fn lower_1d_slice_write(
15237 b: &mut FuncBuilder,
15238 ctx: &mut LowerCtx,
15239 info: &LocalInfo,
15240 arg: &crate::ast::expr::Argument,
15241 unit: ValueId,
15242 ) {
15243 let (start_e, end_e, stride_e) = match &arg.value {
15244 crate::ast::expr::SectionSubscript::Range { start, end, stride } => {
15245 (start.as_ref(), end.as_ref(), stride.as_ref())
15246 }
15247 _ => return,
15248 };
15249
15250 // Default to the declared bounds of dimension 0.
15251 let (decl_lo, decl_ext) = info.dims.first().copied().unwrap_or((1, 0));
15252 let decl_hi = decl_lo + decl_ext - 1;
15253
15254 let start_val = match start_e {
15255 Some(e) => lower_expr_ctx(b, ctx, e),
15256 None => b.const_i32(decl_lo as i32),
15257 };
15258 let end_val = match end_e {
15259 Some(e) => lower_expr_ctx(b, ctx, e),
15260 None => b.const_i32(decl_hi as i32),
15261 };
15262 let stride_val = match stride_e {
15263 Some(e) => lower_expr_ctx(b, ctx, e),
15264 None => b.const_i32(1),
15265 };
15266
15267 let base = array_base_addr(b, info);
15268 let elem_bytes = ir_scalar_byte_size(&info.ty);
15269 let writer = match &info.ty {
15270 IrType::Int(IntWidth::I128) => "afs_write_int128",
15271 IrType::Int(IntWidth::I64) => "afs_write_int64",
15272 IrType::Int(_) => "afs_write_int",
15273 IrType::Float(FloatWidth::F64) => "afs_write_real64",
15274 IrType::Float(_) => "afs_write_real",
15275 IrType::Bool => "afs_write_logical",
15276 _ => "afs_write_int",
15277 };
15278
15279 // `i` counter, starts at the slice's first index.
15280 let i_addr = b.alloca(IrType::Int(IntWidth::I32));
15281 b.store(start_val, i_addr);
15282
15283 let bb_check = b.create_block("slice_write_check");
15284 let bb_body = b.create_block("slice_write_body");
15285 let bb_exit = b.create_block("slice_write_exit");
15286 b.branch(bb_check, vec![]);
15287
15288 // Sign-of-stride handling, mirroring the regular DO lowerer.
15289 // For ascending stride, exit when `i > end`; for descending,
15290 // exit when `i < end`. Audit BLOCKING-2: the previous version
15291 // hardcoded `Gt`, so `print *, a(5:1:-1)` exited on the very
15292 // first iteration with no elements written.
15293 b.set_block(bb_check);
15294 let i = b.load(i_addr);
15295 let const_stride = stride_e.and_then(eval_const_int);
15296 if let Some(sv) = const_stride {
15297 let done_op = if sv < 0 { CmpOp::Lt } else { CmpOp::Gt };
15298 let done = b.icmp(done_op, i, end_val);
15299 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
15300 } else {
15301 // Runtime stride: branch on sign at the check site.
15302 let zero = b.const_i32(0);
15303 let stride_neg = b.icmp(CmpOp::Lt, stride_val, zero);
15304 let bb_neg = b.create_block("slice_write_neg_check");
15305 let bb_pos = b.create_block("slice_write_pos_check");
15306 b.cond_branch(stride_neg, bb_neg, vec![], bb_pos, vec![]);
15307
15308 b.set_block(bb_neg);
15309 let done_neg = b.icmp(CmpOp::Lt, i, end_val);
15310 b.cond_branch(done_neg, bb_exit, vec![], bb_body, vec![]);
15311
15312 b.set_block(bb_pos);
15313 let done_pos = b.icmp(CmpOp::Gt, i, end_val);
15314 b.cond_branch(done_pos, bb_exit, vec![], bb_body, vec![]);
15315 }
15316
15317 b.set_block(bb_body);
15318 let i_val = b.load(i_addr);
15319 // Translate declared-index `i` → byte offset into base:
15320 // (i - decl_lo) * elem_bytes
15321 let lo_const = b.const_i32(decl_lo as i32);
15322 let zero_based = b.isub(i_val, lo_const);
15323 let zero_based64 = widen_idx_to_i64(b, zero_based);
15324 let step = b.const_i64(elem_bytes);
15325 let byte_off = b.imul(zero_based64, step);
15326 let p = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
15327 let elem = b.load_typed(p, info.ty.clone());
15328 b.call(
15329 FuncRef::External(writer.into()),
15330 vec![unit, elem],
15331 IrType::Void,
15332 );
15333 let next = b.iadd(i_val, stride_val);
15334 b.store(next, i_addr);
15335 b.branch(bb_check, vec![]);
15336
15337 b.set_block(bb_exit);
15338 }
15339
15340 fn lower_1d_slice_read(
15341 b: &mut FuncBuilder,
15342 ctx: &mut LowerCtx,
15343 info: &LocalInfo,
15344 arg: &crate::ast::expr::Argument,
15345 mode: ReadMode,
15346 ) {
15347 let (start_e, end_e, stride_e) = match &arg.value {
15348 crate::ast::expr::SectionSubscript::Range { start, end, stride } => {
15349 (start.as_ref(), end.as_ref(), stride.as_ref())
15350 }
15351 _ => return,
15352 };
15353
15354 let (decl_lo, decl_ext) = info.dims.first().copied().unwrap_or((1, 0));
15355 let decl_hi = decl_lo + decl_ext - 1;
15356
15357 let start_val = match start_e {
15358 Some(e) => lower_expr_ctx(b, ctx, e),
15359 None => b.const_i32(decl_lo as i32),
15360 };
15361 let end_val = match end_e {
15362 Some(e) => lower_expr_ctx(b, ctx, e),
15363 None => b.const_i32(decl_hi as i32),
15364 };
15365 let stride_val = match stride_e {
15366 Some(e) => lower_expr_ctx(b, ctx, e),
15367 None => b.const_i32(1),
15368 };
15369
15370 let base = array_base_addr(b, info);
15371 let elem_bytes = ir_scalar_byte_size(&info.ty);
15372
15373 let i_addr = b.alloca(IrType::Int(IntWidth::I32));
15374 b.store(start_val, i_addr);
15375
15376 let bb_check = b.create_block("slice_read_check");
15377 let bb_body = b.create_block("slice_read_body");
15378 let bb_exit = b.create_block("slice_read_exit");
15379 b.branch(bb_check, vec![]);
15380
15381 b.set_block(bb_check);
15382 let i = b.load(i_addr);
15383 let const_stride = stride_e.and_then(eval_const_int);
15384 if let Some(sv) = const_stride {
15385 let done_op = if sv < 0 { CmpOp::Lt } else { CmpOp::Gt };
15386 let done = b.icmp(done_op, i, end_val);
15387 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
15388 } else {
15389 let zero = b.const_i32(0);
15390 let stride_neg = b.icmp(CmpOp::Lt, stride_val, zero);
15391 let bb_neg = b.create_block("slice_read_neg_check");
15392 let bb_pos = b.create_block("slice_read_pos_check");
15393 b.cond_branch(stride_neg, bb_neg, vec![], bb_pos, vec![]);
15394
15395 b.set_block(bb_neg);
15396 let done_neg = b.icmp(CmpOp::Lt, i, end_val);
15397 b.cond_branch(done_neg, bb_exit, vec![], bb_body, vec![]);
15398
15399 b.set_block(bb_pos);
15400 let done_pos = b.icmp(CmpOp::Gt, i, end_val);
15401 b.cond_branch(done_pos, bb_exit, vec![], bb_body, vec![]);
15402 }
15403
15404 b.set_block(bb_body);
15405 let i_val = b.load(i_addr);
15406 let lo_const = b.const_i32(decl_lo as i32);
15407 let zero_based = b.isub(i_val, lo_const);
15408 let zero_based64 = widen_idx_to_i64(b, zero_based);
15409 let step = b.const_i64(elem_bytes);
15410 let byte_off = b.imul(zero_based64, step);
15411 let p = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
15412 let _ = lower_read_into_addr(b, mode, &info.ty, p);
15413 let next = b.iadd(i_val, stride_val);
15414 b.store(next, i_addr);
15415 b.branch(bb_check, vec![]);
15416
15417 b.set_block(bb_exit);
15418 }
15419
15420 fn lower_section_read_nd(
15421 b: &mut FuncBuilder,
15422 ctx: &mut LowerCtx,
15423 info: &LocalInfo,
15424 args: &[crate::ast::expr::Argument],
15425 mode: ReadMode,
15426 ) {
15427 use crate::ast::expr::SectionSubscript;
15428
15429 let base = array_base_addr(b, info);
15430 let elem_bytes = ir_scalar_byte_size(&info.ty);
15431
15432 struct DimSlice {
15433 counter: ValueId,
15434 start_val: ValueId,
15435 end_val: ValueId,
15436 stride_val: ValueId,
15437 const_stride: Option<i64>,
15438 decl_lo: i64,
15439 cum_stride: i64,
15440 }
15441
15442 let mut dims: Vec<DimSlice> = Vec::with_capacity(args.len());
15443 let mut cum_stride: i64 = 1;
15444 for (dim_idx, arg) in args.iter().enumerate() {
15445 let (decl_lo, decl_ext) = info.dims.get(dim_idx).copied().unwrap_or((1, 0));
15446 let decl_hi = decl_lo + decl_ext - 1;
15447
15448 let counter = b.alloca(IrType::Int(IntWidth::I32));
15449 let (start_val, end_val, stride_val, const_stride) = match &arg.value {
15450 SectionSubscript::Range { start, end, stride } => {
15451 let start_v = match start {
15452 Some(e) => lower_expr_ctx(b, ctx, e),
15453 None => b.const_i32(decl_lo as i32),
15454 };
15455 let end_v = match end {
15456 Some(e) => lower_expr_ctx(b, ctx, e),
15457 None => b.const_i32(decl_hi as i32),
15458 };
15459 let stride_v = match stride {
15460 Some(e) => lower_expr_ctx(b, ctx, e),
15461 None => b.const_i32(1),
15462 };
15463 let cs = stride.as_ref().and_then(eval_const_int);
15464 (start_v, end_v, stride_v, cs)
15465 }
15466 SectionSubscript::Element(e) => {
15467 let v = lower_expr_ctx(b, ctx, e);
15468 (v, v, b.const_i32(1), Some(1))
15469 }
15470 };
15471 b.store(start_val, counter);
15472 dims.push(DimSlice {
15473 counter,
15474 start_val,
15475 end_val,
15476 stride_val,
15477 const_stride,
15478 decl_lo,
15479 cum_stride,
15480 });
15481 cum_stride *= decl_ext.max(1);
15482 }
15483
15484 let n = dims.len();
15485 let mut checks: Vec<BlockId> = Vec::with_capacity(n);
15486 let mut bodies: Vec<BlockId> = Vec::with_capacity(n);
15487 let mut incrs: Vec<BlockId> = Vec::with_capacity(n);
15488 let mut exits: Vec<BlockId> = Vec::with_capacity(n);
15489 for d in 0..n {
15490 checks.push(b.create_block(&format!("read_sec_check_d{}", d)));
15491 bodies.push(b.create_block(&format!("read_sec_body_d{}", d)));
15492 incrs.push(b.create_block(&format!("read_sec_incr_d{}", d)));
15493 exits.push(b.create_block(&format!("read_sec_exit_d{}", d)));
15494 }
15495
15496 let outer = n - 1;
15497 b.branch(checks[outer], vec![]);
15498
15499 for d_rev in 0..n {
15500 let d = n - 1 - d_rev;
15501
15502 b.set_block(checks[d]);
15503 let cur = b.load(dims[d].counter);
15504 if let Some(sv) = dims[d].const_stride {
15505 let done_op = if sv < 0 { CmpOp::Lt } else { CmpOp::Gt };
15506 let done = b.icmp(done_op, cur, dims[d].end_val);
15507 b.cond_branch(done, exits[d], vec![], bodies[d], vec![]);
15508 } else {
15509 let zero = b.const_i32(0);
15510 let stride_neg = b.icmp(CmpOp::Lt, dims[d].stride_val, zero);
15511 let bb_neg = b.create_block(&format!("read_sec_neg_d{}", d));
15512 let bb_pos = b.create_block(&format!("read_sec_pos_d{}", d));
15513 b.cond_branch(stride_neg, bb_neg, vec![], bb_pos, vec![]);
15514
15515 b.set_block(bb_neg);
15516 let done_neg = b.icmp(CmpOp::Lt, cur, dims[d].end_val);
15517 b.cond_branch(done_neg, exits[d], vec![], bodies[d], vec![]);
15518
15519 b.set_block(bb_pos);
15520 let done_pos = b.icmp(CmpOp::Gt, cur, dims[d].end_val);
15521 b.cond_branch(done_pos, exits[d], vec![], bodies[d], vec![]);
15522 }
15523
15524 b.set_block(bodies[d]);
15525 if d == 0 {
15526 let mut byte_offset: Option<ValueId> = None;
15527 let dim_data: Vec<(ValueId, i64, i64)> = dims
15528 .iter()
15529 .map(|dim| (dim.counter, dim.decl_lo, dim.cum_stride))
15530 .collect();
15531 for (counter, decl_lo, cum_stride_d) in dim_data {
15532 let cnt = b.load(counter);
15533 let lo_const = b.const_i32(decl_lo as i32);
15534 let zero_based = b.isub(cnt, lo_const);
15535 let zero_based64 = widen_idx_to_i64(b, zero_based);
15536 let stride_const = b.const_i64(cum_stride_d * elem_bytes);
15537 let term = b.imul(zero_based64, stride_const);
15538 byte_offset = Some(match byte_offset {
15539 Some(prev) => b.iadd(prev, term),
15540 None => term,
15541 });
15542 }
15543 let off = byte_offset.unwrap_or_else(|| b.const_i64(0));
15544 let ptr = b.gep(base, vec![off], IrType::Int(IntWidth::I8));
15545 let _ = lower_read_into_addr(b, mode, &info.ty, ptr);
15546 b.branch(incrs[0], vec![]);
15547 } else {
15548 b.store(dims[d - 1].start_val, dims[d - 1].counter);
15549 b.branch(checks[d - 1], vec![]);
15550 }
15551
15552 b.set_block(incrs[d]);
15553 let cur2 = b.load(dims[d].counter);
15554 let next = b.iadd(cur2, dims[d].stride_val);
15555 b.store(next, dims[d].counter);
15556 b.branch(checks[d], vec![]);
15557
15558 b.set_block(exits[d]);
15559 if d < n - 1 {
15560 b.branch(incrs[d + 1], vec![]);
15561 }
15562 }
15563
15564 b.set_block(exits[outer]);
15565 }
15566
15567 fn load_array_desc_i64_field(b: &mut FuncBuilder, desc: ValueId, offset: i64) -> ValueId {
15568 let off = b.const_i64(offset);
15569 let ptr = b.gep(desc, vec![off], IrType::Int(IntWidth::I8));
15570 b.load_typed(ptr, IrType::Int(IntWidth::I64))
15571 }
15572
15573 fn lower_alloc_section_read(
15574 b: &mut FuncBuilder,
15575 ctx: &mut LowerCtx,
15576 info: &LocalInfo,
15577 args: &[crate::ast::expr::Argument],
15578 mode: ReadMode,
15579 ) {
15580 use crate::ast::expr::SectionSubscript;
15581
15582 struct DimSlice {
15583 counter: ValueId,
15584 start_val: ValueId,
15585 end_val: ValueId,
15586 stride_val: ValueId,
15587 const_stride: Option<i64>,
15588 lower_bound: ValueId,
15589 mem_stride: ValueId,
15590 cum_extent: ValueId,
15591 }
15592
15593 let elem_bytes = ir_scalar_byte_size(&info.ty);
15594 let base = array_base_addr(b, info);
15595 let one64 = b.const_i64(1);
15596 let zero64 = b.const_i64(0);
15597
15598 let mut dims: Vec<DimSlice> = Vec::with_capacity(args.len());
15599 let mut cum_extent = one64;
15600 for (dim_idx, arg) in args.iter().enumerate() {
15601 let dim_base = 24 + (dim_idx as i64) * 24;
15602 let lo = load_array_desc_i64_field(b, info.addr, dim_base);
15603 let up = load_array_desc_i64_field(b, info.addr, dim_base + 8);
15604 let mem_stride = load_array_desc_i64_field(b, info.addr, dim_base + 16);
15605 let span = b.isub(up, lo);
15606 let extent_raw = b.iadd(span, one64);
15607 let is_empty = b.icmp(CmpOp::Lt, up, lo);
15608 let extent = b.select(is_empty, zero64, extent_raw);
15609 let (start_val, end_val, stride_val, const_stride) = match &arg.value {
15610 SectionSubscript::Range { start, end, stride } => {
15611 let start_v = match start {
15612 Some(e) => {
15613 let raw = lower_expr_ctx(b, ctx, e);
15614 widen_idx_to_i64(b, raw)
15615 }
15616 None => lo,
15617 };
15618 let end_v = match end {
15619 Some(e) => {
15620 let raw = lower_expr_ctx(b, ctx, e);
15621 widen_idx_to_i64(b, raw)
15622 }
15623 None => up,
15624 };
15625 let stride_v = match stride {
15626 Some(e) => {
15627 let raw = lower_expr_ctx(b, ctx, e);
15628 widen_idx_to_i64(b, raw)
15629 }
15630 None => one64,
15631 };
15632 let cs = stride.as_ref().and_then(eval_const_int);
15633 (start_v, end_v, stride_v, cs)
15634 }
15635 SectionSubscript::Element(e) => {
15636 let raw = lower_expr_ctx(b, ctx, e);
15637 let val = widen_idx_to_i64(b, raw);
15638 (val, val, one64, Some(1))
15639 }
15640 };
15641 let counter = b.alloca(IrType::Int(IntWidth::I64));
15642 b.store(start_val, counter);
15643 dims.push(DimSlice {
15644 counter,
15645 start_val,
15646 end_val,
15647 stride_val,
15648 const_stride,
15649 lower_bound: lo,
15650 mem_stride,
15651 cum_extent,
15652 });
15653 cum_extent = b.imul(cum_extent, extent);
15654 }
15655
15656 let n = dims.len();
15657 let mut checks: Vec<BlockId> = Vec::with_capacity(n);
15658 let mut bodies: Vec<BlockId> = Vec::with_capacity(n);
15659 let mut incrs: Vec<BlockId> = Vec::with_capacity(n);
15660 let mut exits: Vec<BlockId> = Vec::with_capacity(n);
15661 for d in 0..n {
15662 checks.push(b.create_block(&format!("read_desc_check_d{}", d)));
15663 bodies.push(b.create_block(&format!("read_desc_body_d{}", d)));
15664 incrs.push(b.create_block(&format!("read_desc_incr_d{}", d)));
15665 exits.push(b.create_block(&format!("read_desc_exit_d{}", d)));
15666 }
15667
15668 let outer = n - 1;
15669 b.branch(checks[outer], vec![]);
15670
15671 for d_rev in 0..n {
15672 let d = n - 1 - d_rev;
15673
15674 b.set_block(checks[d]);
15675 let cur = b.load(dims[d].counter);
15676 if let Some(sv) = dims[d].const_stride {
15677 let done_op = if sv < 0 { CmpOp::Lt } else { CmpOp::Gt };
15678 let done = b.icmp(done_op, cur, dims[d].end_val);
15679 b.cond_branch(done, exits[d], vec![], bodies[d], vec![]);
15680 } else {
15681 let stride_neg = b.icmp(CmpOp::Lt, dims[d].stride_val, zero64);
15682 let bb_neg = b.create_block(&format!("read_alloc_neg_d{}", d));
15683 let bb_pos = b.create_block(&format!("read_alloc_pos_d{}", d));
15684 b.cond_branch(stride_neg, bb_neg, vec![], bb_pos, vec![]);
15685
15686 b.set_block(bb_neg);
15687 let done_neg = b.icmp(CmpOp::Lt, cur, dims[d].end_val);
15688 b.cond_branch(done_neg, exits[d], vec![], bodies[d], vec![]);
15689
15690 b.set_block(bb_pos);
15691 let done_pos = b.icmp(CmpOp::Gt, cur, dims[d].end_val);
15692 b.cond_branch(done_pos, exits[d], vec![], bodies[d], vec![]);
15693 }
15694
15695 b.set_block(bodies[d]);
15696 if d == 0 {
15697 let dim_data: Vec<(ValueId, ValueId, ValueId, ValueId)> = dims
15698 .iter()
15699 .map(|dim| (dim.counter, dim.lower_bound, dim.mem_stride, dim.cum_extent))
15700 .collect();
15701 let mut elem_offset: Option<ValueId> = None;
15702 for (counter, lower_bound, mem_stride, cum_extent_d) in dim_data {
15703 let cnt = b.load(counter);
15704 let adjusted = b.isub(cnt, lower_bound);
15705 let scaled = b.imul(adjusted, cum_extent_d);
15706 let term = b.imul(scaled, mem_stride);
15707 elem_offset = Some(match elem_offset {
15708 Some(prev) => b.iadd(prev, term),
15709 None => term,
15710 });
15711 }
15712 let off_elems = elem_offset.unwrap_or_else(|| b.const_i64(0));
15713 let elem_bytes_v = b.const_i64(elem_bytes);
15714 let byte_off = b.imul(off_elems, elem_bytes_v);
15715 let ptr = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
15716 let _ = lower_read_into_addr(b, mode, &info.ty, ptr);
15717 b.branch(incrs[0], vec![]);
15718 } else {
15719 b.store(dims[d - 1].start_val, dims[d - 1].counter);
15720 b.branch(checks[d - 1], vec![]);
15721 }
15722
15723 b.set_block(incrs[d]);
15724 let cur2 = b.load(dims[d].counter);
15725 let next = b.iadd(cur2, dims[d].stride_val);
15726 b.store(next, dims[d].counter);
15727 b.branch(checks[d], vec![]);
15728
15729 b.set_block(exits[d]);
15730 if d < n - 1 {
15731 b.branch(incrs[d + 1], vec![]);
15732 }
15733 }
15734
15735 b.set_block(exits[outer]);
15736 }
15737
15738 /// Lower an N-dimensional array section write item, e.g.
15739 /// `print *, m(:, 1)` or `print *, m(2:3, 1:2)`. Generates one
15740 /// nested loop per dimension, innermost = dim 0 (Fortran column-
15741 /// major iteration order), and at the leaf computes the flat
15742 /// byte offset into the array's base.
15743 ///
15744 /// Element subscripts (`m(:, 1)`) collapse to a single iteration
15745 /// at the fixed value. Range subscripts iterate from start to
15746 /// end with the given stride (defaults: declared bounds, stride
15747 /// 1). Stride sign is honored both at compile time and at runtime.
15748 ///
15749 /// Audit CRITICAL-3: multi-dim slice prints used to mis-dispatch
15750 /// through afs_create_section on a bare stack pointer and crash
15751 /// at runtime reading 384 bytes of garbage as a descriptor.
15752 fn lower_section_write_nd(
15753 b: &mut FuncBuilder,
15754 ctx: &mut LowerCtx,
15755 info: &LocalInfo,
15756 args: &[crate::ast::expr::Argument],
15757 unit: ValueId,
15758 ) {
15759 use crate::ast::expr::SectionSubscript;
15760
15761 let base = array_base_addr(b, info);
15762 let elem_bytes = ir_scalar_byte_size(&info.ty);
15763 let writer = match &info.ty {
15764 IrType::Int(IntWidth::I128) => "afs_write_int128",
15765 IrType::Int(IntWidth::I64) => "afs_write_int64",
15766 IrType::Int(_) => "afs_write_int",
15767 IrType::Float(FloatWidth::F64) => "afs_write_real64",
15768 IrType::Float(_) => "afs_write_real",
15769 IrType::Bool => "afs_write_logical",
15770 _ => "afs_write_int",
15771 };
15772
15773 // For each dimension we need: a runtime counter alloca plus
15774 // its start/end/stride values, the declared lower bound (for
15775 // base-relative offset arithmetic), and the cumulative stride
15776 // for column-major flat-offset computation. start_val is
15777 // saved so non-innermost loop bodies can RE-init the inner
15778 // counter on each outer iteration.
15779 struct DimSlice {
15780 counter: ValueId,
15781 start_val: ValueId,
15782 end_val: ValueId,
15783 stride_val: ValueId,
15784 const_stride: Option<i64>,
15785 decl_lo: i64,
15786 cum_stride: i64,
15787 }
15788
15789 let mut dims: Vec<DimSlice> = Vec::with_capacity(args.len());
15790 let mut cum_stride: i64 = 1;
15791 for (dim_idx, arg) in args.iter().enumerate() {
15792 let (decl_lo, decl_ext) = info.dims.get(dim_idx).copied().unwrap_or((1, 0));
15793 let decl_hi = decl_lo + decl_ext - 1;
15794
15795 let counter = b.alloca(IrType::Int(IntWidth::I32));
15796 let (start_val, end_val, stride_val, const_stride) = match &arg.value {
15797 SectionSubscript::Range { start, end, stride } => {
15798 let start_v = match start {
15799 Some(e) => lower_expr_ctx(b, ctx, e),
15800 None => b.const_i32(decl_lo as i32),
15801 };
15802 let end_v = match end {
15803 Some(e) => lower_expr_ctx(b, ctx, e),
15804 None => b.const_i32(decl_hi as i32),
15805 };
15806 let stride_v = match stride {
15807 Some(e) => lower_expr_ctx(b, ctx, e),
15808 None => b.const_i32(1),
15809 };
15810 let cs = stride.as_ref().and_then(eval_const_int);
15811 (start_v, end_v, stride_v, cs)
15812 }
15813 SectionSubscript::Element(e) => {
15814 let v = lower_expr_ctx(b, ctx, e);
15815 // Single-element dimension: start == end, stride 1.
15816 (v, v, b.const_i32(1), Some(1))
15817 }
15818 };
15819 b.store(start_val, counter);
15820 dims.push(DimSlice {
15821 counter,
15822 start_val,
15823 end_val,
15824 stride_val,
15825 const_stride,
15826 decl_lo,
15827 cum_stride,
15828 });
15829 cum_stride *= decl_ext.max(1);
15830 }
15831
15832 // Build nested check/body/exit blocks, OUTERMOST first (last
15833 // dim) — we want innermost = dim 0 for column-major iteration.
15834 // Layout per dimension d (counting from outermost):
15835 // check_d → body_d? exit_d
15836 // body_d:
15837 // [if d > 0] init counter[d-1] = start[d-1]; branch check_{d-1}
15838 // [if d == 0] compute offset, GEP, write, branch incr_0
15839 // incr_d: counter[d] += stride[d]; branch check_d
15840 // exit_d
15841 let n = dims.len();
15842 let mut checks: Vec<BlockId> = Vec::with_capacity(n);
15843 let mut bodies: Vec<BlockId> = Vec::with_capacity(n);
15844 let mut incrs: Vec<BlockId> = Vec::with_capacity(n);
15845 let mut exits: Vec<BlockId> = Vec::with_capacity(n);
15846 for d in 0..n {
15847 checks.push(b.create_block(&format!("sec_check_d{}", d)));
15848 bodies.push(b.create_block(&format!("sec_body_d{}", d)));
15849 incrs.push(b.create_block(&format!("sec_incr_d{}", d)));
15850 exits.push(b.create_block(&format!("sec_exit_d{}", d)));
15851 }
15852
15853 // Enter the outermost loop. Walking from outermost (n-1) to
15854 // innermost (0) means index n-1 is the LAST in the dims vec.
15855 let outer = n - 1;
15856 b.branch(checks[outer], vec![]);
15857
15858 // Emit each dimension's check/incr/exit. Body chains down to
15859 // the next inner dim (or to the leaf computation at d == 0).
15860 for d_rev in 0..n {
15861 let d = n - 1 - d_rev; // outermost first
15862
15863 // Check block: load counter, compare against end with the
15864 // appropriate cmp op (sign of stride).
15865 b.set_block(checks[d]);
15866 let cur = b.load(dims[d].counter);
15867 if let Some(sv) = dims[d].const_stride {
15868 let done_op = if sv < 0 { CmpOp::Lt } else { CmpOp::Gt };
15869 let done = b.icmp(done_op, cur, dims[d].end_val);
15870 b.cond_branch(done, exits[d], vec![], bodies[d], vec![]);
15871 } else {
15872 let zero = b.const_i32(0);
15873 let stride_neg = b.icmp(CmpOp::Lt, dims[d].stride_val, zero);
15874 let bb_neg = b.create_block(&format!("sec_neg_d{}", d));
15875 let bb_pos = b.create_block(&format!("sec_pos_d{}", d));
15876 b.cond_branch(stride_neg, bb_neg, vec![], bb_pos, vec![]);
15877
15878 b.set_block(bb_neg);
15879 let done_neg = b.icmp(CmpOp::Lt, cur, dims[d].end_val);
15880 b.cond_branch(done_neg, exits[d], vec![], bodies[d], vec![]);
15881
15882 b.set_block(bb_pos);
15883 let done_pos = b.icmp(CmpOp::Gt, cur, dims[d].end_val);
15884 b.cond_branch(done_pos, exits[d], vec![], bodies[d], vec![]);
15885 }
15886
15887 // Body block. If we're at the innermost dim, compute the
15888 // offset and emit the load+write. Otherwise, init the
15889 // next-inner dim's counter and branch to its check.
15890 b.set_block(bodies[d]);
15891 if d == 0 {
15892 // Innermost: compute flat offset = sum over all dims of
15893 // (counter - decl_lo) * cum_stride * elem_bytes.
15894 let mut byte_offset: Option<ValueId> = None;
15895 // Borrow `dims` immutably while iterating it; the loop
15896 // body needs &mut b so we collect the per-dim values
15897 // first, then emit the IR for the sum afterwards.
15898 let dim_data: Vec<(ValueId, i64, i64)> = dims
15899 .iter()
15900 .map(|d| (d.counter, d.decl_lo, d.cum_stride))
15901 .collect();
15902 for (counter, decl_lo, cum_stride_d) in dim_data {
15903 let cnt = b.load(counter);
15904 let lo_const = b.const_i32(decl_lo as i32);
15905 let zero_based = b.isub(cnt, lo_const);
15906 let zero_based64 = widen_idx_to_i64(b, zero_based);
15907 let stride_const = b.const_i64(cum_stride_d * elem_bytes);
15908 let term = b.imul(zero_based64, stride_const);
15909 byte_offset = Some(match byte_offset {
15910 Some(prev) => b.iadd(prev, term),
15911 None => term,
15912 });
15913 }
15914 let off = byte_offset.unwrap_or_else(|| b.const_i64(0));
15915 let p = b.gep(base, vec![off], IrType::Int(IntWidth::I8));
15916 let elem = b.load_typed(p, info.ty.clone());
15917 b.call(
15918 FuncRef::External(writer.into()),
15919 vec![unit, elem],
15920 IrType::Void,
15921 );
15922 b.branch(incrs[0], vec![]);
15923 } else {
15924 // Not innermost: re-init the next-inner dim's counter
15925 // to its start value (RESET on each outer iteration),
15926 // then branch to its check block.
15927 b.store(dims[d - 1].start_val, dims[d - 1].counter);
15928 b.branch(checks[d - 1], vec![]);
15929 }
15930
15931 // Increment block: counter += stride; branch back to check.
15932 b.set_block(incrs[d]);
15933 let cur2 = b.load(dims[d].counter);
15934 let next = b.iadd(cur2, dims[d].stride_val);
15935 b.store(next, dims[d].counter);
15936 b.branch(checks[d], vec![]);
15937
15938 // Exit block: continue out to next-outer increment, or
15939 // fall through past everything if this was the outermost.
15940 b.set_block(exits[d]);
15941 if d < n - 1 {
15942 b.branch(incrs[d + 1], vec![]);
15943 }
15944 // If d == n-1 (outermost exit), the caller of this helper
15945 // continues emitting after exits[outer]. We leave the
15946 // current block set to exits[outer] below.
15947 }
15948
15949 // The final current block must be exits[outer] so subsequent
15950 // statement lowering continues after the section loop.
15951 b.set_block(exits[outer]);
15952 }
15953
15954 /// Lower a whole-array write item: iterate every element of the
15955 /// array and call the per-element write helper. Used by `print *,
15956 /// arr` and equivalent forms. Without this the array's base
15957 /// pointer leaks into the Ptr<_> arm of the scalar write
15958 /// dispatcher and gets mis-routed to afs_write_string.
15959 fn lower_whole_array_write(
15960 b: &mut FuncBuilder,
15961 _ctx: &mut LowerCtx,
15962 info: &LocalInfo,
15963 unit: ValueId,
15964 ) {
15965 let base = array_base_addr(b, info);
15966 let elem_bytes = ir_scalar_byte_size(&info.ty);
15967 let writer = match &info.ty {
15968 IrType::Int(IntWidth::I128) => "afs_write_int128",
15969 IrType::Int(IntWidth::I64) => "afs_write_int64",
15970 IrType::Int(_) => "afs_write_int",
15971 IrType::Float(FloatWidth::F64) => "afs_write_real64",
15972 IrType::Float(_) => "afs_write_real",
15973 IrType::Bool => "afs_write_logical",
15974 _ => "afs_write_int",
15975 };
15976
15977 // Compile-time-known size for stack arrays; runtime descriptor
15978 // call for allocatables.
15979 let n = array_total_elems_value(b, info);
15980
15981 // Stack-allocated loop counter, like lower_array_assign.
15982 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
15983 let zero = b.const_i64(0);
15984 b.store(zero, i_addr);
15985
15986 let bb_check = b.create_block("write_arr_check");
15987 let bb_body = b.create_block("write_arr_body");
15988 let bb_exit = b.create_block("write_arr_exit");
15989 b.branch(bb_check, vec![]);
15990
15991 b.set_block(bb_check);
15992 let i = b.load(i_addr);
15993 let done = b.icmp(CmpOp::Ge, i, n);
15994 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
15995
15996 b.set_block(bb_body);
15997 let i_val = b.load(i_addr);
15998 let elem_bytes_v = b.const_i64(elem_bytes);
15999 let byte_off = b.imul(i_val, elem_bytes_v);
16000 let ptr = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
16001 let elem = b.load_typed(ptr, info.ty.clone());
16002 b.call(
16003 FuncRef::External(writer.into()),
16004 vec![unit, elem],
16005 IrType::Void,
16006 );
16007 let one = b.const_i64(1);
16008 let next = b.iadd(i_val, one);
16009 b.store(next, i_addr);
16010 b.branch(bb_check, vec![]);
16011
16012 b.set_block(bb_exit);
16013 }
16014
16015 fn lower_whole_array_read(b: &mut FuncBuilder, info: &LocalInfo, mode: ReadMode) {
16016 let base = array_base_addr(b, info);
16017 let elem_bytes = ir_scalar_byte_size(&info.ty);
16018 let n = array_total_elems_value(b, info);
16019
16020 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
16021 let zero = b.const_i64(0);
16022 b.store(zero, i_addr);
16023
16024 let bb_check = b.create_block("read_arr_check");
16025 let bb_body = b.create_block("read_arr_body");
16026 let bb_exit = b.create_block("read_arr_exit");
16027 b.branch(bb_check, vec![]);
16028
16029 b.set_block(bb_check);
16030 let i = b.load(i_addr);
16031 let done = b.icmp(CmpOp::Ge, i, n);
16032 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
16033
16034 b.set_block(bb_body);
16035 let i_val = b.load(i_addr);
16036 let elem_bytes_v = b.const_i64(elem_bytes);
16037 let byte_off = b.imul(i_val, elem_bytes_v);
16038 let ptr = b.gep(base, vec![byte_off], IrType::Int(IntWidth::I8));
16039 let _ = lower_read_into_addr(b, mode, &info.ty, ptr);
16040 let one = b.const_i64(1);
16041 let next = b.iadd(i_val, one);
16042 b.store(next, i_addr);
16043 b.branch(bb_check, vec![]);
16044
16045 b.set_block(bb_exit);
16046 }
16047
16048 fn local_uses_array_descriptor(info: &LocalInfo) -> bool {
16049 info.allocatable || info.descriptor_arg
16050 }
16051
16052 fn array_descriptor_addr(b: &mut FuncBuilder, info: &LocalInfo) -> ValueId {
16053 if info.allocatable {
16054 info.addr
16055 } else if info.descriptor_arg {
16056 b.load(info.addr)
16057 } else {
16058 info.addr
16059 }
16060 }
16061
16062 fn string_descriptor_addr(b: &mut FuncBuilder, info: &LocalInfo) -> ValueId {
16063 if info.by_ref {
16064 b.load(info.addr)
16065 } else {
16066 info.addr
16067 }
16068 }
16069
16070 fn store_byte_aggregate_field(
16071 b: &mut FuncBuilder,
16072 base: ValueId,
16073 offset: i64,
16074 field_ty: IrType,
16075 val: ValueId,
16076 ) {
16077 let field_bytes = field_ty.size_bytes() as i64;
16078 debug_assert!(field_bytes > 0 && offset % field_bytes == 0);
16079 let slot = b.const_i64(offset / field_bytes);
16080 let ptr = b.gep(base, vec![slot], field_ty.clone());
16081 let stored = match field_ty {
16082 IrType::Int(_) | IrType::Float(_) | IrType::Bool => coerce_to_type(b, val, &field_ty),
16083 _ => val,
16084 };
16085 b.store(stored, ptr);
16086 }
16087
16088 fn array_data_ptr_for_call(b: &mut FuncBuilder, info: &LocalInfo) -> ValueId {
16089 if local_uses_array_descriptor(info) {
16090 let desc = array_descriptor_addr(b, info);
16091 b.load_typed(desc, IrType::Ptr(Box::new(info.ty.clone())))
16092 } else if info.by_ref {
16093 b.load(info.addr)
16094 } else if !info.dims.is_empty() {
16095 let zero = b.const_i64(0);
16096 b.gep(info.addr, vec![zero], info.ty.clone())
16097 } else {
16098 info.addr
16099 }
16100 }
16101
16102 fn materialize_array_descriptor_for_info(b: &mut FuncBuilder, info: &LocalInfo) -> ValueId {
16103 let desc = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384));
16104 let zero32 = b.const_i32(0);
16105 let sz384 = b.const_i64(384);
16106 b.call(
16107 FuncRef::External("memset".into()),
16108 vec![desc, zero32, sz384],
16109 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
16110 );
16111
16112 let base_ptr = array_data_ptr_for_call(b, info);
16113 store_byte_aggregate_field(b, desc, 0, IrType::Ptr(Box::new(info.ty.clone())), base_ptr);
16114 let elem_size = b.const_i64(ir_scalar_byte_size(&info.ty));
16115 store_byte_aggregate_field(b, desc, 8, IrType::Int(IntWidth::I64), elem_size);
16116 let rank = b.const_i32(info.dims.len() as i32);
16117 store_byte_aggregate_field(b, desc, 16, IrType::Int(IntWidth::I32), rank);
16118 let flags = b.const_i32(2);
16119 store_byte_aggregate_field(b, desc, 20, IrType::Int(IntWidth::I32), flags);
16120
16121 for (i, (lower, extent)) in info.dims.iter().copied().enumerate() {
16122 let base_offset = 24 + (i as i64) * 24;
16123 let lower_val = b.const_i64(lower);
16124 store_byte_aggregate_field(b, desc, base_offset, IrType::Int(IntWidth::I64), lower_val);
16125 let upper_val = b.const_i64(lower + extent - 1);
16126 store_byte_aggregate_field(
16127 b,
16128 desc,
16129 base_offset + 8,
16130 IrType::Int(IntWidth::I64),
16131 upper_val,
16132 );
16133 let stride_val = b.const_i64(1);
16134 store_byte_aggregate_field(
16135 b,
16136 desc,
16137 base_offset + 16,
16138 IrType::Int(IntWidth::I64),
16139 stride_val,
16140 );
16141 }
16142
16143 desc
16144 }
16145
16146 fn lower_arg_descriptor(
16147 b: &mut FuncBuilder,
16148 locals: &HashMap<String, LocalInfo>,
16149 expr: &crate::ast::expr::SpannedExpr,
16150 _st: &SymbolTable,
16151 ) -> ValueId {
16152 if let Expr::Name { name } = &expr.node {
16153 let key = name.to_lowercase();
16154 if let Some(info) = locals.get(&key) {
16155 if info.allocatable {
16156 return info.addr;
16157 }
16158 if info.descriptor_arg {
16159 return b.load(info.addr);
16160 }
16161 if !info.dims.is_empty() {
16162 return materialize_array_descriptor_for_info(b, info);
16163 }
16164 }
16165 }
16166 b.const_i64(0)
16167 }
16168
16169 /// Get the data base address for an array variable.
16170 /// For fixed arrays, this is the alloca address directly.
16171 /// For allocatable arrays, load base_addr from the descriptor (offset 0).
16172 fn array_base_addr(b: &mut FuncBuilder, info: &LocalInfo) -> ValueId {
16173 if local_uses_array_descriptor(info) {
16174 let desc = array_descriptor_addr(b, info);
16175 b.load_typed(desc, IrType::Ptr(Box::new(info.ty.clone())))
16176 } else if info.by_ref {
16177 // Dummy arrays are stored as "slot holding caller base pointer".
16178 b.load(info.addr)
16179 } else {
16180 info.addr
16181 }
16182 }
16183
16184 fn array_total_elems_value(b: &mut FuncBuilder, info: &LocalInfo) -> ValueId {
16185 if local_uses_array_descriptor(info) {
16186 let desc = array_descriptor_addr(b, info);
16187 b.call(
16188 FuncRef::External("afs_array_size".into()),
16189 vec![desc],
16190 IrType::Int(IntWidth::I64),
16191 )
16192 } else {
16193 let total: i64 = info.dims.iter().map(|(_, extent)| *extent).product();
16194 b.const_i64(total.max(0))
16195 }
16196 }
16197
16198 fn whole_array_expr_info(
16199 locals: &HashMap<String, LocalInfo>,
16200 expr: &crate::ast::expr::SpannedExpr,
16201 ) -> Option<LocalInfo> {
16202 let Expr::Name { name } = &expr.node else {
16203 return None;
16204 };
16205 let key = name.to_lowercase();
16206 locals
16207 .get(&key)
16208 .filter(|info| local_is_array_like(info))
16209 .cloned()
16210 }
16211
16212 fn whole_array_named_info(
16213 locals: &HashMap<String, LocalInfo>,
16214 expr: &crate::ast::expr::SpannedExpr,
16215 ) -> Option<(String, LocalInfo)> {
16216 let Expr::Name { name } = &expr.node else {
16217 return None;
16218 };
16219 let key = name.to_lowercase();
16220 let info = locals
16221 .get(&key)
16222 .filter(|info| local_is_array_like(info))
16223 .cloned()?;
16224 Some((key, info))
16225 }
16226
16227 #[derive(Clone)]
16228 enum BulkArrayPlan {
16229 Fill {
16230 kernel: &'static str,
16231 scalar: crate::ast::expr::SpannedExpr,
16232 },
16233 ArrayBinary {
16234 kernel: &'static str,
16235 lhs: LocalInfo,
16236 rhs: LocalInfo,
16237 },
16238 ArrayScalar {
16239 kernel: &'static str,
16240 array: LocalInfo,
16241 scalar: crate::ast::expr::SpannedExpr,
16242 },
16243 ScalarArray {
16244 kernel: &'static str,
16245 scalar: crate::ast::expr::SpannedExpr,
16246 array: LocalInfo,
16247 },
16248 }
16249
16250 #[derive(Clone)]
16251 struct IndexedArrayRef {
16252 name: String,
16253 info: LocalInfo,
16254 }
16255
16256 fn bulk_fill_runtime_name(ty: &IrType) -> Option<&'static str> {
16257 match ty {
16258 IrType::Int(IntWidth::I32) => Some("afs_fill_i32"),
16259 IrType::Float(FloatWidth::F32) => Some("afs_fill_f32"),
16260 IrType::Float(FloatWidth::F64) => Some("afs_fill_f64"),
16261 _ => None,
16262 }
16263 }
16264
16265 fn bulk_array_binary_runtime_name(op: BinaryOp, ty: &IrType) -> Option<&'static str> {
16266 match (op, ty) {
16267 (BinaryOp::Add, IrType::Int(IntWidth::I32)) => Some("afs_array_add_i32"),
16268 (BinaryOp::Add, IrType::Float(FloatWidth::F32)) => Some("afs_array_add_f32"),
16269 (BinaryOp::Add, IrType::Float(FloatWidth::F64)) => Some("afs_array_add_f64"),
16270 (BinaryOp::Sub, IrType::Int(IntWidth::I32)) => Some("afs_array_sub_i32"),
16271 (BinaryOp::Sub, IrType::Float(FloatWidth::F32)) => Some("afs_array_sub_f32"),
16272 (BinaryOp::Sub, IrType::Float(FloatWidth::F64)) => Some("afs_array_sub_f64"),
16273 (BinaryOp::Mul, IrType::Int(IntWidth::I32)) => Some("afs_array_mul_i32"),
16274 (BinaryOp::Mul, IrType::Float(FloatWidth::F32)) => Some("afs_array_mul_f32"),
16275 (BinaryOp::Mul, IrType::Float(FloatWidth::F64)) => Some("afs_array_mul_f64"),
16276 _ => None,
16277 }
16278 }
16279
16280 fn bulk_array_scalar_runtime_name(op: BinaryOp, ty: &IrType) -> Option<&'static str> {
16281 match (op, ty) {
16282 (BinaryOp::Add, IrType::Int(IntWidth::I32)) => Some("afs_array_add_scalar_i32"),
16283 (BinaryOp::Add, IrType::Float(FloatWidth::F32)) => Some("afs_array_add_scalar_f32"),
16284 (BinaryOp::Add, IrType::Float(FloatWidth::F64)) => Some("afs_array_add_scalar_f64"),
16285 (BinaryOp::Sub, IrType::Int(IntWidth::I32)) => Some("afs_array_sub_scalar_i32"),
16286 (BinaryOp::Sub, IrType::Float(FloatWidth::F32)) => Some("afs_array_sub_scalar_f32"),
16287 (BinaryOp::Sub, IrType::Float(FloatWidth::F64)) => Some("afs_array_sub_scalar_f64"),
16288 (BinaryOp::Mul, IrType::Int(IntWidth::I32)) => Some("afs_array_mul_scalar_i32"),
16289 (BinaryOp::Mul, IrType::Float(FloatWidth::F32)) => Some("afs_array_mul_scalar_f32"),
16290 (BinaryOp::Mul, IrType::Float(FloatWidth::F64)) => Some("afs_array_mul_scalar_f64"),
16291 _ => None,
16292 }
16293 }
16294
16295 fn bulk_scalar_array_runtime_name(op: BinaryOp, ty: &IrType) -> Option<&'static str> {
16296 match (op, ty) {
16297 (BinaryOp::Sub, IrType::Int(IntWidth::I32)) => Some("afs_scalar_sub_array_i32"),
16298 (BinaryOp::Sub, IrType::Float(FloatWidth::F32)) => Some("afs_scalar_sub_array_f32"),
16299 (BinaryOp::Sub, IrType::Float(FloatWidth::F64)) => Some("afs_scalar_sub_array_f64"),
16300 _ => None,
16301 }
16302 }
16303
16304 fn expr_contains_array_refs(
16305 expr: &crate::ast::expr::SpannedExpr,
16306 locals: &HashMap<String, LocalInfo>,
16307 ) -> bool {
16308 let mut arrays = Vec::new();
16309 collect_array_names(expr, locals, &mut arrays);
16310 !arrays.is_empty()
16311 }
16312
16313 fn expr_mentions_name(expr: &crate::ast::expr::SpannedExpr, needle: &str) -> bool {
16314 match &expr.node {
16315 Expr::Name { name } => name.eq_ignore_ascii_case(needle),
16316 Expr::BinaryOp { left, right, .. } => {
16317 expr_mentions_name(left, needle) || expr_mentions_name(right, needle)
16318 }
16319 Expr::UnaryOp { operand, .. } => expr_mentions_name(operand, needle),
16320 Expr::ParenExpr { inner } => expr_mentions_name(inner, needle),
16321 Expr::ComponentAccess { base, .. } => expr_mentions_name(base, needle),
16322 Expr::FunctionCall { callee, args } => {
16323 expr_mentions_name(callee, needle)
16324 || args.iter().any(|arg| match &arg.value {
16325 crate::ast::expr::SectionSubscript::Element(e) => expr_mentions_name(e, needle),
16326 crate::ast::expr::SectionSubscript::Range { start, end, stride } => {
16327 start
16328 .as_ref()
16329 .is_some_and(|e| expr_mentions_name(e, needle))
16330 || end.as_ref().is_some_and(|e| expr_mentions_name(e, needle))
16331 || stride
16332 .as_ref()
16333 .is_some_and(|e| expr_mentions_name(e, needle))
16334 }
16335 })
16336 }
16337 Expr::ArrayConstructor { values, .. } => values.iter().any(|v| match v {
16338 crate::ast::expr::AcValue::Expr(e) => expr_mentions_name(e, needle),
16339 crate::ast::expr::AcValue::ImpliedDo(ido) => {
16340 ido.var.eq_ignore_ascii_case(needle)
16341 || expr_mentions_name(&ido.start, needle)
16342 || expr_mentions_name(&ido.end, needle)
16343 || ido
16344 .step
16345 .as_ref()
16346 .is_some_and(|e| expr_mentions_name(e, needle))
16347 || ido.values.iter().any(|inner| match inner {
16348 crate::ast::expr::AcValue::Expr(e) => expr_mentions_name(e, needle),
16349 crate::ast::expr::AcValue::ImpliedDo(_) => false,
16350 })
16351 }
16352 }),
16353 _ => false,
16354 }
16355 }
16356
16357 fn expr_is_size_of_array(expr: &crate::ast::expr::SpannedExpr, array_name: &str) -> bool {
16358 match &expr.node {
16359 Expr::ParenExpr { inner } => expr_is_size_of_array(inner, array_name),
16360 Expr::FunctionCall { callee, args } => {
16361 if let Expr::Name { name } = &callee.node {
16362 if name.eq_ignore_ascii_case("size") && args.len() == 1 {
16363 if let crate::ast::expr::SectionSubscript::Element(arg) = &args[0].value {
16364 return matches!(
16365 &arg.node,
16366 Expr::Name { name } if name.eq_ignore_ascii_case(array_name)
16367 );
16368 }
16369 }
16370 }
16371 false
16372 }
16373 _ => false,
16374 }
16375 }
16376
16377 fn fresh_synth_loop_var(locals: &HashMap<String, LocalInfo>) -> String {
16378 let mut idx = 0usize;
16379 loop {
16380 let name = if idx == 0 {
16381 "afs_elem_i".to_string()
16382 } else {
16383 format!("afs_elem_i{}", idx)
16384 };
16385 if !locals.contains_key(&name) {
16386 return name;
16387 }
16388 idx += 1;
16389 }
16390 }
16391
16392 fn synth_name_expr(name: &str, span: crate::lexer::Span) -> crate::ast::expr::SpannedExpr {
16393 crate::ast::Spanned::new(
16394 Expr::Name {
16395 name: name.to_string(),
16396 },
16397 span,
16398 )
16399 }
16400
16401 fn synth_int_expr(value: i64, span: crate::lexer::Span) -> crate::ast::expr::SpannedExpr {
16402 crate::ast::Spanned::new(
16403 Expr::IntegerLiteral {
16404 text: value.to_string(),
16405 kind: None,
16406 },
16407 span,
16408 )
16409 }
16410
16411 fn synth_indexed_array_expr(
16412 array_name: &str,
16413 index_name: &str,
16414 span: crate::lexer::Span,
16415 ) -> crate::ast::expr::SpannedExpr {
16416 crate::ast::Spanned::new(
16417 Expr::FunctionCall {
16418 callee: Box::new(synth_name_expr(array_name, span)),
16419 args: vec![crate::ast::expr::Argument {
16420 keyword: None,
16421 value: crate::ast::expr::SectionSubscript::Element(synth_name_expr(
16422 index_name, span,
16423 )),
16424 }],
16425 },
16426 span,
16427 )
16428 }
16429
16430 fn try_lower_elemental_array_assign(
16431 b: &mut FuncBuilder,
16432 ctx: &mut LowerCtx,
16433 dest_name: &str,
16434 dest_info: &LocalInfo,
16435 value: &crate::ast::expr::SpannedExpr,
16436 ) -> bool {
16437 if dest_info.allocatable || dest_info.dims.len() != 1 {
16438 return false;
16439 }
16440
16441 let Expr::FunctionCall { callee, args } = &value.node else {
16442 return false;
16443 };
16444 let Expr::Name { name: callee_name } = &callee.node else {
16445 return false;
16446 };
16447 if !ctx.elemental_funcs.contains(&callee_name.to_lowercase()) {
16448 return false;
16449 }
16450
16451 let (dest_lower, dest_extent) = dest_info.dims[0];
16452 let dest_upper = dest_lower + dest_extent - 1;
16453 let loop_var = fresh_synth_loop_var(&ctx.locals);
16454 let mut saw_array_arg = false;
16455 let mut mapped_args = Vec::with_capacity(args.len());
16456
16457 for arg in args {
16458 if arg.keyword.is_some() {
16459 return false;
16460 }
16461 let crate::ast::expr::SectionSubscript::Element(actual) = &arg.value else {
16462 return false;
16463 };
16464
16465 if let Some((array_name, array_info)) = whole_array_named_info(&ctx.locals, actual) {
16466 if array_info.allocatable
16467 || array_info.dims.len() != 1
16468 || !bulk_arrays_compatible(dest_info, &array_info)
16469 {
16470 return false;
16471 }
16472 saw_array_arg = true;
16473 mapped_args.push(crate::ast::expr::Argument {
16474 keyword: None,
16475 value: crate::ast::expr::SectionSubscript::Element(synth_indexed_array_expr(
16476 &array_name,
16477 &loop_var,
16478 actual.span,
16479 )),
16480 });
16481 } else {
16482 if expr_contains_array_refs(actual, &ctx.locals) {
16483 return false;
16484 }
16485 mapped_args.push(arg.clone());
16486 }
16487 }
16488
16489 if !saw_array_arg {
16490 return false;
16491 }
16492
16493 let target = synth_indexed_array_expr(dest_name, &loop_var, value.span);
16494 let mapped_value = crate::ast::Spanned::new(
16495 Expr::FunctionCall {
16496 callee: Box::new(synth_name_expr(callee_name, callee.span)),
16497 args: mapped_args,
16498 },
16499 value.span,
16500 );
16501 let body = vec![crate::ast::Spanned::new(
16502 Stmt::Assignment {
16503 target,
16504 value: mapped_value,
16505 },
16506 value.span,
16507 )];
16508 let controls = vec![ConcurrentControl {
16509 var: loop_var,
16510 start: synth_int_expr(dest_lower, value.span),
16511 end: synth_int_expr(dest_upper, value.span),
16512 step: None,
16513 }];
16514 lower_do_concurrent(b, ctx, &None, &controls, None, &body, value.span);
16515 true
16516 }
16517
16518 fn bulk_arrays_compatible(dest_info: &LocalInfo, other_info: &LocalInfo) -> bool {
16519 if dest_info.ty != other_info.ty {
16520 return false;
16521 }
16522 if dest_info.allocatable || other_info.allocatable {
16523 return true;
16524 }
16525 dest_info.dims == other_info.dims
16526 }
16527
16528 fn build_whole_array_bulk_plan(
16529 locals: &HashMap<String, LocalInfo>,
16530 dest_info: &LocalInfo,
16531 value: &crate::ast::expr::SpannedExpr,
16532 ) -> Option<BulkArrayPlan> {
16533 if let Expr::BinaryOp { op, left, right } = &value.node {
16534 if let Some(kernel) = bulk_array_binary_runtime_name(op.clone(), &dest_info.ty) {
16535 let lhs_info = whole_array_expr_info(locals, left);
16536 let rhs_info = whole_array_expr_info(locals, right);
16537 if let (Some(lhs_info), Some(rhs_info)) = (lhs_info, rhs_info) {
16538 if bulk_arrays_compatible(dest_info, &lhs_info)
16539 && bulk_arrays_compatible(dest_info, &rhs_info)
16540 {
16541 return Some(BulkArrayPlan::ArrayBinary {
16542 kernel,
16543 lhs: lhs_info,
16544 rhs: rhs_info,
16545 });
16546 }
16547 }
16548 }
16549
16550 let lhs_info = whole_array_expr_info(locals, left);
16551 let rhs_info = whole_array_expr_info(locals, right);
16552 let lhs_scalar = !expr_contains_array_refs(left, locals);
16553 let rhs_scalar = !expr_contains_array_refs(right, locals);
16554
16555 if let Some(lhs_info) = lhs_info {
16556 if rhs_scalar && bulk_arrays_compatible(dest_info, &lhs_info) {
16557 if let Some(kernel) = bulk_array_scalar_runtime_name(op.clone(), &dest_info.ty) {
16558 return Some(BulkArrayPlan::ArrayScalar {
16559 kernel,
16560 array: lhs_info,
16561 scalar: (**right).clone(),
16562 });
16563 }
16564 }
16565 }
16566
16567 if let Some(rhs_info) = rhs_info {
16568 if lhs_scalar && bulk_arrays_compatible(dest_info, &rhs_info) {
16569 match op {
16570 BinaryOp::Add | BinaryOp::Mul => {
16571 if let Some(kernel) =
16572 bulk_array_scalar_runtime_name(op.clone(), &dest_info.ty)
16573 {
16574 return Some(BulkArrayPlan::ArrayScalar {
16575 kernel,
16576 array: rhs_info,
16577 scalar: (**left).clone(),
16578 });
16579 }
16580 }
16581 BinaryOp::Sub => {
16582 if let Some(kernel) =
16583 bulk_scalar_array_runtime_name(op.clone(), &dest_info.ty)
16584 {
16585 return Some(BulkArrayPlan::ScalarArray {
16586 kernel,
16587 scalar: (**left).clone(),
16588 array: rhs_info,
16589 });
16590 }
16591 }
16592 _ => {}
16593 }
16594 }
16595 }
16596 }
16597
16598 if !expr_contains_array_refs(value, locals) {
16599 if let Some(kernel) = bulk_fill_runtime_name(&dest_info.ty) {
16600 return Some(BulkArrayPlan::Fill {
16601 kernel,
16602 scalar: value.clone(),
16603 });
16604 }
16605 }
16606
16607 None
16608 }
16609
16610 fn emit_bulk_array_plan(
16611 b: &mut FuncBuilder,
16612 ctx: &mut LowerCtx,
16613 dest_info: &LocalInfo,
16614 n: ValueId,
16615 plan: BulkArrayPlan,
16616 ) {
16617 let dest_base = array_base_addr(b, dest_info);
16618 match plan {
16619 BulkArrayPlan::Fill { kernel, scalar } => {
16620 let scalar = lower_expr_ctx_tl(b, ctx, &scalar);
16621 b.call(
16622 FuncRef::External(kernel.into()),
16623 vec![dest_base, n, scalar],
16624 IrType::Void,
16625 );
16626 }
16627 BulkArrayPlan::ArrayBinary { kernel, lhs, rhs } => {
16628 let lhs_base = array_base_addr(b, &lhs);
16629 let rhs_base = array_base_addr(b, &rhs);
16630 b.call(
16631 FuncRef::External(kernel.into()),
16632 vec![dest_base, lhs_base, rhs_base, n],
16633 IrType::Void,
16634 );
16635 }
16636 BulkArrayPlan::ArrayScalar {
16637 kernel,
16638 array,
16639 scalar,
16640 } => {
16641 let array_base = array_base_addr(b, &array);
16642 let scalar = lower_expr_ctx_tl(b, ctx, &scalar);
16643 b.call(
16644 FuncRef::External(kernel.into()),
16645 vec![dest_base, array_base, scalar, n],
16646 IrType::Void,
16647 );
16648 }
16649 BulkArrayPlan::ScalarArray {
16650 kernel,
16651 scalar,
16652 array,
16653 } => {
16654 let scalar = lower_expr_ctx_tl(b, ctx, &scalar);
16655 let array_base = array_base_addr(b, &array);
16656 b.call(
16657 FuncRef::External(kernel.into()),
16658 vec![dest_base, scalar, array_base, n],
16659 IrType::Void,
16660 );
16661 }
16662 }
16663 }
16664
16665 fn lower_array_expr_descriptor(
16666 b: &mut FuncBuilder,
16667 locals: &HashMap<String, LocalInfo>,
16668 expr: &crate::ast::expr::SpannedExpr,
16669 st: &SymbolTable,
16670 ) -> Option<(ValueId, IrType)> {
16671 match &expr.node {
16672 Expr::ParenExpr { inner } => lower_array_expr_descriptor(b, locals, inner, st),
16673 Expr::Name { name } => {
16674 let info = locals.get(&name.to_lowercase())?;
16675 if local_is_array_like(info) {
16676 let desc = if local_uses_array_descriptor(info) {
16677 array_descriptor_addr(b, info)
16678 } else {
16679 materialize_array_descriptor_for_info(b, info)
16680 };
16681 Some((desc, info.ty.clone()))
16682 } else {
16683 None
16684 }
16685 }
16686 Expr::FunctionCall { callee, args } => {
16687 let Expr::Name { name } = &callee.node else {
16688 return None;
16689 };
16690 let info = locals.get(&name.to_lowercase())?;
16691 if local_is_array_like(info)
16692 && args.iter().any(|arg| {
16693 matches!(arg.value, crate::ast::expr::SectionSubscript::Range { .. })
16694 })
16695 {
16696 Some((
16697 lower_array_section(b, locals, info, args, st),
16698 info.ty.clone(),
16699 ))
16700 } else {
16701 None
16702 }
16703 }
16704 _ => None,
16705 }
16706 }
16707
16708 fn lower_1d_section_assign(
16709 b: &mut FuncBuilder,
16710 ctx: &mut LowerCtx,
16711 dest_info: &LocalInfo,
16712 dest_args: &[crate::ast::expr::Argument],
16713 value: &crate::ast::expr::SpannedExpr,
16714 ) -> bool {
16715 if dest_args.len() != 1
16716 || !matches!(
16717 dest_args[0].value,
16718 crate::ast::expr::SectionSubscript::Range { .. }
16719 )
16720 {
16721 return false;
16722 }
16723
16724 let dest_desc = lower_array_section(b, &ctx.locals, dest_info, dest_args, ctx.st);
16725 let dest_n = b.call(
16726 FuncRef::External("afs_array_size".into()),
16727 vec![dest_desc],
16728 IrType::Int(IntWidth::I64),
16729 );
16730 let dest_stride = load_array_desc_i64_field(b, dest_desc, 24 + 16);
16731 let src_desc = lower_array_expr_descriptor(b, &ctx.locals, value, ctx.st);
16732 let src_n = src_desc.as_ref().map(|(desc, _)| {
16733 b.call(
16734 FuncRef::External("afs_array_size".into()),
16735 vec![*desc],
16736 IrType::Int(IntWidth::I64),
16737 )
16738 });
16739
16740 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
16741 let zero64 = b.const_i64(0);
16742 b.store(zero64, i_addr);
16743
16744 let bb_check = b.create_block("section_assign_check");
16745 let bb_body = b.create_block("section_assign_body");
16746 let bb_exit = b.create_block("section_assign_exit");
16747 b.branch(bb_check, vec![]);
16748
16749 b.set_block(bb_check);
16750 let i = b.load(i_addr);
16751 let mut done = b.icmp(CmpOp::Ge, i, dest_n);
16752 if let Some(src_len) = src_n {
16753 let src_done = b.icmp(CmpOp::Ge, i, src_len);
16754 done = b.or(done, src_done);
16755 }
16756 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
16757
16758 b.set_block(bb_body);
16759 let i_val = b.load(i_addr);
16760
16761 if dest_info.char_kind != CharKind::None || descriptor_backed_runtime_char_array(dest_info) {
16762 let dest_base = b.load_typed(dest_desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
16763 let dest_elem_len = descriptor_elem_size(b, dest_desc);
16764 let dest_index = b.imul(i_val, dest_stride);
16765 let dest_off = b.imul(dest_index, dest_elem_len);
16766 let dest_ptr = b.gep(dest_base, vec![dest_off], IrType::Int(IntWidth::I8));
16767
16768 let (src_ptr, src_len) = if let Some((desc, _)) = src_desc.as_ref() {
16769 let src_base = b.load_typed(*desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
16770 let src_stride = load_array_desc_i64_field(b, *desc, 24 + 16);
16771 let src_index = b.imul(i_val, src_stride);
16772 let src_elem_len = descriptor_elem_size(b, *desc);
16773 let src_off = b.imul(src_index, src_elem_len);
16774 let src_ptr = b.gep(src_base, vec![src_off], IrType::Int(IntWidth::I8));
16775 (src_ptr, src_elem_len)
16776 } else {
16777 lower_string_expr_ctx(b, ctx, value)
16778 };
16779 b.call(
16780 FuncRef::External("afs_assign_char_fixed".into()),
16781 vec![dest_ptr, dest_elem_len, src_ptr, src_len],
16782 IrType::Void,
16783 );
16784 } else {
16785 let dest_base = b.load_typed(dest_desc, IrType::Ptr(Box::new(dest_info.ty.clone())));
16786 let elem_bytes = b.const_i64(ir_scalar_byte_size(&dest_info.ty));
16787 let scalar = src_desc.is_none().then(|| {
16788 let raw = lower_expr_ctx_tl(b, ctx, value);
16789 coerce_to_type(b, raw, &dest_info.ty)
16790 });
16791 let (src_base, src_stride, src_ty) = if let Some((desc, ty)) = src_desc.as_ref() {
16792 (
16793 Some(b.load_typed(*desc, IrType::Ptr(Box::new(ty.clone())))),
16794 Some(load_array_desc_i64_field(b, *desc, 24 + 16)),
16795 Some(ty.clone()),
16796 )
16797 } else {
16798 (None, None, None)
16799 };
16800
16801 let dest_index = b.imul(i_val, dest_stride);
16802 let dest_off = b.imul(dest_index, elem_bytes);
16803 let dest_ptr = b.gep(dest_base, vec![dest_off], IrType::Int(IntWidth::I8));
16804 let stored = if let (Some(src_base), Some(src_stride), Some(src_ty)) =
16805 (src_base, src_stride, src_ty)
16806 {
16807 let src_index = b.imul(i_val, src_stride);
16808 let src_off = b.imul(src_index, elem_bytes);
16809 let src_ptr = b.gep(src_base, vec![src_off], IrType::Int(IntWidth::I8));
16810 let raw = b.load_typed(src_ptr, src_ty);
16811 coerce_to_type(b, raw, &dest_info.ty)
16812 } else {
16813 scalar.expect("scalar slice assignment should have scalar RHS")
16814 };
16815 b.store(stored, dest_ptr);
16816 }
16817
16818 let one = b.const_i64(1);
16819 let next_i = b.iadd(i_val, one);
16820 b.store(next_i, i_addr);
16821 b.branch(bb_check, vec![]);
16822
16823 b.set_block(bb_exit);
16824 true
16825 }
16826
16827 fn loop_indexed_array_ref(
16828 locals: &HashMap<String, LocalInfo>,
16829 expr: &crate::ast::expr::SpannedExpr,
16830 loop_var: &str,
16831 ) -> Option<IndexedArrayRef> {
16832 match &expr.node {
16833 Expr::ParenExpr { inner } => loop_indexed_array_ref(locals, inner, loop_var),
16834 Expr::FunctionCall { callee, args } => {
16835 if args.len() != 1 {
16836 return None;
16837 }
16838 let Expr::Name { name } = &callee.node else {
16839 return None;
16840 };
16841 let crate::ast::expr::SectionSubscript::Element(index) = &args[0].value else {
16842 return None;
16843 };
16844 let Expr::Name { name: idx_name } = &index.node else {
16845 return None;
16846 };
16847 if !idx_name.eq_ignore_ascii_case(loop_var) {
16848 return None;
16849 }
16850 let key = name.to_lowercase();
16851 let info = locals.get(&key)?.clone();
16852 if info.allocatable || info.dims.len() == 1 {
16853 Some(IndexedArrayRef { name: key, info })
16854 } else {
16855 None
16856 }
16857 }
16858 _ => None,
16859 }
16860 }
16861
16862 fn control_covers_full_array(ctrl: &ConcurrentControl, dest: &IndexedArrayRef) -> bool {
16863 let step_ok = ctrl
16864 .step
16865 .as_ref()
16866 .is_none_or(|step| eval_const_int(step) == Some(1));
16867 if !step_ok {
16868 return false;
16869 }
16870 if dest.info.allocatable {
16871 eval_const_int(&ctrl.start) == Some(1) && expr_is_size_of_array(&ctrl.end, &dest.name)
16872 } else {
16873 let Some((lower, extent)) = dest.info.dims.first().copied() else {
16874 return false;
16875 };
16876 let upper = lower + extent - 1;
16877 eval_const_int(&ctrl.start) == Some(lower) && eval_const_int(&ctrl.end) == Some(upper)
16878 }
16879 }
16880
16881 fn build_loop_bulk_plan(
16882 locals: &HashMap<String, LocalInfo>,
16883 dest_info: &LocalInfo,
16884 loop_var: &str,
16885 value: &crate::ast::expr::SpannedExpr,
16886 ) -> Option<BulkArrayPlan> {
16887 if let Expr::BinaryOp { op, left, right } = &value.node {
16888 if let Some(kernel) = bulk_array_binary_runtime_name(op.clone(), &dest_info.ty) {
16889 let lhs = loop_indexed_array_ref(locals, left, loop_var);
16890 let rhs = loop_indexed_array_ref(locals, right, loop_var);
16891 if let (Some(lhs), Some(rhs)) = (lhs, rhs) {
16892 if bulk_arrays_compatible(dest_info, &lhs.info)
16893 && bulk_arrays_compatible(dest_info, &rhs.info)
16894 {
16895 return Some(BulkArrayPlan::ArrayBinary {
16896 kernel,
16897 lhs: lhs.info,
16898 rhs: rhs.info,
16899 });
16900 }
16901 }
16902 }
16903
16904 let lhs = loop_indexed_array_ref(locals, left, loop_var);
16905 let rhs = loop_indexed_array_ref(locals, right, loop_var);
16906 let lhs_scalar =
16907 !expr_contains_array_refs(left, locals) && !expr_mentions_name(left, loop_var);
16908 let rhs_scalar =
16909 !expr_contains_array_refs(right, locals) && !expr_mentions_name(right, loop_var);
16910
16911 if let Some(lhs) = lhs {
16912 if rhs_scalar && bulk_arrays_compatible(dest_info, &lhs.info) {
16913 if let Some(kernel) = bulk_array_scalar_runtime_name(op.clone(), &dest_info.ty) {
16914 return Some(BulkArrayPlan::ArrayScalar {
16915 kernel,
16916 array: lhs.info,
16917 scalar: (**right).clone(),
16918 });
16919 }
16920 }
16921 }
16922
16923 if let Some(rhs) = rhs {
16924 if lhs_scalar && bulk_arrays_compatible(dest_info, &rhs.info) {
16925 match op {
16926 BinaryOp::Add | BinaryOp::Mul => {
16927 if let Some(kernel) =
16928 bulk_array_scalar_runtime_name(op.clone(), &dest_info.ty)
16929 {
16930 return Some(BulkArrayPlan::ArrayScalar {
16931 kernel,
16932 array: rhs.info,
16933 scalar: (**left).clone(),
16934 });
16935 }
16936 }
16937 BinaryOp::Sub => {
16938 if let Some(kernel) =
16939 bulk_scalar_array_runtime_name(op.clone(), &dest_info.ty)
16940 {
16941 return Some(BulkArrayPlan::ScalarArray {
16942 kernel,
16943 scalar: (**left).clone(),
16944 array: rhs.info,
16945 });
16946 }
16947 }
16948 _ => {}
16949 }
16950 }
16951 }
16952 }
16953
16954 if !expr_contains_array_refs(value, locals) && !expr_mentions_name(value, loop_var) {
16955 if let Some(kernel) = bulk_fill_runtime_name(&dest_info.ty) {
16956 return Some(BulkArrayPlan::Fill {
16957 kernel,
16958 scalar: value.clone(),
16959 });
16960 }
16961 }
16962
16963 None
16964 }
16965
16966 fn try_lower_bulk_array_assign(
16967 b: &mut FuncBuilder,
16968 ctx: &mut LowerCtx,
16969 dest_info: &LocalInfo,
16970 value: &crate::ast::expr::SpannedExpr,
16971 ) -> bool {
16972 if let Some(plan) = build_whole_array_bulk_plan(&ctx.locals, dest_info, value) {
16973 let n = array_total_elems_value(b, dest_info);
16974 emit_bulk_array_plan(b, ctx, dest_info, n, plan);
16975 return true;
16976 }
16977 false
16978 }
16979
16980 /// Extract base variable name from an expression.
16981 fn extract_base_name(expr: &crate::ast::expr::SpannedExpr) -> Option<String> {
16982 match &expr.node {
16983 Expr::Name { name } => Some(name.clone()),
16984 Expr::FunctionCall { callee, .. } => extract_base_name(callee),
16985 _ => None,
16986 }
16987 }
16988
16989 /// Lower an argument for pass-by-reference: return the address of the value.
16990 /// If the argument is a named variable, return its alloca address.
16991 /// If it's an expression (literal, computation), store to a temp and return the temp address.
16992 /// Lower FORALL by nesting loops recursively. The body executes inside the innermost loop.
16993 fn lower_forall_nested(
16994 b: &mut FuncBuilder,
16995 ctx: &mut LowerCtx,
16996 specs: &[crate::ast::stmt::ForallSpec],
16997 mask: Option<&crate::ast::expr::SpannedExpr>,
16998 body: &[SpannedStmt],
16999 ) {
17000 if specs.is_empty() {
17001 // Innermost level: apply mask and execute body.
17002 if let Some(mask_expr) = mask {
17003 let cond = lower_expr_ctx_tl(b, ctx, mask_expr);
17004 let bb_body = b.create_block("forall_body");
17005 let bb_skip = b.create_block("forall_skip");
17006 b.cond_branch(cond, bb_body, vec![], bb_skip, vec![]);
17007 b.set_block(bb_body);
17008 lower_stmts(b, ctx, body);
17009 if b.func().block(b.current_block()).terminator.is_none() {
17010 b.branch(bb_skip, vec![]);
17011 }
17012 b.set_block(bb_skip);
17013 } else {
17014 lower_stmts(b, ctx, body);
17015 }
17016 } else {
17017 // Wrap remaining specs in a DO loop. The loop body recurses to handle inner specs.
17018 let spec = &specs[0];
17019 let remaining = &specs[1..];
17020
17021 // Build the inner body as a FORALL of remaining specs + body.
17022 // We use lower_do_loop with the body being the recursive FORALL.
17023 // But lower_do_loop takes &[SpannedStmt], not a closure.
17024 // Instead, manually build the loop structure.
17025 let key = spec.var.to_lowercase();
17026 let var_addr = ctx
17027 .locals
17028 .get(&key)
17029 .map(|info| info.addr)
17030 .unwrap_or_else(|| {
17031 let addr = b.alloca(IrType::Int(IntWidth::I32));
17032 ctx.locals.insert(
17033 key.clone(),
17034 LocalInfo {
17035 addr,
17036 ty: IrType::Int(IntWidth::I32),
17037 dims: vec![],
17038 allocatable: false,
17039 descriptor_arg: false,
17040 by_ref: false,
17041 char_kind: CharKind::None,
17042 derived_type: None,
17043 inline_const: None,
17044 is_pointer: false,
17045 runtime_dim_upper: vec![],
17046 },
17047 );
17048 addr
17049 });
17050
17051 let init_val = lower_expr_ctx(b, ctx, &spec.start);
17052 b.store(init_val, var_addr);
17053 let end_val = lower_expr_ctx(b, ctx, &spec.end);
17054 let step_val = spec
17055 .step
17056 .as_ref()
17057 .map(|s| lower_expr_ctx(b, ctx, s))
17058 .unwrap_or_else(|| b.const_i32(1));
17059
17060 let bb_check = b.create_block("forall_check");
17061 let bb_loop = b.create_block("forall_loop");
17062 let bb_incr = b.create_block("forall_incr");
17063 let bb_exit = b.create_block("forall_exit");
17064 b.branch(bb_check, vec![]);
17065
17066 b.set_block(bb_check);
17067 let cur = b.load(var_addr);
17068 // Handle both positive and negative steps: done = (step >= 0 && cur > end) || (step < 0 && cur < end)
17069 let zero_const = b.const_i32(0);
17070 let step_neg = b.icmp(CmpOp::Lt, step_val, zero_const);
17071 let gt_end = b.icmp(CmpOp::Gt, cur, end_val);
17072 let lt_end = b.icmp(CmpOp::Lt, cur, end_val);
17073 let done = b.select(step_neg, lt_end, gt_end);
17074 b.cond_branch(done, bb_exit, vec![], bb_loop, vec![]);
17075
17076 b.set_block(bb_loop);
17077 // Recurse: lower remaining specs + body inside this loop.
17078 lower_forall_nested(b, ctx, remaining, mask, body);
17079 if b.func().block(b.current_block()).terminator.is_none() {
17080 b.branch(bb_incr, vec![]);
17081 }
17082
17083 b.set_block(bb_incr);
17084 let cur2 = b.load(var_addr);
17085 let next = b.iadd(cur2, step_val);
17086 b.store(next, var_addr);
17087 b.branch(bb_check, vec![]);
17088
17089 b.set_block(bb_exit);
17090 }
17091 }
17092
17093 /// Lower whole-array assignment: a = b (element-wise copy) or a = scalar (broadcast).
17094 fn lower_array_assign(
17095 b: &mut FuncBuilder,
17096 ctx: &mut LowerCtx,
17097 dest_name: &str,
17098 dest_info: &LocalInfo,
17099 value: &crate::ast::expr::SpannedExpr,
17100 ) {
17101 // a = [v0, v1, v2, ...] — element-wise store of an array
17102 // constructor's literal values into the destination.
17103 if let Expr::ArrayConstructor { values, .. } = &value.node {
17104 let dest_base = array_base_addr(b, dest_info);
17105 store_ac_values_into(b, &ctx.locals, dest_base, &dest_info.ty, values, ctx.st);
17106 return;
17107 }
17108
17109 if (dest_info.char_kind != CharKind::None || descriptor_backed_runtime_char_array(dest_info))
17110 && local_uses_array_descriptor(dest_info)
17111 {
17112 let dest_desc = array_descriptor_addr(b, dest_info);
17113 let dest_base = b.load_typed(dest_desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
17114 let dest_n = b.call(
17115 FuncRef::External("afs_array_size".into()),
17116 vec![dest_desc],
17117 IrType::Int(IntWidth::I64),
17118 );
17119 let dest_stride = load_array_desc_i64_field(b, dest_desc, 24 + 16);
17120 let dest_elem_len = descriptor_elem_size(b, dest_desc);
17121 let src_desc = lower_array_expr_descriptor(b, &ctx.locals, value, ctx.st);
17122 let src_n = src_desc.as_ref().map(|(desc, _)| {
17123 b.call(
17124 FuncRef::External("afs_array_size".into()),
17125 vec![*desc],
17126 IrType::Int(IntWidth::I64),
17127 )
17128 });
17129
17130 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
17131 let zero = b.const_i64(0);
17132 b.store(zero, i_addr);
17133
17134 let bb_check = b.create_block("char_array_assign_check");
17135 let bb_body = b.create_block("char_array_assign_body");
17136 let bb_exit = b.create_block("char_array_assign_exit");
17137 b.branch(bb_check, vec![]);
17138
17139 b.set_block(bb_check);
17140 let i = b.load(i_addr);
17141 let mut done = b.icmp(CmpOp::Ge, i, dest_n);
17142 if let Some(src_len) = src_n {
17143 let src_done = b.icmp(CmpOp::Ge, i, src_len);
17144 done = b.or(done, src_done);
17145 }
17146 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
17147
17148 b.set_block(bb_body);
17149 let i_val = b.load(i_addr);
17150 let dest_index = b.imul(i_val, dest_stride);
17151 let dest_off = b.imul(dest_index, dest_elem_len);
17152 let dest_ptr = b.gep(dest_base, vec![dest_off], IrType::Int(IntWidth::I8));
17153
17154 let (src_ptr, src_len) = if let Some((desc, _)) = src_desc.as_ref() {
17155 let src_base = b.load_typed(*desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
17156 let src_stride = load_array_desc_i64_field(b, *desc, 24 + 16);
17157 let src_index = b.imul(i_val, src_stride);
17158 let src_elem_len = descriptor_elem_size(b, *desc);
17159 let src_off = b.imul(src_index, src_elem_len);
17160 let src_ptr = b.gep(src_base, vec![src_off], IrType::Int(IntWidth::I8));
17161 (src_ptr, src_elem_len)
17162 } else {
17163 lower_string_expr_ctx(b, ctx, value)
17164 };
17165 b.call(
17166 FuncRef::External("afs_assign_char_fixed".into()),
17167 vec![dest_ptr, dest_elem_len, src_ptr, src_len],
17168 IrType::Void,
17169 );
17170
17171 let one = b.const_i64(1);
17172 let next_i = b.iadd(i_val, one);
17173 b.store(next_i, i_addr);
17174 b.branch(bb_check, vec![]);
17175
17176 b.set_block(bb_exit);
17177 return;
17178 }
17179
17180 if try_lower_elemental_array_assign(b, ctx, dest_name, dest_info, value) {
17181 return;
17182 }
17183
17184 if try_lower_bulk_array_assign(b, ctx, dest_info, value) {
17185 return;
17186 }
17187
17188 // Check if RHS is also an array variable → element-wise copy via memcpy.
17189 let rhs_is_array = if let Expr::Name { name } = &value.node {
17190 ctx.locals
17191 .get(&name.to_lowercase())
17192 .map(|i| !i.dims.is_empty() || i.allocatable)
17193 .unwrap_or(false)
17194 } else {
17195 false
17196 };
17197
17198 if rhs_is_array {
17199 // a = b: memcpy from b's data to a's data.
17200 let dest_base = array_base_addr(b, dest_info);
17201
17202 if let Expr::Name { name } = &value.node {
17203 let key = name.to_lowercase();
17204 if let Some(src_info) = ctx.locals.get(&key) {
17205 let src_base = array_base_addr(b, src_info);
17206
17207 // Compute byte count: size(a) * elem_size.
17208 let n = array_total_elems_value(b, dest_info);
17209 let elem_bytes = b.const_i64(ir_scalar_byte_size(&dest_info.ty));
17210 let byte_count = b.imul(n, elem_bytes);
17211 b.call(
17212 FuncRef::External("memcpy".into()),
17213 vec![dest_base, src_base, byte_count],
17214 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
17215 );
17216 }
17217 }
17218 } else {
17219 // a = scalar: broadcast scalar to all elements.
17220 // Generate a loop with stack-allocated counter.
17221 let scalar = lower_expr_ctx_tl(b, ctx, value);
17222 let dest_base = array_base_addr(b, dest_info);
17223 let n = array_total_elems_value(b, dest_info);
17224
17225 // Stack-allocated loop counter.
17226 let i_addr = b.alloca(IrType::Int(IntWidth::I64));
17227 let zero = b.const_i64(0);
17228 b.store(zero, i_addr);
17229
17230 let bb_check = b.create_block("broadcast_check");
17231 let bb_body = b.create_block("broadcast_body");
17232 let bb_exit = b.create_block("broadcast_exit");
17233 b.branch(bb_check, vec![]);
17234
17235 b.set_block(bb_check);
17236 let i = b.load(i_addr);
17237 let done = b.icmp(CmpOp::Ge, i, n);
17238 b.cond_branch(done, bb_exit, vec![], bb_body, vec![]);
17239
17240 b.set_block(bb_body);
17241 let i_val = b.load(i_addr);
17242 // Compute byte offset: i * elem_size. Use byte-level GEP to avoid double multiplication.
17243 let elem_bytes = match &dest_info.ty {
17244 IrType::Int(IntWidth::I128) => b.const_i64(16),
17245 IrType::Int(IntWidth::I64) | IrType::Float(FloatWidth::F64) => b.const_i64(8),
17246 IrType::Int(IntWidth::I16) => b.const_i64(2),
17247 IrType::Int(IntWidth::I8) => b.const_i64(1),
17248 _ => b.const_i64(4),
17249 };
17250 let byte_offset = b.imul(i_val, elem_bytes);
17251 let elem_ptr = b.gep(dest_base, vec![byte_offset], IrType::Int(IntWidth::I8));
17252 b.store(scalar, elem_ptr);
17253 let one = b.const_i64(1);
17254 let next_i = b.iadd(i_val, one);
17255 b.store(next_i, i_addr);
17256 b.branch(bb_check, vec![]);
17257
17258 b.set_block(bb_exit);
17259 }
17260 }
17261
17262 /// Collect all array variable names referenced in an expression.
17263 fn collect_array_names(
17264 expr: &crate::ast::expr::SpannedExpr,
17265 locals: &HashMap<String, LocalInfo>,
17266 out: &mut Vec<String>,
17267 ) {
17268 match &expr.node {
17269 Expr::Name { name } => {
17270 let key = name.to_lowercase();
17271 if let Some(info) = locals.get(&key) {
17272 if local_is_array_like(info) && !out.contains(&key) {
17273 out.push(key);
17274 }
17275 }
17276 }
17277 Expr::BinaryOp { left, right, .. } => {
17278 collect_array_names(left, locals, out);
17279 collect_array_names(right, locals, out);
17280 }
17281 Expr::UnaryOp { operand, .. } => collect_array_names(operand, locals, out),
17282 Expr::ParenExpr { inner } => collect_array_names(inner, locals, out),
17283 Expr::FunctionCall { args, .. } => {
17284 if let Expr::FunctionCall { callee, .. } = &expr.node {
17285 collect_array_names(callee, locals, out);
17286 }
17287 for a in args {
17288 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17289 collect_array_names(e, locals, out);
17290 } else if let crate::ast::expr::SectionSubscript::Range { start, end, stride } =
17291 &a.value
17292 {
17293 if let Some(e) = start {
17294 collect_array_names(e, locals, out);
17295 }
17296 if let Some(e) = end {
17297 collect_array_names(e, locals, out);
17298 }
17299 if let Some(e) = stride {
17300 collect_array_names(e, locals, out);
17301 }
17302 }
17303 }
17304 }
17305 _ => {}
17306 }
17307 }
17308
17309 /// Collect array names referenced in a statement (for WHERE body analysis).
17310 fn collect_array_names_stmt(
17311 stmt: &SpannedStmt,
17312 locals: &HashMap<String, LocalInfo>,
17313 out: &mut Vec<String>,
17314 ) {
17315 if let Stmt::Assignment { target, value } = &stmt.node {
17316 collect_array_names(target, locals, out);
17317 collect_array_names(value, locals, out);
17318 }
17319 }
17320
17321 /// Find the first array variable referenced in an expression (for WHERE mask detection).
17322 fn find_array_in_expr(
17323 expr: &crate::ast::expr::SpannedExpr,
17324 locals: &HashMap<String, LocalInfo>,
17325 ) -> Option<LocalInfo> {
17326 match &expr.node {
17327 Expr::Name { name } => {
17328 let key = name.to_lowercase();
17329 locals
17330 .get(&key)
17331 .filter(|i| !i.dims.is_empty() || i.allocatable)
17332 .cloned()
17333 }
17334 Expr::BinaryOp { left, right, .. } => {
17335 find_array_in_expr(left, locals).or_else(|| find_array_in_expr(right, locals))
17336 }
17337 Expr::UnaryOp { operand, .. } => find_array_in_expr(operand, locals),
17338 Expr::ParenExpr { inner } => find_array_in_expr(inner, locals),
17339 Expr::FunctionCall { callee, args } => find_array_in_expr(callee, locals).or_else(|| {
17340 args.iter().find_map(|a| {
17341 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17342 find_array_in_expr(e, locals)
17343 } else if let crate::ast::expr::SectionSubscript::Range { start, end, stride } =
17344 &a.value
17345 {
17346 start
17347 .as_ref()
17348 .and_then(|e| find_array_in_expr(e, locals))
17349 .or_else(|| end.as_ref().and_then(|e| find_array_in_expr(e, locals)))
17350 .or_else(|| stride.as_ref().and_then(|e| find_array_in_expr(e, locals)))
17351 } else {
17352 None
17353 }
17354 })
17355 }),
17356 _ => None,
17357 }
17358 }
17359
17360 /// Check if the first argument refers to a REAL array (for type dispatch).
17361 fn first_arg_is_real(
17362 args: &[crate::ast::expr::Argument],
17363 locals: &HashMap<String, LocalInfo>,
17364 ) -> bool {
17365 args.first()
17366 .and_then(|a| {
17367 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17368 if let Expr::Name { name } = &e.node {
17369 locals.get(&name.to_lowercase()).map(|i| i.ty.is_float())
17370 } else {
17371 None
17372 }
17373 } else {
17374 None
17375 }
17376 })
17377 .unwrap_or(false)
17378 }
17379
17380 /// Lower an array section expression: a(1:10:2) → create section descriptor.
17381 fn lower_array_section(
17382 b: &mut FuncBuilder,
17383 locals: &HashMap<String, LocalInfo>,
17384 info: &LocalInfo,
17385 args: &[crate::ast::expr::Argument],
17386 st: &SymbolTable,
17387 ) -> ValueId {
17388 let n_dims = args.len();
17389
17390 // Allocate SectionSpec array on stack: each spec is 24 bytes (3 x i64).
17391 let spec_array_size = (n_dims * 24) as u64;
17392 let specs = b.alloca(IrType::Array(
17393 Box::new(IrType::Int(IntWidth::I8)),
17394 spec_array_size,
17395 ));
17396
17397 // Fill in each SectionSpec from the subscript ranges.
17398 for (i, arg) in args.iter().enumerate() {
17399 let base_offset = (i * 24) as i64;
17400 match &arg.value {
17401 crate::ast::expr::SectionSubscript::Range { start, end, stride } => {
17402 let start_val = start
17403 .as_ref()
17404 .map(|e| {
17405 let raw = lower_expr(b, locals, e, st);
17406 widen_idx_to_i64(b, raw)
17407 })
17408 .unwrap_or_else(|| {
17409 if local_uses_array_descriptor(info) {
17410 let dim = b.const_i32((i + 1) as i32);
17411 let desc = array_descriptor_addr(b, info);
17412 b.call(
17413 FuncRef::External("afs_array_lbound".into()),
17414 vec![desc, dim],
17415 IrType::Int(IntWidth::I64),
17416 )
17417 } else {
17418 let lower = info.dims.get(i).copied().map(|(lo, _)| lo).unwrap_or(1);
17419 b.const_i64(lower)
17420 }
17421 });
17422 let end_val = end
17423 .as_ref()
17424 .map(|e| {
17425 let raw = lower_expr(b, locals, e, st);
17426 widen_idx_to_i64(b, raw)
17427 })
17428 .unwrap_or_else(|| {
17429 if local_uses_array_descriptor(info) {
17430 let dim = b.const_i32((i + 1) as i32);
17431 let desc = array_descriptor_addr(b, info);
17432 b.call(
17433 FuncRef::External("afs_array_ubound".into()),
17434 vec![desc, dim],
17435 IrType::Int(IntWidth::I64),
17436 )
17437 } else {
17438 let (lower, extent) = info.dims.get(i).copied().unwrap_or((1, 1));
17439 b.const_i64(lower + extent - 1)
17440 }
17441 });
17442 let stride_val = stride
17443 .as_ref()
17444 .map(|e| {
17445 let raw = lower_expr(b, locals, e, st);
17446 widen_idx_to_i64(b, raw)
17447 })
17448 .unwrap_or_else(|| b.const_i64(1)); // default stride = 1
17449
17450 // Store start at offset+0, end at offset+8, stride at offset+16.
17451 let off0 = b.const_i64(base_offset);
17452 let off8 = b.const_i64(base_offset + 8);
17453 let off16 = b.const_i64(base_offset + 16);
17454 let p0 = b.gep(specs, vec![off0], IrType::Int(IntWidth::I8));
17455 let p8 = b.gep(specs, vec![off8], IrType::Int(IntWidth::I8));
17456 let p16 = b.gep(specs, vec![off16], IrType::Int(IntWidth::I8));
17457 b.store(start_val, p0);
17458 b.store(end_val, p8);
17459 b.store(stride_val, p16);
17460 }
17461 crate::ast::expr::SectionSubscript::Element(e) => {
17462 // Single element subscript in a section context — treat as start=end=val, stride=1.
17463 let raw = lower_expr(b, locals, e, st);
17464 let val = widen_idx_to_i64(b, raw);
17465 let off0 = b.const_i64(base_offset);
17466 let off8 = b.const_i64(base_offset + 8);
17467 let off16 = b.const_i64(base_offset + 16);
17468 let p0 = b.gep(specs, vec![off0], IrType::Int(IntWidth::I8));
17469 let p8 = b.gep(specs, vec![off8], IrType::Int(IntWidth::I8));
17470 let p16 = b.gep(specs, vec![off16], IrType::Int(IntWidth::I8));
17471 b.store(val, p0);
17472 b.store(val, p8);
17473 let one = b.const_i64(1);
17474 b.store(one, p16);
17475 }
17476 }
17477 }
17478
17479 // Allocate result descriptor on stack (384 bytes).
17480 let result_desc = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384));
17481 let zero = b.const_i32(0);
17482 let sz384 = b.const_i64(384);
17483 b.call(
17484 FuncRef::External("memset".into()),
17485 vec![result_desc, zero, sz384],
17486 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
17487 );
17488
17489 // Call afs_create_section(source, result, specs, n_dims).
17490 let ndims = b.const_i32(n_dims as i32);
17491 let source_desc = if local_uses_array_descriptor(info) {
17492 array_descriptor_addr(b, info)
17493 } else {
17494 materialize_array_descriptor_for_info(b, info)
17495 };
17496 b.call(
17497 FuncRef::External("afs_create_section".into()),
17498 vec![source_desc, result_desc, specs, ndims],
17499 IrType::Void,
17500 );
17501
17502 result_desc
17503 }
17504
17505 /// Lower array intrinsics that need descriptor addresses (SIZE, SUM, etc.).
17506 /// Lower pointer-only intrinsics like `ASSOCIATED(p)`. Kept
17507 /// separate from `lower_array_intrinsic` because the argument
17508 /// filter there rejects scalar and derived-type pointers (they
17509 /// don't have array dims) — but ASSOCIATED works on every
17510 /// pointer shape.
17511 ///
17512 /// Returns `Some(bool_value)` for `ASSOCIATED(p)`, `None` for
17513 /// any other name or shape so the caller can fall through.
17514 fn lower_pointer_intrinsic(
17515 b: &mut FuncBuilder,
17516 locals: &HashMap<String, LocalInfo>,
17517 name: &str,
17518 args: &[crate::ast::expr::Argument],
17519 st: &SymbolTable,
17520 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
17521 ) -> Option<ValueId> {
17522 if name != "associated" {
17523 return None;
17524 }
17525 // We handle the one-argument form: ASSOCIATED(p). The
17526 // two-argument form ASSOCIATED(p, target) is deferred.
17527 let first = args.first()?;
17528 let crate::ast::expr::SectionSubscript::Element(expr) = &first.value else {
17529 return None;
17530 };
17531 let raw = if let Expr::Name { name: ptr_name } = &expr.node {
17532 let info = locals.get(&ptr_name.to_lowercase())?.clone();
17533 if !info.is_pointer {
17534 return None;
17535 }
17536 let zero_off = b.const_i64(0);
17537 let ptr_slot = if info.by_ref {
17538 b.load(info.addr)
17539 } else {
17540 info.addr
17541 };
17542 let base_ptr = b.gep(ptr_slot, vec![zero_off], IrType::Int(IntWidth::I64));
17543 b.load_typed(base_ptr, IrType::Int(IntWidth::I64))
17544 } else if let Some(tl) = type_layouts {
17545 let (field_ptr, field) = resolve_component_field_access(b, locals, expr, st, tl)?;
17546 if !field.pointer {
17547 return None;
17548 }
17549 if is_deferred_char_component_field(&field) {
17550 let (ptr, _len) = load_string_descriptor_view(b, field_ptr);
17551 coerce_to_type(b, ptr, &IrType::Int(IntWidth::I64))
17552 } else {
17553 let zero_off = b.const_i64(0);
17554 let base_ptr = b.gep(field_ptr, vec![zero_off], IrType::Int(IntWidth::I64));
17555 b.load_typed(base_ptr, IrType::Int(IntWidth::I64))
17556 }
17557 } else {
17558 return None;
17559 };
17560 let zero = b.const_i64(0);
17561
17562 if args.len() >= 2 {
17563 // Two-argument form: ASSOCIATED(p, target).
17564 // True iff p's stored address equals the target's address.
17565 // Both values are compared as raw i64 representations.
17566 let second = &args[1];
17567 let crate::ast::expr::SectionSubscript::Element(tgt_expr) = &second.value else {
17568 return Some(b.icmp(CmpOp::Ne, raw, zero));
17569 };
17570 let Expr::Name { name: tgt_name } = &tgt_expr.node else {
17571 return Some(b.icmp(CmpOp::Ne, raw, zero));
17572 };
17573 let Some(tgt_info) = locals.get(&tgt_name.to_lowercase()) else {
17574 return Some(b.icmp(CmpOp::Ne, raw, zero));
17575 };
17576 // Get the target's address as i64 for comparison.
17577 // For a pointer: load the stored address from its slot.
17578 // For a plain variable: write info.addr into a scratch
17579 // i64 slot and read it back (effectlvely ptrtoint).
17580 let tgt_addr = if tgt_info.is_pointer {
17581 let off = b.const_i64(0);
17582 let ptr_slot = if tgt_info.by_ref {
17583 b.load(tgt_info.addr)
17584 } else {
17585 tgt_info.addr
17586 };
17587 let tgt_slot = b.gep(ptr_slot, vec![off], IrType::Int(IntWidth::I64));
17588 b.load_typed(tgt_slot, IrType::Int(IntWidth::I64))
17589 } else {
17590 let scratch = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
17591 b.store(tgt_info.addr, scratch);
17592 b.load_typed(scratch, IrType::Int(IntWidth::I64))
17593 };
17594 return Some(b.icmp(CmpOp::Eq, raw, tgt_addr));
17595 }
17596
17597 Some(b.icmp(CmpOp::Ne, raw, zero))
17598 }
17599
17600 fn lower_scalar_allocated_intrinsic(
17601 b: &mut FuncBuilder,
17602 locals: &HashMap<String, LocalInfo>,
17603 name: &str,
17604 args: &[crate::ast::expr::Argument],
17605 st: &SymbolTable,
17606 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
17607 ) -> Option<ValueId> {
17608 if name != "allocated" {
17609 return None;
17610 }
17611 let first = args.first()?;
17612 let crate::ast::expr::SectionSubscript::Element(expr) = &first.value else {
17613 return None;
17614 };
17615 let desc = match &expr.node {
17616 Expr::Name { name } => {
17617 let info = locals.get(&name.to_lowercase())?;
17618 if !matches!(info.char_kind, CharKind::Deferred) {
17619 return None;
17620 }
17621 string_descriptor_addr(b, info)
17622 }
17623 Expr::ComponentAccess { .. } => {
17624 let tl = type_layouts?;
17625 let (field_ptr, field) = resolve_component_field_access(b, locals, expr, st, tl)?;
17626 if !matches!(field_char_kind(&field), CharKind::Deferred) || field.size != 32 {
17627 return None;
17628 }
17629 field_ptr
17630 }
17631 _ => return None,
17632 };
17633 let raw = b.call(
17634 FuncRef::External("afs_string_allocated".into()),
17635 vec![desc],
17636 IrType::Int(IntWidth::I32),
17637 );
17638 let zero = b.const_i32(0);
17639 Some(b.icmp(CmpOp::Ne, raw, zero))
17640 }
17641
17642 fn component_intrinsic_local_info(
17643 b: &mut FuncBuilder,
17644 locals: &HashMap<String, LocalInfo>,
17645 expr: &crate::ast::expr::SpannedExpr,
17646 st: &SymbolTable,
17647 tl: &crate::sema::type_layout::TypeLayoutRegistry,
17648 ) -> Option<LocalInfo> {
17649 let (field_ptr, field) = resolve_component_field_access(b, locals, expr, st, tl)?;
17650 if field.size == 384 && (field.allocatable || field.pointer) {
17651 return Some(LocalInfo {
17652 addr: field_ptr,
17653 ty: type_info_to_storage_ir_type(&field.type_info, tl),
17654 dims: vec![],
17655 allocatable: true,
17656 descriptor_arg: false,
17657 by_ref: false,
17658 char_kind: field_char_kind(&field),
17659 derived_type: field_derived_type_name(&field),
17660 inline_const: None,
17661 is_pointer: field.pointer,
17662 runtime_dim_upper: vec![],
17663 });
17664 }
17665 if field.dims.is_empty() {
17666 return None;
17667 }
17668 Some(LocalInfo {
17669 addr: field_ptr,
17670 ty: type_info_to_storage_ir_type(&field.type_info, tl),
17671 dims: field.dims.clone(),
17672 allocatable: false,
17673 descriptor_arg: false,
17674 by_ref: false,
17675 char_kind: field_char_kind(&field),
17676 derived_type: field_derived_type_name(&field),
17677 inline_const: None,
17678 is_pointer: field.pointer,
17679 runtime_dim_upper: vec![],
17680 })
17681 }
17682
17683 fn lower_array_intrinsic(
17684 b: &mut FuncBuilder,
17685 locals: &HashMap<String, LocalInfo>,
17686 name: &str,
17687 args: &[crate::ast::expr::Argument],
17688 st: &SymbolTable,
17689 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
17690 ) -> Option<ValueId> {
17691 let first_arg_info = args.first().and_then(|a| {
17692 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17693 match &e.node {
17694 Expr::Name { name } => {
17695 let key = name.to_lowercase();
17696 locals
17697 .get(&key)
17698 .cloned()
17699 .filter(|i| local_uses_array_descriptor(i) || !i.dims.is_empty())
17700 }
17701 Expr::ComponentAccess { .. } => {
17702 type_layouts.and_then(|tl| component_intrinsic_local_info(b, locals, e, st, tl))
17703 }
17704 _ => None,
17705 }
17706 } else {
17707 None
17708 }
17709 });
17710
17711 let info = first_arg_info?;
17712 let desc = if local_uses_array_descriptor(&info) {
17713 array_descriptor_addr(b, &info)
17714 } else {
17715 materialize_array_descriptor_for_info(b, &info)
17716 };
17717
17718 match name {
17719 "size" => {
17720 if args.len() >= 2 {
17721 // SIZE(array, dim)
17722 if let crate::ast::expr::SectionSubscript::Element(e) = &args[1].value {
17723 let dim = lower_expr(b, locals, e, st);
17724 let result64 = if local_uses_array_descriptor(&info) {
17725 let desc = array_descriptor_addr(b, &info);
17726 b.call(
17727 FuncRef::External("afs_array_size_dim".into()),
17728 vec![desc, dim],
17729 IrType::Int(IntWidth::I64),
17730 )
17731 } else {
17732 let raw_dim = match b.func().value_type(dim) {
17733 Some(IrType::Int(IntWidth::I64)) => b.int_trunc(dim, IntWidth::I32),
17734 _ => dim,
17735 };
17736 let one = b.const_i32(1);
17737 let zero = b.const_i64(0);
17738 let idx0 = b.isub(raw_dim, one);
17739 let mut result = zero;
17740 for (idx, (_lower, extent)) in info.dims.iter().enumerate() {
17741 let cond_idx = b.const_i32(idx as i32);
17742 let is_match = b.icmp(CmpOp::Eq, idx0, cond_idx);
17743 let extent_val = b.const_i64(*extent);
17744 result = b.select(is_match, extent_val, result);
17745 }
17746 result
17747 };
17748 Some(b.int_trunc(result64, IntWidth::I32))
17749 } else {
17750 None
17751 }
17752 } else {
17753 // SIZE(array)
17754 let result64 = if local_uses_array_descriptor(&info) {
17755 let desc = array_descriptor_addr(b, &info);
17756 b.call(
17757 FuncRef::External("afs_array_size".into()),
17758 vec![desc],
17759 IrType::Int(IntWidth::I64),
17760 )
17761 } else {
17762 let total: i64 = info.dims.iter().map(|(_, extent)| *extent).product();
17763 b.const_i64(total.max(0))
17764 };
17765 Some(b.int_trunc(result64, IntWidth::I32))
17766 }
17767 }
17768 "lbound" => {
17769 if args.len() >= 2 {
17770 if let crate::ast::expr::SectionSubscript::Element(e) = &args[1].value {
17771 let dim = lower_expr(b, locals, e, st);
17772 let result64 = if local_uses_array_descriptor(&info) {
17773 let desc = array_descriptor_addr(b, &info);
17774 b.call(
17775 FuncRef::External("afs_array_lbound".into()),
17776 vec![desc, dim],
17777 IrType::Int(IntWidth::I64),
17778 )
17779 } else {
17780 let raw_dim = match b.func().value_type(dim) {
17781 Some(IrType::Int(IntWidth::I64)) => b.int_trunc(dim, IntWidth::I32),
17782 _ => dim,
17783 };
17784 let one = b.const_i32(1);
17785 let default = b.const_i64(1);
17786 let idx0 = b.isub(raw_dim, one);
17787 let mut result = default;
17788 for (idx, (lower, _extent)) in info.dims.iter().enumerate() {
17789 let cond_idx = b.const_i32(idx as i32);
17790 let is_match = b.icmp(CmpOp::Eq, idx0, cond_idx);
17791 let lower_val = b.const_i64(*lower);
17792 result = b.select(is_match, lower_val, result);
17793 }
17794 result
17795 };
17796 Some(b.int_trunc(result64, IntWidth::I32))
17797 } else {
17798 None
17799 }
17800 } else {
17801 None
17802 }
17803 }
17804 "ubound" => {
17805 if args.len() >= 2 {
17806 if let crate::ast::expr::SectionSubscript::Element(e) = &args[1].value {
17807 let dim = lower_expr(b, locals, e, st);
17808 let result64 = if local_uses_array_descriptor(&info) {
17809 let desc = array_descriptor_addr(b, &info);
17810 b.call(
17811 FuncRef::External("afs_array_ubound".into()),
17812 vec![desc, dim],
17813 IrType::Int(IntWidth::I64),
17814 )
17815 } else {
17816 let raw_dim = match b.func().value_type(dim) {
17817 Some(IrType::Int(IntWidth::I64)) => b.int_trunc(dim, IntWidth::I32),
17818 _ => dim,
17819 };
17820 let one = b.const_i32(1);
17821 let default = b.const_i64(0);
17822 let idx0 = b.isub(raw_dim, one);
17823 let mut result = default;
17824 for (idx, (lower, extent)) in info.dims.iter().enumerate() {
17825 let cond_idx = b.const_i32(idx as i32);
17826 let is_match = b.icmp(CmpOp::Eq, idx0, cond_idx);
17827 let upper_val = b.const_i64(lower + extent - 1);
17828 result = b.select(is_match, upper_val, result);
17829 }
17830 result
17831 };
17832 Some(b.int_trunc(result64, IntWidth::I32))
17833 } else {
17834 None
17835 }
17836 } else {
17837 None
17838 }
17839 }
17840 "allocated" => {
17841 let raw = b.call(
17842 FuncRef::External("afs_array_allocated".into()),
17843 vec![desc],
17844 IrType::Int(IntWidth::I32),
17845 );
17846 let zero = b.const_i32(0);
17847 Some(b.icmp(CmpOp::Ne, raw, zero))
17848 }
17849 "sum" => {
17850 let is_real = first_arg_is_real(args, locals);
17851 if is_real {
17852 Some(b.call(
17853 FuncRef::External("afs_array_sum_real8".into()),
17854 vec![desc],
17855 IrType::Float(FloatWidth::F64),
17856 ))
17857 } else {
17858 Some(b.call(
17859 FuncRef::External("afs_array_sum_int".into()),
17860 vec![desc],
17861 IrType::Int(IntWidth::I64),
17862 ))
17863 }
17864 }
17865 "product" => {
17866 let is_real = first_arg_is_real(args, locals);
17867 if is_real {
17868 Some(b.call(
17869 FuncRef::External("afs_array_product_real8".into()),
17870 vec![desc],
17871 IrType::Float(FloatWidth::F64),
17872 ))
17873 } else {
17874 Some(b.call(
17875 FuncRef::External("afs_array_product_int".into()),
17876 vec![desc],
17877 IrType::Int(IntWidth::I64),
17878 ))
17879 }
17880 }
17881 "maxval" => {
17882 let is_real = first_arg_is_real(args, locals);
17883 if is_real {
17884 Some(b.call(
17885 FuncRef::External("afs_array_maxval_real8".into()),
17886 vec![desc],
17887 IrType::Float(FloatWidth::F64),
17888 ))
17889 } else {
17890 Some(b.call(
17891 FuncRef::External("afs_array_maxval_int".into()),
17892 vec![desc],
17893 IrType::Int(IntWidth::I32),
17894 ))
17895 }
17896 }
17897 "minval" => {
17898 let is_real = first_arg_is_real(args, locals);
17899 if is_real {
17900 Some(b.call(
17901 FuncRef::External("afs_array_minval_real8".into()),
17902 vec![desc],
17903 IrType::Float(FloatWidth::F64),
17904 ))
17905 } else {
17906 Some(b.call(
17907 FuncRef::External("afs_array_minval_int".into()),
17908 vec![desc],
17909 IrType::Int(IntWidth::I32),
17910 ))
17911 }
17912 }
17913 "dot_product" => {
17914 let second_desc = args.get(1).and_then(|a| {
17915 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17916 if let Expr::Name { name } = &e.node {
17917 locals
17918 .get(&name.to_lowercase())
17919 .filter(|i| local_uses_array_descriptor(i) || !i.dims.is_empty())
17920 .map(|i| {
17921 if local_uses_array_descriptor(i) {
17922 array_descriptor_addr(b, i)
17923 } else {
17924 materialize_array_descriptor_for_info(b, i)
17925 }
17926 })
17927 } else {
17928 None
17929 }
17930 } else {
17931 None
17932 }
17933 })?;
17934 // Get the first arg's element type for dispatch.
17935 let elem_ty = args
17936 .first()
17937 .and_then(|a| {
17938 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17939 if let Expr::Name { name } = &e.node {
17940 locals.get(&name.to_lowercase()).map(|i| i.ty.clone())
17941 } else {
17942 None
17943 }
17944 } else {
17945 None
17946 }
17947 })
17948 .unwrap_or(IrType::Float(FloatWidth::F64));
17949 match &elem_ty {
17950 IrType::Float(FloatWidth::F64) => Some(b.call(
17951 FuncRef::External("afs_dot_product_real8".into()),
17952 vec![desc, second_desc],
17953 IrType::Float(FloatWidth::F64),
17954 )),
17955 IrType::Float(FloatWidth::F32) => Some(b.call(
17956 FuncRef::External("afs_dot_product_real4".into()),
17957 vec![desc, second_desc],
17958 IrType::Float(FloatWidth::F32),
17959 )),
17960 _ => Some(b.call(
17961 FuncRef::External("afs_dot_product_int".into()),
17962 vec![desc, second_desc],
17963 IrType::Int(IntWidth::I64),
17964 )),
17965 }
17966 }
17967 "matmul" => {
17968 // MATMUL(a, b) → allocate result descriptor, dispatch by type.
17969 let second_desc = args.get(1).and_then(|a| {
17970 if let crate::ast::expr::SectionSubscript::Element(e) = &a.value {
17971 if let Expr::Name { name } = &e.node {
17972 locals
17973 .get(&name.to_lowercase())
17974 .filter(|i| local_uses_array_descriptor(i) || !i.dims.is_empty())
17975 .map(|i| {
17976 if local_uses_array_descriptor(i) {
17977 array_descriptor_addr(b, i)
17978 } else {
17979 materialize_array_descriptor_for_info(b, i)
17980 }
17981 })
17982 } else {
17983 None
17984 }
17985 } else {
17986 None
17987 }
17988 })?;
17989 let is_real = first_arg_is_real(args, locals);
17990 let result_desc = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384));
17991 let zero = b.const_i32(0);
17992 let sz384 = b.const_i64(384);
17993 b.call(
17994 FuncRef::External("memset".into()),
17995 vec![result_desc, zero, sz384],
17996 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
17997 );
17998 let func = if is_real {
17999 "afs_matmul_real8"
18000 } else {
18001 "afs_matmul_int"
18002 };
18003 b.call(
18004 FuncRef::External(func.into()),
18005 vec![desc, second_desc, result_desc],
18006 IrType::Void,
18007 );
18008 Some(result_desc)
18009 }
18010 "transpose" => {
18011 // TRANSPOSE(source) → allocate result descriptor, dispatch by type.
18012 let is_real = first_arg_is_real(args, locals);
18013 let result_desc = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I8)), 384));
18014 let zero = b.const_i32(0);
18015 let sz384 = b.const_i64(384);
18016 b.call(
18017 FuncRef::External("memset".into()),
18018 vec![result_desc, zero, sz384],
18019 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18020 );
18021 let func = if is_real {
18022 "afs_transpose_real8"
18023 } else {
18024 "afs_transpose_int"
18025 };
18026 b.call(
18027 FuncRef::External(func.into()),
18028 vec![desc, result_desc],
18029 IrType::Void,
18030 );
18031 Some(result_desc)
18032 }
18033 _ => None,
18034 }
18035 }
18036
18037 /// Check if `actual_type` is or extends `target_type` (for CLASS IS matching).
18038 fn is_type_or_extends(
18039 actual_type: &str,
18040 target_type: &str,
18041 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18042 ) -> bool {
18043 if actual_type.eq_ignore_ascii_case(target_type) {
18044 return true;
18045 }
18046 // Walk the parent chain.
18047 let mut current = actual_type.to_lowercase();
18048 loop {
18049 let layout = match tl.get(&current) {
18050 Some(l) => l,
18051 None => return false,
18052 };
18053 match &layout.parent {
18054 Some(parent) if parent.eq_ignore_ascii_case(target_type) => return true,
18055 Some(parent) => current = parent.to_lowercase(),
18056 None => return false,
18057 }
18058 }
18059 }
18060
18061 /// Convert TypeInfo to IR type for field loads.
18062 fn type_info_to_ir_type(ti: &crate::sema::symtab::TypeInfo) -> IrType {
18063 use crate::sema::symtab::TypeInfo;
18064 if let TypeInfo::Derived(name) = ti {
18065 let lower = name.to_lowercase();
18066 if lower == "c_ptr" || lower == "c_funptr" {
18067 return IrType::Int(IntWidth::I64);
18068 }
18069 }
18070 // Derived types lower to a byte pointer — the compiler treats
18071 // values of derived types as addresses into struct-shaped
18072 // buffers. Without this case, size_of_type for a small derived
18073 // type (e.g. one real field → 4 bytes) would fall into the
18074 // Int(I32) arm and the generic dispatcher would compare pointers
18075 // as integers.
18076 if matches!(ti, TypeInfo::Derived(_)) {
18077 return IrType::Ptr(Box::new(IrType::Int(IntWidth::I8)));
18078 }
18079 let (size, _) = crate::sema::type_layout::size_of_type(ti);
18080 match size {
18081 1 => IrType::Int(IntWidth::I8),
18082 2 => IrType::Int(IntWidth::I16),
18083 4 => match ti {
18084 TypeInfo::Real { .. } => IrType::Float(FloatWidth::F32),
18085 TypeInfo::Logical { .. } => IrType::Bool,
18086 _ => IrType::Int(IntWidth::I32),
18087 },
18088 8 => match ti {
18089 TypeInfo::Real { .. } | TypeInfo::DoublePrecision => IrType::Float(FloatWidth::F64),
18090 _ => IrType::Int(IntWidth::I64),
18091 },
18092 16 => IrType::Int(IntWidth::I128),
18093 _ => IrType::Int(IntWidth::I32),
18094 }
18095 }
18096
18097 fn derived_storage_ir_type(
18098 type_name: &str,
18099 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18100 ) -> Option<IrType> {
18101 let layout = tl.get(type_name)?;
18102 Some(IrType::Array(
18103 Box::new(IrType::Int(IntWidth::I8)),
18104 layout.size as u64,
18105 ))
18106 }
18107
18108 fn type_info_to_storage_ir_type(
18109 ti: &crate::sema::symtab::TypeInfo,
18110 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18111 ) -> IrType {
18112 if let crate::sema::symtab::TypeInfo::Derived(type_name) = ti {
18113 if let Some(storage_ty) = derived_storage_ir_type(type_name, tl) {
18114 return storage_ty;
18115 }
18116 }
18117 type_info_to_ir_type(ti)
18118 }
18119
18120 fn lower_fixed_component_array_element_ptr(
18121 b: &mut FuncBuilder,
18122 locals: &HashMap<String, LocalInfo>,
18123 args: &[crate::ast::expr::Argument],
18124 st: &SymbolTable,
18125 base_ptr: ValueId,
18126 dims: &[(i64, i64)],
18127 elem_bytes: i64,
18128 ) -> Option<ValueId> {
18129 if dims.is_empty() || args.len() != dims.len() {
18130 return None;
18131 }
18132 let mut linear_idx: Option<ValueId> = None;
18133 let mut stride = 1i64;
18134 for (arg, (lower, extent)) in args.iter().zip(dims.iter()) {
18135 let crate::ast::expr::SectionSubscript::Element(idx_expr) = &arg.value else {
18136 return None;
18137 };
18138 let idx = lower_expr(b, locals, idx_expr, st);
18139 let idx64 = match b.func().value_type(idx) {
18140 Some(IrType::Int(IntWidth::I64)) => idx,
18141 _ => b.int_extend(idx, IntWidth::I64, true),
18142 };
18143 let lo = b.const_i64(*lower);
18144 let hi = b.const_i64(lower + extent - 1);
18145 b.runtime_call(RuntimeFunc::CheckBounds, vec![idx64, lo, hi], IrType::Void);
18146 let zero_based = b.isub(idx64, lo);
18147 let contrib = if stride == 1 {
18148 zero_based
18149 } else {
18150 let stride_val = b.const_i64(stride);
18151 b.imul(zero_based, stride_val)
18152 };
18153 linear_idx = Some(match linear_idx {
18154 Some(cur) => b.iadd(cur, contrib),
18155 None => contrib,
18156 });
18157 stride = stride.saturating_mul(*extent);
18158 }
18159 let elem_bytes_val = b.const_i64(elem_bytes);
18160 let byte_off = b.imul(linear_idx?, elem_bytes_val);
18161 Some(b.gep(base_ptr, vec![byte_off], IrType::Int(IntWidth::I8)))
18162 }
18163
18164 /// Resolve a component access base expression to (struct_address, type_name).
18165 /// Handles both direct names (x%field) and chained access (x%inner%field).
18166 fn resolve_component_base(
18167 b: &mut FuncBuilder,
18168 locals: &HashMap<String, LocalInfo>,
18169 base: &crate::ast::expr::SpannedExpr,
18170 st: &SymbolTable,
18171 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18172 ) -> Option<(ValueId, String)> {
18173 match &base.node {
18174 Expr::Name { name } => {
18175 let key = name.to_lowercase();
18176 let info = locals.get(&key)?;
18177 let type_name = info.derived_type.as_ref()?.clone();
18178 // For a derived-type POINTER, info.addr is a pointer slot
18179 // whose contents are the associated struct's address.
18180 // Dereference once to get the struct base.
18181 let addr = if info.is_pointer {
18182 b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
18183 } else if info.allocatable {
18184 array_base_addr(b, info)
18185 } else if info.by_ref {
18186 b.load(info.addr)
18187 } else {
18188 info.addr
18189 };
18190 Some((addr, type_name))
18191 }
18192 Expr::ComponentAccess {
18193 base: inner_base,
18194 component,
18195 } => {
18196 // Recursive: resolve the inner base first.
18197 let (inner_addr, inner_type) = resolve_component_base(b, locals, inner_base, st, tl)?;
18198 let layout = tl.get(&inner_type)?;
18199 let field = layout.field(component)?;
18200 let offset = b.const_i64(field.offset as i64);
18201 let field_ptr = b.gep(inner_addr, vec![offset], IrType::Int(IntWidth::I8));
18202 // The field must be a derived type for chaining to continue.
18203 if let crate::sema::symtab::TypeInfo::Derived(ref nested_type) = field.type_info {
18204 Some((field_ptr, nested_type.clone()))
18205 } else {
18206 None // Terminal field — caller should load, not chain further.
18207 }
18208 }
18209 Expr::FunctionCall { callee, args } => {
18210 if args
18211 .iter()
18212 .any(|arg| !matches!(arg.value, crate::ast::expr::SectionSubscript::Element(_)))
18213 {
18214 return None;
18215 }
18216 if let Expr::Name { name } = &callee.node {
18217 let info = locals.get(&name.to_lowercase())?;
18218 let type_name = info.derived_type.as_ref()?.clone();
18219 if info.dims.is_empty() && !local_uses_array_descriptor(info) {
18220 return None;
18221 }
18222 let elem_addr = lower_array_element(b, locals, info, args, st);
18223 return Some((elem_addr, type_name));
18224 }
18225 if let Expr::ComponentAccess { .. } = &callee.node {
18226 if let Some(info) = component_array_local_info(b, locals, callee, st, tl) {
18227 let type_name = info.derived_type.as_ref()?.clone();
18228 let elem_addr = lower_array_element(b, locals, &info, args, st);
18229 return Some((elem_addr, type_name));
18230 }
18231 let (field_ptr, field) = resolve_component_field_access(b, locals, callee, st, tl)?;
18232 let crate::sema::symtab::TypeInfo::Derived(type_name) = &field.type_info else {
18233 return None;
18234 };
18235 let layout = tl.get(type_name)?;
18236 let elem_addr = lower_fixed_component_array_element_ptr(
18237 b,
18238 locals,
18239 args,
18240 st,
18241 field_ptr,
18242 &field.dims,
18243 layout.size as i64,
18244 )?;
18245 return Some((elem_addr, type_name.clone()));
18246 }
18247 None
18248 }
18249 _ => None,
18250 }
18251 }
18252
18253 fn resolve_component_field_access(
18254 b: &mut FuncBuilder,
18255 locals: &HashMap<String, LocalInfo>,
18256 expr: &crate::ast::expr::SpannedExpr,
18257 st: &SymbolTable,
18258 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18259 ) -> Option<(ValueId, crate::sema::type_layout::FieldLayout)> {
18260 let Expr::ComponentAccess { base, component } = &expr.node else {
18261 return None;
18262 };
18263 let (base_addr, type_name) = resolve_component_base(b, locals, base, st, tl)?;
18264 let layout = tl.get(&type_name)?;
18265 let field = layout.field(component)?.clone();
18266 let offset = b.const_i64(field.offset as i64);
18267 let field_ptr = b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
18268 Some((field_ptr, field))
18269 }
18270
18271 fn is_deferred_char_component_field(field: &crate::sema::type_layout::FieldLayout) -> bool {
18272 (field.pointer || field.allocatable)
18273 && matches!(
18274 field.type_info,
18275 crate::sema::symtab::TypeInfo::Character { len: None, .. }
18276 )
18277 }
18278
18279 fn load_string_descriptor_view(b: &mut FuncBuilder, desc: ValueId) -> (ValueId, ValueId) {
18280 let ptr = b.load_typed(desc, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
18281 let eight = b.const_i64(8);
18282 let len_ptr = b.gep(desc, vec![eight], IrType::Int(IntWidth::I8));
18283 let len = b.load_typed(len_ptr, IrType::Int(IntWidth::I64));
18284 (ptr, len)
18285 }
18286
18287 fn field_char_kind(field: &crate::sema::type_layout::FieldLayout) -> CharKind {
18288 match &field.type_info {
18289 crate::sema::symtab::TypeInfo::Character { len: Some(n), .. } => CharKind::Fixed(*n),
18290 crate::sema::symtab::TypeInfo::Character { len: None, .. } => {
18291 if field.pointer || field.allocatable {
18292 CharKind::Deferred
18293 } else {
18294 CharKind::Fixed(field.size as i64)
18295 }
18296 }
18297 _ => CharKind::None,
18298 }
18299 }
18300
18301 fn field_derived_type_name(field: &crate::sema::type_layout::FieldLayout) -> Option<String> {
18302 match &field.type_info {
18303 crate::sema::symtab::TypeInfo::Derived(name) => Some(name.clone()),
18304 _ => None,
18305 }
18306 }
18307
18308 fn component_array_local_info(
18309 b: &mut FuncBuilder,
18310 locals: &HashMap<String, LocalInfo>,
18311 expr: &crate::ast::expr::SpannedExpr,
18312 st: &SymbolTable,
18313 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18314 ) -> Option<LocalInfo> {
18315 let (field_ptr, field) = resolve_component_field_access(b, locals, expr, st, tl)?;
18316 if field.size != 384 || !(field.allocatable || field.pointer) {
18317 return None;
18318 }
18319 Some(LocalInfo {
18320 addr: field_ptr,
18321 ty: type_info_to_storage_ir_type(&field.type_info, tl),
18322 dims: vec![],
18323 allocatable: true,
18324 descriptor_arg: false,
18325 by_ref: false,
18326 char_kind: field_char_kind(&field),
18327 derived_type: field_derived_type_name(&field),
18328 inline_const: None,
18329 is_pointer: field.pointer,
18330 runtime_dim_upper: vec![],
18331 })
18332 }
18333
18334 fn expr_is_array_designator(
18335 b: &mut FuncBuilder,
18336 locals: &HashMap<String, LocalInfo>,
18337 expr: &crate::ast::expr::SpannedExpr,
18338 st: &SymbolTable,
18339 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
18340 ) -> bool {
18341 match &expr.node {
18342 Expr::Name { name } => locals
18343 .get(&name.to_lowercase())
18344 .map(|info| {
18345 if local_fixed_char_allocatable_scalar_len(info).is_some() {
18346 return false;
18347 }
18348 if matches!(info.char_kind, CharKind::Deferred) {
18349 false
18350 } else {
18351 !info.dims.is_empty() || local_uses_array_descriptor(info)
18352 }
18353 })
18354 .unwrap_or(false),
18355 Expr::ComponentAccess { .. } => type_layouts
18356 .and_then(|tl| component_array_local_info(b, locals, expr, st, tl))
18357 .is_some(),
18358 _ => false,
18359 }
18360 }
18361
18362 fn expr_is_character_expr(
18363 b: &mut FuncBuilder,
18364 locals: &HashMap<String, LocalInfo>,
18365 expr: &crate::ast::expr::SpannedExpr,
18366 st: &SymbolTable,
18367 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
18368 ) -> bool {
18369 match &expr.node {
18370 Expr::StringLiteral { .. } => true,
18371 Expr::BinaryOp {
18372 op: BinaryOp::Concat,
18373 ..
18374 } => true,
18375 Expr::Name { name } => locals
18376 .get(&name.to_lowercase())
18377 .map(|info| {
18378 info.char_kind != CharKind::None
18379 || local_fixed_char_allocatable_scalar_len(info).is_some()
18380 })
18381 .unwrap_or(false),
18382 Expr::ComponentAccess { .. } => type_layouts
18383 .and_then(|tl| resolve_component_field_access(b, locals, expr, st, tl))
18384 .map(|(_, field)| {
18385 matches!(
18386 field.type_info,
18387 crate::sema::symtab::TypeInfo::Character { .. }
18388 )
18389 })
18390 .unwrap_or(false),
18391 Expr::FunctionCall { callee, args } => {
18392 if let Expr::Name { name } = &callee.node {
18393 let key = name.to_lowercase();
18394 matches!(
18395 key.as_str(),
18396 "trim"
18397 | "adjustl"
18398 | "adjustr"
18399 | "char"
18400 | "achar"
18401 | "new_line"
18402 | "repeat"
18403 | "compiler_version"
18404 | "compiler_options"
18405 ) || (key == "merge"
18406 && args.len() >= 2
18407 && args
18408 .first()
18409 .and_then(|arg| {
18410 if let crate::ast::expr::SectionSubscript::Element(expr) = &arg.value {
18411 Some(expr_is_character_expr(b, locals, expr, st, type_layouts))
18412 } else {
18413 None
18414 }
18415 })
18416 .unwrap_or(false)
18417 && args
18418 .get(1)
18419 .and_then(|arg| {
18420 if let crate::ast::expr::SectionSubscript::Element(expr) = &arg.value {
18421 Some(expr_is_character_expr(b, locals, expr, st, type_layouts))
18422 } else {
18423 None
18424 }
18425 })
18426 .unwrap_or(false))
18427 || named_expr_callable_character_return_abi(st, locals, &key).is_some()
18428 || locals
18429 .get(&key)
18430 .map(|info| {
18431 (info.char_kind != CharKind::None
18432 || descriptor_backed_runtime_char_array(info)
18433 || local_fixed_char_allocatable_scalar_len(info).is_some())
18434 && (info.dims.is_empty()
18435 || args.iter().all(|arg| {
18436 matches!(
18437 arg.value,
18438 crate::ast::expr::SectionSubscript::Element(_)
18439 | crate::ast::expr::SectionSubscript::Range { .. }
18440 )
18441 }))
18442 })
18443 .unwrap_or(false)
18444 } else if let Expr::ComponentAccess { .. } = &callee.node {
18445 type_layouts
18446 .and_then(|tl| {
18447 component_array_local_info(b, locals, callee, st, tl).or_else(|| {
18448 resolve_component_field_access(b, locals, callee, st, tl).map(
18449 |(field_ptr, field)| LocalInfo {
18450 addr: field_ptr,
18451 ty: type_info_to_storage_ir_type(&field.type_info, tl),
18452 dims: vec![],
18453 allocatable: field.allocatable,
18454 descriptor_arg: false,
18455 by_ref: false,
18456 char_kind: field_char_kind(&field),
18457 derived_type: field_derived_type_name(&field),
18458 inline_const: None,
18459 is_pointer: field.pointer,
18460 runtime_dim_upper: vec![],
18461 },
18462 )
18463 })
18464 })
18465 .map(|info| {
18466 info.char_kind != CharKind::None
18467 || descriptor_backed_runtime_char_array(&info)
18468 })
18469 .unwrap_or(false)
18470 } else {
18471 expr_is_character_expr(b, locals, callee, st, type_layouts)
18472 }
18473 }
18474 _ => false,
18475 }
18476 }
18477
18478 fn store_string_descriptor_view(b: &mut FuncBuilder, desc: ValueId, ptr: ValueId, len: ValueId) {
18479 let flags = b.const_i32(2); // STR_DEFERRED without STR_ALLOCATED
18480 store_byte_aggregate_field(
18481 b,
18482 desc,
18483 0,
18484 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18485 ptr,
18486 );
18487 store_byte_aggregate_field(b, desc, 8, IrType::Int(IntWidth::I64), len);
18488 store_byte_aggregate_field(b, desc, 16, IrType::Int(IntWidth::I64), len);
18489 store_byte_aggregate_field(b, desc, 24, IrType::Int(IntWidth::I32), flags);
18490 }
18491
18492 fn init_allocated_string_descriptor(b: &mut FuncBuilder, desc: ValueId, len: ValueId) {
18493 let buf = b.runtime_call(
18494 RuntimeFunc::Allocate,
18495 vec![len],
18496 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18497 );
18498 let space = b.const_i32(b' ' as i32);
18499 b.call(
18500 FuncRef::External("memset".into()),
18501 vec![buf, space, len],
18502 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18503 );
18504 store_byte_aggregate_field(
18505 b,
18506 desc,
18507 0,
18508 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18509 buf,
18510 );
18511 store_byte_aggregate_field(b, desc, 8, IrType::Int(IntWidth::I64), len);
18512 store_byte_aggregate_field(b, desc, 16, IrType::Int(IntWidth::I64), len);
18513 let flags = b.const_i32(3);
18514 store_byte_aggregate_field(b, desc, 24, IrType::Int(IntWidth::I32), flags);
18515 }
18516
18517 fn typed_allocate_char_len(
18518 b: &mut FuncBuilder,
18519 locals: &HashMap<String, LocalInfo>,
18520 type_spec: Option<&TypeSpec>,
18521 st: &SymbolTable,
18522 ) -> Option<ValueId> {
18523 match type_spec {
18524 Some(TypeSpec::Character(None)) => Some(b.const_i64(1)),
18525 Some(TypeSpec::Character(Some(sel))) => match &sel.len {
18526 Some(crate::ast::decl::LenSpec::Expr(e)) => {
18527 let raw_len = lower_expr(b, locals, e, st);
18528 Some(clamp_nonnegative_i64(b, raw_len))
18529 }
18530 None => Some(b.const_i64(1)),
18531 _ => None,
18532 },
18533 _ => None,
18534 }
18535 }
18536
18537 /// Resolve a base expression for a type-bound procedure call.
18538 /// Returns (object_address, type_name) — the address of the base object.
18539 /// For simple `obj%method()`, base is `obj` → returns (obj.addr, obj.type).
18540 /// For `obj%inner%method()`, base is `obj%inner` → returns (inner.addr, inner.type).
18541 fn resolve_component_base_for_method(
18542 b: &mut FuncBuilder,
18543 locals: &HashMap<String, LocalInfo>,
18544 base: &crate::ast::expr::SpannedExpr,
18545 st: &SymbolTable,
18546 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18547 ) -> Option<(ValueId, String)> {
18548 match &base.node {
18549 Expr::Name { name } => {
18550 let key = name.to_lowercase();
18551 let info = locals.get(&key)?;
18552 let type_name = info.derived_type.as_ref()?.clone();
18553 let addr = if info.allocatable {
18554 array_base_addr(b, info)
18555 } else if info.by_ref {
18556 b.load(info.addr)
18557 } else {
18558 info.addr
18559 };
18560 Some((addr, type_name))
18561 }
18562 Expr::ComponentAccess {
18563 base: inner_base,
18564 component,
18565 } => {
18566 // Resolve the inner base, then GEP to the component field.
18567 let (inner_addr, inner_type) =
18568 resolve_component_base_for_method(b, locals, inner_base, st, tl)?;
18569 let layout = tl.get(&inner_type)?;
18570 let field = layout.field(component)?;
18571 let offset = b.const_i64(field.offset as i64);
18572 let field_ptr = b.gep(inner_addr, vec![offset], IrType::Int(IntWidth::I8));
18573 if let crate::sema::symtab::TypeInfo::Derived(ref nested_type) = field.type_info {
18574 Some((field_ptr, nested_type.clone()))
18575 } else {
18576 None
18577 }
18578 }
18579 Expr::FunctionCall { callee, args } => {
18580 if args
18581 .iter()
18582 .any(|arg| !matches!(arg.value, crate::ast::expr::SectionSubscript::Element(_)))
18583 {
18584 return None;
18585 }
18586 if let Expr::Name { name } = &callee.node {
18587 let info = locals.get(&name.to_lowercase())?;
18588 let type_name = info.derived_type.as_ref()?.clone();
18589 if info.dims.is_empty() && !local_uses_array_descriptor(info) {
18590 return None;
18591 }
18592 let elem_addr = lower_array_element(b, locals, info, args, st);
18593 return Some((elem_addr, type_name));
18594 }
18595 if let Expr::ComponentAccess { .. } = &callee.node {
18596 let (field_ptr, field) = resolve_component_field_access(b, locals, callee, st, tl)?;
18597 let crate::sema::symtab::TypeInfo::Derived(type_name) = &field.type_info else {
18598 return None;
18599 };
18600 let layout = tl.get(type_name)?;
18601 let elem_addr = lower_fixed_component_array_element_ptr(
18602 b,
18603 locals,
18604 args,
18605 st,
18606 field_ptr,
18607 &field.dims,
18608 layout.size as i64,
18609 )?;
18610 return Some((elem_addr, type_name.clone()));
18611 }
18612 None
18613 }
18614 _ => None,
18615 }
18616 }
18617
18618 fn lower_char_arg_by_ref(
18619 b: &mut FuncBuilder,
18620 locals: &HashMap<String, LocalInfo>,
18621 expr: &crate::ast::expr::SpannedExpr,
18622 st: &SymbolTable,
18623 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
18624 internal_funcs: Option<&HashMap<String, u32>>,
18625 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
18626 descriptor_params: Option<&HashMap<String, Vec<bool>>>,
18627 ) -> Option<ValueId> {
18628 use crate::ast::expr::Expr;
18629
18630 match &expr.node {
18631 Expr::Name { name } => {
18632 let info = locals.get(&name.to_lowercase())?;
18633 if !info.dims.is_empty() {
18634 return None;
18635 }
18636 if info.by_ref
18637 && info.char_kind == CharKind::None
18638 && matches!(
18639 info.ty,
18640 IrType::Ptr(ref inner) if matches!(inner.as_ref(), IrType::Int(IntWidth::I8))
18641 )
18642 {
18643 return Some(info.addr);
18644 }
18645 let (ptr, _len) = char_addr_and_runtime_len(b, expr, locals)?;
18646 let slot = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
18647 b.store(ptr, slot);
18648 Some(slot)
18649 }
18650 Expr::StringLiteral { value, .. } => {
18651 let src = b.const_string(value.as_bytes());
18652 let buf = b.alloca(IrType::Array(
18653 Box::new(IrType::Int(IntWidth::I8)),
18654 (value.len() + 1) as u64,
18655 ));
18656 let zero = b.const_i32(0);
18657 let total = b.const_i64((value.len() + 1) as i64);
18658 b.call(
18659 FuncRef::External("memset".into()),
18660 vec![buf, zero, total],
18661 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18662 );
18663 let len = b.const_i64(value.len() as i64);
18664 b.call(
18665 FuncRef::External("memcpy".into()),
18666 vec![buf, src, len],
18667 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
18668 );
18669 let zero_idx = b.const_i64(0);
18670 let ptr = b.gep(buf, vec![zero_idx], IrType::Int(IntWidth::I8));
18671 let slot = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
18672 b.store(ptr, slot);
18673 Some(slot)
18674 }
18675 Expr::FunctionCall { callee, args } => {
18676 let Expr::Name { name } = &callee.node else {
18677 return None;
18678 };
18679 let info = locals.get(&name.to_lowercase())?;
18680 if info.char_kind != CharKind::None && info.dims.is_empty() {
18681 let (ptr, _len) = lower_string_expr_full(
18682 b,
18683 locals,
18684 expr,
18685 st,
18686 type_layouts,
18687 internal_funcs,
18688 contained_host_refs,
18689 descriptor_params,
18690 );
18691 let slot = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
18692 b.store(ptr, slot);
18693 return Some(slot);
18694 }
18695 if !matches!(info.char_kind, CharKind::Fixed(_)) || info.dims.is_empty() {
18696 return None;
18697 }
18698 let ptr = if local_uses_array_descriptor(info)
18699 || matches!(info.ty, IrType::Int(IntWidth::I8))
18700 {
18701 char_array_element_ptr_and_len(b, locals, info, args, st)?.0
18702 } else {
18703 lower_array_element(b, locals, info, args, st)
18704 };
18705 let slot = b.alloca(IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))));
18706 b.store(ptr, slot);
18707 Some(slot)
18708 }
18709 _ => None,
18710 }
18711 }
18712
18713 fn lower_arg_string_descriptor(
18714 b: &mut FuncBuilder,
18715 locals: &HashMap<String, LocalInfo>,
18716 expr: &crate::ast::expr::SpannedExpr,
18717 st: &SymbolTable,
18718 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
18719 ) -> ValueId {
18720 match &expr.node {
18721 Expr::ParenExpr { inner } => {
18722 lower_arg_string_descriptor(b, locals, inner, st, type_layouts)
18723 }
18724 Expr::Name { name } => {
18725 let key = name.to_lowercase();
18726 if let Some(info) = locals.get(&key) {
18727 if matches!(info.char_kind, CharKind::Deferred) {
18728 return string_descriptor_addr(b, info);
18729 }
18730 }
18731 b.const_i64(0)
18732 }
18733 Expr::ComponentAccess { .. } => {
18734 if let Some(tl) = type_layouts {
18735 if let Some((field_ptr, field)) =
18736 resolve_component_field_access(b, locals, expr, st, tl)
18737 {
18738 if matches!(field_char_kind(&field), CharKind::Deferred) && field.size == 32 {
18739 return field_ptr;
18740 }
18741 }
18742 }
18743 b.const_i64(0)
18744 }
18745 _ => b.const_i64(0),
18746 }
18747 }
18748
18749 fn lower_arg_by_ref_full(
18750 b: &mut FuncBuilder,
18751 locals: &HashMap<String, LocalInfo>,
18752 expr: &crate::ast::expr::SpannedExpr,
18753 st: &SymbolTable,
18754 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
18755 internal_funcs: Option<&HashMap<String, u32>>,
18756 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
18757 descriptor_params: Option<&HashMap<String, Vec<bool>>>,
18758 ) -> ValueId {
18759 if let Some(ptr_slot) = lower_char_arg_by_ref(
18760 b,
18761 locals,
18762 expr,
18763 st,
18764 type_layouts,
18765 internal_funcs,
18766 contained_host_refs,
18767 descriptor_params,
18768 ) {
18769 return ptr_slot;
18770 }
18771 // If it's a simple name, pass its address.
18772 if let Expr::Name { name } = &expr.node {
18773 let key = name.to_lowercase();
18774 if let Some(info) = locals.get(&key) {
18775 if info.by_ref {
18776 if info.descriptor_arg {
18777 return array_data_ptr_for_call(b, info);
18778 }
18779 // Already a pointer to caller's storage — load and pass it.
18780 return b.load(info.addr);
18781 }
18782 if !info.dims.is_empty() || local_uses_array_descriptor(info) {
18783 return array_data_ptr_for_call(b, info);
18784 }
18785 return info.addr;
18786 }
18787 }
18788 // Array element: arr(i) passed by ref should pass the address
18789 // of the element within arr, NOT a copy. This enables sequence
18790 // association (F2018 §15.5.2.11): a callee that declares a
18791 // larger dummy can walk to successive elements from the passed
18792 // address.
18793 if let Expr::FunctionCall { callee, args } = &expr.node {
18794 if let Expr::Name { name } = &callee.node {
18795 let key = name.to_lowercase();
18796 if let Some(info) = locals.get(&key) {
18797 if !info.dims.is_empty() || local_uses_array_descriptor(info) {
18798 // Compute the element address via GEP.
18799 if args.len() == 1 {
18800 if let crate::ast::expr::SectionSubscript::Element(idx_expr) =
18801 &args[0].value
18802 {
18803 let base = array_data_ptr_for_call(b, info);
18804 let idx = lower_expr(b, locals, idx_expr, st);
18805 let idx64 = match b.func().value_type(idx) {
18806 Some(IrType::Int(IntWidth::I64)) => idx,
18807 _ => b.int_extend(idx, IntWidth::I64, true),
18808 };
18809 let one = b.const_i64(1);
18810 let idx0 = b.isub(idx64, one); // Fortran 1-indexed → 0-indexed
18811 return b.gep(base, vec![idx0], info.ty.clone());
18812 }
18813 }
18814 }
18815 }
18816 }
18817 }
18818 // Otherwise, evaluate and store to a temp.
18819 let val = lower_expr_full(
18820 b,
18821 locals,
18822 expr,
18823 st,
18824 type_layouts,
18825 internal_funcs,
18826 contained_host_refs,
18827 descriptor_params,
18828 );
18829 let ty = b
18830 .func()
18831 .value_type(val)
18832 .unwrap_or(IrType::Int(IntWidth::I32));
18833 let tmp = b.alloca(ty);
18834 b.store(val, tmp);
18835 tmp
18836 }
18837
18838 fn lower_arg_by_ref(
18839 b: &mut FuncBuilder,
18840 locals: &HashMap<String, LocalInfo>,
18841 expr: &crate::ast::expr::SpannedExpr,
18842 st: &SymbolTable,
18843 ) -> ValueId {
18844 lower_arg_by_ref_full(b, locals, expr, st, None, None, None, None)
18845 }
18846
18847 fn lower_arg_by_ref_ctx(
18848 b: &mut FuncBuilder,
18849 ctx: &LowerCtx,
18850 expr: &crate::ast::expr::SpannedExpr,
18851 ) -> ValueId {
18852 lower_arg_by_ref_full(
18853 b,
18854 &ctx.locals,
18855 expr,
18856 ctx.st,
18857 Some(ctx.type_layouts),
18858 Some(ctx.internal_funcs),
18859 Some(ctx.contained_host_refs),
18860 Some(ctx.descriptor_params),
18861 )
18862 }
18863
18864 /// Lower an expression to a ValueId.
18865 fn lower_expr(
18866 b: &mut FuncBuilder,
18867 locals: &HashMap<String, LocalInfo>,
18868 expr: &crate::ast::expr::SpannedExpr,
18869 st: &SymbolTable,
18870 ) -> ValueId {
18871 lower_expr_full(b, locals, expr, st, None, None, None, None)
18872 }
18873
18874 fn lower_expr_ctx(
18875 b: &mut FuncBuilder,
18876 ctx: &LowerCtx,
18877 expr: &crate::ast::expr::SpannedExpr,
18878 ) -> ValueId {
18879 lower_expr_full(
18880 b,
18881 &ctx.locals,
18882 expr,
18883 ctx.st,
18884 Some(ctx.type_layouts),
18885 Some(ctx.internal_funcs),
18886 Some(ctx.contained_host_refs),
18887 Some(ctx.descriptor_params),
18888 )
18889 }
18890
18891 fn lower_expr_tl(
18892 b: &mut FuncBuilder,
18893 locals: &HashMap<String, LocalInfo>,
18894 expr: &crate::ast::expr::SpannedExpr,
18895 st: &SymbolTable,
18896 tl: &crate::sema::type_layout::TypeLayoutRegistry,
18897 ) -> ValueId {
18898 lower_expr_full(b, locals, expr, st, Some(tl), None, None, None)
18899 }
18900
18901 fn lower_expr_ctx_tl(
18902 b: &mut FuncBuilder,
18903 ctx: &LowerCtx,
18904 expr: &crate::ast::expr::SpannedExpr,
18905 ) -> ValueId {
18906 lower_expr_full(
18907 b,
18908 &ctx.locals,
18909 expr,
18910 ctx.st,
18911 Some(ctx.type_layouts),
18912 Some(ctx.internal_funcs),
18913 Some(ctx.contained_host_refs),
18914 Some(ctx.descriptor_params),
18915 )
18916 }
18917
18918 fn lower_expr_full(
18919 b: &mut FuncBuilder,
18920 locals: &HashMap<String, LocalInfo>,
18921 expr: &crate::ast::expr::SpannedExpr,
18922 st: &SymbolTable,
18923 type_layouts: Option<&crate::sema::type_layout::TypeLayoutRegistry>,
18924 internal_funcs: Option<&HashMap<String, u32>>,
18925 contained_host_refs: Option<&HashMap<String, Vec<String>>>,
18926 descriptor_params: Option<&HashMap<String, Vec<bool>>>,
18927 ) -> ValueId {
18928 match &expr.node {
18929 Expr::IntegerLiteral { text, kind, .. } => {
18930 let kind = kind.as_deref();
18931 if kind == Some("16") {
18932 b.const_i128(text.parse::<i128>().unwrap_or(0))
18933 } else {
18934 let val: i64 = text.parse().unwrap_or(0);
18935 if kind == Some("8") || val > i32::MAX as i64 || val < i32::MIN as i64 {
18936 b.const_i64(val)
18937 } else {
18938 b.const_i32(val as i32)
18939 }
18940 }
18941 }
18942 Expr::RealLiteral { text, kind } => {
18943 let val: f64 = text
18944 .replace('d', "e")
18945 .replace('D', "E")
18946 .parse()
18947 .unwrap_or(0.0);
18948 // Determine width from kind suffix (_dp, _8), 'd' exponent, or default.
18949 let is_f64 = if let Some(kind_str) = kind {
18950 real_kind_to_width(kind_str, st) == 8
18951 } else {
18952 text.to_lowercase().contains('d')
18953 };
18954 if is_f64 {
18955 b.const_f64(val)
18956 } else {
18957 b.const_f32(val as f32)
18958 }
18959 }
18960 Expr::LogicalLiteral { value, .. } => b.const_bool(*value),
18961 Expr::StringLiteral { value, .. } => b.const_string(value.as_bytes()),
18962 Expr::BozLiteral { text, base } => {
18963 // BOZ literals: strip prefix letter and quotes, parse digit string.
18964 let radix = match base {
18965 crate::ast::expr::BozBase::Binary => 2,
18966 crate::ast::expr::BozBase::Octal => 8,
18967 crate::ast::expr::BozBase::Hex => 16,
18968 };
18969 // Token text is like Z'FF' or B'1010' — extract the digits between quotes.
18970 let digits: String = text
18971 .chars()
18972 .skip_while(|c| !matches!(c, '\'' | '"'))
18973 .skip(1) // skip opening quote
18974 .take_while(|c| !matches!(c, '\'' | '"'))
18975 .collect();
18976 let val = i64::from_str_radix(&digits, radix).unwrap_or(0);
18977 if val > i32::MAX as i64 || val < i32::MIN as i64 {
18978 b.const_i64(val)
18979 } else {
18980 b.const_i32(val as i32)
18981 }
18982 }
18983
18984 Expr::Name { name } => {
18985 let key = name.to_lowercase();
18986 if let Some(info) = locals.get(&key) {
18987 // Audit MAJOR-4: PARAMETER-attributed locals with
18988 // a folded value get inlined directly. The const
18989 // is materialized via the appropriate b.const_*
18990 // helper, matching the local's declared type.
18991 if let Some(c) = info.inline_const {
18992 return materialize_const_scalar(b, c, &info.ty);
18993 }
18994 if !info.dims.is_empty() {
18995 // Array name without subscripts — return the base address.
18996 info.addr
18997 } else if info.is_pointer && is_complex_ty(&info.ty) {
18998 // Complex POINTER: slot holds ptr<[f32/f64 x 2]>.
18999 // Consumers of complex values want the *address* of
19000 // the 2-element buffer (same ABI as an ordinary
19001 // complex variable), so load once to get the
19002 // associated buffer and return that.
19003 b.load_typed(info.addr, IrType::Ptr(Box::new(info.ty.clone())))
19004 } else if info.is_pointer && info.derived_type.is_none() {
19005 // Scalar Fortran POINTER: `info.addr` is an alloca
19006 // ptr<T>. Reading the pointer as a value
19007 // dereferences it: load the target address out of
19008 // the slot, then load the value through it.
19009 let tgt = b.load_typed(info.addr, IrType::Ptr(Box::new(info.ty.clone())));
19010 b.load_typed(tgt, info.ty.clone())
19011 } else if info.is_pointer && info.derived_type.is_some() {
19012 // Derived-type POINTER used as a bare Name (e.g.
19013 // passed to a subroutine expecting type(t)). The
19014 // consumer wants the struct address, which is
19015 // what's stored in the pointer slot.
19016 b.load_typed(info.addr, IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))))
19017 } else if info.derived_type.is_some() {
19018 // Derived type variable: storage is `alloca [i8 x size]`.
19019 // Consumers of the value treat it as a pointer to the
19020 // struct (memcpy for whole-struct assignment, GEP for
19021 // component access). Without this case we fell through
19022 // to load_typed(info.ty) which yanked the first 8 bytes
19023 // of the struct as if they were a pointer, turning
19024 // `b = a` into a memcpy from the garbage address held
19025 // by a's first field slot.
19026 if info.allocatable {
19027 array_base_addr(b, info)
19028 } else if info.by_ref {
19029 b.load(info.addr)
19030 } else {
19031 info.addr
19032 }
19033 } else if is_complex_ty(&info.ty) {
19034 if info.by_ref {
19035 // by-ref complex: info.addr holds ptr-to-ptr-to-buffer.
19036 // Load once to get ptr-to-buffer; caller treats as address.
19037 b.load(info.addr)
19038 } else {
19039 // Complex variable: return the stack-buffer address.
19040 // Complex is stored as [f32/f64 x 2] — callers use the address
19041 // directly (memcpy for assignment, ptr for I/O, GEP for components).
19042 info.addr
19043 }
19044 } else if info.by_ref {
19045 // Pass-by-reference param: load the pointer, then load through it.
19046 let ptr = b.load(info.addr);
19047 b.load_typed(ptr, info.ty.clone())
19048 } else {
19049 // Use load_typed with the local's declared type to handle cases
19050 // where the address pointer type doesn't exactly match (e.g.,
19051 // WHERE substitution using byte-level GEP).
19052 b.load_typed(info.addr, info.ty.clone())
19053 }
19054 } else {
19055 b.const_i32(0)
19056 }
19057 }
19058
19059 Expr::BinaryOp { op, left, right } => {
19060 if matches!(
19061 op,
19062 BinaryOp::Eq
19063 | BinaryOp::Ne
19064 | BinaryOp::Lt
19065 | BinaryOp::Le
19066 | BinaryOp::Gt
19067 | BinaryOp::Ge
19068 ) && (expr_is_character_expr(b, locals, left, st, type_layouts)
19069 || expr_is_character_expr(b, locals, right, st, type_layouts))
19070 {
19071 let (lhs_ptr, lhs_len) =
19072 lower_string_expr_with_layouts(b, locals, left, st, type_layouts);
19073 let (rhs_ptr, rhs_len) =
19074 lower_string_expr_with_layouts(b, locals, right, st, type_layouts);
19075 let cmp = b.call(
19076 FuncRef::External("afs_compare_char".into()),
19077 vec![lhs_ptr, lhs_len, rhs_ptr, rhs_len],
19078 IrType::Int(IntWidth::I32),
19079 );
19080 let zero = b.const_i32(0);
19081 return match op {
19082 BinaryOp::Eq => b.icmp(CmpOp::Eq, cmp, zero),
19083 BinaryOp::Ne => b.icmp(CmpOp::Ne, cmp, zero),
19084 BinaryOp::Lt => b.icmp(CmpOp::Lt, cmp, zero),
19085 BinaryOp::Le => b.icmp(CmpOp::Le, cmp, zero),
19086 BinaryOp::Gt => b.icmp(CmpOp::Gt, cmp, zero),
19087 BinaryOp::Ge => b.icmp(CmpOp::Ge, cmp, zero),
19088 _ => unreachable!(),
19089 };
19090 }
19091 let mut lhs = lower_expr_full(
19092 b,
19093 locals,
19094 left,
19095 st,
19096 type_layouts,
19097 internal_funcs,
19098 contained_host_refs,
19099 descriptor_params,
19100 );
19101 let mut rhs = lower_expr_full(
19102 b,
19103 locals,
19104 right,
19105 st,
19106 type_layouts,
19107 internal_funcs,
19108 contained_host_refs,
19109 descriptor_params,
19110 );
19111 let lty = b
19112 .func()
19113 .value_type(lhs)
19114 .unwrap_or(IrType::Int(IntWidth::I32));
19115 let rty = b
19116 .func()
19117 .value_type(rhs)
19118 .unwrap_or(IrType::Int(IntWidth::I32));
19119
19120 // Defined operator dispatch (INTERFACE OPERATOR(...)): if a
19121 // generic interface for this operator exists and a specific
19122 // matches the actual operand types, emit a call instead of
19123 // arithmetic. Needed for e.g. `type(vec) + type(vec)` — the
19124 // default arithmetic would ICE trying to `iadd` pointers.
19125 if let Some(resolved) = resolve_operator_overload(st, b, op, &lty, &rty, lhs, rhs) {
19126 let (specific, arg_vals) = resolved;
19127 let ret_ty =
19128 callee_return_ir_type(st, &specific).unwrap_or(IrType::Int(IntWidth::I32));
19129 let specific_key = specific.to_lowercase();
19130 let (call_name, _) = resolved_symbol_call_target(st, &specific_key, &specific);
19131 let func_ref = internal_funcs
19132 .and_then(|m| m.get(&specific_key).copied())
19133 .map(FuncRef::Internal)
19134 .unwrap_or_else(|| FuncRef::External(call_name));
19135 return b.call(func_ref, arg_vals, ret_ty);
19136 }
19137
19138 // Complex arithmetic: both operands are ptr<[f32/f64 x 2]>.
19139 // Add/Sub operate component-wise; Mul uses (ac-bd, ad+bc).
19140 if is_complex_ty(&lty) || is_complex_ty(&rty) {
19141 let fw = if complex_float_width(&lty) == FloatWidth::F64
19142 || complex_float_width(&rty) == FloatWidth::F64
19143 {
19144 FloatWidth::F64
19145 } else {
19146 FloatWidth::F32
19147 };
19148 let elem = IrType::Float(fw);
19149 let esz = b.const_i64(if fw == FloatWidth::F64 { 8 } else { 4 });
19150 let zero = b.const_i64(0);
19151 // Load components from lhs (re_l, im_l).
19152 let re_l_ptr = b.gep(lhs, vec![zero], IrType::Int(IntWidth::I8));
19153 let im_l_ptr = b.gep(lhs, vec![esz], IrType::Int(IntWidth::I8));
19154 let re_l = b.load_typed(re_l_ptr, elem.clone());
19155 let im_l = b.load_typed(im_l_ptr, elem.clone());
19156 // Load components from rhs (re_r, im_r).
19157 let re_r_ptr = b.gep(rhs, vec![zero], IrType::Int(IntWidth::I8));
19158 let im_r_ptr = b.gep(rhs, vec![esz], IrType::Int(IntWidth::I8));
19159 let re_r = b.load_typed(re_r_ptr, elem.clone());
19160 let im_r = b.load_typed(im_r_ptr, elem.clone());
19161 let arr_ty = IrType::Array(Box::new(elem.clone()), 2);
19162 let buf = b.alloca(arr_ty);
19163 let (re_res, im_res) = match op {
19164 BinaryOp::Add => (b.fadd(re_l, re_r), b.fadd(im_l, im_r)),
19165 BinaryOp::Sub => (b.fsub(re_l, re_r), b.fsub(im_l, im_r)),
19166 BinaryOp::Mul => {
19167 // (ac-bd, ad+bc)
19168 let ac = b.fmul(re_l, re_r);
19169 let bd = b.fmul(im_l, im_r);
19170 let ad = b.fmul(re_l, im_r);
19171 let bc = b.fmul(im_l, re_r);
19172 (b.fsub(ac, bd), b.fadd(ad, bc))
19173 }
19174 _ => (re_l, im_l), // unsupported: return lhs unchanged
19175 };
19176 let dst_re = b.gep(buf, vec![zero], IrType::Int(IntWidth::I8));
19177 let dst_im = b.gep(buf, vec![esz], IrType::Int(IntWidth::I8));
19178 b.store(re_res, dst_re);
19179 b.store(im_res, dst_im);
19180 return buf;
19181 }
19182
19183 // Implicit type promotion: if one side is int and the other float,
19184 // convert the int to float (Fortran mixed-mode arithmetic).
19185 let result_ty = if lty.is_float() || rty.is_float() {
19186 let fw = match (&lty, &rty) {
19187 (IrType::Float(FloatWidth::F64), _) | (_, IrType::Float(FloatWidth::F64)) => {
19188 FloatWidth::F64
19189 }
19190 _ => FloatWidth::F32,
19191 };
19192 if lty.is_int() {
19193 lhs = b.int_to_float(lhs, fw);
19194 }
19195 if rty.is_int() {
19196 rhs = b.int_to_float(rhs, fw);
19197 }
19198 // Promote f32 to f64 if other is f64.
19199 if matches!(lty, IrType::Float(FloatWidth::F32)) && fw == FloatWidth::F64 {
19200 lhs = b.float_extend(lhs, FloatWidth::F64);
19201 }
19202 if matches!(rty, IrType::Float(FloatWidth::F32)) && fw == FloatWidth::F64 {
19203 rhs = b.float_extend(rhs, FloatWidth::F64);
19204 }
19205 IrType::Float(fw)
19206 } else {
19207 // Integer width promotion: widen the narrower operand to
19208 // match the wider one. Without this, integer(int64) + 1
19209 // produces an IR width mismatch (i64 + i32).
19210 let lw = lty.int_width().unwrap_or(IntWidth::I32);
19211 let rw = rty.int_width().unwrap_or(IntWidth::I32);
19212 let target_w = if lw.bits() >= rw.bits() { lw } else { rw };
19213 if lw != target_w {
19214 lhs = b.int_extend(lhs, target_w, true);
19215 }
19216 if rw != target_w {
19217 rhs = b.int_extend(rhs, target_w, true);
19218 }
19219 IrType::Int(target_w)
19220 };
19221
19222 match (op, &result_ty) {
19223 (BinaryOp::Add, IrType::Int(_)) => b.iadd(lhs, rhs),
19224 (BinaryOp::Add, IrType::Float(_)) => b.fadd(lhs, rhs),
19225 (BinaryOp::Sub, IrType::Int(_)) => b.isub(lhs, rhs),
19226 (BinaryOp::Sub, IrType::Float(_)) => b.fsub(lhs, rhs),
19227 (BinaryOp::Mul, IrType::Int(_)) => b.imul(lhs, rhs),
19228 (BinaryOp::Mul, IrType::Float(_)) => b.fmul(lhs, rhs),
19229 (BinaryOp::Div, IrType::Int(_)) => b.idiv(lhs, rhs),
19230 (BinaryOp::Div, IrType::Float(_)) => b.fdiv(lhs, rhs),
19231 (BinaryOp::Pow, IrType::Float(_)) => b.fpow(lhs, rhs),
19232 (BinaryOp::Pow, IrType::Int(_)) => {
19233 let fl = b.int_to_float(lhs, FloatWidth::F64);
19234 let fr = b.int_to_float(rhs, FloatWidth::F64);
19235 let result = b.fpow(fl, fr);
19236 b.float_to_int(result, IntWidth::I32)
19237 }
19238 (BinaryOp::Eq, IrType::Int(_)) => b.icmp(CmpOp::Eq, lhs, rhs),
19239 (BinaryOp::Eq, IrType::Float(_)) => b.fcmp(CmpOp::Eq, lhs, rhs),
19240 (BinaryOp::Ne, IrType::Int(_)) => b.icmp(CmpOp::Ne, lhs, rhs),
19241 (BinaryOp::Ne, IrType::Float(_)) => b.fcmp(CmpOp::Ne, lhs, rhs),
19242 (BinaryOp::Lt, IrType::Int(_)) => b.icmp(CmpOp::Lt, lhs, rhs),
19243 (BinaryOp::Lt, IrType::Float(_)) => b.fcmp(CmpOp::Lt, lhs, rhs),
19244 (BinaryOp::Le, IrType::Int(_)) => b.icmp(CmpOp::Le, lhs, rhs),
19245 (BinaryOp::Le, IrType::Float(_)) => b.fcmp(CmpOp::Le, lhs, rhs),
19246 (BinaryOp::Gt, IrType::Int(_)) => b.icmp(CmpOp::Gt, lhs, rhs),
19247 (BinaryOp::Gt, IrType::Float(_)) => b.fcmp(CmpOp::Gt, lhs, rhs),
19248 (BinaryOp::Ge, IrType::Int(_)) => b.icmp(CmpOp::Ge, lhs, rhs),
19249 (BinaryOp::Ge, IrType::Float(_)) => b.fcmp(CmpOp::Ge, lhs, rhs),
19250 (BinaryOp::And, _) => {
19251 // Coerce to Bool if not already (Fortran .AND. on integers).
19252 let lbool = coerce_to_type(b, lhs, &IrType::Bool);
19253 let rbool = coerce_to_type(b, rhs, &IrType::Bool);
19254 b.and(lbool, rbool)
19255 }
19256 (BinaryOp::Or, _) => {
19257 let lbool = coerce_to_type(b, lhs, &IrType::Bool);
19258 let rbool = coerce_to_type(b, rhs, &IrType::Bool);
19259 b.or(lbool, rbool)
19260 }
19261 (BinaryOp::Eqv, _) => {
19262 // a .eqv. b = .not. (a .xor. b)
19263 let lbool = coerce_to_type(b, lhs, &IrType::Bool);
19264 let rbool = coerce_to_type(b, rhs, &IrType::Bool);
19265 let both = b.and(lbool, rbool);
19266 let either = b.or(lbool, rbool);
19267 let not_both = b.not(both);
19268 let xor = b.and(either, not_both);
19269 b.not(xor)
19270 }
19271 (BinaryOp::Neqv, _) => {
19272 // a .neqv. b = a .xor. b
19273 let lbool = coerce_to_type(b, lhs, &IrType::Bool);
19274 let rbool = coerce_to_type(b, rhs, &IrType::Bool);
19275 let both = b.and(lbool, rbool);
19276 let either = b.or(lbool, rbool);
19277 let not_both = b.not(both);
19278 b.and(either, not_both)
19279 }
19280 (BinaryOp::Concat, _) => b.runtime_call(
19281 RuntimeFunc::StringConcat,
19282 vec![lhs, rhs],
19283 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
19284 ),
19285 _ => b.iadd(lhs, rhs), // fallback for defined ops
19286 }
19287 }
19288
19289 Expr::UnaryOp { op, operand } => {
19290 let val = lower_expr_full(
19291 b,
19292 locals,
19293 operand,
19294 st,
19295 type_layouts,
19296 internal_funcs,
19297 contained_host_refs,
19298 descriptor_params,
19299 );
19300 let ty = b
19301 .func()
19302 .value_type(val)
19303 .unwrap_or(IrType::Int(IntWidth::I32));
19304 match (op, &ty) {
19305 (UnaryOp::Minus, IrType::Int(_)) => b.ineg(val),
19306 (UnaryOp::Minus, IrType::Float(_)) => b.fneg(val),
19307 (UnaryOp::Plus, _) => val,
19308 (UnaryOp::Not, _) => b.not(val),
19309 _ => val,
19310 }
19311 }
19312
19313 Expr::ParenExpr { inner } => lower_expr_full(
19314 b,
19315 locals,
19316 inner,
19317 st,
19318 type_layouts,
19319 internal_funcs,
19320 contained_host_refs,
19321 descriptor_params,
19322 ),
19323
19324 Expr::FunctionCall { callee, args } => {
19325 if args.len() == 1
19326 && expr_is_character_expr(b, locals, callee, st, type_layouts)
19327 && !expr_is_array_designator(b, locals, callee, st, type_layouts)
19328 {
19329 match &args[0].value {
19330 crate::ast::expr::SectionSubscript::Range { start, end, .. } => {
19331 let (base_ptr, base_len) =
19332 lower_string_expr_with_layouts(b, locals, callee, st, type_layouts);
19333 let (ptr, _len) = lower_substring(
19334 b,
19335 locals,
19336 st,
19337 base_ptr,
19338 base_len,
19339 start.as_ref(),
19340 end.as_ref(),
19341 );
19342 return ptr;
19343 }
19344 crate::ast::expr::SectionSubscript::Element(idx_expr) => {
19345 let (base_ptr, base_len) =
19346 lower_string_expr_with_layouts(b, locals, callee, st, type_layouts);
19347 let (ptr, _len) = lower_substring(
19348 b,
19349 locals,
19350 st,
19351 base_ptr,
19352 base_len,
19353 Some(idx_expr),
19354 Some(idx_expr),
19355 );
19356 return ptr;
19357 }
19358 }
19359 }
19360 if let Expr::Name { name } = &callee.node {
19361 let key = name.to_lowercase();
19362 let procptr_target = procedure_pointer_call_target(b, locals, st, &key);
19363 let signature_key = procptr_target
19364 .as_ref()
19365 .map(|(_, sig_key)| sig_key.clone())
19366 .unwrap_or_else(|| key.clone());
19367
19368 // Check if this is an array element or section access.
19369 if let Some(info) = locals.get(&key) {
19370 if local_is_array_like(info) {
19371 let has_range = args.iter().any(|a| {
19372 matches!(a.value, crate::ast::expr::SectionSubscript::Range { .. })
19373 });
19374 if has_range {
19375 return lower_array_section(b, locals, info, args, st);
19376 }
19377 return lower_array_element(b, locals, info, args, st);
19378 }
19379 }
19380
19381 // Check for pointer intrinsics (ASSOCIATED) first —
19382 // these work on every pointer shape and don't care
19383 // about the array-intrinsic filter.
19384 if let Some(result) =
19385 lower_pointer_intrinsic(b, locals, &key, args, st, type_layouts)
19386 {
19387 return result;
19388 }
19389
19390 if let Some(result) =
19391 lower_scalar_allocated_intrinsic(b, locals, &key, args, st, type_layouts)
19392 {
19393 return result;
19394 }
19395
19396 if let Some(result) = lower_any_intrinsic_ast(
19397 b,
19398 &key,
19399 args,
19400 locals,
19401 st,
19402 type_layouts,
19403 internal_funcs,
19404 contained_host_refs,
19405 descriptor_params,
19406 ) {
19407 return result;
19408 }
19409
19410 // Check for array intrinsics (SIZE, SUM, etc.) that need descriptor addresses.
19411 if let Some(result) = lower_array_intrinsic(b, locals, &key, args, st, type_layouts)
19412 {
19413 return result;
19414 }
19415
19416 // Check if this is a structure constructor: type_name(val1, val2, ...).
19417 if let Some(tl) = type_layouts {
19418 if let Some(layout) = tl.get(&key) {
19419 // Allocate a temporary struct on the stack and zero-initialize.
19420 let struct_ty =
19421 IrType::Array(Box::new(IrType::Int(IntWidth::I8)), layout.size as u64);
19422 let tmp = b.alloca(struct_ty);
19423 let zero = b.const_i32(0);
19424 let sz = b.const_i64(layout.size as i64);
19425 b.call(
19426 FuncRef::External("memset".into()),
19427 vec![tmp, zero, sz],
19428 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
19429 );
19430
19431 if args.len() != layout.fields.len() {
19432 eprintln!("warning: structure constructor for '{}' has {} args but type has {} fields",
19433 key, args.len(), layout.fields.len());
19434 }
19435
19436 // Store each argument into the corresponding field.
19437 for (i, arg) in args.iter().enumerate() {
19438 if i < layout.fields.len() {
19439 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
19440 let val = lower_expr_full(
19441 b,
19442 locals,
19443 e,
19444 st,
19445 type_layouts,
19446 internal_funcs,
19447 contained_host_refs,
19448 descriptor_params,
19449 );
19450 let coerced = coerce_to_type(
19451 b,
19452 val,
19453 &type_info_to_ir_type(&layout.fields[i].type_info),
19454 );
19455 let offset = b.const_i64(layout.fields[i].offset as i64);
19456 let field_ptr =
19457 b.gep(tmp, vec![offset], IrType::Int(IntWidth::I8));
19458 b.store(coerced, field_ptr);
19459 }
19460 }
19461 }
19462 return tmp;
19463 }
19464 }
19465
19466 // Try character intrinsics (need access to locals for CharKind).
19467 if let Some(result) = lower_char_intrinsic(b, &key, args, locals, st, type_layouts)
19468 {
19469 return result;
19470 }
19471
19472 // PRESENT(x): check if optional dummy argument x was passed.
19473 // By-ref params are stored as `alloca Ptr<T>` in locals; when the
19474 // caller omits an optional arg it passes null (0). Load the stored
19475 // pointer and compare to zero → non-zero means present.
19476 if key == "present" {
19477 if let Some(arg0) = args.first() {
19478 if let crate::ast::expr::SectionSubscript::Element(e) = &arg0.value {
19479 if let Expr::Name { name: arg_name } = &e.node {
19480 let akey = arg_name.to_lowercase();
19481 if let Some(info) = locals.get(&akey) {
19482 if info.by_ref {
19483 // Load the incoming pointer stored in the by-ref slot.
19484 // If absent, caller passes 0; if present, non-zero address.
19485 let ptr_val = b.load(info.addr);
19486 let zero = b.const_i64(0);
19487 return b.icmp(CmpOp::Ne, ptr_val, zero);
19488 }
19489 }
19490 }
19491 }
19492 }
19493 // If we can't resolve it (non-standard usage), assume present.
19494 return b.const_bool(true);
19495 }
19496
19497 // c_loc(x): return the address of the target itself as an
19498 // i64 integer (matching type(c_ptr)). This must bypass the
19499 // normal by-ref argument path because character arguments use
19500 // temporary pointer slots there, while c_loc needs the real
19501 // underlying element/storage address.
19502 if key == "c_loc" {
19503 if let Some(arg0) = args.first() {
19504 if let crate::ast::expr::SectionSubscript::Element(e) = &arg0.value {
19505 if let Expr::Name { name } = &e.node {
19506 if let Some(info) = locals.get(&name.to_lowercase()) {
19507 let addr = if info.by_ref {
19508 if info.descriptor_arg {
19509 array_data_ptr_for_call(b, info)
19510 } else if info.char_kind != CharKind::None
19511 && info.dims.is_empty()
19512 {
19513 if let Some((ptr, _)) =
19514 char_addr_and_runtime_len(b, e, locals)
19515 {
19516 ptr
19517 } else {
19518 b.load(info.addr)
19519 }
19520 } else {
19521 b.load(info.addr)
19522 }
19523 } else if !info.dims.is_empty()
19524 || local_uses_array_descriptor(info)
19525 {
19526 array_data_ptr_for_call(b, info)
19527 } else if info.char_kind != CharKind::None {
19528 if let Some((ptr, _)) =
19529 char_addr_and_runtime_len(b, e, locals)
19530 {
19531 ptr
19532 } else {
19533 info.addr
19534 }
19535 } else {
19536 info.addr
19537 };
19538 return b.ptr_to_int(addr);
19539 }
19540 }
19541
19542 if let Expr::FunctionCall {
19543 callee,
19544 args: subscripts,
19545 } = &e.node
19546 {
19547 if let Expr::Name { name } = &callee.node {
19548 if let Some(info) = locals.get(&name.to_lowercase()) {
19549 if info.char_kind != CharKind::None {
19550 let addr = if local_uses_array_descriptor(info)
19551 || matches!(info.ty, IrType::Int(IntWidth::I8))
19552 {
19553 char_array_element_ptr_and_len(
19554 b, locals, info, subscripts, st,
19555 )
19556 .map(|(ptr, _)| ptr)
19557 .unwrap_or_else(|| {
19558 lower_array_element_addr(
19559 b, locals, info, subscripts, st,
19560 )
19561 })
19562 } else {
19563 lower_array_element(b, locals, info, subscripts, st)
19564 };
19565 return b.ptr_to_int(addr);
19566 }
19567 if !info.dims.is_empty()
19568 || local_uses_array_descriptor(info)
19569 {
19570 let addr = lower_array_element_addr(
19571 b, locals, info, subscripts, st,
19572 );
19573 return b.ptr_to_int(addr);
19574 }
19575 }
19576 }
19577 }
19578
19579 let addr = lower_arg_by_ref(b, locals, e, st);
19580 return b.ptr_to_int(addr);
19581 }
19582 }
19583 }
19584
19585 // c_funloc(f): return the entry address of the procedure.
19586 if key == "c_funloc" {
19587 if let Some(arg0) = args.first() {
19588 if let crate::ast::expr::SectionSubscript::Element(e) = &arg0.value {
19589 let addr = lower_arg_by_ref(b, locals, e, st);
19590 return b.ptr_to_int(addr);
19591 }
19592 }
19593 }
19594
19595 // abs(z) for complex: sqrt(re² + im²).
19596 // Must be handled before generic intrinsic lowering because
19597 // complex values are pointers to [f32/f64 x 2] buffers.
19598 if (key == "abs" || key == "cabs" || key == "cdabs" || key == "zabs")
19599 && args.len() == 1
19600 {
19601 if let Some(arg0) = args.first() {
19602 if let crate::ast::expr::SectionSubscript::Element(e) = &arg0.value {
19603 let val = lower_expr_full(
19604 b,
19605 locals,
19606 e,
19607 st,
19608 type_layouts,
19609 internal_funcs,
19610 contained_host_refs,
19611 descriptor_params,
19612 );
19613 let ty = b
19614 .func()
19615 .value_type(val)
19616 .unwrap_or(IrType::Int(IntWidth::I32));
19617 if is_complex_ty(&ty) {
19618 let fw = complex_float_width(&ty);
19619 let elem = IrType::Float(fw);
19620 let esz = b.const_i64(if fw == FloatWidth::F64 { 8 } else { 4 });
19621 let zero = b.const_i64(0);
19622 let re_ptr = b.gep(val, vec![zero], IrType::Int(IntWidth::I8));
19623 let im_ptr = b.gep(val, vec![esz], IrType::Int(IntWidth::I8));
19624 let re = b.load_typed(re_ptr, elem.clone());
19625 let im = b.load_typed(im_ptr, elem);
19626 let re2 = b.fmul(re, re);
19627 let im2 = b.fmul(im, im);
19628 let sum = b.fadd(re2, im2);
19629 return b.fsqrt(sum);
19630 }
19631 }
19632 }
19633 }
19634
19635 // Keyword-argument reordering for function calls
19636 // (symmetric with the Stmt::Call path). Binds by name
19637 // when the callee's arg_order is resolvable.
19638 let reordered_fn = reorder_args_by_keyword(args, &signature_key, st);
19639 let args: &[crate::ast::expr::Argument] = &reordered_fn;
19640
19641 // Try intrinsic lowering first (intrinsics use values, not references).
19642 let intrinsic_arg_vals: Vec<ValueId> = args
19643 .iter()
19644 .map(|a| match &a.value {
19645 crate::ast::expr::SectionSubscript::Element(e) => lower_expr_full(
19646 b,
19647 locals,
19648 e,
19649 st,
19650 type_layouts,
19651 internal_funcs,
19652 contained_host_refs,
19653 descriptor_params,
19654 ),
19655 _ => b.const_i32(0),
19656 })
19657 .collect();
19658
19659 if let Some(result) = lower_intrinsic(b, &key, &intrinsic_arg_vals) {
19660 return result;
19661 }
19662
19663 // Check if the callee has VALUE args (BIND(C) interface).
19664 let callee_value_args = callee_value_arg_mask(st, &signature_key)
19665 .or_else(|| callee_value_arg_mask(st, &key));
19666
19667 // Check which params use the descriptor ABI
19668 // (assumed-shape / deferred / assumed-size arrays).
19669 // The Stmt::Call path consults ctx.descriptor_params
19670 // directly; here we reach into it through the
19671 // descriptor_params plumbed as an optional arg so
19672 // the function-call path doesn't need full ctx.
19673 // Without this, a function with `integer :: xs(:)`
19674 // would receive a raw element pointer and size(xs)
19675 // would read garbage (audit31 Finding 6).
19676 let callee_descriptor_args = descriptor_params.and_then(|m| {
19677 m.get(&signature_key)
19678 .cloned()
19679 .or_else(|| m.get(&key).cloned())
19680 });
19681 let callee_string_descriptor_args =
19682 callee_string_descriptor_arg_mask(st, &signature_key)
19683 .or_else(|| callee_string_descriptor_arg_mask(st, &key));
19684
19685 // Pass args: by value for VALUE, descriptor for
19686 // assumed-shape, by reference otherwise.
19687 let mut ref_arg_vals: Vec<ValueId> = args
19688 .iter()
19689 .enumerate()
19690 .map(|(i, a)| {
19691 let is_value = callee_value_args
19692 .as_ref()
19693 .map(|mask| i < mask.len() && mask[i])
19694 .unwrap_or(false);
19695 let wants_descriptor = callee_descriptor_args
19696 .as_ref()
19697 .map(|mask| i < mask.len() && mask[i])
19698 .unwrap_or(false);
19699 let wants_string_descriptor = callee_string_descriptor_args
19700 .as_ref()
19701 .map(|mask| i < mask.len() && mask[i])
19702 .unwrap_or(false);
19703 match &a.value {
19704 crate::ast::expr::SectionSubscript::Element(e) => {
19705 if is_value {
19706 lower_expr_full(
19707 b,
19708 locals,
19709 e,
19710 st,
19711 type_layouts,
19712 internal_funcs,
19713 contained_host_refs,
19714 descriptor_params,
19715 )
19716 } else if wants_string_descriptor {
19717 lower_arg_string_descriptor(b, locals, e, st, type_layouts)
19718 } else if wants_descriptor {
19719 lower_arg_descriptor(b, locals, e, st)
19720 } else {
19721 lower_arg_by_ref_full(
19722 b,
19723 locals,
19724 e,
19725 st,
19726 type_layouts,
19727 internal_funcs,
19728 contained_host_refs,
19729 descriptor_params,
19730 )
19731 }
19732 }
19733 _ => b.const_i32(0),
19734 }
19735 })
19736 .collect();
19737
19738 // Resolve generic interface names to specific procedures.
19739 // For a NamedInterface callee, failing to resolve means
19740 // the call is ill-typed (wrong arity, wrong kind, or no
19741 // matching specific). Emit a compile-time diagnostic
19742 // instead of silently falling back to the generic name,
19743 // which would either mismatch the callee ABI or produce
19744 // an unresolved link-time symbol.
19745 let (call_name, callee_key) = if procptr_target.is_some() {
19746 (String::new(), signature_key.clone())
19747 } else {
19748 let resolved_name = match resolve_generic_call(st, b, &key, &intrinsic_arg_vals)
19749 {
19750 Some(n) => n,
19751 None => {
19752 if let Some(sym) = st.find_symbol_any_scope(&key) {
19753 if sym.kind == crate::sema::symtab::SymbolKind::NamedInterface {
19754 let specifics = sym.arg_names.join(", ");
19755 eprintln!(
19756 "armfortas: error: {}:{}: no specific procedure of generic '{}' matches the actual arguments; candidates: [{}]",
19757 expr.span.start.line,
19758 expr.span.start.col,
19759 name,
19760 specifics,
19761 );
19762 let _ = std::io::stderr().flush();
19763 std::process::exit(1);
19764 }
19765 }
19766 name.clone()
19767 }
19768 };
19769 let resolved_key = resolved_name.to_lowercase();
19770 resolved_symbol_call_target(st, &resolved_key, &resolved_name)
19771 };
19772 if let Some(string_desc_flags) = callee_string_descriptor_arg_mask(st, &callee_key)
19773 .or_else(|| callee_string_descriptor_arg_mask(st, &signature_key))
19774 .or_else(|| callee_string_descriptor_arg_mask(st, &key))
19775 {
19776 for (i, flag) in string_desc_flags.iter().enumerate() {
19777 if callee_descriptor_args
19778 .as_ref()
19779 .map(|mask| mask.get(i).copied().unwrap_or(false))
19780 .unwrap_or(false)
19781 {
19782 continue;
19783 }
19784 if !*flag || i >= args.len() {
19785 continue;
19786 }
19787 ref_arg_vals[i] = match &args[i].value {
19788 crate::ast::expr::SectionSubscript::Element(e) => {
19789 lower_arg_string_descriptor(b, locals, e, st, type_layouts)
19790 }
19791 _ => b.const_i64(0),
19792 };
19793 }
19794 }
19795 let callee_char_len_star_args = callee_char_len_star_mask(st, &callee_key)
19796 .or_else(|| callee_char_len_star_mask(st, &signature_key))
19797 .or_else(|| callee_char_len_star_mask(st, &key));
19798
19799 if let Some(cls_flags) = &callee_char_len_star_args {
19800 for (i, flag) in cls_flags.iter().enumerate() {
19801 if !*flag || i >= args.len() {
19802 continue;
19803 }
19804 if let crate::ast::expr::SectionSubscript::Element(e) = &args[i].value {
19805 ref_arg_vals.push(
19806 actual_char_arg_runtime_len(b, locals, e, st, type_layouts)
19807 .unwrap_or_else(|| b.const_i64(0)),
19808 );
19809 } else {
19810 ref_arg_vals.push(b.const_i64(0));
19811 }
19812 }
19813 }
19814
19815 // Host-association closure-passing ABI: append trailing
19816 // pointer args for each host-local the callee references.
19817 // Prefer the generic-resolved name's map entry; fall back
19818 // to the unresolved key so calls that don't go through
19819 // generic dispatch still thread host vars.
19820 let closure_key = if contained_host_refs
19821 .map(|m| m.contains_key(&callee_key))
19822 .unwrap_or(false)
19823 {
19824 &callee_key
19825 } else {
19826 &key
19827 };
19828 if procptr_target.is_none() {
19829 append_host_closure_args_raw(
19830 b,
19831 locals,
19832 contained_host_refs,
19833 closure_key,
19834 &mut ref_arg_vals,
19835 );
19836 }
19837
19838 // Look up callee return type from symbol table.
19839 let ret_ty = callee_return_ir_type(st, &callee_key)
19840 .or_else(|| callee_return_ir_type(st, &signature_key))
19841 .or_else(|| callee_return_ir_type(st, &key))
19842 .unwrap_or(IrType::Int(IntWidth::I32));
19843 let func_ref = if let Some((target, _)) = procptr_target {
19844 FuncRef::Indirect(target)
19845 } else {
19846 internal_funcs
19847 .and_then(|map| {
19848 map.get(&callee_key)
19849 .or_else(|| map.get(&signature_key))
19850 .or_else(|| map.get(&key))
19851 .copied()
19852 })
19853 .map(FuncRef::Internal)
19854 .unwrap_or_else(|| FuncRef::External(call_name))
19855 };
19856 b.call(func_ref, ref_arg_vals, ret_ty)
19857 } else if let Expr::ComponentAccess { .. } = &callee.node {
19858 if let Some(tl) = type_layouts {
19859 if let Some(info) = component_array_local_info(b, locals, callee, st, tl) {
19860 let has_range = args.iter().any(|a| {
19861 matches!(a.value, crate::ast::expr::SectionSubscript::Range { .. })
19862 });
19863 if has_range {
19864 return lower_array_section(b, locals, &info, args, st);
19865 }
19866 return lower_array_element(b, locals, &info, args, st);
19867 }
19868 }
19869 b.const_i32(0)
19870 } else {
19871 b.const_i32(0)
19872 }
19873 }
19874
19875 Expr::ComponentAccess { base, component } => {
19876 if let Some(tl) = type_layouts {
19877 // Common case: base is a Name or chained ComponentAccess.
19878 let resolved = resolve_component_base(b, locals, base, st, tl);
19879 // Inline case: base is a call that returns a derived type,
19880 // e.g. `add_t(a, b)%x`. resolve_component_base doesn't
19881 // know how to lower a FunctionCall, so handle it here.
19882 // The call itself evaluates to a ptr<i8> pointing at the
19883 // result struct, so we can reuse it as the component
19884 // base address — but we still need the callee's return
19885 // type name to look up the layout.
19886 let resolved = resolved.or_else(|| {
19887 if let Expr::FunctionCall { callee, .. } = &base.node {
19888 if let Expr::Name { name } = &callee.node {
19889 if let Some(ret_type_name) = callee_return_derived_type_name(st, name) {
19890 let base_ptr = lower_expr_full(
19891 b,
19892 locals,
19893 base,
19894 st,
19895 type_layouts,
19896 internal_funcs,
19897 contained_host_refs,
19898 descriptor_params,
19899 );
19900 return Some((base_ptr, ret_type_name));
19901 }
19902 }
19903 }
19904 None
19905 });
19906 if let Some((base_addr, type_name)) = resolved {
19907 if let Some(layout) = tl.get(&type_name) {
19908 if let Some(field) = layout.field(component) {
19909 let offset = b.const_i64(field.offset as i64);
19910 let field_ptr =
19911 b.gep(base_addr, vec![offset], IrType::Int(IntWidth::I8));
19912
19913 // If the field is a derived type, DON'T load — return the pointer
19914 // (for chained access like x%inner%field).
19915 if let crate::sema::symtab::TypeInfo::Derived(_) = &field.type_info {
19916 return field_ptr;
19917 }
19918
19919 if is_deferred_char_component_field(field) {
19920 let (ptr, _len) = load_string_descriptor_view(b, field_ptr);
19921 return ptr;
19922 }
19923
19924 // Character fields: return the pointer to the inline
19925 // character data, not a load. The data is stored
19926 // inline in the struct, not behind a pointer.
19927 if let crate::sema::symtab::TypeInfo::Character { .. } =
19928 &field.type_info
19929 {
19930 return field_ptr;
19931 }
19932
19933 let ir_ty = type_info_to_ir_type(&field.type_info);
19934 return b.load_typed(field_ptr, ir_ty);
19935 }
19936 } else {
19937 eprintln!("warning: no field '{}' in type '{}'", component, type_name);
19938 }
19939 }
19940 }
19941 b.const_i32(0) // fallback for unresolved component access
19942 }
19943
19944 Expr::ArrayConstructor { values, .. } => {
19945 // Allocate a temporary stack array, store each literal
19946 // element into it, return the base pointer. Element
19947 // type is inferred from the first element's IR type
19948 // (or defaults to i32 for an empty constructor — rare
19949 // but legal). Implied-do values are slot-skipped by
19950 // store_ac_values_into; see the helper for details.
19951 //
19952 // The expression form is needed when an array literal
19953 // appears as a function argument or print item; the
19954 // assignment form (`a = [1,2,3]`) bypasses this and
19955 // routes through lower_array_assign for direct stores.
19956 let elem_ty = values
19957 .iter()
19958 .find_map(|v| match v {
19959 crate::ast::expr::AcValue::Expr(e) => {
19960 // Peek at the first element's type by
19961 // lowering it on a scratch path. Rather
19962 // than actually lower (and have to undo),
19963 // approximate from the AST: integer
19964 // literals → i32, real → f64, etc.
19965 Some(infer_const_expr_ty(&e.node))
19966 }
19967 _ => None,
19968 })
19969 .unwrap_or(IrType::Int(IntWidth::I32));
19970 let n = values.len() as u64;
19971 let arr_ty = IrType::Array(Box::new(elem_ty.clone()), n.max(1));
19972 let buf = b.alloca(arr_ty);
19973 store_ac_values_into(b, locals, buf, &elem_ty, values, st);
19974 buf
19975 }
19976
19977 Expr::ComplexLiteral { real, imag } => {
19978 // Complex numbers are stored as a 2-element float array on the stack.
19979 // Determine float width from the literal parts: if either uses a 'd'/'D'
19980 // exponent it's double precision (f64), otherwise single (f32).
19981 let is_double = |e: &crate::ast::expr::SpannedExpr| -> bool {
19982 if let Expr::RealLiteral { text, .. } = &e.node {
19983 text.to_lowercase().contains('d')
19984 } else {
19985 false
19986 }
19987 };
19988 let fw = if is_double(real) || is_double(imag) {
19989 FloatWidth::F64
19990 } else {
19991 FloatWidth::F32
19992 };
19993 let elem_ty = IrType::Float(fw);
19994 let elem_bytes = b.const_i64(if fw == FloatWidth::F64 { 8 } else { 4 });
19995 let arr_ty = IrType::Array(Box::new(elem_ty.clone()), 2);
19996 let buf = b.alloca(arr_ty);
19997
19998 let real_raw = lower_expr_full(
19999 b,
20000 locals,
20001 real,
20002 st,
20003 type_layouts,
20004 internal_funcs,
20005 contained_host_refs,
20006 descriptor_params,
20007 );
20008 let imag_raw = lower_expr_full(
20009 b,
20010 locals,
20011 imag,
20012 st,
20013 type_layouts,
20014 internal_funcs,
20015 contained_host_refs,
20016 descriptor_params,
20017 );
20018 let real_val = coerce_to_type(b, real_raw, &elem_ty);
20019 let imag_val = coerce_to_type(b, imag_raw, &elem_ty);
20020
20021 // Store real at byte offset 0, imag at byte offset elem_bytes.
20022 let zero = b.const_i64(0);
20023 let real_ptr = b.gep(buf, vec![zero], IrType::Int(IntWidth::I8));
20024 b.store(real_val, real_ptr);
20025 let imag_ptr = b.gep(buf, vec![elem_bytes], IrType::Int(IntWidth::I8));
20026 b.store(imag_val, imag_ptr);
20027
20028 buf
20029 }
20030 }
20031 }
20032
20033 /// Approximate the IR type of a constant-or-near-constant
20034 /// expression by inspecting the AST. Used by ArrayConstructor
20035 /// lowering to pick an element type without actually emitting IR.
20036 /// Conservative — falls back to i32 for anything it can't
20037 /// classify.
20038 fn infer_const_expr_ty(e: &Expr) -> IrType {
20039 match e {
20040 Expr::IntegerLiteral { kind, .. } => {
20041 if kind.as_deref() == Some("16") {
20042 IrType::Int(IntWidth::I128)
20043 } else if kind.as_deref() == Some("8") {
20044 IrType::Int(IntWidth::I64)
20045 } else {
20046 IrType::Int(IntWidth::I32)
20047 }
20048 }
20049 Expr::RealLiteral { text, .. } => {
20050 if text.to_lowercase().contains('d') {
20051 IrType::Float(FloatWidth::F64)
20052 } else {
20053 IrType::Float(FloatWidth::F32)
20054 }
20055 }
20056 Expr::LogicalLiteral { .. } => IrType::Bool,
20057 Expr::UnaryOp { operand, .. } => infer_const_expr_ty(&operand.node),
20058 Expr::ParenExpr { inner } => infer_const_expr_ty(&inner.node),
20059 _ => IrType::Int(IntWidth::I32),
20060 }
20061 }
20062
20063 #[cfg(test)]
20064 mod tests {
20065 use super::super::printer;
20066 use super::super::verify;
20067 use super::*;
20068 use crate::lexer::Lexer;
20069 use crate::parser::Parser;
20070 use crate::sema::resolve;
20071
20072 fn lower_source(src: &str) -> Module {
20073 let tokens = Lexer::tokenize(src, 0).unwrap();
20074 let mut parser = Parser::new(&tokens);
20075 let units = parser.parse_file().unwrap();
20076 let (st, layouts) = {
20077 let rr = resolve::resolve_file(&units, &[]).unwrap();
20078 (rr.st, rr.type_layouts)
20079 };
20080 lower_file(
20081 &units,
20082 &st,
20083 &layouts,
20084 HashMap::new(),
20085 HashMap::new(),
20086 HashMap::new(),
20087 )
20088 .0
20089 }
20090
20091 fn lower_and_verify(src: &str) -> (Module, String) {
20092 let module = lower_source(src);
20093 let errs = verify::verify_module(&module);
20094 assert!(
20095 errs.is_empty(),
20096 "IR verification failed:\n{}\nIR:\n{}",
20097 errs.iter()
20098 .map(|e| e.to_string())
20099 .collect::<Vec<_>>()
20100 .join("\n"),
20101 printer::print_module(&module)
20102 );
20103 let ir_text = printer::print_module(&module);
20104 (module, ir_text)
20105 }
20106
20107 #[test]
20108 fn lower_integer_arithmetic() {
20109 let (module, ir) = lower_and_verify(
20110 "\
20111 program test
20112 implicit none
20113 integer :: x, y, z
20114 x = 10
20115 y = 20
20116 z = x + y
20117 end program
20118 ",
20119 );
20120 assert_eq!(module.functions.len(), 1);
20121 assert!(ir.contains("const_int 10"));
20122 assert!(ir.contains("const_int 20"));
20123 assert!(ir.contains("iadd"));
20124 }
20125
20126 #[test]
20127 fn lower_real_arithmetic() {
20128 let (_, ir) = lower_and_verify(
20129 "\
20130 program test
20131 implicit none
20132 real :: a, b, c
20133 a = 3.14
20134 b = 2.0
20135 c = a * b
20136 end program
20137 ",
20138 );
20139 assert!(ir.contains("const_float"));
20140 assert!(ir.contains("fmul"));
20141 }
20142
20143 #[test]
20144 fn lower_print() {
20145 let (_, ir) = lower_and_verify(
20146 "\
20147 program test
20148 implicit none
20149 integer :: x
20150 x = 42
20151 print *, x
20152 end program
20153 ",
20154 );
20155 assert!(ir.contains("afs_write_int"));
20156 assert!(ir.contains("afs_write_newline"));
20157 }
20158
20159 #[test]
20160 fn lower_print_integer16_uses_wide_writer() {
20161 let (_, ir) = lower_and_verify(
20162 "\
20163 program test
20164 implicit none
20165 integer(16) :: x
20166 x = 170141183460469231731687303715884105727_16
20167 print *, x
20168 end program
20169 ",
20170 );
20171 assert!(ir.contains("afs_write_int128"));
20172 }
20173
20174 #[test]
20175 fn lower_read_integer16_uses_wide_reader() {
20176 let (_, ir) = lower_and_verify(
20177 "\
20178 program test
20179 implicit none
20180 integer(16) :: x
20181 read(*, *) x
20182 end program
20183 ",
20184 );
20185 assert!(ir.contains("afs_read_int128"));
20186 }
20187
20188 #[test]
20189 fn lower_internal_write_integer16_uses_wide_buffer_writer() {
20190 let (_, ir) = lower_and_verify(
20191 "\
20192 program test
20193 implicit none
20194 character(len=96) :: buf
20195 integer(16) :: x
20196 x = 170141183460469231731687303715884105727_16
20197 write(buf, *) x
20198 end program
20199 ",
20200 );
20201 assert!(ir.contains("afs_write_internal_int128"));
20202 }
20203
20204 #[test]
20205 fn lower_internal_read_integer16_uses_wide_buffer_reader() {
20206 let (_, ir) = lower_and_verify(
20207 "\
20208 program test
20209 implicit none
20210 character(len=96) :: buf
20211 integer(16) :: x
20212 read(buf, *) x
20213 end program
20214 ",
20215 );
20216 assert!(ir.contains("afs_read_internal_int128"));
20217 }
20218
20219 #[test]
20220 fn lower_formatted_internal_write_integer16_uses_internal_format_sink() {
20221 let (_, ir) = lower_and_verify(
20222 "\
20223 program test
20224 implicit none
20225 character(len=96) :: buf
20226 integer(16) :: x
20227 x = 170141183460469231731687303715884105727_16
20228 write(buf, '(I40)') x
20229 end program
20230 ",
20231 );
20232 assert!(ir.contains("afs_fmt_begin_internal"));
20233 assert!(ir.contains("afs_fmt_push_int128"));
20234 }
20235
20236 #[test]
20237 fn lower_formatted_internal_read_integer16_uses_internal_format_reader() {
20238 let (_, ir) = lower_and_verify(
20239 "\
20240 program test
20241 implicit none
20242 character(len=64) :: buf
20243 integer(16) :: x
20244 read(buf, '(I40)') x
20245 end program
20246 ",
20247 );
20248 assert!(ir.contains("afs_fmt_read_int128_internal"));
20249 }
20250
20251 #[test]
20252 fn lower_formatted_read_integer16_uses_wide_format_reader() {
20253 let (_, ir) = lower_and_verify(
20254 "\
20255 program test
20256 implicit none
20257 integer(16) :: x
20258 read(10, '(I40)') x
20259 end program
20260 ",
20261 );
20262 assert!(ir.contains("afs_fmt_read_int128"));
20263 }
20264
20265 #[test]
20266 fn lower_formatted_write_integer16_uses_wide_push() {
20267 let (_, ir) = lower_and_verify(
20268 "\
20269 program test
20270 implicit none
20271 integer(16) :: x
20272 x = 170141183460469231731687303715884105727_16
20273 write(*, '(I40)') x
20274 end program
20275 ",
20276 );
20277 assert!(ir.contains("afs_fmt_push_int128"));
20278 }
20279
20280 #[test]
20281 fn lower_unary_minus() {
20282 let (_, ir) = lower_and_verify(
20283 "\
20284 program test
20285 implicit none
20286 integer :: x, y
20287 x = 5
20288 y = -x
20289 end program
20290 ",
20291 );
20292 assert!(ir.contains("ineg"));
20293 }
20294
20295 #[test]
20296 fn lower_multiple_vars() {
20297 let (module, ir) = lower_and_verify(
20298 "\
20299 program test
20300 implicit none
20301 integer :: a, b, c, d
20302 a = 1
20303 b = 2
20304 c = 3
20305 d = a + b + c
20306 end program
20307 ",
20308 );
20309 assert_eq!(module.functions.len(), 1);
20310 // Should have two iadd operations (a+b, then result+c).
20311 let iadd_count = ir.matches("iadd").count();
20312 assert_eq!(iadd_count, 2);
20313 }
20314
20315 #[test]
20316 fn lower_stop() {
20317 let (_, ir) = lower_and_verify(
20318 "\
20319 program test
20320 implicit none
20321 stop
20322 end program
20323 ",
20324 );
20325 assert!(ir.contains("rt_call @__afs_stop"));
20326 assert!(ir.contains("unreachable"));
20327 }
20328
20329 #[test]
20330 fn ir_passes_verifier() {
20331 lower_and_verify("program p\n implicit none\n integer :: x\n x = 1\nend program\n");
20332 lower_and_verify("program p\n implicit none\n real :: x\n x = 1.0\nend program\n");
20333 lower_and_verify("program p\n implicit none\n integer :: x, y\n x = 1\n y = x + 2\n print *, y\nend program\n");
20334 }
20335
20336 // ---- Control flow ----
20337
20338 #[test]
20339 fn lower_if_then_else() {
20340 // Simple diamond `if (cond) y = a; else y = b` lowers to Select.
20341 let (_, ir) = lower_and_verify(
20342 "\
20343 program test
20344 implicit none
20345 integer :: x, y
20346 x = 5
20347 if (x > 0) then
20348 y = 1
20349 else
20350 y = -1
20351 end if
20352 end program
20353 ",
20354 );
20355 assert!(
20356 ir.contains("select"),
20357 "simple diamond should lower to select: {}",
20358 ir
20359 );
20360 }
20361
20362 #[test]
20363 fn lower_if_then_else_branching() {
20364 // Non-diamond IF/ELSE (multi-statement body) must still use branches.
20365 let (_, ir) = lower_and_verify(
20366 "\
20367 program test
20368 implicit none
20369 integer :: x, y, z
20370 x = 5
20371 if (x > 0) then
20372 y = 1
20373 z = 2
20374 else
20375 y = -1
20376 z = -2
20377 end if
20378 end program
20379 ",
20380 );
20381 assert!(
20382 ir.contains("cond_br"),
20383 "multi-stmt if/else should use branches: {}",
20384 ir
20385 );
20386 assert!(ir.contains("if_then"));
20387 assert!(ir.contains("if_else"));
20388 assert!(ir.contains("if_end"));
20389 }
20390
20391 #[test]
20392 fn lower_if_elseif() {
20393 let (_, ir) = lower_and_verify(
20394 "\
20395 program test
20396 implicit none
20397 integer :: x, y
20398 x = 5
20399 if (x > 10) then
20400 y = 1
20401 else if (x > 0) then
20402 y = 2
20403 else
20404 y = 3
20405 end if
20406 end program
20407 ",
20408 );
20409 assert!(ir.contains("elseif_0_then"));
20410 }
20411
20412 #[test]
20413 fn lower_if_stmt() {
20414 let (_, ir) = lower_and_verify(
20415 "\
20416 program test
20417 implicit none
20418 integer :: x
20419 x = 5
20420 if (x > 0) x = 0
20421 end program
20422 ",
20423 );
20424 assert!(ir.contains("if_then"));
20425 assert!(ir.contains("if_end"));
20426 }
20427
20428 #[test]
20429 fn lower_do_loop() {
20430 let (_, ir) = lower_and_verify(
20431 "\
20432 program test
20433 implicit none
20434 integer :: i, s
20435 s = 0
20436 do i = 1, 10
20437 s = s + i
20438 end do
20439 end program
20440 ",
20441 );
20442 assert!(ir.contains("do_check"));
20443 assert!(ir.contains("do_body"));
20444 assert!(ir.contains("do_incr"));
20445 assert!(ir.contains("do_exit"));
20446 assert!(ir.contains("icmp le"));
20447 }
20448
20449 #[test]
20450 fn lower_do_loop_with_step() {
20451 let (_, ir) = lower_and_verify(
20452 "\
20453 program test
20454 implicit none
20455 integer :: i, s
20456 s = 0
20457 do i = 1, 10, 2
20458 s = s + i
20459 end do
20460 end program
20461 ",
20462 );
20463 assert!(ir.contains("const_int 2"));
20464 assert!(ir.contains("do_incr"));
20465 }
20466
20467 #[test]
20468 fn lower_do_concurrent_uses_distinct_blocks() {
20469 let (_, ir) = lower_and_verify(
20470 "\
20471 program test
20472 implicit none
20473 integer :: i, arr(10)
20474 do concurrent (i = 1:10)
20475 arr(i) = i * 2
20476 end do
20477 end program
20478 ",
20479 );
20480 assert!(ir.contains("doconc_check"));
20481 assert!(ir.contains("doconc_body"));
20482 assert!(ir.contains("doconc_incr"));
20483 assert!(ir.contains("doconc_exit"));
20484 assert!(ir.contains("icmp le"));
20485 }
20486
20487 #[test]
20488 fn lower_do_concurrent_mask_emits_guard() {
20489 let (_, ir) = lower_and_verify(
20490 "\
20491 program test
20492 implicit none
20493 integer :: i, arr(6)
20494 arr = 0
20495 do concurrent (i = 1:6, mod(i, 2) == 0)
20496 arr(i) = i
20497 end do
20498 end program
20499 ",
20500 );
20501 assert!(ir.contains("doconc_check"));
20502 assert!(ir.contains("if_then"));
20503 assert!(ir.contains("if_end"));
20504 }
20505
20506 #[test]
20507 fn mod_intrinsic_coerces_mixed_width_integer_args() {
20508 let (_, ir) = lower_and_verify(
20509 "\
20510 program test
20511 use iso_fortran_env, only: int64
20512 implicit none
20513 integer(int64) :: total_commands
20514 logical :: should_report
20515 total_commands = 51_int64
20516 should_report = mod(total_commands, 50_int64) == 0_int64
20517 end program
20518 ",
20519 );
20520 assert!(ir.contains("imod"));
20521 assert!(!ir.contains("operand width mismatch"));
20522 }
20523
20524 #[test]
20525 fn lower_do_concurrent_multiple_controls_nests_loops() {
20526 let (_, ir) = lower_and_verify(
20527 "\
20528 program test
20529 implicit none
20530 integer :: i, j, arr(3, 2)
20531 do concurrent (i = 1:3, j = 1:2)
20532 arr(i, j) = i * 10 + j
20533 end do
20534 end program
20535 ",
20536 );
20537 assert!(ir.matches("doconc_check").count() >= 2);
20538 }
20539
20540 #[test]
20541 fn lower_do_concurrent_full_array_map_uses_bulk_kernel() {
20542 let (_, ir) = lower_and_verify(
20543 "\
20544 program test
20545 implicit none
20546 integer :: i, a(8), b(8), c(8)
20547 do i = 1, 8
20548 a(i) = i
20549 b(i) = i * 10
20550 end do
20551 do concurrent (i = 1:8)
20552 c(i) = a(i) + b(i)
20553 end do
20554 end program
20555 ",
20556 );
20557 assert!(ir.contains("call @afs_array_add_i32("));
20558 assert!(!ir.contains("doconc_check"));
20559 }
20560
20561 #[test]
20562 fn lower_whole_array_elemental_assign_uses_do_concurrent_shape() {
20563 let (_, ir) = lower_and_verify(
20564 "\
20565 program test
20566 implicit none
20567 integer :: a(4), b(4), i
20568 do i = 1, 4
20569 a(i) = i * 2
20570 end do
20571 b = shift_scale(a, 5)
20572 contains
20573 elemental function shift_scale(x, y) result(r)
20574 integer, intent(in) :: x, y
20575 integer :: r
20576 r = x * 2 + y
20577 end function
20578 end program
20579 ",
20580 );
20581 assert!(ir.contains("doconc_check"));
20582 assert!(ir.contains("call @afs_internal___prog_test_1("));
20583 }
20584
20585 #[test]
20586 fn lower_do_while() {
20587 let (_, ir) = lower_and_verify(
20588 "\
20589 program test
20590 implicit none
20591 integer :: x
20592 x = 10
20593 do while (x > 0)
20594 x = x - 1
20595 end do
20596 end program
20597 ",
20598 );
20599 assert!(ir.contains("do_while_header"));
20600 assert!(ir.contains("do_while_body"));
20601 assert!(ir.contains("do_while_exit"));
20602 }
20603
20604 #[test]
20605 fn lower_exit_cycle() {
20606 let (_, ir) = lower_and_verify(
20607 "\
20608 program test
20609 implicit none
20610 integer :: i, s
20611 s = 0
20612 do i = 1, 100
20613 if (i > 10) exit
20614 if (i == 5) cycle
20615 s = s + i
20616 end do
20617 end program
20618 ",
20619 );
20620 // EXIT should branch to do_exit, CYCLE to do_incr.
20621 assert!(ir.contains("do_exit"));
20622 assert!(ir.contains("do_incr"));
20623 }
20624
20625 #[test]
20626 fn lower_select_case() {
20627 let (_, ir) = lower_and_verify(
20628 "\
20629 program test
20630 implicit none
20631 integer :: x, y
20632 x = 2
20633 select case (x)
20634 case (1)
20635 y = 10
20636 case (2)
20637 y = 20
20638 case default
20639 y = 0
20640 end select
20641 end program
20642 ",
20643 );
20644 assert!(ir.contains("case_0_body"));
20645 assert!(ir.contains("case_1_body"));
20646 assert!(ir.contains("select_end"));
20647 }
20648
20649 #[test]
20650 fn lower_nested_loops() {
20651 let (_, ir) = lower_and_verify(
20652 "\
20653 program test
20654 implicit none
20655 integer :: i, j, s
20656 s = 0
20657 do i = 1, 10
20658 do j = 1, 10
20659 s = s + i * j
20660 end do
20661 end do
20662 end program
20663 ",
20664 );
20665 // Two loops means 2 blocks named "do_check_N":
20666 let label_count = ir.matches("do_check_").count();
20667 assert!(
20668 label_count >= 2,
20669 "expected at least 2 loop headers, got {} in:\n{}",
20670 label_count,
20671 ir
20672 );
20673 }
20674
20675 #[test]
20676 fn lower_do_negative_step() {
20677 let (_, ir) = lower_and_verify(
20678 "\
20679 program test
20680 implicit none
20681 integer :: i, s
20682 s = 0
20683 do i = 10, 1, -1
20684 s = s + i
20685 end do
20686 end program
20687 ",
20688 );
20689 // Negative step should use >= comparison, not <=.
20690 assert!(
20691 ir.contains("icmp ge"),
20692 "expected 'icmp ge' for negative step in:\n{}",
20693 ir
20694 );
20695 }
20696
20697 #[test]
20698 fn lower_function_return() {
20699 let (_, ir) = lower_and_verify(
20700 "\
20701 function square(x) result(y)
20702 integer, intent(in) :: x
20703 integer :: y
20704 y = x * x
20705 return
20706 end function
20707 ",
20708 );
20709 // RETURN should load the result variable and ret it, not ret void.
20710 assert!(ir.contains("ret %"), "expected 'ret %value' in:\n{}", ir);
20711 assert!(
20712 !ir.contains("ret void"),
20713 "function should not ret void in:\n{}",
20714 ir
20715 );
20716 }
20717
20718 #[test]
20719 fn lower_return() {
20720 let (_, ir) = lower_and_verify(
20721 "\
20722 subroutine foo()
20723 implicit none
20724 return
20725 end subroutine
20726 ",
20727 );
20728 assert!(ir.contains("ret void"));
20729 }
20730
20731 #[test]
20732 fn lower_associate() {
20733 let (_, ir) = lower_and_verify(
20734 "\
20735 program test
20736 implicit none
20737 integer :: x
20738 x = 42
20739 associate (n => x)
20740 print *, n
20741 end associate
20742 end program
20743 ",
20744 );
20745 assert!(ir.contains("afs_write_int"));
20746 }
20747
20748 // ---- Allocatable / strings ----
20749
20750 #[test]
20751 fn lower_allocate_deallocate() {
20752 let (_, ir) = lower_and_verify(
20753 "\
20754 program test
20755 implicit none
20756 real, allocatable :: a(:)
20757 allocate(a(100))
20758 deallocate(a)
20759 end program
20760 ",
20761 );
20762 assert!(
20763 ir.contains("call @afs_allocate_array"),
20764 "expected allocate call in:\n{}",
20765 ir
20766 );
20767 assert!(
20768 ir.contains("call @afs_deallocate_array"),
20769 "expected deallocate call in:\n{}",
20770 ir
20771 );
20772 }
20773
20774 #[test]
20775 fn lower_implicit_dealloc_at_scope_exit() {
20776 let (_, ir) = lower_and_verify(
20777 "\
20778 subroutine foo()
20779 implicit none
20780 real, allocatable :: temp(:)
20781 allocate(temp(10))
20782 end subroutine
20783 ",
20784 );
20785 // Should have implicit deallocation before ret.
20786 let dealloc_count = ir.matches("call @afs_deallocate_array").count();
20787 assert!(
20788 dealloc_count >= 1,
20789 "expected implicit deallocation, got {} in:\n{}",
20790 dealloc_count,
20791 ir
20792 );
20793 }
20794
20795 #[test]
20796 fn lower_allocatable_c_char_array_element_assignment() {
20797 let (_, ir) = lower_and_verify(
20798 "\
20799 subroutine foo(str, n)
20800 use iso_c_binding, only: c_char
20801 implicit none
20802 character(len=*), intent(in) :: str
20803 integer, intent(in) :: n
20804 character(kind=c_char), target, allocatable :: c_str(:)
20805 integer :: i
20806 allocate(c_str(n + 1))
20807 do i = 1, n
20808 c_str(i) = str(i:i)
20809 end do
20810 c_str(n + 1) = char(10)
20811 end subroutine
20812 ",
20813 );
20814 assert!(
20815 ir.contains("call @afs_allocate_array"),
20816 "expected descriptor-backed allocation in:\n{}",
20817 ir
20818 );
20819 }
20820
20821 #[test]
20822 fn lower_string_literal() {
20823 let (_, ir) = lower_and_verify(
20824 "\
20825 program test
20826 implicit none
20827 print *, 'hello'
20828 end program
20829 ",
20830 );
20831 assert!(
20832 ir.contains("const_string"),
20833 "expected string constant in:\n{}",
20834 ir
20835 );
20836 assert!(ir.contains("afs_write_string"));
20837 }
20838
20839 // ---- Calls ----
20840
20841 #[test]
20842 fn lower_call_passes_addresses() {
20843 let (_, ir) = lower_and_verify(
20844 "\
20845 program test
20846 implicit none
20847 integer :: x
20848 x = 42
20849 call foo(x)
20850 end program
20851 ",
20852 );
20853 // x should be passed by reference — the alloca address, not a loaded value.
20854 // The call should reference the alloca directly.
20855 assert!(ir.contains("call @foo("));
20856 }
20857
20858 #[test]
20859 fn lower_call_expression_arg() {
20860 let (_, ir) = lower_and_verify(
20861 "\
20862 program test
20863 implicit none
20864 integer :: x
20865 x = 5
20866 call foo(x + 1)
20867 end program
20868 ",
20869 );
20870 // Expression arg: x+1 evaluated, stored to temp, temp address passed.
20871 assert!(ir.contains("iadd"));
20872 assert!(ir.contains("alloca")); // temp for expression result
20873 assert!(ir.contains("call @foo("));
20874 }
20875
20876 // ---- Arrays ----
20877
20878 #[test]
20879 fn lower_array_declaration() {
20880 let (_, ir) = lower_and_verify(
20881 "\
20882 program test
20883 implicit none
20884 integer :: a(10)
20885 a(1) = 42
20886 end program
20887 ",
20888 );
20889 // Should alloca an array of 10 i32, then GEP + store.
20890 assert!(
20891 ir.contains("[i32 x 10]"),
20892 "expected array alloca in:\n{}",
20893 ir
20894 );
20895 assert!(ir.contains("gep"), "expected GEP in:\n{}", ir);
20896 }
20897
20898 #[test]
20899 fn lower_array_read() {
20900 let (_, ir) = lower_and_verify(
20901 "\
20902 program test
20903 implicit none
20904 integer :: a(10), x
20905 a(3) = 99
20906 x = a(3)
20907 end program
20908 ",
20909 );
20910 // Reading array element: GEP + load.
20911 let gep_count = ir.matches("gep").count();
20912 assert!(
20913 gep_count >= 2,
20914 "expected at least 2 GEPs (write + read), got {} in:\n{}",
20915 gep_count,
20916 ir
20917 );
20918 }
20919
20920 #[test]
20921 fn lower_2d_array() {
20922 let (_, ir) = lower_and_verify(
20923 "\
20924 program test
20925 implicit none
20926 integer :: mat(3, 4)
20927 mat(2, 3) = 42
20928 end program
20929 ",
20930 );
20931 // 2D array: alloca [i32 x 12], column-major offset.
20932 assert!(
20933 ir.contains("[i32 x 12]"),
20934 "expected 3*4=12 element array in:\n{}",
20935 ir
20936 );
20937 assert!(ir.contains("gep"), "expected GEP in:\n{}", ir);
20938 }
20939
20940 #[test]
20941 fn lower_array_in_loop() {
20942 let (_, ir) = lower_and_verify(
20943 "\
20944 program test
20945 implicit none
20946 integer :: a(10), i
20947 do i = 1, 10
20948 a(i) = i * 2
20949 end do
20950 end program
20951 ",
20952 );
20953 assert!(ir.contains("gep"));
20954 assert!(ir.contains("imul"));
20955 }
20956
20957 #[test]
20958 fn lower_module_globals() {
20959 let module = lower_source(
20960 "\
20961 module mymod
20962 implicit none
20963 integer :: counter
20964 real :: threshold
20965 end module
20966 ",
20967 );
20968 assert_eq!(module.globals.len(), 2);
20969 assert!(module.globals.iter().any(|g| g.name.contains("counter")));
20970 assert!(module.globals.iter().any(|g| g.name.contains("threshold")));
20971 }
20972
20973 #[test]
20974 fn lower_block_construct() {
20975 let (_, ir) = lower_and_verify(
20976 "\
20977 program test
20978 implicit none
20979 integer :: x
20980 x = 1
20981 block
20982 x = x + 1
20983 end block
20984 end program
20985 ",
20986 );
20987 assert!(ir.contains("iadd"));
20988 }
20989 }
20990