Rust · 25111 bytes Raw Blame History
1 //! Lowering of Fortran intrinsic subroutines (CALL system_clock,
2 //! CALL date_and_time, CALL c_f_pointer, ...).
3 //!
4 //! Extracted from `core.rs` in Sprint 11 Stage B.2. Pure mechanical
5 //! move — behavior unchanged. Helpers consulted via `core::*`.
6
7 use crate::ir::builder::FuncBuilder;
8 use crate::ir::inst::*;
9 use crate::ir::types::*;
10
11 use super::core::*;
12 use super::ctx::{CharKind, LowerCtx};
13 use super::helpers::coerce_to_type;
14 use crate::ast::expr::Expr;
15
16 /// Lower an intrinsic subroutine call (CALL system_clock, CALL date_and_time, etc.).
17 /// Returns true if the name was recognized and lowered, false otherwise.
18 pub(crate) fn lower_intrinsic_subroutine(
19 b: &mut FuncBuilder,
20 ctx: &mut LowerCtx,
21 name: &str,
22 args: &[crate::ast::expr::Argument],
23 ) -> bool {
24 #[derive(Clone)]
25 struct RuntimeOutWriteback {
26 dest_ptr: ValueId,
27 dest_ty: IrType,
28 tmp_ptr: ValueId,
29 }
30
31 /// Helper: get the nth positional arg as a by-ref pointer, or null if absent.
32 fn nth_arg_ref(
33 b: &mut FuncBuilder,
34 ctx: &LowerCtx,
35 args: &[Option<crate::ast::expr::Argument>],
36 n: usize,
37 ) -> ValueId {
38 if let Some(Some(arg)) = args.get(n) {
39 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
40 return lower_arg_by_ref_ctx(b, ctx, e);
41 }
42 }
43 b.const_i64(0) // null pointer for missing optional arg
44 }
45
46 /// Helper: get the nth positional arg as a by-value expression, or default.
47 fn nth_arg_val(
48 b: &mut FuncBuilder,
49 ctx: &LowerCtx,
50 args: &[Option<crate::ast::expr::Argument>],
51 n: usize,
52 default: i32,
53 ) -> ValueId {
54 if let Some(Some(arg)) = args.get(n) {
55 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
56 return super::expr::lower_expr_ctx(b, ctx, e);
57 }
58 }
59 b.const_i32(default)
60 }
61
62 /// Helper: get the nth positional arg as a (ptr, len) string pair, or (null, 0).
63 fn nth_arg_str(
64 b: &mut FuncBuilder,
65 ctx: &LowerCtx,
66 args: &[Option<crate::ast::expr::Argument>],
67 n: usize,
68 ) -> (ValueId, ValueId) {
69 if let Some(Some(arg)) = args.get(n) {
70 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
71 if expr_is_character_expr(b, &ctx.locals, e, ctx.st, Some(ctx.type_layouts)) {
72 return lower_string_expr_ctx(b, ctx, e);
73 }
74 // Otherwise pass as ref + zero length.
75 let ptr = lower_arg_by_ref_ctx(b, ctx, e);
76 let zero = b.const_i64(0);
77 return (ptr, zero);
78 }
79 }
80 let z = b.const_i64(0);
81 (z, z)
82 }
83
84 /// Helper: adapt an intrinsic out-arg to a runtime ABI that writes
85 /// through an i64 slot. Non-i64 destinations get a temporary i64
86 /// alloca followed by an explicit writeback after the runtime call.
87 fn nth_arg_i64_out(
88 b: &mut FuncBuilder,
89 ctx: &LowerCtx,
90 args: &[Option<crate::ast::expr::Argument>],
91 n: usize,
92 ) -> (ValueId, Option<RuntimeOutWriteback>) {
93 if let Some(Some(arg)) = args.get(n) {
94 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
95 let dest_ptr = lower_arg_by_ref_ctx(b, ctx, e);
96 if let Some(IrType::Ptr(inner)) = b.func().value_type(dest_ptr) {
97 let dest_ty = (*inner).clone();
98 if dest_ty == IrType::Int(IntWidth::I64) {
99 return (dest_ptr, None);
100 }
101 let tmp_ptr = b.alloca(IrType::Int(IntWidth::I64));
102 return (
103 tmp_ptr,
104 Some(RuntimeOutWriteback {
105 dest_ptr,
106 dest_ty,
107 tmp_ptr,
108 }),
109 );
110 }
111 return (dest_ptr, None);
112 }
113 }
114 (b.const_i64(0), None)
115 }
116
117 let arg_slots = reorder_args_by_keyword_slots(args, name, ctx.st);
118 let args = arg_slots.as_slice();
119
120 fn move_alloc_target(
121 b: &mut FuncBuilder,
122 ctx: &LowerCtx,
123 expr: &crate::ast::expr::SpannedExpr,
124 ) -> Option<(ValueId, bool)> {
125 match &expr.node {
126 Expr::ParenExpr { inner } => move_alloc_target(b, ctx, inner),
127 Expr::Name { name } => {
128 let info = ctx.locals.get(&name.to_lowercase())?;
129 if matches!(info.char_kind, CharKind::Deferred) {
130 Some((string_descriptor_addr(b, info), true))
131 } else if local_uses_array_descriptor(info) {
132 Some((array_descriptor_addr(b, info), false))
133 } else {
134 None
135 }
136 }
137 Expr::ComponentAccess { .. } => {
138 if let Some(info) =
139 component_array_local_info(b, &ctx.locals, expr, ctx.st, ctx.type_layouts)
140 {
141 return Some((array_descriptor_addr(b, &info), false));
142 }
143 resolve_component_field_access(b, &ctx.locals, expr, ctx.st, ctx.type_layouts)
144 .and_then(|(field_ptr, field)| {
145 if matches!(field_char_kind(&field), CharKind::Deferred) && field.size == 32
146 {
147 Some((field_ptr, true))
148 } else if field.allocatable && field.size == 384 {
149 Some((field_ptr, false))
150 } else {
151 None
152 }
153 })
154 }
155 _ => None,
156 }
157 }
158
159 fn nth_arg_expr(
160 args: &[Option<crate::ast::expr::Argument>],
161 idx: usize,
162 ) -> Option<&crate::ast::expr::SpannedExpr> {
163 let arg = args.get(idx)?.as_ref()?;
164 if let crate::ast::expr::SectionSubscript::Element(expr) = &arg.value {
165 Some(expr)
166 } else {
167 None
168 }
169 }
170
171 match name {
172 "move_alloc" => {
173 let from_expr = args.first().and_then(|arg| {
174 let arg = arg.as_ref()?;
175 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
176 Some(e)
177 } else {
178 None
179 }
180 });
181 let to_expr = args.get(1).and_then(|arg| {
182 let arg = arg.as_ref()?;
183 if let crate::ast::expr::SectionSubscript::Element(e) = &arg.value {
184 Some(e)
185 } else {
186 None
187 }
188 });
189 let Some((from_desc, from_is_string)) =
190 from_expr.and_then(|e| move_alloc_target(b, ctx, e))
191 else {
192 eprintln!(
193 "armfortas: error: MOVE_ALLOC source must be a descriptor-backed allocatable variable"
194 );
195 std::process::exit(1);
196 };
197 let Some((to_desc, to_is_string)) = to_expr.and_then(|e| move_alloc_target(b, ctx, e))
198 else {
199 eprintln!(
200 "armfortas: error: MOVE_ALLOC destination must be a descriptor-backed allocatable variable"
201 );
202 std::process::exit(1);
203 };
204 if from_is_string != to_is_string {
205 eprintln!(
206 "armfortas: error: MOVE_ALLOC source and destination must use matching descriptor kinds"
207 );
208 std::process::exit(1);
209 }
210 let runtime = if from_is_string {
211 "afs_move_alloc_string"
212 } else {
213 "afs_move_alloc"
214 };
215 b.call(
216 FuncRef::External(runtime.into()),
217 vec![from_desc, to_desc],
218 IrType::Void,
219 );
220 true
221 }
222 "system_clock" => {
223 // call system_clock(count, count_rate, count_max) — all optional
224 let (count, count_writeback) = nth_arg_i64_out(b, ctx, args, 0);
225 let (rate, rate_writeback) = nth_arg_i64_out(b, ctx, args, 1);
226 let (max, max_writeback) = nth_arg_i64_out(b, ctx, args, 2);
227 b.call(
228 FuncRef::External("afs_system_clock".into()),
229 vec![count, rate, max],
230 IrType::Void,
231 );
232 for writeback in [count_writeback, rate_writeback, max_writeback]
233 .into_iter()
234 .flatten()
235 {
236 let raw = b.load(writeback.tmp_ptr);
237 let coerced = coerce_to_type(b, raw, &writeback.dest_ty);
238 b.store(coerced, writeback.dest_ptr);
239 }
240 true
241 }
242 "cpu_time" => {
243 let time = nth_arg_ref(b, ctx, args, 0);
244 b.call(
245 FuncRef::External("afs_cpu_time".into()),
246 vec![time],
247 IrType::Void,
248 );
249 true
250 }
251 "date_and_time" => {
252 // call date_and_time(date, time, zone, values) — all optional strings/array
253 // Runtime: afs_date_and_time(date_buf, date_len, time_buf, time_len, zone_buf, zone_len, values)
254 let (date_ptr, date_len) = nth_arg_str(b, ctx, args, 0);
255 let (time_ptr, time_len) = nth_arg_str(b, ctx, args, 1);
256 let (zone_ptr, zone_len) = nth_arg_str(b, ctx, args, 2);
257 let values = nth_arg_ref(b, ctx, args, 3);
258 b.call(
259 FuncRef::External("afs_date_and_time".into()),
260 vec![
261 date_ptr, date_len, time_ptr, time_len, zone_ptr, zone_len, values,
262 ],
263 IrType::Void,
264 );
265 true
266 }
267 "get_command_argument" => {
268 // call get_command_argument(number, value, length, status)
269 // Runtime: afs_get_command_argument(number, value, value_len, length, status)
270 let number = nth_arg_val(b, ctx, args, 0, 0);
271 let (val_ptr, val_len) = nth_arg_str(b, ctx, args, 1);
272 let length = nth_arg_ref(b, ctx, args, 2);
273 let status = nth_arg_ref(b, ctx, args, 3);
274 b.call(
275 FuncRef::External("afs_get_command_argument".into()),
276 vec![number, val_ptr, val_len, length, status],
277 IrType::Void,
278 );
279 true
280 }
281 "command_argument_count" => {
282 // This is a function, not a subroutine — handled in lower_intrinsic.
283 false
284 }
285 "get_command" => {
286 // call get_command(command, length, status)
287 let (cmd_ptr, cmd_len) = nth_arg_str(b, ctx, args, 0);
288 let length = nth_arg_ref(b, ctx, args, 1);
289 let status = nth_arg_ref(b, ctx, args, 2);
290 b.call(
291 FuncRef::External("afs_get_command".into()),
292 vec![cmd_ptr, cmd_len, length, status],
293 IrType::Void,
294 );
295 true
296 }
297 "get_environment_variable" => {
298 // call get_environment_variable(name, value, length, status)
299 // Runtime: afs_get_environment_variable(name, name_len, value, value_len, length, status)
300 let (name_ptr, name_len) = nth_arg_str(b, ctx, args, 0);
301 let (val_ptr, val_len) = nth_arg_str(b, ctx, args, 1);
302 let length = nth_arg_ref(b, ctx, args, 2);
303 let status = nth_arg_ref(b, ctx, args, 3);
304 b.call(
305 FuncRef::External("afs_get_environment_variable".into()),
306 vec![name_ptr, name_len, val_ptr, val_len, length, status],
307 IrType::Void,
308 );
309 true
310 }
311 "random_number" => {
312 // F2018 §16.9.171: RANDOM_NUMBER(harvest) accepts both
313 // scalar and array harvest. The scalar runtime fills only
314 // one element; routing array actuals to it left N-1 slots
315 // as stack garbage, which surfaced as denormal/NaN values
316 // throughout stdlib examples (e.g. sparse_spmv: count() on
317 // the resulting matrix returned 1 instead of m*n, malloc
318 // sized COO%index(2,1), and the next assign ran past dim 0).
319 let harvest = nth_arg_ref(b, ctx, args, 0);
320 let harvest_expr = nth_arg_expr(args, 0);
321 let kind_is_f32 = harvest_expr
322 .and_then(|expr| {
323 generic_actual_expr_type_info(
324 expr,
325 &ctx.locals,
326 ctx.st,
327 Some(ctx.type_layouts),
328 )
329 })
330 .map(|ty| matches!(
331 ty,
332 crate::sema::symtab::TypeInfo::Real { kind: Some(k) } if k <= 4
333 ))
334 .unwrap_or(false);
335 let is_array = harvest_expr
336 .map(|e| expr_returns_array(e, &ctx.locals, ctx.st))
337 .unwrap_or(false);
338 if is_array {
339 if let Some(expr) = harvest_expr {
340 if let Some((desc, _elem_ty)) = lower_array_expr_descriptor(
341 b,
342 &ctx.locals,
343 expr,
344 ctx.st,
345 Some(ctx.type_layouts),
346 Some(ctx.internal_funcs),
347 Some(ctx.contained_host_refs),
348 Some(ctx.descriptor_params),
349 ) {
350 let n = b.call(
351 FuncRef::External("afs_array_size".into()),
352 vec![desc],
353 IrType::Int(IntWidth::I64),
354 );
355 let runtime = if kind_is_f32 {
356 "afs_random_number_array_f32"
357 } else {
358 "afs_random_number_array_f64"
359 };
360 b.call(
361 FuncRef::External(runtime.into()),
362 vec![harvest, n],
363 IrType::Void,
364 );
365 return true;
366 }
367 }
368 }
369 let runtime = if kind_is_f32 {
370 "afs_random_number_f32"
371 } else {
372 "afs_random_number_f64"
373 };
374 b.call(
375 FuncRef::External(runtime.into()),
376 vec![harvest],
377 IrType::Void,
378 );
379 true
380 }
381 "random_seed" => {
382 let seed = nth_arg_val(b, ctx, args, 0, 0);
383 let widened = b.int_extend(seed, IntWidth::I64, true);
384 b.call(
385 FuncRef::External("afs_random_seed".into()),
386 vec![widened],
387 IrType::Void,
388 );
389 true
390 }
391 "execute_command_line" => {
392 let (cmd_ptr, cmd_len) = nth_arg_str(b, ctx, args, 0);
393 let wait = nth_arg_val(b, ctx, args, 1, 1);
394 let exitstat = nth_arg_ref(b, ctx, args, 2);
395 let cmdstat = nth_arg_ref(b, ctx, args, 3);
396 b.call(
397 FuncRef::External("afs_execute_command_line".into()),
398 vec![cmd_ptr, cmd_len, wait, exitstat, cmdstat],
399 IrType::Void,
400 );
401 true
402 }
403 "flush" => {
404 let unit_raw = nth_arg_val(b, ctx, args, 0, 6);
405 let unit = coerce_to_type(b, unit_raw, &IrType::Int(IntWidth::I32));
406 let null = b.const_i64(0);
407 b.call(
408 FuncRef::External("afs_flush".into()),
409 vec![unit, null],
410 IrType::Void,
411 );
412 true
413 }
414
415 // ---- iso_c_binding subroutines ----
416 "c_f_pointer" => {
417 // call c_f_pointer(cptr, fptr [, shape])
418 //
419 // Scalar pointers store the raw address directly into the
420 // pointer slot. Array pointers are descriptor-backed in
421 // armfortas, so we must populate the 384-byte descriptor
422 // with base_addr/elem_size/rank/bounds instead of
423 // treating the second argument like a plain Ptr<T>.
424 let raw_cptr = nth_arg_val(b, ctx, args, 0, 0);
425 let cptr = match b.func().value_type(raw_cptr) {
426 Some(IrType::Int(IntWidth::I64)) => raw_cptr,
427 _ => b.int_extend(raw_cptr, IntWidth::I64, false),
428 };
429
430 let target_expr = args.get(1).and_then(|arg| {
431 let arg = arg.as_ref()?;
432 if let crate::ast::expr::SectionSubscript::Element(expr) = &arg.value {
433 Some(expr)
434 } else {
435 None
436 }
437 });
438 if let Some(expr) = target_expr {
439 if let Some((target_addr, elem_ty, descriptor_backed)) =
440 c_f_pointer_target(b, ctx, expr)
441 {
442 if descriptor_backed {
443 let zero32 = b.const_i32(0);
444 let sz384 = b.const_i64(384);
445 b.call(
446 FuncRef::External("memset".into()),
447 vec![target_addr, zero32, sz384],
448 IrType::Ptr(Box::new(IrType::Int(IntWidth::I8))),
449 );
450
451 let base_ptr = b.int_to_ptr(cptr, elem_ty.clone());
452 store_byte_aggregate_field(
453 b,
454 target_addr,
455 0,
456 IrType::Ptr(Box::new(elem_ty.clone())),
457 base_ptr,
458 );
459 let elem_size = b.const_i64(ir_scalar_byte_size(&elem_ty));
460 store_byte_aggregate_field(
461 b,
462 target_addr,
463 8,
464 IrType::Int(IntWidth::I64),
465 elem_size,
466 );
467
468 let shape_vals = c_f_pointer_shape_values(args);
469 let rank = shape_vals.map_or(0, |vals| vals.len());
470 let rank_val = b.const_i32(rank as i32);
471 store_byte_aggregate_field(
472 b,
473 target_addr,
474 16,
475 IrType::Int(IntWidth::I32),
476 rank_val,
477 );
478
479 let null_cptr = b.const_i64(0);
480 let is_associated = b.icmp(CmpOp::Ne, cptr, null_cptr);
481 let assoc_flag = b.const_i32(2);
482 let disassoc_flag = b.const_i32(0);
483 let flags = b.select(is_associated, assoc_flag, disassoc_flag);
484 store_byte_aggregate_field(
485 b,
486 target_addr,
487 20,
488 IrType::Int(IntWidth::I32),
489 flags,
490 );
491
492 if let Some(values) = shape_vals {
493 for (i, value) in values.iter().enumerate() {
494 let crate::ast::expr::AcValue::Expr(extent_expr) = value else {
495 continue;
496 };
497 let raw_extent = super::expr::lower_expr_ctx(b, ctx, extent_expr);
498 let extent = match b.func().value_type(raw_extent) {
499 Some(IrType::Int(IntWidth::I64)) => raw_extent,
500 _ => b.int_extend(raw_extent, IntWidth::I64, true),
501 };
502 let base = 24 + (i as i64) * 24;
503 let one64 = b.const_i64(1);
504 store_byte_aggregate_field(
505 b,
506 target_addr,
507 base,
508 IrType::Int(IntWidth::I64),
509 one64,
510 );
511 store_byte_aggregate_field(
512 b,
513 target_addr,
514 base + 8,
515 IrType::Int(IntWidth::I64),
516 extent,
517 );
518 let stride64 = b.const_i64(1);
519 store_byte_aggregate_field(
520 b,
521 target_addr,
522 base + 16,
523 IrType::Int(IntWidth::I64),
524 stride64,
525 );
526 }
527 }
528 return true;
529 }
530
531 let ptr_val = b.int_to_ptr(cptr, elem_ty);
532 b.store(ptr_val, target_addr);
533 return true;
534 }
535 }
536
537 let fptr = nth_arg_ref(b, ctx, args, 1);
538 let inner_pointee = b
539 .func()
540 .value_type(fptr)
541 .and_then(|ty| {
542 if let IrType::Ptr(inner) = ty {
543 if let IrType::Ptr(elem) = inner.as_ref() {
544 Some(elem.as_ref().clone())
545 } else {
546 Some(inner.as_ref().clone())
547 }
548 } else {
549 None
550 }
551 })
552 .unwrap_or(IrType::Int(IntWidth::I8));
553 let ptr_val = b.int_to_ptr(cptr, inner_pointee);
554 b.store(ptr_val, fptr);
555 true
556 }
557
558 "mvbits" => {
559 // F2018 §16.9.155: call mvbits(from, frompos, len, to, topos)
560 // Copies len bits starting at bit `frompos` of `from` into
561 // `to` starting at bit `topos`. Other bits of `to` are
562 // unchanged. Both `from` and `to` must be the same integer
563 // kind; we pick the destination's width as authoritative
564 // since we have to write back through that pointer.
565 let to_arg = match args.get(3) {
566 Some(Some(arg)) => arg,
567 _ => return true,
568 };
569 let crate::ast::expr::SectionSubscript::Element(to_expr) = &to_arg.value else {
570 return true;
571 };
572 let to_ptr = lower_arg_by_ref_ctx(b, ctx, to_expr);
573 let to_width = match b.func().value_type(to_ptr) {
574 Some(IrType::Ptr(inner)) => match inner.as_ref() {
575 IrType::Int(w) => *w,
576 _ => IntWidth::I32,
577 },
578 _ => IntWidth::I32,
579 };
580 let from_val = nth_arg_val(b, ctx, args, 0, 0);
581 let from = coerce_int_like_to_width(b, from_val, to_width);
582 let frompos_val = nth_arg_val(b, ctx, args, 1, 0);
583 let frompos = coerce_int_like_to_width(b, frompos_val, to_width);
584 let len_val = nth_arg_val(b, ctx, args, 2, 0);
585 let len = coerce_int_like_to_width(b, len_val, to_width);
586 let topos_val = nth_arg_val(b, ctx, args, 4, 0);
587 let topos = coerce_int_like_to_width(b, topos_val, to_width);
588
589 let one = int_const_for_width(b, to_width, 1);
590 // (1 << len) - 1
591 let one_shl = b.shl(one, len);
592 let one_again = int_const_for_width(b, to_width, 1);
593 let len_mask = b.isub(one_shl, one_again);
594 // extracted = (from >> frompos) & len_mask
595 let shifted = b.lshr(from, frompos);
596 let extracted = b.bit_and(shifted, len_mask);
597 // shifted_dest = extracted << topos
598 let shifted_dest = b.shl(extracted, topos);
599 // dest_mask = len_mask << topos
600 let dest_mask = b.shl(len_mask, topos);
601 let inv_mask = b.bit_not(dest_mask);
602 let to_loaded = b.load_typed(to_ptr, IrType::Int(to_width));
603 let cleared = b.bit_and(to_loaded, inv_mask);
604 let updated = b.bit_or(cleared, shifted_dest);
605 b.store(updated, to_ptr);
606 true
607 }
608
609 _ => false,
610 }
611 }
612