| 1 | //! IR instructions, terminators, and values. |
| 2 | //! |
| 3 | //! Every instruction produces a value (ValueId) in SSA form. |
| 4 | //! Basic blocks end with exactly one Terminator. |
| 5 | |
| 6 | use super::types::{FloatWidth, IntWidth, IrType}; |
| 7 | use crate::lexer::Span; |
| 8 | use std::collections::HashMap; |
| 9 | |
| 10 | /// A value identifier — unique within a function. |
| 11 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
| 12 | pub struct ValueId(pub u32); |
| 13 | |
| 14 | /// A basic block identifier — unique within a function. |
| 15 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
| 16 | pub struct BlockId(pub u32); |
| 17 | |
| 18 | /// A function reference — index into Module::functions or external. |
| 19 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
| 20 | pub enum FuncRef { |
| 21 | /// Index into Module::functions. |
| 22 | Internal(u32), |
| 23 | /// External function by name (runtime calls, etc.). |
| 24 | External(String), |
| 25 | /// Indirect call through a pointer-typed SSA value. |
| 26 | Indirect(ValueId), |
| 27 | } |
| 28 | |
| 29 | /// A runtime library function. |
| 30 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
| 31 | pub enum RuntimeFunc { |
| 32 | PrintInt, |
| 33 | PrintReal, |
| 34 | PrintString, |
| 35 | PrintLogical, |
| 36 | PrintNewline, |
| 37 | Allocate, |
| 38 | Deallocate, |
| 39 | StringConcat, |
| 40 | StringCopy, |
| 41 | StringCompare, |
| 42 | Stop, |
| 43 | ErrorStop, |
| 44 | /// Bounds check: `afs_check_bounds(index, lower_bound, upper_bound)`. |
| 45 | /// Aborts if index < lower or index > upper. Inserted at array access |
| 46 | /// sites; eliminated at O2+ when provably safe. |
| 47 | CheckBounds, |
| 48 | } |
| 49 | |
| 50 | /// Comparison operations. |
| 51 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] |
| 52 | pub enum CmpOp { |
| 53 | Eq, |
| 54 | Ne, |
| 55 | Lt, |
| 56 | Le, |
| 57 | Gt, |
| 58 | Ge, |
| 59 | } |
| 60 | |
| 61 | /// An SSA instruction. |
| 62 | #[derive(Debug, Clone)] |
| 63 | pub struct Inst { |
| 64 | pub id: ValueId, |
| 65 | pub kind: InstKind, |
| 66 | pub ty: IrType, |
| 67 | pub span: Span, |
| 68 | } |
| 69 | |
| 70 | /// Instruction kinds. |
| 71 | #[derive(Debug, Clone)] |
| 72 | pub enum InstKind { |
| 73 | // ---- Constants ---- |
| 74 | ConstInt(i128, IntWidth), |
| 75 | ConstFloat(f64, FloatWidth), |
| 76 | ConstBool(bool), |
| 77 | ConstString(Vec<u8>), |
| 78 | Undef(IrType), |
| 79 | |
| 80 | // ---- Integer arithmetic ---- |
| 81 | IAdd(ValueId, ValueId), |
| 82 | ISub(ValueId, ValueId), |
| 83 | IMul(ValueId, ValueId), |
| 84 | IDiv(ValueId, ValueId), |
| 85 | IMod(ValueId, ValueId), |
| 86 | INeg(ValueId), |
| 87 | |
| 88 | // ---- Float arithmetic ---- |
| 89 | FAdd(ValueId, ValueId), |
| 90 | FSub(ValueId, ValueId), |
| 91 | FMul(ValueId, ValueId), |
| 92 | FDiv(ValueId, ValueId), |
| 93 | FNeg(ValueId), |
| 94 | FAbs(ValueId), |
| 95 | FSqrt(ValueId), |
| 96 | FPow(ValueId, ValueId), |
| 97 | |
| 98 | // ---- Comparison ---- |
| 99 | ICmp(CmpOp, ValueId, ValueId), |
| 100 | FCmp(CmpOp, ValueId, ValueId), |
| 101 | |
| 102 | // ---- Logic ---- |
| 103 | And(ValueId, ValueId), |
| 104 | Or(ValueId, ValueId), |
| 105 | Not(ValueId), |
| 106 | |
| 107 | // ---- Select (conditional) ---- |
| 108 | /// Select(cond, true_val, false_val) → cond ? true_val : false_val |
| 109 | Select(ValueId, ValueId, ValueId), |
| 110 | |
| 111 | // ---- Bitwise ---- |
| 112 | BitAnd(ValueId, ValueId), |
| 113 | BitOr(ValueId, ValueId), |
| 114 | BitXor(ValueId, ValueId), |
| 115 | BitNot(ValueId), |
| 116 | Shl(ValueId, ValueId), |
| 117 | LShr(ValueId, ValueId), |
| 118 | AShr(ValueId, ValueId), |
| 119 | CountLeadingZeros(ValueId), |
| 120 | CountTrailingZeros(ValueId), |
| 121 | PopCount(ValueId), |
| 122 | |
| 123 | // ---- Conversions ---- |
| 124 | IntToFloat(ValueId, FloatWidth), |
| 125 | FloatToInt(ValueId, IntWidth), |
| 126 | FloatExtend(ValueId, FloatWidth), |
| 127 | FloatTrunc(ValueId, FloatWidth), |
| 128 | IntExtend(ValueId, IntWidth, bool), // bool = signed |
| 129 | IntTrunc(ValueId, IntWidth), |
| 130 | /// Convert a pointer to an integer (address as i64). |
| 131 | PtrToInt(ValueId), |
| 132 | /// Convert an integer (i64 address) to a pointer. |
| 133 | IntToPtr(ValueId, IrType), |
| 134 | |
| 135 | // ---- Memory ---- |
| 136 | Alloca(IrType), |
| 137 | Load(ValueId), |
| 138 | Store(ValueId, ValueId), // store(value, addr) |
| 139 | GetElementPtr(ValueId, Vec<ValueId>), // base, indices |
| 140 | /// Address of a module-level global. Returns `Ptr<T>` where T |
| 141 | /// is the global's declared type. Used by SAVE'd locals (those |
| 142 | /// with initializers in subprograms) and module variables — |
| 143 | /// any storage that must persist across function calls. |
| 144 | GlobalAddr(String), |
| 145 | |
| 146 | // ---- Calls ---- |
| 147 | Call(FuncRef, Vec<ValueId>), |
| 148 | RuntimeCall(RuntimeFunc, Vec<ValueId>), |
| 149 | |
| 150 | // ---- Aggregates ---- |
| 151 | ExtractField(ValueId, u32), |
| 152 | InsertField(ValueId, u32, ValueId), |
| 153 | |
| 154 | // ---- SIMD vector ops (Sprint 12 Stage 1) ---- |
| 155 | // |
| 156 | // All vector ops operate on `IrType::Vector` values (128-bit NEON). |
| 157 | // Element-wise binary / unary arithmetic and bitwise ops follow the |
| 158 | // same lane shape as their operands. `VLoad`/`VStore` move 128 bits |
| 159 | // to / from a `Ptr<elem>`. `VBroadcast` splats a scalar across every |
| 160 | // lane. `VExtract` / `VInsert` access a single lane by constant |
| 161 | // index (lane index is `u8`, not a `ValueId`, so SCCP and |
| 162 | // const-fold can reason about it). Reductions take a vector and |
| 163 | // return a scalar. |
| 164 | VAdd(ValueId, ValueId), |
| 165 | VSub(ValueId, ValueId), |
| 166 | VMul(ValueId, ValueId), |
| 167 | VDiv(ValueId, ValueId), |
| 168 | VNeg(ValueId), |
| 169 | VAbs(ValueId), |
| 170 | VSqrt(ValueId), |
| 171 | VFma(ValueId, ValueId, ValueId), // a*b + c |
| 172 | /// Vector bit-select: per lane, return `t` where mask bit is 1, |
| 173 | /// else `f`. Lowers to NEON `bsl.16b`. Produced by the WHERE-block |
| 174 | /// vectorizer to express conditional element-wise updates. |
| 175 | VSelect(ValueId, ValueId, ValueId), // (mask, t, f) |
| 176 | VMin(ValueId, ValueId), |
| 177 | VMax(ValueId, ValueId), |
| 178 | VICmp(CmpOp, ValueId, ValueId), |
| 179 | VFCmp(CmpOp, ValueId, ValueId), |
| 180 | VLoad(ValueId), // ptr → vector |
| 181 | VStore(ValueId, ValueId), // (vector, ptr) |
| 182 | VBitcast(ValueId, IrType), // reinterpret element layout |
| 183 | VExtract(ValueId, u8), // extract lane `n` |
| 184 | VInsert(ValueId, u8, ValueId), // (vector, lane, scalar) → vector |
| 185 | VBroadcast(ValueId), // scalar → vector (every lane) |
| 186 | VReduceSum(ValueId), // vector → scalar (cross-lane sum) |
| 187 | VReduceMin(ValueId), // vector → scalar (cross-lane min) |
| 188 | VReduceMax(ValueId), // vector → scalar (cross-lane max) |
| 189 | } |
| 190 | |
| 191 | /// Block terminator — exactly one per basic block. |
| 192 | #[derive(Debug, Clone)] |
| 193 | pub enum Terminator { |
| 194 | /// Return from function. |
| 195 | Return(Option<ValueId>), |
| 196 | /// Unconditional branch with block arguments. |
| 197 | Branch(BlockId, Vec<ValueId>), |
| 198 | /// Conditional branch: condition, true target + args, false target + args. |
| 199 | CondBranch { |
| 200 | cond: ValueId, |
| 201 | true_dest: BlockId, |
| 202 | true_args: Vec<ValueId>, |
| 203 | false_dest: BlockId, |
| 204 | false_args: Vec<ValueId>, |
| 205 | }, |
| 206 | /// Multi-way branch (SELECT CASE). |
| 207 | Switch { |
| 208 | selector: ValueId, |
| 209 | cases: Vec<(i64, BlockId)>, |
| 210 | default: BlockId, |
| 211 | }, |
| 212 | /// Unreachable — after a STOP or ERROR STOP. |
| 213 | Unreachable, |
| 214 | } |
| 215 | |
| 216 | /// A block parameter (replaces phi nodes). |
| 217 | #[derive(Debug, Clone)] |
| 218 | pub struct BlockParam { |
| 219 | pub id: ValueId, |
| 220 | pub ty: IrType, |
| 221 | } |
| 222 | |
| 223 | /// A basic block in a function. |
| 224 | #[derive(Debug, Clone)] |
| 225 | pub struct BasicBlock { |
| 226 | pub id: BlockId, |
| 227 | pub name: String, |
| 228 | pub params: Vec<BlockParam>, |
| 229 | pub insts: Vec<Inst>, |
| 230 | pub terminator: Option<Terminator>, |
| 231 | } |
| 232 | |
| 233 | impl BasicBlock { |
| 234 | pub fn new(id: BlockId, name: String) -> Self { |
| 235 | Self { |
| 236 | id, |
| 237 | name, |
| 238 | params: Vec::new(), |
| 239 | insts: Vec::new(), |
| 240 | terminator: None, |
| 241 | } |
| 242 | } |
| 243 | } |
| 244 | |
| 245 | /// A function parameter. |
| 246 | #[derive(Debug, Clone)] |
| 247 | pub struct Param { |
| 248 | pub name: String, |
| 249 | pub ty: IrType, |
| 250 | pub id: ValueId, |
| 251 | /// Fortran dummy arguments are non-aliasing by default unless |
| 252 | /// lowered from POINTER/TARGET-like declarations. |
| 253 | pub fortran_noalias: bool, |
| 254 | } |
| 255 | |
| 256 | /// A function in the IR module. |
| 257 | #[derive(Debug, Clone)] |
| 258 | pub struct Function { |
| 259 | pub name: String, |
| 260 | pub params: Vec<Param>, |
| 261 | pub return_type: IrType, |
| 262 | pub blocks: Vec<BasicBlock>, |
| 263 | pub entry: BlockId, |
| 264 | next_value: u32, |
| 265 | next_block: u32, |
| 266 | /// O(1) type lookup cache. Populated during construction; call |
| 267 | /// `rebuild_type_cache()` after optimizer passes mutate the IR. |
| 268 | type_cache: HashMap<ValueId, IrType>, |
| 269 | /// Fortran PURE attribute — function has no side effects. |
| 270 | pub is_pure: bool, |
| 271 | /// Fortran ELEMENTAL attribute — operates element-wise on arrays. |
| 272 | pub is_elemental: bool, |
| 273 | /// True for contained procedures that never participate in the |
| 274 | /// external object ABI and can be rewritten by IPO passes. |
| 275 | pub internal_only: bool, |
| 276 | } |
| 277 | |
| 278 | impl Function { |
| 279 | pub fn new(name: String, params: Vec<Param>, return_type: IrType) -> Self { |
| 280 | let entry = BlockId(0); |
| 281 | let entry_block = BasicBlock::new(entry, "entry".into()); |
| 282 | let next_value = params.iter().map(|p| p.id.0 + 1).max().unwrap_or(0); |
| 283 | let mut type_cache = HashMap::new(); |
| 284 | for p in ¶ms { |
| 285 | type_cache.insert(p.id, p.ty.clone()); |
| 286 | } |
| 287 | Self { |
| 288 | name, |
| 289 | params, |
| 290 | return_type, |
| 291 | blocks: vec![entry_block], |
| 292 | entry, |
| 293 | next_value, |
| 294 | next_block: 1, |
| 295 | type_cache, |
| 296 | is_pure: false, |
| 297 | is_elemental: false, |
| 298 | internal_only: false, |
| 299 | } |
| 300 | } |
| 301 | |
| 302 | /// Allocate a fresh ValueId. |
| 303 | pub fn next_value_id(&mut self) -> ValueId { |
| 304 | let id = ValueId(self.next_value); |
| 305 | self.next_value += 1; |
| 306 | id |
| 307 | } |
| 308 | |
| 309 | /// Allocate a fresh BlockId and create the block. |
| 310 | /// Appends the block ID to ensure unique label names. |
| 311 | pub fn create_block(&mut self, name: &str) -> BlockId { |
| 312 | let id = BlockId(self.next_block); |
| 313 | self.next_block += 1; |
| 314 | let unique_name = format!("{}_{}", name, id.0); |
| 315 | self.blocks.push(BasicBlock::new(id, unique_name)); |
| 316 | id |
| 317 | } |
| 318 | |
| 319 | /// Get a block by ID. Panics if the ID is not present — use |
| 320 | /// `try_block` for graceful degradation. |
| 321 | pub fn block(&self, id: BlockId) -> &BasicBlock { |
| 322 | self.blocks |
| 323 | .iter() |
| 324 | .find(|b| b.id == id) |
| 325 | .expect("block not found") |
| 326 | } |
| 327 | |
| 328 | /// Get a mutable block by ID. Panics if the ID is not present. |
| 329 | pub fn block_mut(&mut self, id: BlockId) -> &mut BasicBlock { |
| 330 | self.blocks |
| 331 | .iter_mut() |
| 332 | .find(|b| b.id == id) |
| 333 | .expect("block not found") |
| 334 | } |
| 335 | |
| 336 | /// Get a block by ID, returning `None` if the ID is not |
| 337 | /// present. Audit N-10: used by CFG walks that may follow a |
| 338 | /// terminator to a target that was just pruned mid-pass. The |
| 339 | /// verifier rejects dangling targets, so on valid IR this |
| 340 | /// behaves like `block`, but optimizer passes that intentionally |
| 341 | /// run before block pruning (or that mutate the CFG) can use |
| 342 | /// this to degrade gracefully instead of panicking. |
| 343 | pub fn try_block(&self, id: BlockId) -> Option<&BasicBlock> { |
| 344 | self.blocks.iter().find(|b| b.id == id) |
| 345 | } |
| 346 | |
| 347 | /// Register a value's type in the O(1) lookup cache. |
| 348 | /// Called by FuncBuilder during construction. |
| 349 | pub fn register_type(&mut self, id: ValueId, ty: IrType) { |
| 350 | self.type_cache.insert(id, ty); |
| 351 | } |
| 352 | |
| 353 | /// Rebuild the type cache from scratch. Call after optimizer passes |
| 354 | /// that add/remove/rewrite instructions. |
| 355 | pub fn rebuild_type_cache(&mut self) { |
| 356 | self.type_cache.clear(); |
| 357 | for p in &self.params { |
| 358 | self.type_cache.insert(p.id, p.ty.clone()); |
| 359 | } |
| 360 | for block in &self.blocks { |
| 361 | for bp in &block.params { |
| 362 | self.type_cache.insert(bp.id, bp.ty.clone()); |
| 363 | } |
| 364 | for inst in &block.insts { |
| 365 | self.type_cache.insert(inst.id, inst.ty.clone()); |
| 366 | } |
| 367 | } |
| 368 | } |
| 369 | |
| 370 | /// Get the type of a value by ID. O(1) via cache. |
| 371 | pub fn value_type(&self, id: ValueId) -> Option<IrType> { |
| 372 | if let Some(ty) = self.type_cache.get(&id) { |
| 373 | return Some(ty.clone()); |
| 374 | } |
| 375 | // Cache miss — the authoritative source is the instruction or |
| 376 | // parameter that defines the value. An earlier version of the |
| 377 | // verifier silently skipped checks whenever the cache lagged |
| 378 | // behind optimiser passes, which hid width mismatches and |
| 379 | // pointer-type bugs for entire compilation units. Recompute |
| 380 | // on-demand so consumers always get a consistent answer. |
| 381 | for p in &self.params { |
| 382 | if p.id == id { |
| 383 | return Some(p.ty.clone()); |
| 384 | } |
| 385 | } |
| 386 | for block in &self.blocks { |
| 387 | for bp in &block.params { |
| 388 | if bp.id == id { |
| 389 | return Some(bp.ty.clone()); |
| 390 | } |
| 391 | } |
| 392 | for inst in &block.insts { |
| 393 | if inst.id == id { |
| 394 | return Some(inst.ty.clone()); |
| 395 | } |
| 396 | } |
| 397 | } |
| 398 | None |
| 399 | } |
| 400 | |
| 401 | /// Find the instruction that defines a value, searching all blocks. |
| 402 | pub fn find_defining_inst(&self, id: ValueId) -> Option<&Inst> { |
| 403 | for block in &self.blocks { |
| 404 | for inst in &block.insts { |
| 405 | if inst.id == id { |
| 406 | return Some(inst); |
| 407 | } |
| 408 | } |
| 409 | } |
| 410 | None |
| 411 | } |
| 412 | } |
| 413 | |
| 414 | /// A global variable. |
| 415 | #[derive(Debug, Clone)] |
| 416 | pub struct Global { |
| 417 | pub name: String, |
| 418 | pub ty: IrType, |
| 419 | pub initializer: Option<GlobalInit>, |
| 420 | } |
| 421 | |
| 422 | /// Global variable initializer. |
| 423 | #[derive(Debug, Clone)] |
| 424 | pub enum GlobalInit { |
| 425 | Zero, |
| 426 | Int(i128), |
| 427 | Float(f64), |
| 428 | String(Vec<u8>), |
| 429 | /// Array literal: a sequence of per-element initializers of |
| 430 | /// the same homogeneous element type. Used by module array |
| 431 | /// variables with `[v0, v1, ...]` or `(/ v0, v1, ... /)` |
| 432 | /// constructors. The Vec's length is the array's total element |
| 433 | /// count (product of dims); shorter initializers are padded |
| 434 | /// with Zero at lowering time. |
| 435 | IntArray(Vec<i128>), |
| 436 | FloatArray(Vec<f64>), |
| 437 | } |
| 438 | |
| 439 | /// The top-level IR module. |
| 440 | #[derive(Debug, Clone)] |
| 441 | pub struct Module { |
| 442 | pub name: String, |
| 443 | pub globals: Vec<Global>, |
| 444 | pub functions: Vec<Function>, |
| 445 | pub struct_defs: Vec<super::types::StructDef>, |
| 446 | pub extern_funcs: Vec<ExternFunc>, |
| 447 | } |
| 448 | |
| 449 | /// An external function declaration. |
| 450 | #[derive(Debug, Clone)] |
| 451 | pub struct ExternFunc { |
| 452 | pub name: String, |
| 453 | pub sig: super::types::FuncSig, |
| 454 | } |
| 455 | |
| 456 | impl Module { |
| 457 | pub fn new(name: String) -> Self { |
| 458 | Self { |
| 459 | name, |
| 460 | globals: Vec::new(), |
| 461 | functions: Vec::new(), |
| 462 | struct_defs: Vec::new(), |
| 463 | extern_funcs: Vec::new(), |
| 464 | } |
| 465 | } |
| 466 | |
| 467 | /// Add a function and return its index. |
| 468 | pub fn add_function(&mut self, func: Function) -> u32 { |
| 469 | let idx = self.functions.len() as u32; |
| 470 | self.functions.push(func); |
| 471 | idx |
| 472 | } |
| 473 | |
| 474 | /// Add a global variable. |
| 475 | pub fn add_global(&mut self, global: Global) { |
| 476 | self.globals.push(global); |
| 477 | } |
| 478 | |
| 479 | /// Add a struct definition and return its ID. |
| 480 | pub fn add_struct(&mut self, def: super::types::StructDef) -> super::types::StructId { |
| 481 | let id = self.struct_defs.len() as u32; |
| 482 | self.struct_defs.push(def); |
| 483 | id |
| 484 | } |
| 485 | |
| 486 | /// True when any live IR surface in the module uses `i128`. |
| 487 | pub fn contains_i128(&self) -> bool { |
| 488 | self.globals |
| 489 | .iter() |
| 490 | .any(|global| type_contains_i128(self, &global.ty)) |
| 491 | || self |
| 492 | .extern_funcs |
| 493 | .iter() |
| 494 | .any(|func| sig_contains_i128(self, &func.sig)) |
| 495 | || self.functions.iter().any(|func| { |
| 496 | type_contains_i128(self, &func.return_type) |
| 497 | || func |
| 498 | .params |
| 499 | .iter() |
| 500 | .any(|param| type_contains_i128(self, ¶m.ty)) |
| 501 | || func.blocks.iter().any(|block| { |
| 502 | block |
| 503 | .params |
| 504 | .iter() |
| 505 | .any(|param| type_contains_i128(self, ¶m.ty)) |
| 506 | || block |
| 507 | .insts |
| 508 | .iter() |
| 509 | .any(|inst| type_contains_i128(self, &inst.ty)) |
| 510 | }) |
| 511 | }) |
| 512 | } |
| 513 | |
| 514 | /// True when `i128` appears anywhere outside module-global storage. |
| 515 | /// |
| 516 | /// Globals-only `i128` is the first staged backend surface we support: |
| 517 | /// the optimizer may ignore it and the emitter can lay it out as raw data. |
| 518 | /// Parameters, returns, instruction results, block params, and extern |
| 519 | /// signatures still imply unsupported ABI or codegen work. |
| 520 | pub fn contains_i128_outside_globals(&self) -> bool { |
| 521 | self.extern_funcs |
| 522 | .iter() |
| 523 | .any(|func| sig_contains_i128(self, &func.sig)) |
| 524 | || self.functions.iter().any(|func| { |
| 525 | type_contains_i128(self, &func.return_type) |
| 526 | || func |
| 527 | .params |
| 528 | .iter() |
| 529 | .any(|param| type_contains_i128(self, ¶m.ty)) |
| 530 | || func.blocks.iter().any(|block| { |
| 531 | block |
| 532 | .params |
| 533 | .iter() |
| 534 | .any(|param| type_contains_i128(self, ¶m.ty)) |
| 535 | || block |
| 536 | .insts |
| 537 | .iter() |
| 538 | .any(|inst| type_contains_i128(self, &inst.ty)) |
| 539 | }) |
| 540 | }) |
| 541 | } |
| 542 | |
| 543 | /// True when every `i128` use in the module is a global data shape that |
| 544 | /// the current backend emitter can lay out directly. |
| 545 | pub fn i128_backend_data_only_supported(&self) -> bool { |
| 546 | !self.contains_i128_outside_globals() |
| 547 | && self |
| 548 | .globals |
| 549 | .iter() |
| 550 | .all(|global| global_i128_backend_data_supported(self, global)) |
| 551 | } |
| 552 | |
| 553 | /// True when the current O0 backend can lower every live `i128` surface. |
| 554 | /// |
| 555 | /// Today that means: |
| 556 | /// - module-global `i128` data |
| 557 | /// - local `i128` allocas and plain memory traffic around them |
| 558 | /// - `i128` constants that only flow through those memory ops |
| 559 | /// - stack-backed `i128` SSA block params and edge copies introduced |
| 560 | /// by mem2reg-style promotion |
| 561 | /// - local `i128` selects that stay entirely within stack-backed values |
| 562 | /// - direct `i128` params/returns and stack/register direct calls |
| 563 | /// |
| 564 | /// It still excludes runtime-call `i128`, broad optimized-pipeline |
| 565 | /// support beyond the staged O1 lane, and any integer operation |
| 566 | /// whose result or operands are `i128` beyond the currently lowered |
| 567 | /// stack-backed surface. |
| 568 | pub fn i128_backend_o0_supported(&self) -> bool { |
| 569 | self.globals |
| 570 | .iter() |
| 571 | .all(|global| global_i128_backend_data_supported(self, global)) |
| 572 | && self.extern_funcs.iter().all(|func| { |
| 573 | abi_type_i128_backend_o0_supported(self, &func.sig.ret, true) |
| 574 | && func |
| 575 | .sig |
| 576 | .params |
| 577 | .iter() |
| 578 | .all(|param| abi_type_i128_backend_o0_supported(self, param, true)) |
| 579 | }) |
| 580 | && self |
| 581 | .functions |
| 582 | .iter() |
| 583 | .all(|func| function_i128_backend_o0_supported(self, func)) |
| 584 | } |
| 585 | } |
| 586 | |
| 587 | fn sig_contains_i128(module: &Module, sig: &super::types::FuncSig) -> bool { |
| 588 | type_contains_i128(module, &sig.ret) |
| 589 | || sig |
| 590 | .params |
| 591 | .iter() |
| 592 | .any(|param| type_contains_i128(module, param)) |
| 593 | } |
| 594 | |
| 595 | fn type_contains_i128(module: &Module, ty: &IrType) -> bool { |
| 596 | match ty { |
| 597 | IrType::Int(IntWidth::I128) => true, |
| 598 | IrType::Ptr(inner) | IrType::Array(inner, _) => type_contains_i128(module, inner), |
| 599 | IrType::Struct(id) => module.struct_defs.get(*id as usize).is_some_and(|def| { |
| 600 | def.fields |
| 601 | .iter() |
| 602 | .any(|(_, field_ty)| type_contains_i128(module, field_ty)) |
| 603 | }), |
| 604 | IrType::FuncPtr(sig) => sig_contains_i128(module, sig), |
| 605 | _ => false, |
| 606 | } |
| 607 | } |
| 608 | |
| 609 | fn global_i128_backend_data_supported(module: &Module, global: &Global) -> bool { |
| 610 | match &global.ty { |
| 611 | IrType::Int(IntWidth::I128) => matches!( |
| 612 | global.initializer, |
| 613 | Some(GlobalInit::Int(_)) | Some(GlobalInit::Zero) | None |
| 614 | ), |
| 615 | IrType::Array(elem_ty, _) if matches!(elem_ty.as_ref(), IrType::Int(IntWidth::I128)) => { |
| 616 | matches!( |
| 617 | global.initializer, |
| 618 | Some(GlobalInit::IntArray(_)) | Some(GlobalInit::Zero) | None |
| 619 | ) |
| 620 | } |
| 621 | _ => !type_contains_i128(module, &global.ty), |
| 622 | } |
| 623 | } |
| 624 | |
| 625 | fn abi_type_i128_backend_o0_supported( |
| 626 | module: &Module, |
| 627 | ty: &IrType, |
| 628 | allow_direct_i128_scalar: bool, |
| 629 | ) -> bool { |
| 630 | match ty { |
| 631 | IrType::Int(IntWidth::I128) => allow_direct_i128_scalar, |
| 632 | IrType::Ptr(_) => true, |
| 633 | _ => !type_contains_i128(module, ty), |
| 634 | } |
| 635 | } |
| 636 | |
| 637 | fn ssa_type_i128_backend_o0_supported(module: &Module, ty: &IrType) -> bool { |
| 638 | matches!(ty, IrType::Int(IntWidth::I128)) |
| 639 | || abi_type_i128_backend_o0_supported(module, ty, false) |
| 640 | } |
| 641 | |
| 642 | fn direct_call_i128_backend_o0_supported( |
| 643 | module: &Module, |
| 644 | func: &Function, |
| 645 | callee: &FuncRef, |
| 646 | args: &[ValueId], |
| 647 | result_ty: &IrType, |
| 648 | ) -> bool { |
| 649 | // Indirect calls share the same arg-passing path as direct calls at |
| 650 | // the codegen level — the only difference is how the callee address |
| 651 | // is materialised. Accept all three variants for the i128 stack-slot |
| 652 | // surface so an i128-typed actual passed through a procedure-pointer |
| 653 | // component (e.g. process_type's `payload : class(*), pointer` slot |
| 654 | // in stdlib_system_subprocess) doesn't trip the support gate. |
| 655 | matches!( |
| 656 | callee, |
| 657 | FuncRef::Internal(_) | FuncRef::External(_) | FuncRef::Indirect(_) |
| 658 | ) && abi_type_i128_backend_o0_supported(module, result_ty, true) |
| 659 | && args |
| 660 | .iter() |
| 661 | .filter_map(|arg| func.value_type(*arg)) |
| 662 | .all(|ty| abi_type_i128_backend_o0_supported(module, &ty, true)) |
| 663 | } |
| 664 | |
| 665 | fn runtime_call_i128_backend_o0_supported( |
| 666 | module: &Module, |
| 667 | func: &Function, |
| 668 | rf: &RuntimeFunc, |
| 669 | args: &[ValueId], |
| 670 | result_ty: &IrType, |
| 671 | ) -> bool { |
| 672 | match rf { |
| 673 | RuntimeFunc::PrintInt => { |
| 674 | matches!(result_ty, IrType::Void) |
| 675 | && args.len() == 1 |
| 676 | && args |
| 677 | .iter() |
| 678 | .filter_map(|arg| func.value_type(*arg)) |
| 679 | .all(|ty| abi_type_i128_backend_o0_supported(module, &ty, true)) |
| 680 | } |
| 681 | _ => false, |
| 682 | } |
| 683 | } |
| 684 | |
| 685 | fn function_i128_backend_o0_supported(module: &Module, func: &Function) -> bool { |
| 686 | if !abi_type_i128_backend_o0_supported(module, &func.return_type, true) |
| 687 | || func |
| 688 | .params |
| 689 | .iter() |
| 690 | .any(|param| !abi_type_i128_backend_o0_supported(module, ¶m.ty, true)) |
| 691 | { |
| 692 | return false; |
| 693 | } |
| 694 | |
| 695 | func.blocks.iter().all(|block| { |
| 696 | block |
| 697 | .params |
| 698 | .iter() |
| 699 | .all(|param| ssa_type_i128_backend_o0_supported(module, ¶m.ty)) |
| 700 | && block |
| 701 | .insts |
| 702 | .iter() |
| 703 | .all(|inst| inst_i128_backend_o0_supported(module, func, inst)) |
| 704 | && block |
| 705 | .terminator |
| 706 | .as_ref() |
| 707 | .is_none_or(|term| terminator_i128_backend_o0_supported(module, func, term)) |
| 708 | }) |
| 709 | } |
| 710 | |
| 711 | fn inst_i128_backend_o0_supported(module: &Module, func: &Function, inst: &Inst) -> bool { |
| 712 | let inst_ty_has_i128 = type_contains_i128(module, &inst.ty); |
| 713 | let uses_i128 = crate::ir::walk::inst_uses(&inst.kind) |
| 714 | .into_iter() |
| 715 | .filter_map(|vid| func.value_type(vid)) |
| 716 | .any(|ty| type_contains_i128(module, &ty)); |
| 717 | |
| 718 | match &inst.kind { |
| 719 | InstKind::ConstInt(_, IntWidth::I128) => true, |
| 720 | InstKind::Undef(_) if matches!(inst.ty, IrType::Int(IntWidth::I128)) => true, |
| 721 | InstKind::Load(_) if matches!(inst.ty, IrType::Int(IntWidth::I128)) => true, |
| 722 | // Loading a *pointer* whose pointee happens to be i128 is just |
| 723 | // an 8-byte vreg load — the wide-slot machinery isn't involved. |
| 724 | // type_contains_i128 walks through Ptr<>, so without this arm |
| 725 | // the catch-all would falsely reject `load ptr<i128>` when |
| 726 | // dereferencing an sret-style hidden result-buffer pointer. |
| 727 | InstKind::Load(_) if matches!(inst.ty, IrType::Ptr(_)) => true, |
| 728 | InstKind::IAdd(..) | InstKind::ISub(..) | InstKind::INeg(_) |
| 729 | if matches!(inst.ty, IrType::Int(IntWidth::I128)) => |
| 730 | { |
| 731 | true |
| 732 | } |
| 733 | InstKind::ICmp(..) if uses_i128 => true, |
| 734 | InstKind::Select(..) if matches!(inst.ty, IrType::Int(IntWidth::I128)) => true, |
| 735 | InstKind::Call(callee, args) if inst_ty_has_i128 || uses_i128 => { |
| 736 | direct_call_i128_backend_o0_supported(module, func, callee, args, &inst.ty) |
| 737 | } |
| 738 | InstKind::RuntimeCall(rf, args) if inst_ty_has_i128 || uses_i128 => { |
| 739 | runtime_call_i128_backend_o0_supported(module, func, rf, args, &inst.ty) |
| 740 | } |
| 741 | InstKind::Store(..) => true, |
| 742 | // Address-producing ops are safe even when they walk storage that |
| 743 | // contains i128. The widened backend already knows how to carry the |
| 744 | // actual loads/stores/calls that touch the value; rejecting the byte |
| 745 | // cursor itself falsely blocks legal array-constructor and component |
| 746 | // lvalue paths. |
| 747 | InstKind::Alloca(_) | InstKind::GlobalAddr(_) | InstKind::GetElementPtr(..) => true, |
| 748 | _ => !inst_ty_has_i128 && !uses_i128, |
| 749 | } |
| 750 | } |
| 751 | |
| 752 | fn terminator_i128_backend_o0_supported( |
| 753 | module: &Module, |
| 754 | func: &Function, |
| 755 | term: &Terminator, |
| 756 | ) -> bool { |
| 757 | let term_uses_i128 = crate::ir::walk::terminator_uses(term) |
| 758 | .into_iter() |
| 759 | .filter_map(|vid| func.value_type(vid)) |
| 760 | .any(|ty| type_contains_i128(module, &ty)); |
| 761 | |
| 762 | match term { |
| 763 | Terminator::Return(Some(val)) => func |
| 764 | .value_type(*val) |
| 765 | .is_none_or(|ty| abi_type_i128_backend_o0_supported(module, &ty, true)), |
| 766 | Terminator::Branch(_, args) => args |
| 767 | .iter() |
| 768 | .filter_map(|arg| func.value_type(*arg)) |
| 769 | .all(|ty| ssa_type_i128_backend_o0_supported(module, &ty)), |
| 770 | Terminator::CondBranch { |
| 771 | true_args, |
| 772 | false_args, |
| 773 | .. |
| 774 | } => true_args |
| 775 | .iter() |
| 776 | .chain(false_args.iter()) |
| 777 | .filter_map(|arg| func.value_type(*arg)) |
| 778 | .all(|ty| ssa_type_i128_backend_o0_supported(module, &ty)), |
| 779 | _ => !term_uses_i128, |
| 780 | } |
| 781 | } |
| 782 | |
| 783 | #[cfg(test)] |
| 784 | mod tests { |
| 785 | use super::*; |
| 786 | use crate::ir::builder::FuncBuilder; |
| 787 | |
| 788 | #[test] |
| 789 | fn runtime_print_i128_is_supported_by_backend_gate() { |
| 790 | let mut module = Module::new("test".into()); |
| 791 | let mut func = Function::new("main".into(), vec![], IrType::Void); |
| 792 | { |
| 793 | let mut b = FuncBuilder::new(&mut func); |
| 794 | let wide = b.const_i128(170141183460469231731687303715884105727i128); |
| 795 | b.runtime_call(RuntimeFunc::PrintInt, vec![wide], IrType::Void); |
| 796 | b.ret_void(); |
| 797 | } |
| 798 | module.add_function(func); |
| 799 | |
| 800 | assert!( |
| 801 | module.i128_backend_o0_supported(), |
| 802 | "runtime PrintInt with integer(16) should stay inside the supported O0 backend surface" |
| 803 | ); |
| 804 | } |
| 805 | |
| 806 | #[test] |
| 807 | fn byte_cursor_gep_into_i128_storage_stays_supported() { |
| 808 | let mut module = Module::new("test".into()); |
| 809 | let mut func = Function::new("main".into(), vec![], IrType::Void); |
| 810 | { |
| 811 | let mut b = FuncBuilder::new(&mut func); |
| 812 | let arr = b.alloca(IrType::Array(Box::new(IrType::Int(IntWidth::I128)), 3)); |
| 813 | let off = b.const_i64(16); |
| 814 | let cursor = b.gep(arr, vec![off], IrType::Int(IntWidth::I8)); |
| 815 | let wide = b.const_i128(170141183460469231731687303715884105727i128); |
| 816 | b.store(wide, cursor); |
| 817 | b.ret_void(); |
| 818 | } |
| 819 | module.add_function(func); |
| 820 | |
| 821 | assert!( |
| 822 | module.i128_backend_o0_supported(), |
| 823 | "byte-cursor GEPs into i128 storage should stay inside the supported backend surface" |
| 824 | ); |
| 825 | } |
| 826 | } |
| 827 |