Source code for femorph_solver.mapdl_api.io.full_schema

"""Schema + reader for MAPDL ``.full`` files (assembled K/M/C matrices).

A FULL file stores, for each column of the upper triangle, two framed
records: one holding the 1-based row indices of that column's nonzero
entries, and one holding the matching double-precision values. The
record pointers ``ptrSTF`` / ``ptrMAS`` / ``ptrDMP`` address those
column blocks; ``ptrDOF`` addresses a ``(ndof, const)`` pair of records
that describes which equations are constrained.

Layout sketch (for one column c of an ntermK-nonzero stiffness
matrix, upper triangle only)::

    [ size=nitems | flags | rows(nitems int32, 1-based) | pad4 ]
    [ size=2·nitems | flags | vals(nitems doubles) | pad4 ]

The constrained-DOF mask (``const[i] < 0``) is applied at read time to
drop fixed rows and columns, matching pymapdl-reader's behavior.

References
----------
* Ansys MAPDL ``.full`` file layout (compatibility source):
  Ansys, Inc., *Ansys Mechanical APDL Theory Reference*, Release
  2022R2 — binary-file chapter, section on ``.full`` (assembled
  global K / M / C matrices).
* Open-source cross-reference: ``pymapdl-reader`` (MIT,
  https://github.com/ansys/pymapdl-reader) ``full_file.py`` —
  used here only as a key-map yardstick; the underlying CSC-like
  column-indexed layout comes from MAPDL's own Fortran code.
"""

from __future__ import annotations

from pathlib import Path

import numpy as np
from scipy.sparse import coo_matrix, csc_matrix

from femorph_solver.mapdl_api.io.binary import _open_binary, read_record
from femorph_solver.mapdl_api.io.rst_schema import parse_header

# Keys from pymapdl-reader (ansys/mapdl/reader/full.py).
# Mirrors the MAPDL Fortran ``fdfull.inc`` layout.
full_header_keys: list[str] = [
    "fun04",
    "neqn",
    "nmrow",
    "nmatrx",
    "kan",
    "wfmax",
    "lenbac",
    "numdof",
    "ntermKl",
    "ntermKh",
    "lumpm",
    "nmrow",
    "ntermK_",
    "keyuns",
    "extopt",
    "keyse",
    "sclstf",
    "nxrows",
    "ptrSTFl",
    "ptrSTFh",
    "ncefull",
    "ntermMh",
    "ptrENDl",
    "ptrENDh",
    "ptrIRHSl",
    "ptrIRHSh",
    "ptrMASl",
    "ptrMASh",
    "ptrDMPl",
    "ptrDMPh",
    "ptrCEl",
    "ptrCEh",
    "nNodes",
    "ntermMl",
    "ntermDl",
    "ptrDOFl",
    "ptrDOFh",
    "ptrRHSl",
    "ptrRHSh",
    "ntermDh",
    "ngMaxNZ",
    "ptrNGPHl",
    "ptrNGPHh",
    "minKdiag",
    "maxKdiag",
    "minMdiag",
    "maxMdiag",
    "minDdiag",
    "maxDdiag",
    "ngTerml",
    "ngTermh",
    "ngTermCl",
    "ngTermCh",
    "ptrDIAGKl",
    "ptrDIAGKh",
    "ptrDIAGMl",
    "ptrDIAGMh",
    "ptrDIAGCl",
    "ptrDIAGCh",
    "ptrSCLKl",
    "ptrSCLKh",
    "Glbneqn",
    "distKey",
    "ngTermFl",
    "ngTermFh",
    "GlbnNodes",
    "GlbnVars",
    "GlbfAcCE",
    "lcAcLen",
    "GlbfCE",
    "ptrGmtl",
    "ptrGmth",
    "nceGprime",
    "numA12A11",
    "strctChg",
    "ntermGl",
    "ntermGh",
    "ptrDensel",
    "ptrDenseh",
    "nVirtBCs",
    "ptrVrtBCl",
    "ptrVrtBCh",
    "ptrMRKl",
    "ptrMRKh",
]


[docs] def read_full_header(path: str | Path) -> dict: """Read the FULL file header (record index 1) as a named dict.""" fp = _open_binary(path) try: read_record(fp, dtype="<i4") # standard header arr, _ = read_record(fp, dtype="<i4") finally: fp.close() return parse_header(arr, full_header_keys)
def _read_column_block(fp, n_cols: int) -> tuple[np.ndarray, np.ndarray, np.ndarray]: """Read ``n_cols`` (rows_record, vals_record) pairs starting at fp.tell(). Returns flat ``(rows, cols, vals)`` arrays in 0-based indices, upper triangle (``row <= col``). The row indices in the file are 1-based and may be either above or below the column index; we swap when needed so the output is strictly upper-triangular. """ rows_out: list[np.ndarray] = [] cols_out: list[np.ndarray] = [] vals_out: list[np.ndarray] = [] for c in range(n_cols): row_arr, _ = read_record(fp, dtype="<i4") val_arr, _ = read_record(fp, dtype="<f8") if row_arr.size == 0: continue rows0 = row_arr.astype(np.int64) - 1 cols0 = np.full_like(rows0, c) # enforce upper triangle (row <= col) swap = rows0 > cols0 if swap.any(): lo = np.where(swap, cols0, rows0) hi = np.where(swap, rows0, cols0) rows0, cols0 = lo, hi rows_out.append(rows0) cols_out.append(cols0) vals_out.append(val_arr.astype(np.float64)) if not rows_out: # pragma: no cover # defensive return ( np.empty(0, np.int64), np.empty(0, np.int64), np.empty(0, np.float64), ) return ( np.concatenate(rows_out), np.concatenate(cols_out), np.concatenate(vals_out), ) def _assemble_symmetric(rows: np.ndarray, cols: np.ndarray, vals: np.ndarray, n: int) -> csc_matrix: """Build a symmetric CSC matrix from upper-triangle (rows, cols, vals).""" off = rows != cols all_rows = np.concatenate([rows, cols[off]]) all_cols = np.concatenate([cols, rows[off]]) all_vals = np.concatenate([vals, vals[off]]) return coo_matrix((all_vals, (all_rows, all_cols)), shape=(n, n)).tocsc() def _drop_constrained( k: csc_matrix | None, m: csc_matrix | None, const: np.ndarray ) -> tuple[csc_matrix | None, csc_matrix | None, np.ndarray]: """Remove rows+cols where const<0; return (k, m, keep_index).""" keep = np.where(const >= 0)[0] if k is not None: k = k[keep][:, keep] if m is not None: m = m[keep][:, keep] return k, m, keep
[docs] def read_full_km( path: str | Path, reduce: bool = True ) -> tuple[csc_matrix | None, csc_matrix | None, np.ndarray]: """Read stiffness + mass matrices from a MAPDL FULL file. Parameters ---------- path Path to the ``.full`` file. reduce If True (the default), drop rows and columns whose ``const[i] < 0`` (i.e. the constrained DOFs MAPDL still includes in the assembled matrix but which the user almost always wants eliminated). This matches the behavior of ``pymapdl_reader.FullFile.load_km``. Returns ------- k, m, dof_ref ``k`` and ``m`` are scipy CSC matrices (or ``None`` if the file doesn't store that matrix). ``dof_ref`` is an ``(neqn_out, 2)`` int32 array of ``(node, dof_id)`` pairs aligned with the matrix rows; ``dof_id`` uses the 1-based MAPDL DOF numbering (see :data:`femorph_solver.mapdl_api.io.rst_schema.DOF_REF`). """ header = read_full_header(path) neqn = header["neqn"] ptr_stf = header["ptrSTF"] ptr_mas = header["ptrMAS"] ptr_dof = header["ptrDOF"] nterm_k = header["ntermK"] nterm_m = header["ntermM"] fp = _open_binary(path) try: # Skip standard + full headers. read_record(fp, dtype="<i4") read_record(fp, dtype="<i4") # Next two records: DOF identifier list, then nodal equivalence. dof_ids_arr, _ = read_record(fp, dtype="<i4") neqv_arr, _ = read_record(fp, dtype="<i4") fp.seek(ptr_dof * 4) ndof_arr, _ = read_record(fp, dtype="<i4") const_arr, _ = read_record(fp, dtype="<i4") k = None if nterm_k and ptr_stf: fp.seek(ptr_stf * 4) krow, kcol, kdata = _read_column_block(fp, neqn) k = _assemble_symmetric(krow, kcol, kdata, neqn) m = None if nterm_m and ptr_mas: fp.seek(ptr_mas * 4) mrow, mcol, mdata = _read_column_block(fp, neqn) m = _assemble_symmetric(mrow, mcol, mdata, neqn) finally: fp.close() # Build the (node, dof_id) reference array aligned with the raw rows. dof_ref_rows: list[tuple[int, int]] = [] for node_idx, node_id in enumerate(neqv_arr.tolist()): n_dofs_this_node = int(ndof_arr[node_idx]) for d in range(n_dofs_this_node): dof_ref_rows.append((int(node_id), int(dof_ids_arr[d]))) dof_ref = np.asarray(dof_ref_rows, dtype=np.int32) if reduce: k, m, keep = _drop_constrained(k, m, const_arr) dof_ref = dof_ref[keep] return k, m, dof_ref