Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ set(BOUT_SOURCES
./include/bout/paralleltransform.hxx
./include/bout/petsc_interface.hxx
./include/bout/petsc_operators.hxx
./include/bout/petsc_jacobian.hxx
./include/bout/petsclib.hxx
./include/bout/physicsmodel.hxx
./include/bout/rajalib.hxx
Expand Down Expand Up @@ -310,6 +311,7 @@ set(BOUT_SOURCES
./src/mesh/parallel_boundary_op.cxx
./src/mesh/parallel_boundary_region.cxx
./src/mesh/petsc_operators.cxx
./src/mesh/petsc_jacobian.cxx
./src/mesh/surfaceiter.cxx
./src/physics/gyro_average.cxx
./src/physics/physicsmodel.cxx
Expand Down
63 changes: 32 additions & 31 deletions examples/fci-operators/fci_operators_example.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -10,52 +10,53 @@
int main(int argc, char** argv) {
BoutInitialise(argc, argv);

const PetscOperators ops(bout::globals::mesh);
{
const PetscOperators ops(bout::globals::mesh);

// Parallel operators
const auto parallel = ops.getParallel();
// Parallel operators
const auto parallel = ops.getParallel();

// Test field
Field3D f = FieldFactory::get()->create3D("sin(y) + cos(z)");
bout::globals::mesh->communicate(f);
f.applyParallelBoundary("parallel_dirichlet_o2");
// Test field
Field3D f = FieldFactory::get()->create3D("sin(y) + cos(z)");
bout::globals::mesh->communicate(f);
f.applyParallelBoundary("parallel_dirichlet_o2");

Field3D f_neumann = FieldFactory::get()->create3D("sin(y) + cos(z)");
bout::globals::mesh->communicate(f_neumann);
f_neumann.applyParallelBoundary("parallel_neumann_o1");
Field3D f_neumann = FieldFactory::get()->create3D("sin(y) + cos(z)");
bout::globals::mesh->communicate(f_neumann);
f_neumann.applyParallelBoundary("parallel_neumann_o1");

auto forward_op = parallel.Restrict_minus * parallel.Forward;
auto forward_op = parallel.Restrict_minus * parallel.Forward;

Options dump;
dump["f"] = f;
dump["dV"] = parallel.dV;
Options dump;
dump["f"] = f;
dump["dV"] = parallel.dV;

// Test1 : (parallel.Restrict_minus * parallel.Inject_plus) is the identity in the interior (not X boundaries)
// Test1 : (parallel.Restrict_minus * parallel.Inject_plus) is the identity in the interior (not X boundaries)

dump["grad_par_op"] = parallel.Grad_par(f);
Field3D forward{0.0};
BOUT_FOR(i, forward.getRegion("RGN_NOBNDRY")) { forward[i] = f.yup()[i.yp()]; }
dump["grad_par_op"] = parallel.Grad_par(f);
Field3D forward{0.0};
BOUT_FOR(i, forward.getRegion("RGN_NOBNDRY")) { forward[i] = f.yup()[i.yp()]; }
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: no header providing "BOUT_FOR" is directly included [misc-include-cleaner]

examples/fci-operators/fci_operators_example.cxx:8:

+ #include "bout/region.hxx"


dump["forward"] = forward;
dump["forward"] = forward;

dump["forward_op"] = forward_op(f);
dump["forward_op"] = forward_op(f);

dump["grad_par_yud"] = Grad_par(f);
dump["grad_par_yud"] = Grad_par(f);

// Test2 : Sum of dV * Div_par over domain is zero
// Test2 : Sum of dV * Div_par over domain is zero

dump["div_par_op"] = parallel.Div_par(f);
dump["div_par_op"] = parallel.Div_par(f);

auto* coords = bout::globals::mesh->getCoordinates();
bout::globals::mesh->communicate(coords->Bxy);
coords->Bxy.applyParallelBoundary("parallel_neumann_o1");
dump["div_par_yud"] = Div_par(f);
auto* coords = bout::globals::mesh->getCoordinates();
bout::globals::mesh->communicate(coords->Bxy);
coords->Bxy.applyParallelBoundary("parallel_neumann_o1");
dump["div_par_yud"] = Div_par(f);

dump["div_par_grad_par_op"] = parallel.Div_par_Grad_par(f_neumann);
dump["div_par_grad_par_yud"] = Div_par_K_Grad_par(1.0, f_neumann);

bout::writeDefaultOutputFile(dump);
dump["div_par_grad_par_op"] = parallel.Div_par_Grad_par(f_neumann);
dump["div_par_grad_par_yud"] = Div_par_K_Grad_par(1.0, f_neumann);

bout::writeDefaultOutputFile(dump);
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: no header providing "bout::writeDefaultOutputFile" is directly included [misc-include-cleaner]

examples/fci-operators/fci_operators_example.cxx:7:

- #include "bout/petsc_operators.hxx"
+ #include "bout/options_io.hxx"
+ #include "bout/petsc_operators.hxx"

}
BoutFinalise();
return 0;
}
43 changes: 43 additions & 0 deletions include/bout/petsc_jacobian.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#pragma once

#ifndef BOUT_PETSC_JACOBIAN_H
#define BOUT_PETSC_JACOBIAN_H

#include "bout/build_defines.hxx"

#if BOUT_HAS_PETSC

#include <petscmat.h>

/// Insert the nonzero pattern of @p sub into the variable block
/// (@p out_var, @p in_var) of the Jacobian @p Jfd.
///
/// @p Jfd is a square matrix of size (n_evolving * nvars) where nvars is
/// inferred as Jfd_global_size / sub_global_size. Each nonzero (r, c) in
/// @p sub produces an entry at (r * nvars + out_var, c * nvars + in_var)
/// in @p Jfd.
///
/// @p Jfd must already be preallocated. Entries are inserted with
/// INSERT_VALUES; MatAssemblyBegin/End must be called by the caller after
/// all insertions are complete.
///
/// @param Jfd The Jacobian matrix to populate. Must be preallocated.
/// @param sub Evolving-cell submatrix providing the nonzero pattern.
/// @param out_var Row variable index in [0, nvars).
/// @param in_var Column variable index in [0, nvars).
void addOperatorSparsity(Mat Jfd, Mat sub, int out_var, int in_var);

/// @brief Insert the nonzero pattern of @p sub into every variable block of
/// @p Jfd.
///
/// Equivalent to calling addOperatorSparsity(Jfd, sub, out_var, in_var) for
/// every combination of @p out_var and @p in_var in [0, nvars), where
/// @c nvars is inferred as @c Jfd_global / @c sub_global.
///
/// @param Jfd The Jacobian matrix to populate. Must be preallocated.
/// @param sub Evolving-cell submatrix providing the nonzero pattern.
void addOperatorSparsity(Mat Jfd, Mat sub);

#endif // BOUT_HAS_PETSC

#endif // BOUT_PETSC_JACOBIAN_H
51 changes: 46 additions & 5 deletions include/bout/petsc_operators.hxx
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
#pragma once

#ifndef BOUT_PETSC_OPERATORS_H
#define BOUT_PETSC_OPERATORS_H

#include "bout/build_defines.hxx"

#if BOUT_HAS_PETSC
Expand All @@ -8,7 +13,6 @@
#include "bout/boutexception.hxx"
#include "bout/field3d.hxx"
#include "bout/mesh.hxx"
#include "bout/output.hxx"
#include "bout/output_bout_types.hxx"
#include "bout/petsc_interface.hxx"
#include "bout/petsclib.hxx"
Expand Down Expand Up @@ -45,6 +49,10 @@ struct ForwardLegSpaceTag {};
/// leg space. See @ref ForwardLegSpaceTag and @ref CellSpaceTag.
struct BackwardLegSpaceTag {};

// Forward declare
template <typename OutSpaceTag, typename InSpaceTag>
class PetscOperator;

/// @brief Bidirectional index mapping between mesh-file stored numbering and PETSc
/// row ownership.
///
Expand Down Expand Up @@ -275,6 +283,37 @@ public:
}
}

/// @brief Create a PETSc IS containing the global PETSc indices of all
/// locally owned evolving interior cells, in the order that
/// mapOwnedInteriorCells visits them.
///
/// The returned IS selects the evolving subset of the full cell space C,
/// excluding inner/outer X-boundary cells and forward/backward parallel
/// boundary virtual cells. It is the correct IS to pass to
/// MatCreateSubMatrix when restricting a PetscCellOperator to the degrees
/// of freedom that the SNES solver actually integrates.
///
/// The caller owns the returned IS and must call ISDestroy when finished.
///
/// @returns A PETSc IS of local size equal to the number of locally owned
/// evolving cells, holding global PETSc row indices.
IS makeEvolvingIS() const;

/// @brief Extract the evolving-cell submatrix from a cell-to-cell operator.
///
/// Restricts @p op to the rows and columns that correspond to evolving
/// interior cells, discarding any rows or columns that belong to inner/outer
/// X-boundary cells or forward/backward parallel boundary virtual cells.
///
/// The returned Mat is an independent copy (MAT_INITIAL_MATRIX): subsequent
/// modifications to @p op do not affect it. The caller owns the returned
/// Mat and must call MatDestroy when finished.
///
/// @param op A cell-to-cell operator whose row and column space is the full
/// cell space C managed by this mapping.
/// @returns A square Mat of global size n_evolving × n_evolving.
Mat extractEvolvingSubmatrix(const PetscOperator<CellSpaceTag, CellSpaceTag>& op) const;

private:
Field3D cell_number; ///< Stored cell numbers for interior/X-boundary cells.
Field3D
Expand Down Expand Up @@ -865,8 +904,8 @@ public:
///
/// Naming convention: the subscript on Grad/Div/Restrict refers to which side of
/// the cell face the operator acts on, not the leg space it lives in.
/// - @c Grad_plus / @c Div_plus: forward-side half-step operators (in L-).
/// - @c Grad_minus / @c Div_minus: backward-side half-step operators (in L+).
/// - @c Grad_plus / @c Div_plus: forward-side half-step operators (C to L+).
/// - @c Grad_minus / @c Div_minus: backward-side half-step operators (C to L-).
/// - @c Restrict_minus = I+^T * W+: weighted restriction from L+ back to C,
/// paired with the minus-side gradient.
/// - @c Restrict_plus = I-^T * W-: weighted restriction from L- back to C,
Expand Down Expand Up @@ -914,7 +953,7 @@ public:
///
/// Evaluates the half-step fluxes separately on each side, multiplies by the
/// interpolated diffusion coefficient K, and averages the two divergence
/// contributions. This is the primary user-facing SOM operator.
/// contributions.
///
/// @param K Diffusion coefficient field; must have parallel slices allocated.
/// @param f Field to differentiate; must have parallel slices allocated.
Expand Down Expand Up @@ -1021,4 +1060,6 @@ private:

#warning PETSc not available. No PetscOperators.

#endif
#endif // BOUT_HAS_PETSC

#endif //BOUT_PETSC_OPERATORS_H
65 changes: 65 additions & 0 deletions src/mesh/petsc_jacobian.cxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#include "bout/build_defines.hxx"

#if BOUT_HAS_PETSC

#include "bout/assert.hxx"
#include "bout/petsc_jacobian.hxx"
#include "bout/petsclib.hxx"

#include <petscmat.h>

void addOperatorSparsity(Mat Jfd, Mat sub, int out_var, int in_var) {
// Infer nvars from global sizes
PetscInt jfd_global{0}, sub_global{0};
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: multiple declarations in a single statement reduces readability [readability-isolate-declaration]

Suggested change
PetscInt jfd_global{0}, sub_global{0};
PetscInt jfd_global{0};
PetscInt sub_global{0};

BOUT_DO_PETSC(MatGetSize(Jfd, &jfd_global, nullptr));
BOUT_DO_PETSC(MatGetSize(sub, &sub_global, nullptr));

ASSERT1(sub_global > 0);
ASSERT1(jfd_global % sub_global == 0);

const PetscInt nvars = jfd_global / sub_global;

ASSERT1(out_var >= 0 && out_var < static_cast<int>(nvars));
ASSERT1(in_var >= 0 && in_var < static_cast<int>(nvars));

// Iterate over locally owned rows of sub and insert into Jfd
PetscInt rstart{0}, rend{0};
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: multiple declarations in a single statement reduces readability [readability-isolate-declaration]

Suggested change
PetscInt rstart{0}, rend{0};
PetscInt rstart{0};
PetscInt rend{0};

BOUT_DO_PETSC(MatGetOwnershipRange(sub, &rstart, &rend));

const PetscScalar one = 1.0;

for (PetscInt sub_row = rstart; sub_row < rend; ++sub_row) {
PetscInt ncols{0};
const PetscInt* sub_cols{nullptr};
const PetscScalar* vals{nullptr};
BOUT_DO_PETSC(MatGetRow(sub, sub_row, &ncols, &sub_cols, &vals));

const PetscInt jfd_row = (sub_row * nvars) + out_var;

for (PetscInt k = 0; k < ncols; ++k) {
const PetscInt jfd_col = (sub_cols[k] * nvars) + in_var;
BOUT_DO_PETSC(MatSetValues(Jfd, 1, &jfd_row, 1, &jfd_col, &one, INSERT_VALUES));
}

BOUT_DO_PETSC(MatRestoreRow(sub, sub_row, &ncols, &sub_cols, &vals));
}
}

void addOperatorSparsity(Mat Jfd, Mat sub) {
PetscInt jfd_global{0}, sub_global{0};
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: multiple declarations in a single statement reduces readability [readability-isolate-declaration]

Suggested change
PetscInt jfd_global{0}, sub_global{0};
PetscInt jfd_global{0};
PetscInt sub_global{0};

MatGetSize(Jfd, &jfd_global, nullptr);
MatGetSize(sub, &sub_global, nullptr);

ASSERT1(sub_global > 0);
ASSERT1(jfd_global % sub_global == 0);

const int nvars = static_cast<int>(jfd_global / sub_global);

for (int out_var = 0; out_var < nvars; ++out_var) {
for (int in_var = 0; in_var < nvars; ++in_var) {
addOperatorSparsity(Jfd, sub, out_var, in_var);
}
}
}

#endif // BOUT_HAS_PETSC
29 changes: 28 additions & 1 deletion src/mesh/petsc_operators.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
#include "bout/bout_types.hxx"
#include "bout/boutexception.hxx"
#include "bout/field3d.hxx"
#include "bout/output.hxx"
#include "bout/output_bout_types.hxx"
#include "bout/petsc_operators.hxx"
#include "bout/region.hxx"
Expand Down Expand Up @@ -61,6 +60,7 @@ void PetscIndexMapping::buildPermutation(PetscInt nlocal, PetscInt nglobal,
const PetscInt petsc_row = row_start + i;
const PetscInt stored_col = stored_indices[i];
ASSERT1(stored_col >= 0);
output.write("{}: {}, {} | {} {}\n", i, petsc_row, stored_col, nlocal, nglobal);
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: no header providing "output" is directly included [misc-include-cleaner]

src/mesh/petsc_operators.cxx:1:

+ #include "bout/output.hxx"

local_stored_to_petsc[static_cast<int>(stored_col)] = petsc_row;
BOUT_DO_PETSC(MatSetValues(mat_stored_to_petsc, 1, &petsc_row, 1, &stored_col, &one,
INSERT_VALUES));
Expand Down Expand Up @@ -127,6 +127,33 @@ PetscCellMapping::PetscCellMapping(const Field3D& cell_number,
local_indices);
}

IS PetscCellMapping::makeEvolvingIS() const {
// Collect global PETSc indices in mapOwnedInteriorCells order.
// Reserve the known count up front to avoid reallocation.
std::vector<PetscInt> indices;
indices.reserve(static_cast<std::size_t>(evolving_region.size()));
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: no header providing "std::size_t" is directly included [misc-include-cleaner]

src/mesh/petsc_operators.cxx:1:

+ #include <cstddef>


mapOwnedInteriorCells(
[&](PetscInt row, const Ind3D& /*i*/, int /*stored*/) { indices.push_back(row); });

IS is;
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: variable 'is' is not initialized [cppcoreguidelines-init-variables]

Suggested change
IS is;
IS is = nullptr;

BOUT_DO_PETSC(ISCreateGeneral(BoutComm::get(), static_cast<PetscInt>(indices.size()),
indices.data(), PETSC_COPY_VALUES, &is));
return is;
}

Mat PetscCellMapping::extractEvolvingSubmatrix(
const PetscOperator<CellSpaceTag, CellSpaceTag>& op) const {
IS is = makeEvolvingIS();

Mat sub;
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warning: variable 'sub' is not initialized [cppcoreguidelines-init-variables]

Suggested change
Mat sub;
Mat sub = nullptr;

BOUT_DO_PETSC(MatCreateSubMatrix(op.raw(), is, is, MAT_INITIAL_MATRIX, &sub));

BOUT_DO_PETSC(ISDestroy(&is));

return sub;
}

PetscLegMapping::PetscLegMapping(int total_legs, std::vector<int> local_leg_indices) {
std::sort(local_leg_indices.begin(), local_leg_indices.end());
local_leg_indices.erase(std::unique(local_leg_indices.begin(), local_leg_indices.end()),
Expand Down
1 change: 1 addition & 0 deletions tests/integrated/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ add_subdirectory(test-naulin-laplace)
add_subdirectory(test-options-netcdf)
add_subdirectory(test-petsc_laplace)
add_subdirectory(test-petsc_laplace_MAST-grid)
add_subdirectory(test-petsc-ordering)
add_subdirectory(test-restart-io)
add_subdirectory(test-restarting)
add_subdirectory(test-slepc-solver)
Expand Down
7 changes: 7 additions & 0 deletions tests/integrated/test-petsc-ordering/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
bout_add_integrated_test(
test-petsc-ordering
SOURCES test_petsc_ordering.cxx
REQUIRES BOUT_HAS_PETSC
USE_RUNTEST USE_DATA_BOUT_INP
PROCESSORS 4
)
14 changes: 14 additions & 0 deletions tests/integrated/test-petsc-ordering/data/BOUT.inp
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Input file for test_petsc_ordering integrated test.
# Uses a small grid large enough to exercise MPI decomposition on 4 ranks
# (nx=10 gives 2 interior x-points per rank when NXPE=4; ny and nz similarly).

[mesh]
nx = 10 # 8 interior + 2 guard (mxg=1 each side)
ny = 8
nz = 4

ixseps1 = nx
ixseps2 = nx

[output]
enabled = false # Suppress data file output; we only need stdout
Loading
Loading