deal.II version 9.7.0
\(\newcommand{\dealvcentcolon}{\mathrel{\mathop{:}}}\) \(\newcommand{\dealcoloneq}{\dealvcentcolon\mathrel{\mkern-1.2mu}=}\) \(\newcommand{\jump}[1]{\left[\!\left[ #1 \right]\!\right]}\) \(\newcommand{\average}[1]{\left\{\!\left\{ #1 \right\}\!\right\}}\)
Loading...
Searching...
No Matches
petsc_parallel_block_sparse_matrix.cc
Go to the documentation of this file.
1// ------------------------------------------------------------------------
2//
3// SPDX-License-Identifier: LGPL-2.1-or-later
4// Copyright (C) 2004 - 2025 by the deal.II authors
5//
6// This file is part of the deal.II library.
7//
8// Part of the source code is dual licensed under Apache-2.0 WITH
9// LLVM-exception OR LGPL-2.1-or-later. Detailed license information
10// governing the source code and code contributions can be found in
11// LICENSE.md and CONTRIBUTING.md at the top level directory of deal.II.
12//
13// ------------------------------------------------------------------------
14
16
19
20
21#ifdef DEAL_II_WITH_PETSC
22
23# include <petscmat.h>
24
25
27
28namespace
29{
30 // A dummy utility routine to create an empty matrix in case we import
31 // a MATNEST with NULL blocks
32 static Mat
33 create_dummy_mat(MPI_Comm comm,
34 PetscInt lr,
35 PetscInt gr,
36 PetscInt lc,
37 PetscInt gc)
38 {
39 Mat dummy;
40 PetscErrorCode ierr;
41
42 ierr = MatCreate(comm, &dummy);
43 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
44 ierr = MatSetSizes(dummy, lr, lc, gr, gc);
45 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
46 ierr = MatSetType(dummy, MATAIJ);
47 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
48 ierr = MatSeqAIJSetPreallocation(dummy, 0, nullptr);
49 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
50 ierr = MatMPIAIJSetPreallocation(dummy, 0, nullptr, 0, nullptr);
51 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
52 ierr = MatSetUp(dummy);
53 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
54 ierr = MatSetOption(dummy, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE);
55 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
56 ierr = MatAssemblyBegin(dummy, MAT_FINAL_ASSEMBLY);
57 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
58 ierr = MatAssemblyEnd(dummy, MAT_FINAL_ASSEMBLY);
59 AssertThrow(ierr == 0, ::ExcPETScError(ierr));
60 return dummy;
61 }
62} // namespace
63
64
65namespace PETScWrappers
66{
67 namespace MPI
68 {
71 {
73
74 return *this;
75 }
76
77
78
80 {
81 PetscErrorCode ierr = MatDestroy(&petsc_nest_matrix);
82 AssertNothrow(ierr == 0, ExcPETScError(ierr));
83 }
84
85
86
87# ifndef DOXYGEN
88 void
89 BlockSparseMatrix::reinit(const size_type n_block_rows,
90 const size_type n_block_columns)
91 {
92 // first delete previous content of
93 // the subobjects array
94 clear();
95
96 // then resize. set sizes of blocks to
97 // zero. user will later have to call
98 // collect_sizes for this
99 this->sub_objects.reinit(n_block_rows, n_block_columns);
100 this->row_block_indices.reinit(n_block_rows, 0);
101 this->column_block_indices.reinit(n_block_columns, 0);
102
103 // and reinitialize the blocks
104 for (size_type r = 0; r < this->n_block_rows(); ++r)
105 for (size_type c = 0; c < this->n_block_cols(); ++c)
106 {
107 BlockType *p = new BlockType();
108 this->sub_objects[r][c] = p;
109 }
110 }
111# endif
112
113
114
115 void
116 BlockSparseMatrix::reinit(const std::vector<IndexSet> &rows,
117 const std::vector<IndexSet> &cols,
118 const BlockDynamicSparsityPattern &bdsp,
119 const MPI_Comm com)
120 {
121 Assert(rows.size() == bdsp.n_block_rows(), ExcMessage("invalid size"));
122 Assert(cols.size() == bdsp.n_block_cols(), ExcMessage("invalid size"));
123
124
125 clear();
126 this->sub_objects.reinit(bdsp.n_block_rows(), bdsp.n_block_cols());
127
128 std::vector<types::global_dof_index> row_sizes;
129 for (unsigned int r = 0; r < bdsp.n_block_rows(); ++r)
130 row_sizes.push_back(bdsp.block(r, 0).n_rows());
131 this->row_block_indices.reinit(row_sizes);
132
133 std::vector<types::global_dof_index> col_sizes;
134 for (unsigned int c = 0; c < bdsp.n_block_cols(); ++c)
135 col_sizes.push_back(bdsp.block(0, c).n_cols());
136 this->column_block_indices.reinit(col_sizes);
137
138 for (unsigned int r = 0; r < this->n_block_rows(); ++r)
139 for (unsigned int c = 0; c < this->n_block_cols(); ++c)
140 {
141 Assert(rows[r].size() == bdsp.block(r, c).n_rows(),
142 ExcMessage("invalid size"));
143 Assert(cols[c].size() == bdsp.block(r, c).n_cols(),
144 ExcMessage("invalid size"));
145
146 BlockType *p = new BlockType();
147 p->reinit(rows[r], cols[c], bdsp.block(r, c), com);
148 this->sub_objects[r][c] = p;
149 }
150
151 this->collect_sizes();
152 }
153
154 void
155 BlockSparseMatrix::reinit(const std::vector<IndexSet> &sizes,
156 const BlockDynamicSparsityPattern &bdsp,
157 const MPI_Comm com)
158 {
159 reinit(sizes, sizes, bdsp, com);
160 }
161
162
163
164 void
166 {
167 auto m = this->n_block_rows();
168 auto n = this->n_block_cols();
169 PetscErrorCode ierr;
170
171 // Create empty matrices if needed
172 // This is needed by the base class
173 // not by MATNEST
174 std::vector<size_type> row_sizes(m, size_type(-1));
175 std::vector<size_type> col_sizes(n, size_type(-1));
176 std::vector<size_type> row_local_sizes(m, size_type(-1));
177 std::vector<size_type> col_local_sizes(n, size_type(-1));
178 MPI_Comm comm = MPI_COMM_NULL;
179 for (size_type r = 0; r < m; r++)
180 {
181 for (size_type c = 0; c < n; c++)
182 {
183 if (this->sub_objects[r][c])
184 {
185 comm = this->sub_objects[r][c]->get_mpi_communicator();
186 row_sizes[r] = this->sub_objects[r][c]->m();
187 col_sizes[c] = this->sub_objects[r][c]->n();
188 row_local_sizes[r] = this->sub_objects[r][c]->local_size();
189 col_local_sizes[c] =
190 this->sub_objects[r][c]->local_domain_size();
191 }
192 }
193 }
194 for (size_type r = 0; r < m; r++)
195 {
196 for (size_type c = 0; c < n; c++)
197 {
198 if (!this->sub_objects[r][c])
199 {
200 Assert(
201 row_sizes[r] != size_type(-1),
203 "When passing empty sub-blocks of a block matrix, you need to make "
204 "sure that at least one block in each block row and block column is "
205 "non-empty. However, block row " +
206 std::to_string(r) +
207 " is completely empty "
208 "and so it is not possible to determine how many rows it should have."));
209 Assert(
210 col_sizes[c] != size_type(-1),
212 "When passing empty sub-blocks of a block matrix, you need to make "
213 "sure that at least one block in each block row and block column is "
214 "non-empty. However, block column " +
215 std::to_string(c) +
216 " is completely empty "
217 "and so it is not possible to determine how many columns it should have."));
218 Mat dummy =
219 create_dummy_mat(comm,
220 static_cast<PetscInt>(row_local_sizes[r]),
221 static_cast<PetscInt>(row_sizes[r]),
222 static_cast<PetscInt>(col_local_sizes[c]),
223 static_cast<PetscInt>(col_sizes[c]));
224 this->sub_objects[r][c] = new BlockType(dummy);
225
226 // the new object got a reference on dummy, we can safely
227 // call destroy here
228 ierr = MatDestroy(&dummy);
229 AssertThrow(ierr == 0, ExcPETScError(ierr));
230 }
231 }
232 }
233 }
234
235
236 void
243
244 void
246 {
247 auto m = this->n_block_rows();
248 auto n = this->n_block_cols();
249 PetscErrorCode ierr;
250
251 MPI_Comm comm = PETSC_COMM_SELF;
252
253 ierr = MatDestroy(&petsc_nest_matrix);
254 AssertThrow(ierr == 0, ExcPETScError(ierr));
255 std::vector<Mat> psub_objects(m * n);
256 for (unsigned int r = 0; r < m; r++)
257 for (unsigned int c = 0; c < n; c++)
258 {
259 comm = this->sub_objects[r][c]->get_mpi_communicator();
260 psub_objects[r * n + c] = this->sub_objects[r][c]->petsc_matrix();
261 }
262 ierr = MatCreateNest(
263 comm, m, nullptr, n, nullptr, psub_objects.data(), &petsc_nest_matrix);
264 AssertThrow(ierr == 0, ExcPETScError(ierr));
265
266 ierr = MatNestSetVecType(petsc_nest_matrix, VECNEST);
267 AssertThrow(ierr == 0, ExcPETScError(ierr));
268 }
269
270
271
272 void
278
279
280
281 std::vector<IndexSet>
283 {
284 std::vector<IndexSet> index_sets;
285
286 for (unsigned int i = 0; i < this->n_block_cols(); ++i)
287 index_sets.push_back(this->block(0, i).locally_owned_domain_indices());
288
289 return index_sets;
290 }
291
292
293
294 std::vector<IndexSet>
296 {
297 std::vector<IndexSet> index_sets;
298
299 for (unsigned int i = 0; i < this->n_block_rows(); ++i)
300 index_sets.push_back(this->block(i, 0).locally_owned_range_indices());
301
302 return index_sets;
303 }
304
305
306
307 std::uint64_t
309 {
310 std::uint64_t n_nonzero = 0;
311 for (size_type rows = 0; rows < this->n_block_rows(); ++rows)
312 for (size_type cols = 0; cols < this->n_block_cols(); ++cols)
313 n_nonzero += this->block(rows, cols).n_nonzero_elements();
314
315 return n_nonzero;
316 }
317
318
319
322 {
323 return PetscObjectComm(reinterpret_cast<PetscObject>(petsc_nest_matrix));
324 }
325
326 BlockSparseMatrix::operator const Mat &() const
327 {
328 return petsc_nest_matrix;
329 }
330
331
332
333 Mat &
338
339 void
341 {
342 clear();
343
344 PetscBool isnest;
345 PetscInt nr = 1, nc = 1;
346
347 PetscErrorCode ierr =
348 PetscObjectTypeCompare(reinterpret_cast<PetscObject>(A),
349 MATNEST,
350 &isnest);
351 AssertThrow(ierr == 0, ExcPETScError(ierr));
352 std::vector<Mat> mats;
353 bool need_empty_matrices = false;
354 if (isnest)
355 {
356 ierr = MatNestGetSize(A, &nr, &nc);
357 AssertThrow(ierr == 0, ExcPETScError(ierr));
358 for (PetscInt i = 0; i < nr; ++i)
359 {
360 for (PetscInt j = 0; j < nc; ++j)
361 {
362 Mat sA;
363 ierr = MatNestGetSubMat(A, i, j, &sA);
364 mats.push_back(sA);
365 if (!sA)
366 need_empty_matrices = true;
367 }
368 }
369 }
370 else
371 {
372 mats.push_back(A);
373 }
374
375 std::vector<size_type> r_block_sizes(nr, 0);
376 std::vector<size_type> c_block_sizes(nc, 0);
377 this->row_block_indices.reinit(r_block_sizes);
378 this->column_block_indices.reinit(c_block_sizes);
379 this->sub_objects.reinit(nr, nc);
380 for (PetscInt i = 0; i < nr; ++i)
381 {
382 for (PetscInt j = 0; j < nc; ++j)
383 {
384 if (mats[i * nc + j])
385 this->sub_objects[i][j] = new BlockType(mats[i * nc + j]);
386 else
387 this->sub_objects[i][j] = nullptr;
388 }
389 }
390 if (need_empty_matrices)
392
394 if (need_empty_matrices || !isnest)
395 {
397 }
398 else
399 {
400 ierr = PetscObjectReference(reinterpret_cast<PetscObject>(A));
401 AssertThrow(ierr == 0, ExcPETScError(ierr));
402 PetscErrorCode ierr = MatDestroy(&petsc_nest_matrix);
403 AssertThrow(ierr == 0, ExcPETScError(ierr));
405 }
406 }
407
408 } // namespace MPI
409} // namespace PETScWrappers
410
411
412
414
415#endif
void reinit(const unsigned int n_blocks, const size_type n_elements_per_block)
unsigned int n_block_rows() const
void compress(VectorOperation::values operation)
unsigned int n_block_cols() const
BlockType & block(const unsigned int row, const unsigned int column)
Table< 2, ObserverPointer< BlockType, BlockMatrixBase< SparseMatrix > > > sub_objects
SparsityPatternType & block(const size_type row, const size_type column)
EnableObserverPointer & operator=(const EnableObserverPointer &)
void reinit(const size_type n_block_rows, const size_type n_block_columns)
BlockSparseMatrix & operator=(const BlockSparseMatrix &)
void compress(VectorOperation::values operation)
std::size_t n_nonzero_elements() const
virtual void reinit(const SparsityPattern &sparsity)
size_type n_rows() const
size_type n_cols() const
#define DEAL_II_NAMESPACE_OPEN
Definition config.h:40
#define DEAL_II_NAMESPACE_CLOSE
Definition config.h:41
#define Assert(cond, exc)
#define AssertNothrow(cond, exc)
static ::ExceptionBase & ExcMessage(std::string arg1)
#define AssertThrow(cond, exc)
types::global_dof_index size_type
void petsc_increment_state_counter(Vec v)
*braid_SplitCommworld & comm