Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_CreateGraph;
40: PetscLogEvent MAT_SetValuesBatch;
41: PetscLogEvent MAT_ViennaCLCopyToGPU;
42: PetscLogEvent MAT_CUDACopyToGPU, MAT_HIPCopyToGPU;
43: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
44: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
45: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
46: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
47: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
49: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
51: /*@
52: MatSetRandom - Sets all components of a matrix to random numbers.
54: Logically Collective
56: Input Parameters:
57: + x - the matrix
58: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
59: it will create one internally.
61: Example:
62: .vb
63: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64: MatSetRandom(x,rctx);
65: PetscRandomDestroy(rctx);
66: .ve
68: Level: intermediate
70: Notes:
71: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
73: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
75: It generates an error if used on unassembled sparse matrices that have not been preallocated.
77: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
78: @*/
79: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
80: {
81: PetscRandom randObj = NULL;
83: PetscFunctionBegin;
87: MatCheckPreallocated(x, 1);
89: if (!rctx) {
90: MPI_Comm comm;
91: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
92: PetscCall(PetscRandomCreate(comm, &randObj));
93: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
94: PetscCall(PetscRandomSetFromOptions(randObj));
95: rctx = randObj;
96: }
97: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
98: PetscUseTypeMethod(x, setrandom, rctx);
99: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
101: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
103: PetscCall(PetscRandomDestroy(&randObj));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: /*@
108: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
110: Logically Collective
112: Input Parameter:
113: . mat - the factored matrix
115: Output Parameters:
116: + pivot - the pivot value computed
117: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
118: the share the matrix
120: Level: advanced
122: Notes:
123: This routine does not work for factorizations done with external packages.
125: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
127: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
129: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
130: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
131: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
132: @*/
133: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
134: {
135: PetscFunctionBegin;
137: PetscAssertPointer(pivot, 2);
138: PetscAssertPointer(row, 3);
139: *pivot = mat->factorerror_zeropivot_value;
140: *row = mat->factorerror_zeropivot_row;
141: PetscFunctionReturn(PETSC_SUCCESS);
142: }
144: /*@
145: MatFactorGetError - gets the error code from a factorization
147: Logically Collective
149: Input Parameter:
150: . mat - the factored matrix
152: Output Parameter:
153: . err - the error code
155: Level: advanced
157: Note:
158: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
160: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
161: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
162: @*/
163: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
164: {
165: PetscFunctionBegin;
167: PetscAssertPointer(err, 2);
168: *err = mat->factorerrortype;
169: PetscFunctionReturn(PETSC_SUCCESS);
170: }
172: /*@
173: MatFactorClearError - clears the error code in a factorization
175: Logically Collective
177: Input Parameter:
178: . mat - the factored matrix
180: Level: developer
182: Note:
183: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
185: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
186: `MatGetErrorCode()`, `MatFactorError`
187: @*/
188: PetscErrorCode MatFactorClearError(Mat mat)
189: {
190: PetscFunctionBegin;
192: mat->factorerrortype = MAT_FACTOR_NOERROR;
193: mat->factorerror_zeropivot_value = 0.0;
194: mat->factorerror_zeropivot_row = 0;
195: PetscFunctionReturn(PETSC_SUCCESS);
196: }
198: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
199: {
200: Vec r, l;
201: const PetscScalar *al;
202: PetscInt i, nz, gnz, N, n, st;
204: PetscFunctionBegin;
205: PetscCall(MatCreateVecs(mat, &r, &l));
206: if (!cols) { /* nonzero rows */
207: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
208: PetscCall(MatGetSize(mat, &N, NULL));
209: PetscCall(MatGetLocalSize(mat, &n, NULL));
210: PetscCall(VecSet(l, 0.0));
211: PetscCall(VecSetRandom(r, NULL));
212: PetscCall(MatMult(mat, r, l));
213: PetscCall(VecGetArrayRead(l, &al));
214: } else { /* nonzero columns */
215: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
216: PetscCall(MatGetSize(mat, NULL, &N));
217: PetscCall(MatGetLocalSize(mat, NULL, &n));
218: PetscCall(VecSet(r, 0.0));
219: PetscCall(VecSetRandom(l, NULL));
220: PetscCall(MatMultTranspose(mat, l, r));
221: PetscCall(VecGetArrayRead(r, &al));
222: }
223: if (tol <= 0.0) {
224: for (i = 0, nz = 0; i < n; i++)
225: if (al[i] != 0.0) nz++;
226: } else {
227: for (i = 0, nz = 0; i < n; i++)
228: if (PetscAbsScalar(al[i]) > tol) nz++;
229: }
230: PetscCallMPI(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
231: if (gnz != N) {
232: PetscInt *nzr;
233: PetscCall(PetscMalloc1(nz, &nzr));
234: if (nz) {
235: if (tol < 0) {
236: for (i = 0, nz = 0; i < n; i++)
237: if (al[i] != 0.0) nzr[nz++] = i + st;
238: } else {
239: for (i = 0, nz = 0; i < n; i++)
240: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
241: }
242: }
243: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
244: } else *nonzero = NULL;
245: if (!cols) { /* nonzero rows */
246: PetscCall(VecRestoreArrayRead(l, &al));
247: } else {
248: PetscCall(VecRestoreArrayRead(r, &al));
249: }
250: PetscCall(VecDestroy(&l));
251: PetscCall(VecDestroy(&r));
252: PetscFunctionReturn(PETSC_SUCCESS);
253: }
255: /*@
256: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
258: Input Parameter:
259: . mat - the matrix
261: Output Parameter:
262: . keptrows - the rows that are not completely zero
264: Level: intermediate
266: Note:
267: `keptrows` is set to `NULL` if all rows are nonzero.
269: Developer Note:
270: If `keptrows` is not `NULL`, it must be sorted.
272: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
273: @*/
274: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
275: {
276: PetscFunctionBegin;
279: PetscAssertPointer(keptrows, 2);
280: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
281: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
282: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
283: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
284: if (keptrows && *keptrows) PetscCall(ISSetInfo(*keptrows, IS_SORTED, IS_GLOBAL, PETSC_FALSE, PETSC_TRUE));
285: PetscFunctionReturn(PETSC_SUCCESS);
286: }
288: /*@
289: MatFindZeroRows - Locate all rows that are completely zero in the matrix
291: Input Parameter:
292: . mat - the matrix
294: Output Parameter:
295: . zerorows - the rows that are completely zero
297: Level: intermediate
299: Note:
300: `zerorows` is set to `NULL` if no rows are zero.
302: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
303: @*/
304: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
305: {
306: IS keptrows;
307: PetscInt m, n;
309: PetscFunctionBegin;
312: PetscAssertPointer(zerorows, 2);
313: PetscCall(MatFindNonzeroRows(mat, &keptrows));
314: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
315: In keeping with this convention, we set zerorows to NULL if there are no zero
316: rows. */
317: if (keptrows == NULL) {
318: *zerorows = NULL;
319: } else {
320: PetscCall(MatGetOwnershipRange(mat, &m, &n));
321: PetscCall(ISComplement(keptrows, m, n, zerorows));
322: PetscCall(ISDestroy(&keptrows));
323: }
324: PetscFunctionReturn(PETSC_SUCCESS);
325: }
327: /*@
328: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
330: Not Collective
332: Input Parameter:
333: . A - the matrix
335: Output Parameter:
336: . a - the diagonal part (which is a SEQUENTIAL matrix)
338: Level: advanced
340: Notes:
341: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
343: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
345: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
346: @*/
347: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
348: {
349: PetscFunctionBegin;
352: PetscAssertPointer(a, 2);
353: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
354: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
355: else {
356: PetscMPIInt size;
358: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
359: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
360: *a = A;
361: }
362: PetscFunctionReturn(PETSC_SUCCESS);
363: }
365: /*@
366: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
368: Collective
370: Input Parameter:
371: . mat - the matrix
373: Output Parameter:
374: . trace - the sum of the diagonal entries
376: Level: advanced
378: .seealso: [](ch_matrices), `Mat`
379: @*/
380: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
381: {
382: Vec diag;
384: PetscFunctionBegin;
386: PetscAssertPointer(trace, 2);
387: PetscCall(MatCreateVecs(mat, &diag, NULL));
388: PetscCall(MatGetDiagonal(mat, diag));
389: PetscCall(VecSum(diag, trace));
390: PetscCall(VecDestroy(&diag));
391: PetscFunctionReturn(PETSC_SUCCESS);
392: }
394: /*@
395: MatRealPart - Zeros out the imaginary part of the matrix
397: Logically Collective
399: Input Parameter:
400: . mat - the matrix
402: Level: advanced
404: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
405: @*/
406: PetscErrorCode MatRealPart(Mat mat)
407: {
408: PetscFunctionBegin;
411: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
412: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
413: MatCheckPreallocated(mat, 1);
414: PetscUseTypeMethod(mat, realpart);
415: PetscFunctionReturn(PETSC_SUCCESS);
416: }
418: /*@C
419: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
421: Collective
423: Input Parameter:
424: . mat - the matrix
426: Output Parameters:
427: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
428: - ghosts - the global indices of the ghost points
430: Level: advanced
432: Note:
433: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
435: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
436: @*/
437: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
438: {
439: PetscFunctionBegin;
442: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
443: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
444: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
445: else {
446: if (nghosts) *nghosts = 0;
447: if (ghosts) *ghosts = NULL;
448: }
449: PetscFunctionReturn(PETSC_SUCCESS);
450: }
452: /*@
453: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
455: Logically Collective
457: Input Parameter:
458: . mat - the matrix
460: Level: advanced
462: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
463: @*/
464: PetscErrorCode MatImaginaryPart(Mat mat)
465: {
466: PetscFunctionBegin;
469: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
470: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
471: MatCheckPreallocated(mat, 1);
472: PetscUseTypeMethod(mat, imaginarypart);
473: PetscFunctionReturn(PETSC_SUCCESS);
474: }
476: /*@
477: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
479: Not Collective
481: Input Parameter:
482: . mat - the matrix
484: Output Parameters:
485: + missing - is any diagonal entry missing
486: - dd - first diagonal entry that is missing (optional) on this process
488: Level: advanced
490: Note:
491: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
493: .seealso: [](ch_matrices), `Mat`
494: @*/
495: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
496: {
497: PetscFunctionBegin;
500: PetscAssertPointer(missing, 2);
501: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
502: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
503: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
504: PetscFunctionReturn(PETSC_SUCCESS);
505: }
507: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
508: /*@C
509: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
510: for each row that you get to ensure that your application does
511: not bleed memory.
513: Not Collective
515: Input Parameters:
516: + mat - the matrix
517: - row - the row to get
519: Output Parameters:
520: + ncols - if not `NULL`, the number of nonzeros in `row`
521: . cols - if not `NULL`, the column numbers
522: - vals - if not `NULL`, the numerical values
524: Level: advanced
526: Notes:
527: This routine is provided for people who need to have direct access
528: to the structure of a matrix. We hope that we provide enough
529: high-level matrix routines that few users will need it.
531: `MatGetRow()` always returns 0-based column indices, regardless of
532: whether the internal representation is 0-based (default) or 1-based.
534: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
535: not wish to extract these quantities.
537: The user can only examine the values extracted with `MatGetRow()`;
538: the values CANNOT be altered. To change the matrix entries, one
539: must use `MatSetValues()`.
541: You can only have one call to `MatGetRow()` outstanding for a particular
542: matrix at a time, per processor. `MatGetRow()` can only obtain rows
543: associated with the given processor, it cannot get rows from the
544: other processors; for that we suggest using `MatCreateSubMatrices()`, then
545: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
546: is in the global number of rows.
548: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
550: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
552: Fortran Note:
553: The calling sequence is
554: .vb
555: MatGetRow(matrix,row,ncols,cols,values,ierr)
556: Mat matrix (input)
557: PetscInt row (input)
558: PetscInt ncols (output)
559: PetscInt cols(maxcols) (output)
560: PetscScalar values(maxcols) output
561: .ve
562: where maxcols >= maximum nonzeros in any row of the matrix.
564: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
565: @*/
566: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
567: {
568: PetscInt incols;
570: PetscFunctionBegin;
573: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
574: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
575: MatCheckPreallocated(mat, 1);
576: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
577: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
578: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
579: if (ncols) *ncols = incols;
580: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
581: PetscFunctionReturn(PETSC_SUCCESS);
582: }
584: /*@
585: MatConjugate - replaces the matrix values with their complex conjugates
587: Logically Collective
589: Input Parameter:
590: . mat - the matrix
592: Level: advanced
594: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
595: @*/
596: PetscErrorCode MatConjugate(Mat mat)
597: {
598: PetscFunctionBegin;
600: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
601: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
602: PetscUseTypeMethod(mat, conjugate);
603: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
604: }
605: PetscFunctionReturn(PETSC_SUCCESS);
606: }
608: /*@C
609: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
611: Not Collective
613: Input Parameters:
614: + mat - the matrix
615: . row - the row to get
616: . ncols - the number of nonzeros
617: . cols - the columns of the nonzeros
618: - vals - if nonzero the column values
620: Level: advanced
622: Notes:
623: This routine should be called after you have finished examining the entries.
625: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
626: us of the array after it has been restored. If you pass `NULL`, it will
627: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
629: Fortran Note:
630: `MatRestoreRow()` MUST be called after `MatGetRow()`
631: before another call to `MatGetRow()` can be made.
633: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
634: @*/
635: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
636: {
637: PetscFunctionBegin;
639: if (ncols) PetscAssertPointer(ncols, 3);
640: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
641: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
642: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
643: if (ncols) *ncols = 0;
644: if (cols) *cols = NULL;
645: if (vals) *vals = NULL;
646: PetscFunctionReturn(PETSC_SUCCESS);
647: }
649: /*@
650: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
651: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
653: Not Collective
655: Input Parameter:
656: . mat - the matrix
658: Level: advanced
660: Note:
661: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
663: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
664: @*/
665: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
666: {
667: PetscFunctionBegin;
670: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
671: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
672: MatCheckPreallocated(mat, 1);
673: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
674: PetscUseTypeMethod(mat, getrowuppertriangular);
675: PetscFunctionReturn(PETSC_SUCCESS);
676: }
678: /*@
679: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
681: Not Collective
683: Input Parameter:
684: . mat - the matrix
686: Level: advanced
688: Note:
689: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
691: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
692: @*/
693: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
694: {
695: PetscFunctionBegin;
698: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
699: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
700: MatCheckPreallocated(mat, 1);
701: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
702: PetscUseTypeMethod(mat, restorerowuppertriangular);
703: PetscFunctionReturn(PETSC_SUCCESS);
704: }
706: /*@
707: MatSetOptionsPrefix - Sets the prefix used for searching for all
708: `Mat` options in the database.
710: Logically Collective
712: Input Parameters:
713: + A - the matrix
714: - prefix - the prefix to prepend to all option names
716: Level: advanced
718: Notes:
719: A hyphen (-) must NOT be given at the beginning of the prefix name.
720: The first character of all runtime options is AUTOMATICALLY the hyphen.
722: This is NOT used for options for the factorization of the matrix. Normally the
723: prefix is automatically passed in from the PC calling the factorization. To set
724: it directly use `MatSetOptionsPrefixFactor()`
726: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
727: @*/
728: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
729: {
730: PetscFunctionBegin;
732: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
733: PetscFunctionReturn(PETSC_SUCCESS);
734: }
736: /*@
737: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
738: for matrices created with `MatGetFactor()`
740: Logically Collective
742: Input Parameters:
743: + A - the matrix
744: - prefix - the prefix to prepend to all option names for the factored matrix
746: Level: developer
748: Notes:
749: A hyphen (-) must NOT be given at the beginning of the prefix name.
750: The first character of all runtime options is AUTOMATICALLY the hyphen.
752: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
753: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
755: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
756: @*/
757: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
758: {
759: PetscFunctionBegin;
761: if (prefix) {
762: PetscAssertPointer(prefix, 2);
763: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
764: if (prefix != A->factorprefix) {
765: PetscCall(PetscFree(A->factorprefix));
766: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
767: }
768: } else PetscCall(PetscFree(A->factorprefix));
769: PetscFunctionReturn(PETSC_SUCCESS);
770: }
772: /*@
773: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
774: for matrices created with `MatGetFactor()`
776: Logically Collective
778: Input Parameters:
779: + A - the matrix
780: - prefix - the prefix to prepend to all option names for the factored matrix
782: Level: developer
784: Notes:
785: A hyphen (-) must NOT be given at the beginning of the prefix name.
786: The first character of all runtime options is AUTOMATICALLY the hyphen.
788: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
789: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
791: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
792: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
793: `MatSetOptionsPrefix()`
794: @*/
795: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
796: {
797: size_t len1, len2, new_len;
799: PetscFunctionBegin;
801: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
802: if (!A->factorprefix) {
803: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
804: PetscFunctionReturn(PETSC_SUCCESS);
805: }
806: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
808: PetscCall(PetscStrlen(A->factorprefix, &len1));
809: PetscCall(PetscStrlen(prefix, &len2));
810: new_len = len1 + len2 + 1;
811: PetscCall(PetscRealloc(new_len * sizeof(*A->factorprefix), &A->factorprefix));
812: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
813: PetscFunctionReturn(PETSC_SUCCESS);
814: }
816: /*@
817: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
818: matrix options in the database.
820: Logically Collective
822: Input Parameters:
823: + A - the matrix
824: - prefix - the prefix to prepend to all option names
826: Level: advanced
828: Note:
829: A hyphen (-) must NOT be given at the beginning of the prefix name.
830: The first character of all runtime options is AUTOMATICALLY the hyphen.
832: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
833: @*/
834: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
835: {
836: PetscFunctionBegin;
838: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
839: PetscFunctionReturn(PETSC_SUCCESS);
840: }
842: /*@
843: MatGetOptionsPrefix - Gets the prefix used for searching for all
844: matrix options in the database.
846: Not Collective
848: Input Parameter:
849: . A - the matrix
851: Output Parameter:
852: . prefix - pointer to the prefix string used
854: Level: advanced
856: Fortran Note:
857: The user should pass in a string `prefix` of
858: sufficient length to hold the prefix.
860: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
861: @*/
862: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
863: {
864: PetscFunctionBegin;
866: PetscAssertPointer(prefix, 2);
867: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
868: PetscFunctionReturn(PETSC_SUCCESS);
869: }
871: /*@
872: MatGetState - Gets the state of a `Mat`. Same value as returned by `PetscObjectStateGet()`
874: Not Collective
876: Input Parameter:
877: . A - the matrix
879: Output Parameter:
880: . state - the object state
882: Level: advanced
884: Note:
885: Object state is an integer which gets increased every time
886: the object is changed. By saving and later querying the object state
887: one can determine whether information about the object is still current.
889: See `MatGetNonzeroState()` to determine if the nonzero structure of the matrix has changed.
891: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PetscObjectStateGet()`, `MatGetNonzeroState()`
892: @*/
893: PetscErrorCode MatGetState(Mat A, PetscObjectState *state)
894: {
895: PetscFunctionBegin;
897: PetscAssertPointer(state, 2);
898: PetscCall(PetscObjectStateGet((PetscObject)A, state));
899: PetscFunctionReturn(PETSC_SUCCESS);
900: }
902: /*@
903: MatResetPreallocation - Reset matrix to use the original preallocation values provided by the user, for example with `MatXAIJSetPreallocation()`
905: Collective
907: Input Parameter:
908: . A - the matrix
910: Level: beginner
912: Notes:
913: After calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY` the matrix data structures represent the nonzeros assigned to the
914: matrix. If that space is less than the preallocated space that extra preallocated space is no longer available to take on new values. `MatResetPreallocation()`
915: makes all of the preallocation space available
917: Current values in the matrix are lost in this call
919: Currently only supported for `MATAIJ` matrices.
921: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
922: @*/
923: PetscErrorCode MatResetPreallocation(Mat A)
924: {
925: PetscFunctionBegin;
928: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
929: PetscFunctionReturn(PETSC_SUCCESS);
930: }
932: /*@
933: MatSetUp - Sets up the internal matrix data structures for later use by the matrix
935: Collective
937: Input Parameter:
938: . A - the matrix
940: Level: advanced
942: Notes:
943: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
944: setting values in the matrix.
946: This routine is called internally by other `Mat` functions when needed so rarely needs to be called by users
948: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
949: @*/
950: PetscErrorCode MatSetUp(Mat A)
951: {
952: PetscFunctionBegin;
954: if (!((PetscObject)A)->type_name) {
955: PetscMPIInt size;
957: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
958: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
959: }
960: if (!A->preallocated) PetscTryTypeMethod(A, setup);
961: PetscCall(PetscLayoutSetUp(A->rmap));
962: PetscCall(PetscLayoutSetUp(A->cmap));
963: A->preallocated = PETSC_TRUE;
964: PetscFunctionReturn(PETSC_SUCCESS);
965: }
967: #if defined(PETSC_HAVE_SAWS)
968: #include <petscviewersaws.h>
969: #endif
971: /*
972: If threadsafety is on extraneous matrices may be printed
974: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
975: */
976: #if !defined(PETSC_HAVE_THREADSAFETY)
977: static PetscInt insidematview = 0;
978: #endif
980: /*@
981: MatViewFromOptions - View properties of the matrix based on options set in the options database
983: Collective
985: Input Parameters:
986: + A - the matrix
987: . obj - optional additional object that provides the options prefix to use
988: - name - command line option
990: Options Database Key:
991: . -mat_view [viewertype]:... - the viewer and its options
993: Level: intermediate
995: Note:
996: .vb
997: If no value is provided ascii:stdout is used
998: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
999: for example ascii::ascii_info prints just the information about the object not all details
1000: unless :append is given filename opens in write mode, overwriting what was already there
1001: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
1002: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
1003: socket[:port] defaults to the standard output port
1004: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
1005: .ve
1007: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
1008: @*/
1009: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
1010: {
1011: PetscFunctionBegin;
1013: #if !defined(PETSC_HAVE_THREADSAFETY)
1014: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
1015: #endif
1016: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
1017: PetscFunctionReturn(PETSC_SUCCESS);
1018: }
1020: /*@
1021: MatView - display information about a matrix in a variety ways
1023: Collective on viewer
1025: Input Parameters:
1026: + mat - the matrix
1027: - viewer - visualization context
1029: Options Database Keys:
1030: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1031: . -mat_view ::ascii_info_detail - Prints more detailed info
1032: . -mat_view - Prints matrix in ASCII format
1033: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1034: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1035: . -display <name> - Sets display name (default is host)
1036: . -draw_pause <sec> - Sets number of seconds to pause after display
1037: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1038: . -viewer_socket_machine <machine> - -
1039: . -viewer_socket_port <port> - -
1040: . -mat_view binary - save matrix to file in binary format
1041: - -viewer_binary_filename <name> - -
1043: Level: beginner
1045: Notes:
1046: The available visualization contexts include
1047: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1048: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1049: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1050: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1052: The user can open alternative visualization contexts with
1053: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1054: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a specified file; corresponding input uses `MatLoad()`
1055: . `PetscViewerDrawOpen()` - Outputs nonzero matrix nonzero structure to an X window display
1056: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer, `PETSCVIEWERSOCKET`. Only the `MATSEQDENSE` and `MATAIJ` types support this viewer.
1058: The user can call `PetscViewerPushFormat()` to specify the output
1059: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1060: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1061: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1062: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1063: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1064: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse format common among all matrix types
1065: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific format (which is in many cases the same as the default)
1066: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix size and structure (not the matrix entries)
1067: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about the matrix nonzero structure (still not vector or matrix entries)
1069: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1070: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1072: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1074: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1075: viewer is used.
1077: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1078: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1080: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1081: and then use the following mouse functions.
1082: .vb
1083: left mouse: zoom in
1084: middle mouse: zoom out
1085: right mouse: continue with the simulation
1086: .ve
1088: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1089: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1090: @*/
1091: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1092: {
1093: PetscInt rows, cols, rbs, cbs;
1094: PetscBool isascii, isstring, issaws;
1095: PetscViewerFormat format;
1096: PetscMPIInt size;
1098: PetscFunctionBegin;
1101: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1104: PetscCall(PetscViewerGetFormat(viewer, &format));
1105: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
1106: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1108: #if !defined(PETSC_HAVE_THREADSAFETY)
1109: insidematview++;
1110: #endif
1111: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1112: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1113: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1114: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1116: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1117: if (isascii) {
1118: if (!mat->preallocated) {
1119: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1120: #if !defined(PETSC_HAVE_THREADSAFETY)
1121: insidematview--;
1122: #endif
1123: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1124: PetscFunctionReturn(PETSC_SUCCESS);
1125: }
1126: if (!mat->assembled) {
1127: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1128: #if !defined(PETSC_HAVE_THREADSAFETY)
1129: insidematview--;
1130: #endif
1131: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1132: PetscFunctionReturn(PETSC_SUCCESS);
1133: }
1134: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1135: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1136: MatNullSpace nullsp, transnullsp;
1138: PetscCall(PetscViewerASCIIPushTab(viewer));
1139: PetscCall(MatGetSize(mat, &rows, &cols));
1140: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1141: if (rbs != 1 || cbs != 1) {
1142: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "%s\n", rows, cols, rbs, cbs, mat->bsizes ? " variable blocks set" : ""));
1143: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "%s\n", rows, cols, rbs, mat->bsizes ? " variable blocks set" : ""));
1144: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1145: if (mat->factortype) {
1146: MatSolverType solver;
1147: PetscCall(MatFactorGetSolverType(mat, &solver));
1148: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1149: }
1150: if (mat->ops->getinfo) {
1151: MatInfo info;
1152: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1153: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1154: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1155: }
1156: PetscCall(MatGetNullSpace(mat, &nullsp));
1157: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1158: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1159: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1160: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1161: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1162: PetscCall(PetscViewerASCIIPushTab(viewer));
1163: PetscCall(MatProductView(mat, viewer));
1164: PetscCall(PetscViewerASCIIPopTab(viewer));
1165: if (mat->bsizes && format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1166: IS tmp;
1168: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)viewer), mat->nblocks, mat->bsizes, PETSC_USE_POINTER, &tmp));
1169: PetscCall(PetscObjectSetName((PetscObject)tmp, "Block Sizes"));
1170: PetscCall(PetscViewerASCIIPushTab(viewer));
1171: PetscCall(ISView(tmp, viewer));
1172: PetscCall(PetscViewerASCIIPopTab(viewer));
1173: PetscCall(ISDestroy(&tmp));
1174: }
1175: }
1176: } else if (issaws) {
1177: #if defined(PETSC_HAVE_SAWS)
1178: PetscMPIInt rank;
1180: PetscCall(PetscObjectName((PetscObject)mat));
1181: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1182: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1183: #endif
1184: } else if (isstring) {
1185: const char *type;
1186: PetscCall(MatGetType(mat, &type));
1187: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1188: PetscTryTypeMethod(mat, view, viewer);
1189: }
1190: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1191: PetscCall(PetscViewerASCIIPushTab(viewer));
1192: PetscUseTypeMethod(mat, viewnative, viewer);
1193: PetscCall(PetscViewerASCIIPopTab(viewer));
1194: } else if (mat->ops->view) {
1195: PetscCall(PetscViewerASCIIPushTab(viewer));
1196: PetscUseTypeMethod(mat, view, viewer);
1197: PetscCall(PetscViewerASCIIPopTab(viewer));
1198: }
1199: if (isascii) {
1200: PetscCall(PetscViewerGetFormat(viewer, &format));
1201: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1202: }
1203: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1204: #if !defined(PETSC_HAVE_THREADSAFETY)
1205: insidematview--;
1206: #endif
1207: PetscFunctionReturn(PETSC_SUCCESS);
1208: }
1210: #if defined(PETSC_USE_DEBUG)
1211: #include <../src/sys/totalview/tv_data_display.h>
1212: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1213: {
1214: TV_add_row("Local rows", "int", &mat->rmap->n);
1215: TV_add_row("Local columns", "int", &mat->cmap->n);
1216: TV_add_row("Global rows", "int", &mat->rmap->N);
1217: TV_add_row("Global columns", "int", &mat->cmap->N);
1218: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1219: return TV_format_OK;
1220: }
1221: #endif
1223: /*@
1224: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1225: with `MatView()`. The matrix format is determined from the options database.
1226: Generates a parallel MPI matrix if the communicator has more than one
1227: processor. The default matrix type is `MATAIJ`.
1229: Collective
1231: Input Parameters:
1232: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1233: or some related function before a call to `MatLoad()`
1234: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1236: Options Database Key:
1237: . -matload_block_size <bs> - set block size
1239: Level: beginner
1241: Notes:
1242: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1243: `Mat` before calling this routine if you wish to set it from the options database.
1245: `MatLoad()` automatically loads into the options database any options
1246: given in the file filename.info where filename is the name of the file
1247: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1248: file will be ignored if you use the -viewer_binary_skip_info option.
1250: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1251: sets the default matrix type AIJ and sets the local and global sizes.
1252: If type and/or size is already set, then the same are used.
1254: In parallel, each processor can load a subset of rows (or the
1255: entire matrix). This routine is especially useful when a large
1256: matrix is stored on disk and only part of it is desired on each
1257: processor. For example, a parallel solver may access only some of
1258: the rows from each processor. The algorithm used here reads
1259: relatively small blocks of data rather than reading the entire
1260: matrix and then subsetting it.
1262: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1263: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1264: or the sequence like
1265: .vb
1266: `PetscViewer` v;
1267: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1268: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1269: `PetscViewerSetFromOptions`(v);
1270: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1271: `PetscViewerFileSetName`(v,"datafile");
1272: .ve
1273: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1274: $ -viewer_type {binary, hdf5}
1276: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1277: and src/mat/tutorials/ex10.c with the second approach.
1279: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1280: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1281: Multiple objects, both matrices and vectors, can be stored within the same file.
1282: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1284: Most users should not need to know the details of the binary storage
1285: format, since `MatLoad()` and `MatView()` completely hide these details.
1286: But for anyone who is interested, the standard binary matrix storage
1287: format is
1289: .vb
1290: PetscInt MAT_FILE_CLASSID
1291: PetscInt number of rows
1292: PetscInt number of columns
1293: PetscInt total number of nonzeros
1294: PetscInt *number nonzeros in each row
1295: PetscInt *column indices of all nonzeros (starting index is zero)
1296: PetscScalar *values of all nonzeros
1297: .ve
1298: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1299: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1300: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1302: PETSc automatically does the byte swapping for
1303: machines that store the bytes reversed. Thus if you write your own binary
1304: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1305: and `PetscBinaryWrite()` to see how this may be done.
1307: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1308: Each processor's chunk is loaded independently by its owning MPI process.
1309: Multiple objects, both matrices and vectors, can be stored within the same file.
1310: They are looked up by their PetscObject name.
1312: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1313: by default the same structure and naming of the AIJ arrays and column count
1314: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1315: $ save example.mat A b -v7.3
1316: can be directly read by this routine (see Reference 1 for details).
1318: Depending on your MATLAB version, this format might be a default,
1319: otherwise you can set it as default in Preferences.
1321: Unless -nocompression flag is used to save the file in MATLAB,
1322: PETSc must be configured with ZLIB package.
1324: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1326: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1328: Corresponding `MatView()` is not yet implemented.
1330: The loaded matrix is actually a transpose of the original one in MATLAB,
1331: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1332: With this format, matrix is automatically transposed by PETSc,
1333: unless the matrix is marked as SPD or symmetric
1334: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1336: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1338: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1339: @*/
1340: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1341: {
1342: PetscBool flg;
1344: PetscFunctionBegin;
1348: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1350: flg = PETSC_FALSE;
1351: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1352: if (flg) {
1353: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1354: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1355: }
1356: flg = PETSC_FALSE;
1357: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1358: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1360: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1361: PetscUseTypeMethod(mat, load, viewer);
1362: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1363: PetscFunctionReturn(PETSC_SUCCESS);
1364: }
1366: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1367: {
1368: Mat_Redundant *redund = *redundant;
1370: PetscFunctionBegin;
1371: if (redund) {
1372: if (redund->matseq) { /* via MatCreateSubMatrices() */
1373: PetscCall(ISDestroy(&redund->isrow));
1374: PetscCall(ISDestroy(&redund->iscol));
1375: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1376: } else {
1377: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1378: PetscCall(PetscFree(redund->sbuf_j));
1379: PetscCall(PetscFree(redund->sbuf_a));
1380: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1381: PetscCall(PetscFree(redund->rbuf_j[i]));
1382: PetscCall(PetscFree(redund->rbuf_a[i]));
1383: }
1384: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1385: }
1387: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1388: PetscCall(PetscFree(redund));
1389: }
1390: PetscFunctionReturn(PETSC_SUCCESS);
1391: }
1393: /*@
1394: MatDestroy - Frees space taken by a matrix.
1396: Collective
1398: Input Parameter:
1399: . A - the matrix
1401: Level: beginner
1403: Developer Note:
1404: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1405: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1406: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1407: if changes are needed here.
1409: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1410: @*/
1411: PetscErrorCode MatDestroy(Mat *A)
1412: {
1413: PetscFunctionBegin;
1414: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1416: if (--((PetscObject)*A)->refct > 0) {
1417: *A = NULL;
1418: PetscFunctionReturn(PETSC_SUCCESS);
1419: }
1421: /* if memory was published with SAWs then destroy it */
1422: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1423: PetscTryTypeMethod(*A, destroy);
1425: PetscCall(PetscFree((*A)->factorprefix));
1426: PetscCall(PetscFree((*A)->defaultvectype));
1427: PetscCall(PetscFree((*A)->defaultrandtype));
1428: PetscCall(PetscFree((*A)->bsizes));
1429: PetscCall(PetscFree((*A)->solvertype));
1430: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1431: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1432: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1433: PetscCall(MatProductClear(*A));
1434: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1435: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1436: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1437: PetscCall(MatDestroy(&(*A)->schur));
1438: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1439: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1440: PetscCall(PetscHeaderDestroy(A));
1441: PetscFunctionReturn(PETSC_SUCCESS);
1442: }
1444: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1445: /*@
1446: MatSetValues - Inserts or adds a block of values into a matrix.
1447: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1448: MUST be called after all calls to `MatSetValues()` have been completed.
1450: Not Collective
1452: Input Parameters:
1453: + mat - the matrix
1454: . v - a logically two-dimensional array of values
1455: . m - the number of rows
1456: . idxm - the global indices of the rows
1457: . n - the number of columns
1458: . idxn - the global indices of the columns
1459: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1461: Level: beginner
1463: Notes:
1464: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1466: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1467: options cannot be mixed without intervening calls to the assembly
1468: routines.
1470: `MatSetValues()` uses 0-based row and column numbers in Fortran
1471: as well as in C.
1473: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1474: simply ignored. This allows easily inserting element stiffness matrices
1475: with homogeneous Dirichlet boundary conditions that you don't want represented
1476: in the matrix.
1478: Efficiency Alert:
1479: The routine `MatSetValuesBlocked()` may offer much better efficiency
1480: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1482: Fortran Notes:
1483: If any of `idxm`, `idxn`, and `v` are scalars pass them using, for example,
1484: .vb
1485: MatSetValues(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
1486: .ve
1488: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
1490: Developer Note:
1491: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1492: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1494: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1495: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1496: @*/
1497: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1498: {
1499: PetscFunctionBeginHot;
1502: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1503: PetscAssertPointer(idxm, 3);
1504: PetscAssertPointer(idxn, 5);
1505: MatCheckPreallocated(mat, 1);
1507: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1508: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1510: if (PetscDefined(USE_DEBUG)) {
1511: PetscInt i, j;
1513: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1514: if (v) {
1515: for (i = 0; i < m; i++) {
1516: for (j = 0; j < n; j++) {
1517: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1518: #if defined(PETSC_USE_COMPLEX)
1519: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1520: #else
1521: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1522: #endif
1523: }
1524: }
1525: }
1526: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1527: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1528: }
1530: if (mat->assembled) {
1531: mat->was_assembled = PETSC_TRUE;
1532: mat->assembled = PETSC_FALSE;
1533: }
1534: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1535: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1536: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1537: PetscFunctionReturn(PETSC_SUCCESS);
1538: }
1540: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1541: /*@
1542: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1543: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1544: MUST be called after all calls to `MatSetValues()` have been completed.
1546: Not Collective
1548: Input Parameters:
1549: + mat - the matrix
1550: . v - a logically two-dimensional array of values
1551: . ism - the rows to provide
1552: . isn - the columns to provide
1553: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1555: Level: beginner
1557: Notes:
1558: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1560: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1561: options cannot be mixed without intervening calls to the assembly
1562: routines.
1564: `MatSetValues()` uses 0-based row and column numbers in Fortran
1565: as well as in C.
1567: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1568: simply ignored. This allows easily inserting element stiffness matrices
1569: with homogeneous Dirichlet boundary conditions that you don't want represented
1570: in the matrix.
1572: Efficiency Alert:
1573: The routine `MatSetValuesBlocked()` may offer much better efficiency
1574: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1576: This is currently not optimized for any particular `ISType`
1578: Developer Note:
1579: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1580: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1582: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1583: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1584: @*/
1585: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1586: {
1587: PetscInt m, n;
1588: const PetscInt *rows, *cols;
1590: PetscFunctionBeginHot;
1592: PetscCall(ISGetIndices(ism, &rows));
1593: PetscCall(ISGetIndices(isn, &cols));
1594: PetscCall(ISGetLocalSize(ism, &m));
1595: PetscCall(ISGetLocalSize(isn, &n));
1596: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1597: PetscCall(ISRestoreIndices(ism, &rows));
1598: PetscCall(ISRestoreIndices(isn, &cols));
1599: PetscFunctionReturn(PETSC_SUCCESS);
1600: }
1602: /*@
1603: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1604: values into a matrix
1606: Not Collective
1608: Input Parameters:
1609: + mat - the matrix
1610: . row - the (block) row to set
1611: - v - a logically two-dimensional array of values
1613: Level: intermediate
1615: Notes:
1616: The values, `v`, are column-oriented (for the block version) and sorted
1618: All the nonzero values in `row` must be provided
1620: The matrix must have previously had its column indices set, likely by having been assembled.
1622: `row` must belong to this MPI process
1624: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1625: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1626: @*/
1627: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1628: {
1629: PetscInt globalrow;
1631: PetscFunctionBegin;
1634: PetscAssertPointer(v, 3);
1635: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1636: PetscCall(MatSetValuesRow(mat, globalrow, v));
1637: PetscFunctionReturn(PETSC_SUCCESS);
1638: }
1640: /*@
1641: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1642: values into a matrix
1644: Not Collective
1646: Input Parameters:
1647: + mat - the matrix
1648: . row - the (block) row to set
1649: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1651: Level: advanced
1653: Notes:
1654: The values, `v`, are column-oriented for the block version.
1656: All the nonzeros in `row` must be provided
1658: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1660: `row` must belong to this process
1662: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1663: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1664: @*/
1665: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1666: {
1667: PetscFunctionBeginHot;
1670: MatCheckPreallocated(mat, 1);
1671: PetscAssertPointer(v, 3);
1672: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1673: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1674: mat->insertmode = INSERT_VALUES;
1676: if (mat->assembled) {
1677: mat->was_assembled = PETSC_TRUE;
1678: mat->assembled = PETSC_FALSE;
1679: }
1680: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1681: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1682: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1683: PetscFunctionReturn(PETSC_SUCCESS);
1684: }
1686: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1687: /*@
1688: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1689: Using structured grid indexing
1691: Not Collective
1693: Input Parameters:
1694: + mat - the matrix
1695: . m - number of rows being entered
1696: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1697: . n - number of columns being entered
1698: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1699: . v - a logically two-dimensional array of values
1700: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1702: Level: beginner
1704: Notes:
1705: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1707: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1708: options cannot be mixed without intervening calls to the assembly
1709: routines.
1711: The grid coordinates are across the entire grid, not just the local portion
1713: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1714: as well as in C.
1716: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1718: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1719: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1721: The columns and rows in the stencil passed in MUST be contained within the
1722: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1723: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1724: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1725: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1727: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1728: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1729: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1730: `DM_BOUNDARY_PERIODIC` boundary type.
1732: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1733: a single value per point) you can skip filling those indices.
1735: Inspired by the structured grid interface to the HYPRE package
1736: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1738: Efficiency Alert:
1739: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1740: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1742: Fortran Note:
1743: `idxm` and `idxn` should be declared as
1744: $ MatStencil idxm(4,m),idxn(4,n)
1745: and the values inserted using
1746: .vb
1747: idxm(MatStencil_i,1) = i
1748: idxm(MatStencil_j,1) = j
1749: idxm(MatStencil_k,1) = k
1750: idxm(MatStencil_c,1) = c
1751: etc
1752: .ve
1754: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1755: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1756: @*/
1757: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1758: {
1759: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1760: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1761: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1763: PetscFunctionBegin;
1764: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1767: PetscAssertPointer(idxm, 3);
1768: PetscAssertPointer(idxn, 5);
1770: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1771: jdxm = buf;
1772: jdxn = buf + m;
1773: } else {
1774: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1775: jdxm = bufm;
1776: jdxn = bufn;
1777: }
1778: for (i = 0; i < m; i++) {
1779: for (j = 0; j < 3 - sdim; j++) dxm++;
1780: tmp = *dxm++ - starts[0];
1781: for (j = 0; j < dim - 1; j++) {
1782: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1783: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1784: }
1785: if (mat->stencil.noc) dxm++;
1786: jdxm[i] = tmp;
1787: }
1788: for (i = 0; i < n; i++) {
1789: for (j = 0; j < 3 - sdim; j++) dxn++;
1790: tmp = *dxn++ - starts[0];
1791: for (j = 0; j < dim - 1; j++) {
1792: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1793: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1794: }
1795: if (mat->stencil.noc) dxn++;
1796: jdxn[i] = tmp;
1797: }
1798: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1799: PetscCall(PetscFree2(bufm, bufn));
1800: PetscFunctionReturn(PETSC_SUCCESS);
1801: }
1803: /*@
1804: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1805: Using structured grid indexing
1807: Not Collective
1809: Input Parameters:
1810: + mat - the matrix
1811: . m - number of rows being entered
1812: . idxm - grid coordinates for matrix rows being entered
1813: . n - number of columns being entered
1814: . idxn - grid coordinates for matrix columns being entered
1815: . v - a logically two-dimensional array of values
1816: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1818: Level: beginner
1820: Notes:
1821: By default the values, `v`, are row-oriented and unsorted.
1822: See `MatSetOption()` for other options.
1824: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1825: options cannot be mixed without intervening calls to the assembly
1826: routines.
1828: The grid coordinates are across the entire grid, not just the local portion
1830: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1831: as well as in C.
1833: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1835: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1836: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1838: The columns and rows in the stencil passed in MUST be contained within the
1839: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1840: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1841: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1842: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1844: Negative indices may be passed in idxm and idxn, these rows and columns are
1845: simply ignored. This allows easily inserting element stiffness matrices
1846: with homogeneous Dirichlet boundary conditions that you don't want represented
1847: in the matrix.
1849: Inspired by the structured grid interface to the HYPRE package
1850: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1852: Fortran Note:
1853: `idxm` and `idxn` should be declared as
1854: $ MatStencil idxm(4,m),idxn(4,n)
1855: and the values inserted using
1856: .vb
1857: idxm(MatStencil_i,1) = i
1858: idxm(MatStencil_j,1) = j
1859: idxm(MatStencil_k,1) = k
1860: etc
1861: .ve
1863: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1864: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1865: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1866: @*/
1867: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1868: {
1869: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1870: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1871: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1873: PetscFunctionBegin;
1874: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1877: PetscAssertPointer(idxm, 3);
1878: PetscAssertPointer(idxn, 5);
1879: PetscAssertPointer(v, 6);
1881: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1882: jdxm = buf;
1883: jdxn = buf + m;
1884: } else {
1885: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1886: jdxm = bufm;
1887: jdxn = bufn;
1888: }
1889: for (i = 0; i < m; i++) {
1890: for (j = 0; j < 3 - sdim; j++) dxm++;
1891: tmp = *dxm++ - starts[0];
1892: for (j = 0; j < sdim - 1; j++) {
1893: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1894: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1895: }
1896: dxm++;
1897: jdxm[i] = tmp;
1898: }
1899: for (i = 0; i < n; i++) {
1900: for (j = 0; j < 3 - sdim; j++) dxn++;
1901: tmp = *dxn++ - starts[0];
1902: for (j = 0; j < sdim - 1; j++) {
1903: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1904: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1905: }
1906: dxn++;
1907: jdxn[i] = tmp;
1908: }
1909: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1910: PetscCall(PetscFree2(bufm, bufn));
1911: PetscFunctionReturn(PETSC_SUCCESS);
1912: }
1914: /*@
1915: MatSetStencil - Sets the grid information for setting values into a matrix via
1916: `MatSetValuesStencil()`
1918: Not Collective
1920: Input Parameters:
1921: + mat - the matrix
1922: . dim - dimension of the grid 1, 2, or 3
1923: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1924: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1925: - dof - number of degrees of freedom per node
1927: Level: beginner
1929: Notes:
1930: Inspired by the structured grid interface to the HYPRE package
1931: (www.llnl.gov/CASC/hyper)
1933: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1934: user.
1936: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1937: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1938: @*/
1939: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1940: {
1941: PetscFunctionBegin;
1943: PetscAssertPointer(dims, 3);
1944: PetscAssertPointer(starts, 4);
1946: mat->stencil.dim = dim + (dof > 1);
1947: for (PetscInt i = 0; i < dim; i++) {
1948: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1949: mat->stencil.starts[i] = starts[dim - i - 1];
1950: }
1951: mat->stencil.dims[dim] = dof;
1952: mat->stencil.starts[dim] = 0;
1953: mat->stencil.noc = (PetscBool)(dof == 1);
1954: PetscFunctionReturn(PETSC_SUCCESS);
1955: }
1957: /*@
1958: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1960: Not Collective
1962: Input Parameters:
1963: + mat - the matrix
1964: . v - a logically two-dimensional array of values
1965: . m - the number of block rows
1966: . idxm - the global block indices
1967: . n - the number of block columns
1968: . idxn - the global block indices
1969: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1971: Level: intermediate
1973: Notes:
1974: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1975: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1977: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1978: NOT the total number of rows/columns; for example, if the block size is 2 and
1979: you are passing in values for rows 2,3,4,5 then `m` would be 2 (not 4).
1980: The values in `idxm` would be 1 2; that is the first index for each block divided by
1981: the block size.
1983: You must call `MatSetBlockSize()` when constructing this matrix (before
1984: preallocating it).
1986: By default the values, `v`, are row-oriented, so the layout of
1987: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1989: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1990: options cannot be mixed without intervening calls to the assembly
1991: routines.
1993: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1994: as well as in C.
1996: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1997: simply ignored. This allows easily inserting element stiffness matrices
1998: with homogeneous Dirichlet boundary conditions that you don't want represented
1999: in the matrix.
2001: Each time an entry is set within a sparse matrix via `MatSetValues()`,
2002: internal searching must be done to determine where to place the
2003: data in the matrix storage space. By instead inserting blocks of
2004: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
2005: reduced.
2007: Example:
2008: .vb
2009: Suppose m=n=2 and block size(bs) = 2 The array is
2011: 1 2 | 3 4
2012: 5 6 | 7 8
2013: - - - | - - -
2014: 9 10 | 11 12
2015: 13 14 | 15 16
2017: v[] should be passed in like
2018: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
2020: If you are not using row-oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
2021: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
2022: .ve
2024: Fortran Notes:
2025: If any of `idmx`, `idxn`, and `v` are scalars pass them using, for example,
2026: .vb
2027: MatSetValuesBlocked(mat, one, [idxm], one, [idxn], [v], INSERT_VALUES)
2028: .ve
2030: If `v` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2032: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
2033: @*/
2034: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
2035: {
2036: PetscFunctionBeginHot;
2039: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2040: PetscAssertPointer(idxm, 3);
2041: PetscAssertPointer(idxn, 5);
2042: MatCheckPreallocated(mat, 1);
2043: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2044: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2045: if (PetscDefined(USE_DEBUG)) {
2046: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2047: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2048: }
2049: if (PetscDefined(USE_DEBUG)) {
2050: PetscInt rbs, cbs, M, N, i;
2051: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2052: PetscCall(MatGetSize(mat, &M, &N));
2053: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than row length %" PetscInt_FMT, i, idxm[i], rbs, M);
2054: for (i = 0; i < n; i++)
2055: PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block %" PetscInt_FMT " contains an index %" PetscInt_FMT "*%" PetscInt_FMT " greater than column length %" PetscInt_FMT, i, idxn[i], cbs, N);
2056: }
2057: if (mat->assembled) {
2058: mat->was_assembled = PETSC_TRUE;
2059: mat->assembled = PETSC_FALSE;
2060: }
2061: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2062: if (mat->ops->setvaluesblocked) {
2063: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2064: } else {
2065: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2066: PetscInt i, j, bs, cbs;
2068: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2069: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2070: iidxm = buf;
2071: iidxn = buf + m * bs;
2072: } else {
2073: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2074: iidxm = bufr;
2075: iidxn = bufc;
2076: }
2077: for (i = 0; i < m; i++) {
2078: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2079: }
2080: if (m != n || bs != cbs || idxm != idxn) {
2081: for (i = 0; i < n; i++) {
2082: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2083: }
2084: } else iidxn = iidxm;
2085: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2086: PetscCall(PetscFree2(bufr, bufc));
2087: }
2088: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2089: PetscFunctionReturn(PETSC_SUCCESS);
2090: }
2092: /*@
2093: MatGetValues - Gets a block of local values from a matrix.
2095: Not Collective; can only return values that are owned by the give process
2097: Input Parameters:
2098: + mat - the matrix
2099: . v - a logically two-dimensional array for storing the values
2100: . m - the number of rows
2101: . idxm - the global indices of the rows
2102: . n - the number of columns
2103: - idxn - the global indices of the columns
2105: Level: advanced
2107: Notes:
2108: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2109: The values, `v`, are then returned in a row-oriented format,
2110: analogous to that used by default in `MatSetValues()`.
2112: `MatGetValues()` uses 0-based row and column numbers in
2113: Fortran as well as in C.
2115: `MatGetValues()` requires that the matrix has been assembled
2116: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2117: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2118: without intermediate matrix assembly.
2120: Negative row or column indices will be ignored and those locations in `v` will be
2121: left unchanged.
2123: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2124: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2125: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2127: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2128: @*/
2129: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2130: {
2131: PetscFunctionBegin;
2134: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2135: PetscAssertPointer(idxm, 3);
2136: PetscAssertPointer(idxn, 5);
2137: PetscAssertPointer(v, 6);
2138: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2139: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2140: MatCheckPreallocated(mat, 1);
2142: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2143: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2144: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2145: PetscFunctionReturn(PETSC_SUCCESS);
2146: }
2148: /*@
2149: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2150: defined previously by `MatSetLocalToGlobalMapping()`
2152: Not Collective
2154: Input Parameters:
2155: + mat - the matrix
2156: . nrow - number of rows
2157: . irow - the row local indices
2158: . ncol - number of columns
2159: - icol - the column local indices
2161: Output Parameter:
2162: . y - a logically two-dimensional array of values
2164: Level: advanced
2166: Notes:
2167: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2169: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2170: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2171: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2172: with `MatSetLocalToGlobalMapping()`.
2174: Developer Note:
2175: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2176: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2178: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2179: `MatSetValuesLocal()`, `MatGetValues()`
2180: @*/
2181: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2182: {
2183: PetscFunctionBeginHot;
2186: MatCheckPreallocated(mat, 1);
2187: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2188: PetscAssertPointer(irow, 3);
2189: PetscAssertPointer(icol, 5);
2190: if (PetscDefined(USE_DEBUG)) {
2191: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2192: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2193: }
2194: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2195: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2196: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2197: else {
2198: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2199: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2200: irowm = buf;
2201: icolm = buf + nrow;
2202: } else {
2203: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2204: irowm = bufr;
2205: icolm = bufc;
2206: }
2207: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2208: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2209: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2210: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2211: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2212: PetscCall(PetscFree2(bufr, bufc));
2213: }
2214: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2215: PetscFunctionReturn(PETSC_SUCCESS);
2216: }
2218: /*@
2219: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2220: the same size. Currently, this can only be called once and creates the given matrix.
2222: Not Collective
2224: Input Parameters:
2225: + mat - the matrix
2226: . nb - the number of blocks
2227: . bs - the number of rows (and columns) in each block
2228: . rows - a concatenation of the rows for each block
2229: - v - a concatenation of logically two-dimensional arrays of values
2231: Level: advanced
2233: Notes:
2234: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2236: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2238: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2239: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2240: @*/
2241: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2242: {
2243: PetscFunctionBegin;
2246: PetscAssertPointer(rows, 4);
2247: PetscAssertPointer(v, 5);
2248: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2250: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2251: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2252: else {
2253: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2254: }
2255: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2256: PetscFunctionReturn(PETSC_SUCCESS);
2257: }
2259: /*@
2260: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2261: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2262: using a local (per-processor) numbering.
2264: Not Collective
2266: Input Parameters:
2267: + x - the matrix
2268: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2269: - cmapping - column mapping
2271: Level: intermediate
2273: Note:
2274: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2276: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2277: @*/
2278: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2279: {
2280: PetscFunctionBegin;
2285: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2286: else {
2287: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2288: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2289: }
2290: PetscFunctionReturn(PETSC_SUCCESS);
2291: }
2293: /*@
2294: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2296: Not Collective
2298: Input Parameter:
2299: . A - the matrix
2301: Output Parameters:
2302: + rmapping - row mapping
2303: - cmapping - column mapping
2305: Level: advanced
2307: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2308: @*/
2309: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2310: {
2311: PetscFunctionBegin;
2314: if (rmapping) {
2315: PetscAssertPointer(rmapping, 2);
2316: *rmapping = A->rmap->mapping;
2317: }
2318: if (cmapping) {
2319: PetscAssertPointer(cmapping, 3);
2320: *cmapping = A->cmap->mapping;
2321: }
2322: PetscFunctionReturn(PETSC_SUCCESS);
2323: }
2325: /*@
2326: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2328: Logically Collective
2330: Input Parameters:
2331: + A - the matrix
2332: . rmap - row layout
2333: - cmap - column layout
2335: Level: advanced
2337: Note:
2338: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2340: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2341: @*/
2342: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2343: {
2344: PetscFunctionBegin;
2346: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2347: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2348: PetscFunctionReturn(PETSC_SUCCESS);
2349: }
2351: /*@
2352: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2354: Not Collective
2356: Input Parameter:
2357: . A - the matrix
2359: Output Parameters:
2360: + rmap - row layout
2361: - cmap - column layout
2363: Level: advanced
2365: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2366: @*/
2367: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2368: {
2369: PetscFunctionBegin;
2372: if (rmap) {
2373: PetscAssertPointer(rmap, 2);
2374: *rmap = A->rmap;
2375: }
2376: if (cmap) {
2377: PetscAssertPointer(cmap, 3);
2378: *cmap = A->cmap;
2379: }
2380: PetscFunctionReturn(PETSC_SUCCESS);
2381: }
2383: /*@
2384: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2385: using a local numbering of the rows and columns.
2387: Not Collective
2389: Input Parameters:
2390: + mat - the matrix
2391: . nrow - number of rows
2392: . irow - the row local indices
2393: . ncol - number of columns
2394: . icol - the column local indices
2395: . y - a logically two-dimensional array of values
2396: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2398: Level: intermediate
2400: Notes:
2401: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2403: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2404: options cannot be mixed without intervening calls to the assembly
2405: routines.
2407: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2408: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2410: Fortran Notes:
2411: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2412: .vb
2413: MatSetValuesLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2414: .ve
2416: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2418: Developer Note:
2419: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2420: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2422: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2423: `MatGetValuesLocal()`
2424: @*/
2425: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2426: {
2427: PetscFunctionBeginHot;
2430: MatCheckPreallocated(mat, 1);
2431: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2432: PetscAssertPointer(irow, 3);
2433: PetscAssertPointer(icol, 5);
2434: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2435: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2436: if (PetscDefined(USE_DEBUG)) {
2437: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2438: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2439: }
2441: if (mat->assembled) {
2442: mat->was_assembled = PETSC_TRUE;
2443: mat->assembled = PETSC_FALSE;
2444: }
2445: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2446: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2447: else {
2448: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2449: const PetscInt *irowm, *icolm;
2451: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2452: bufr = buf;
2453: bufc = buf + nrow;
2454: irowm = bufr;
2455: icolm = bufc;
2456: } else {
2457: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2458: irowm = bufr;
2459: icolm = bufc;
2460: }
2461: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2462: else irowm = irow;
2463: if (mat->cmap->mapping) {
2464: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2465: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2466: } else icolm = irowm;
2467: } else icolm = icol;
2468: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2469: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2470: }
2471: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2472: PetscFunctionReturn(PETSC_SUCCESS);
2473: }
2475: /*@
2476: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2477: using a local ordering of the nodes a block at a time.
2479: Not Collective
2481: Input Parameters:
2482: + mat - the matrix
2483: . nrow - number of rows
2484: . irow - the row local indices
2485: . ncol - number of columns
2486: . icol - the column local indices
2487: . y - a logically two-dimensional array of values
2488: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2490: Level: intermediate
2492: Notes:
2493: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2494: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2496: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2497: options cannot be mixed without intervening calls to the assembly
2498: routines.
2500: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2501: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2503: Fortran Notes:
2504: If any of `irow`, `icol`, and `y` are scalars pass them using, for example,
2505: .vb
2506: MatSetValuesBlockedLocal(mat, one, [irow], one, [icol], [y], INSERT_VALUES)
2507: .ve
2509: If `y` is a two-dimensional array use `reshape()` to pass it as a one dimensional array
2511: Developer Note:
2512: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2513: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2515: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2516: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2517: @*/
2518: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2519: {
2520: PetscFunctionBeginHot;
2523: MatCheckPreallocated(mat, 1);
2524: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2525: PetscAssertPointer(irow, 3);
2526: PetscAssertPointer(icol, 5);
2527: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2528: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2529: if (PetscDefined(USE_DEBUG)) {
2530: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2531: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2532: }
2534: if (mat->assembled) {
2535: mat->was_assembled = PETSC_TRUE;
2536: mat->assembled = PETSC_FALSE;
2537: }
2538: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2539: PetscInt irbs, rbs;
2540: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2541: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2542: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2543: }
2544: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2545: PetscInt icbs, cbs;
2546: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2547: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2548: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2549: }
2550: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2551: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2552: else {
2553: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2554: const PetscInt *irowm, *icolm;
2556: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2557: bufr = buf;
2558: bufc = buf + nrow;
2559: irowm = bufr;
2560: icolm = bufc;
2561: } else {
2562: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2563: irowm = bufr;
2564: icolm = bufc;
2565: }
2566: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2567: else irowm = irow;
2568: if (mat->cmap->mapping) {
2569: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2570: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2571: } else icolm = irowm;
2572: } else icolm = icol;
2573: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2574: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2575: }
2576: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2577: PetscFunctionReturn(PETSC_SUCCESS);
2578: }
2580: /*@
2581: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2583: Collective
2585: Input Parameters:
2586: + mat - the matrix
2587: - x - the vector to be multiplied
2589: Output Parameter:
2590: . y - the result
2592: Level: developer
2594: Note:
2595: The vectors `x` and `y` cannot be the same. I.e., one cannot
2596: call `MatMultDiagonalBlock`(A,y,y).
2598: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2599: @*/
2600: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2601: {
2602: PetscFunctionBegin;
2608: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2609: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2610: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2611: MatCheckPreallocated(mat, 1);
2613: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2614: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2615: PetscFunctionReturn(PETSC_SUCCESS);
2616: }
2618: /*@
2619: MatMult - Computes the matrix-vector product, $y = Ax$.
2621: Neighbor-wise Collective
2623: Input Parameters:
2624: + mat - the matrix
2625: - x - the vector to be multiplied
2627: Output Parameter:
2628: . y - the result
2630: Level: beginner
2632: Note:
2633: The vectors `x` and `y` cannot be the same. I.e., one cannot
2634: call `MatMult`(A,y,y).
2636: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2637: @*/
2638: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2639: {
2640: PetscFunctionBegin;
2644: VecCheckAssembled(x);
2646: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2647: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2648: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2649: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2650: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2651: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2652: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2653: PetscCall(VecSetErrorIfLocked(y, 3));
2654: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2655: MatCheckPreallocated(mat, 1);
2657: PetscCall(VecLockReadPush(x));
2658: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2659: PetscUseTypeMethod(mat, mult, x, y);
2660: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2661: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2662: PetscCall(VecLockReadPop(x));
2663: PetscFunctionReturn(PETSC_SUCCESS);
2664: }
2666: /*@
2667: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2669: Neighbor-wise Collective
2671: Input Parameters:
2672: + mat - the matrix
2673: - x - the vector to be multiplied
2675: Output Parameter:
2676: . y - the result
2678: Level: beginner
2680: Notes:
2681: The vectors `x` and `y` cannot be the same. I.e., one cannot
2682: call `MatMultTranspose`(A,y,y).
2684: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2685: use `MatMultHermitianTranspose()`
2687: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2688: @*/
2689: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2690: {
2691: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2693: PetscFunctionBegin;
2697: VecCheckAssembled(x);
2700: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2701: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2702: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2703: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2704: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2705: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2706: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2707: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2708: MatCheckPreallocated(mat, 1);
2710: if (!mat->ops->multtranspose) {
2711: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2712: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2713: } else op = mat->ops->multtranspose;
2714: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2715: PetscCall(VecLockReadPush(x));
2716: PetscCall((*op)(mat, x, y));
2717: PetscCall(VecLockReadPop(x));
2718: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2719: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2720: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2721: PetscFunctionReturn(PETSC_SUCCESS);
2722: }
2724: /*@
2725: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2727: Neighbor-wise Collective
2729: Input Parameters:
2730: + mat - the matrix
2731: - x - the vector to be multiplied
2733: Output Parameter:
2734: . y - the result
2736: Level: beginner
2738: Notes:
2739: The vectors `x` and `y` cannot be the same. I.e., one cannot
2740: call `MatMultHermitianTranspose`(A,y,y).
2742: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2744: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2746: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2747: @*/
2748: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2749: {
2750: PetscFunctionBegin;
2756: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2757: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2758: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2759: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2760: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2761: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2762: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2763: MatCheckPreallocated(mat, 1);
2765: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2766: #if defined(PETSC_USE_COMPLEX)
2767: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2768: PetscCall(VecLockReadPush(x));
2769: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2770: else PetscUseTypeMethod(mat, mult, x, y);
2771: PetscCall(VecLockReadPop(x));
2772: } else {
2773: Vec w;
2774: PetscCall(VecDuplicate(x, &w));
2775: PetscCall(VecCopy(x, w));
2776: PetscCall(VecConjugate(w));
2777: PetscCall(MatMultTranspose(mat, w, y));
2778: PetscCall(VecDestroy(&w));
2779: PetscCall(VecConjugate(y));
2780: }
2781: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2782: #else
2783: PetscCall(MatMultTranspose(mat, x, y));
2784: #endif
2785: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2786: PetscFunctionReturn(PETSC_SUCCESS);
2787: }
2789: /*@
2790: MatMultAdd - Computes $v3 = v2 + A * v1$.
2792: Neighbor-wise Collective
2794: Input Parameters:
2795: + mat - the matrix
2796: . v1 - the vector to be multiplied by `mat`
2797: - v2 - the vector to be added to the result
2799: Output Parameter:
2800: . v3 - the result
2802: Level: beginner
2804: Note:
2805: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2806: call `MatMultAdd`(A,v1,v2,v1).
2808: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2809: @*/
2810: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2811: {
2812: PetscFunctionBegin;
2819: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2820: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2821: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2822: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2823: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2824: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2825: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2826: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2827: MatCheckPreallocated(mat, 1);
2829: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2830: PetscCall(VecLockReadPush(v1));
2831: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2832: PetscCall(VecLockReadPop(v1));
2833: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2834: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2835: PetscFunctionReturn(PETSC_SUCCESS);
2836: }
2838: /*@
2839: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2841: Neighbor-wise Collective
2843: Input Parameters:
2844: + mat - the matrix
2845: . v1 - the vector to be multiplied by the transpose of the matrix
2846: - v2 - the vector to be added to the result
2848: Output Parameter:
2849: . v3 - the result
2851: Level: beginner
2853: Note:
2854: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2855: call `MatMultTransposeAdd`(A,v1,v2,v1).
2857: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2858: @*/
2859: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2860: {
2861: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2863: PetscFunctionBegin;
2870: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2871: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2872: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2873: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2874: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2875: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2876: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2877: MatCheckPreallocated(mat, 1);
2879: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2880: PetscCall(VecLockReadPush(v1));
2881: PetscCall((*op)(mat, v1, v2, v3));
2882: PetscCall(VecLockReadPop(v1));
2883: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2884: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2885: PetscFunctionReturn(PETSC_SUCCESS);
2886: }
2888: /*@
2889: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2891: Neighbor-wise Collective
2893: Input Parameters:
2894: + mat - the matrix
2895: . v1 - the vector to be multiplied by the Hermitian transpose
2896: - v2 - the vector to be added to the result
2898: Output Parameter:
2899: . v3 - the result
2901: Level: beginner
2903: Note:
2904: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2905: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2907: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2908: @*/
2909: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2910: {
2911: PetscFunctionBegin;
2918: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2919: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2920: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2921: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2922: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2923: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2924: MatCheckPreallocated(mat, 1);
2926: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2927: PetscCall(VecLockReadPush(v1));
2928: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2929: else {
2930: Vec w, z;
2931: PetscCall(VecDuplicate(v1, &w));
2932: PetscCall(VecCopy(v1, w));
2933: PetscCall(VecConjugate(w));
2934: PetscCall(VecDuplicate(v3, &z));
2935: PetscCall(MatMultTranspose(mat, w, z));
2936: PetscCall(VecDestroy(&w));
2937: PetscCall(VecConjugate(z));
2938: if (v2 != v3) {
2939: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2940: } else {
2941: PetscCall(VecAXPY(v3, 1.0, z));
2942: }
2943: PetscCall(VecDestroy(&z));
2944: }
2945: PetscCall(VecLockReadPop(v1));
2946: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2947: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2948: PetscFunctionReturn(PETSC_SUCCESS);
2949: }
2951: /*@
2952: MatGetFactorType - gets the type of factorization a matrix is
2954: Not Collective
2956: Input Parameter:
2957: . mat - the matrix
2959: Output Parameter:
2960: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2962: Level: intermediate
2964: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2965: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2966: @*/
2967: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2968: {
2969: PetscFunctionBegin;
2972: PetscAssertPointer(t, 2);
2973: *t = mat->factortype;
2974: PetscFunctionReturn(PETSC_SUCCESS);
2975: }
2977: /*@
2978: MatSetFactorType - sets the type of factorization a matrix is
2980: Logically Collective
2982: Input Parameters:
2983: + mat - the matrix
2984: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2986: Level: intermediate
2988: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2989: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2990: @*/
2991: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2992: {
2993: PetscFunctionBegin;
2996: mat->factortype = t;
2997: PetscFunctionReturn(PETSC_SUCCESS);
2998: }
3000: /*@
3001: MatGetInfo - Returns information about matrix storage (number of
3002: nonzeros, memory, etc.).
3004: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
3006: Input Parameters:
3007: + mat - the matrix
3008: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
3010: Output Parameter:
3011: . info - matrix information context
3013: Options Database Key:
3014: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
3016: Level: intermediate
3018: Notes:
3019: The `MatInfo` context contains a variety of matrix data, including
3020: number of nonzeros allocated and used, number of mallocs during
3021: matrix assembly, etc. Additional information for factored matrices
3022: is provided (such as the fill ratio, number of mallocs during
3023: factorization, etc.).
3025: Example:
3026: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
3027: data within the `MatInfo` context. For example,
3028: .vb
3029: MatInfo info;
3030: Mat A;
3031: double mal, nz_a, nz_u;
3033: MatGetInfo(A, MAT_LOCAL, &info);
3034: mal = info.mallocs;
3035: nz_a = info.nz_allocated;
3036: .ve
3038: Fortran Note:
3039: Declare info as a `MatInfo` array of dimension `MAT_INFO_SIZE`, and then extract the parameters
3040: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
3041: a complete list of parameter names.
3042: .vb
3043: MatInfo info(MAT_INFO_SIZE)
3044: double precision mal, nz_a
3045: Mat A
3046: integer ierr
3048: call MatGetInfo(A, MAT_LOCAL, info, ierr)
3049: mal = info(MAT_INFO_MALLOCS)
3050: nz_a = info(MAT_INFO_NZ_ALLOCATED)
3051: .ve
3053: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
3054: @*/
3055: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
3056: {
3057: PetscFunctionBegin;
3060: PetscAssertPointer(info, 3);
3061: MatCheckPreallocated(mat, 1);
3062: PetscUseTypeMethod(mat, getinfo, flag, info);
3063: PetscFunctionReturn(PETSC_SUCCESS);
3064: }
3066: /*
3067: This is used by external packages where it is not easy to get the info from the actual
3068: matrix factorization.
3069: */
3070: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3071: {
3072: PetscFunctionBegin;
3073: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3074: PetscFunctionReturn(PETSC_SUCCESS);
3075: }
3077: /*@
3078: MatLUFactor - Performs in-place LU factorization of matrix.
3080: Collective
3082: Input Parameters:
3083: + mat - the matrix
3084: . row - row permutation
3085: . col - column permutation
3086: - info - options for factorization, includes
3087: .vb
3088: fill - expected fill as ratio of original fill.
3089: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3090: Run with the option -info to determine an optimal value to use
3091: .ve
3093: Level: developer
3095: Notes:
3096: Most users should employ the `KSP` interface for linear solvers
3097: instead of working directly with matrix algebra routines such as this.
3098: See, e.g., `KSPCreate()`.
3100: This changes the state of the matrix to a factored matrix; it cannot be used
3101: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3103: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3104: when not using `KSP`.
3106: Developer Note:
3107: The Fortran interface is not autogenerated as the
3108: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3110: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3111: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3112: @*/
3113: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3114: {
3115: MatFactorInfo tinfo;
3117: PetscFunctionBegin;
3121: if (info) PetscAssertPointer(info, 4);
3123: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3124: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3125: MatCheckPreallocated(mat, 1);
3126: if (!info) {
3127: PetscCall(MatFactorInfoInitialize(&tinfo));
3128: info = &tinfo;
3129: }
3131: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3132: PetscUseTypeMethod(mat, lufactor, row, col, info);
3133: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3134: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3135: PetscFunctionReturn(PETSC_SUCCESS);
3136: }
3138: /*@
3139: MatILUFactor - Performs in-place ILU factorization of matrix.
3141: Collective
3143: Input Parameters:
3144: + mat - the matrix
3145: . row - row permutation
3146: . col - column permutation
3147: - info - structure containing
3148: .vb
3149: levels - number of levels of fill.
3150: expected fill - as ratio of original fill.
3151: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3152: missing diagonal entries)
3153: .ve
3155: Level: developer
3157: Notes:
3158: Most users should employ the `KSP` interface for linear solvers
3159: instead of working directly with matrix algebra routines such as this.
3160: See, e.g., `KSPCreate()`.
3162: Probably really in-place only when level of fill is zero, otherwise allocates
3163: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3164: when not using `KSP`.
3166: Developer Note:
3167: The Fortran interface is not autogenerated as the
3168: interface definition cannot be generated correctly [due to MatFactorInfo]
3170: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3171: @*/
3172: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3173: {
3174: PetscFunctionBegin;
3178: PetscAssertPointer(info, 4);
3180: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3181: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3182: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3183: MatCheckPreallocated(mat, 1);
3185: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3186: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3187: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3188: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3189: PetscFunctionReturn(PETSC_SUCCESS);
3190: }
3192: /*@
3193: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3194: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3196: Collective
3198: Input Parameters:
3199: + fact - the factor matrix obtained with `MatGetFactor()`
3200: . mat - the matrix
3201: . row - the row permutation
3202: . col - the column permutation
3203: - info - options for factorization, includes
3204: .vb
3205: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3206: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3207: .ve
3209: Level: developer
3211: Notes:
3212: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3214: Most users should employ the simplified `KSP` interface for linear solvers
3215: instead of working directly with matrix algebra routines such as this.
3216: See, e.g., `KSPCreate()`.
3218: Developer Note:
3219: The Fortran interface is not autogenerated as the
3220: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3222: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3223: @*/
3224: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3225: {
3226: MatFactorInfo tinfo;
3228: PetscFunctionBegin;
3233: if (info) PetscAssertPointer(info, 5);
3236: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3237: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3238: MatCheckPreallocated(mat, 2);
3239: if (!info) {
3240: PetscCall(MatFactorInfoInitialize(&tinfo));
3241: info = &tinfo;
3242: }
3244: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3245: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3246: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3247: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3248: PetscFunctionReturn(PETSC_SUCCESS);
3249: }
3251: /*@
3252: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3253: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3255: Collective
3257: Input Parameters:
3258: + fact - the factor matrix obtained with `MatGetFactor()`
3259: . mat - the matrix
3260: - info - options for factorization
3262: Level: developer
3264: Notes:
3265: See `MatLUFactor()` for in-place factorization. See
3266: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3268: Most users should employ the `KSP` interface for linear solvers
3269: instead of working directly with matrix algebra routines such as this.
3270: See, e.g., `KSPCreate()`.
3272: Developer Note:
3273: The Fortran interface is not autogenerated as the
3274: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3276: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3277: @*/
3278: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3279: {
3280: MatFactorInfo tinfo;
3282: PetscFunctionBegin;
3287: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3288: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3289: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3291: MatCheckPreallocated(mat, 2);
3292: if (!info) {
3293: PetscCall(MatFactorInfoInitialize(&tinfo));
3294: info = &tinfo;
3295: }
3297: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3298: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3299: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3300: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3301: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3302: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3303: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3304: PetscFunctionReturn(PETSC_SUCCESS);
3305: }
3307: /*@
3308: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3309: symmetric matrix.
3311: Collective
3313: Input Parameters:
3314: + mat - the matrix
3315: . perm - row and column permutations
3316: - info - expected fill as ratio of original fill
3318: Level: developer
3320: Notes:
3321: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3322: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3324: Most users should employ the `KSP` interface for linear solvers
3325: instead of working directly with matrix algebra routines such as this.
3326: See, e.g., `KSPCreate()`.
3328: Developer Note:
3329: The Fortran interface is not autogenerated as the
3330: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3332: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3333: `MatGetOrdering()`
3334: @*/
3335: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3336: {
3337: MatFactorInfo tinfo;
3339: PetscFunctionBegin;
3342: if (info) PetscAssertPointer(info, 3);
3344: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3345: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3346: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3347: MatCheckPreallocated(mat, 1);
3348: if (!info) {
3349: PetscCall(MatFactorInfoInitialize(&tinfo));
3350: info = &tinfo;
3351: }
3353: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3354: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3355: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3356: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3357: PetscFunctionReturn(PETSC_SUCCESS);
3358: }
3360: /*@
3361: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3362: of a symmetric matrix.
3364: Collective
3366: Input Parameters:
3367: + fact - the factor matrix obtained with `MatGetFactor()`
3368: . mat - the matrix
3369: . perm - row and column permutations
3370: - info - options for factorization, includes
3371: .vb
3372: fill - expected fill as ratio of original fill.
3373: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3374: Run with the option -info to determine an optimal value to use
3375: .ve
3377: Level: developer
3379: Notes:
3380: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3381: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3383: Most users should employ the `KSP` interface for linear solvers
3384: instead of working directly with matrix algebra routines such as this.
3385: See, e.g., `KSPCreate()`.
3387: Developer Note:
3388: The Fortran interface is not autogenerated as the
3389: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3391: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3392: `MatGetOrdering()`
3393: @*/
3394: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3395: {
3396: MatFactorInfo tinfo;
3398: PetscFunctionBegin;
3402: if (info) PetscAssertPointer(info, 4);
3405: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3406: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3407: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3408: MatCheckPreallocated(mat, 2);
3409: if (!info) {
3410: PetscCall(MatFactorInfoInitialize(&tinfo));
3411: info = &tinfo;
3412: }
3414: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3415: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3416: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3417: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3418: PetscFunctionReturn(PETSC_SUCCESS);
3419: }
3421: /*@
3422: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3423: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3424: `MatCholeskyFactorSymbolic()`.
3426: Collective
3428: Input Parameters:
3429: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3430: . mat - the initial matrix that is to be factored
3431: - info - options for factorization
3433: Level: developer
3435: Note:
3436: Most users should employ the `KSP` interface for linear solvers
3437: instead of working directly with matrix algebra routines such as this.
3438: See, e.g., `KSPCreate()`.
3440: Developer Note:
3441: The Fortran interface is not autogenerated as the
3442: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3444: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3445: @*/
3446: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3447: {
3448: MatFactorInfo tinfo;
3450: PetscFunctionBegin;
3455: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3456: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3457: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3458: MatCheckPreallocated(mat, 2);
3459: if (!info) {
3460: PetscCall(MatFactorInfoInitialize(&tinfo));
3461: info = &tinfo;
3462: }
3464: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3465: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3466: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3467: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3468: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3469: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3470: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3471: PetscFunctionReturn(PETSC_SUCCESS);
3472: }
3474: /*@
3475: MatQRFactor - Performs in-place QR factorization of matrix.
3477: Collective
3479: Input Parameters:
3480: + mat - the matrix
3481: . col - column permutation
3482: - info - options for factorization, includes
3483: .vb
3484: fill - expected fill as ratio of original fill.
3485: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3486: Run with the option -info to determine an optimal value to use
3487: .ve
3489: Level: developer
3491: Notes:
3492: Most users should employ the `KSP` interface for linear solvers
3493: instead of working directly with matrix algebra routines such as this.
3494: See, e.g., `KSPCreate()`.
3496: This changes the state of the matrix to a factored matrix; it cannot be used
3497: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3499: Developer Note:
3500: The Fortran interface is not autogenerated as the
3501: interface definition cannot be generated correctly [due to MatFactorInfo]
3503: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3504: `MatSetUnfactored()`
3505: @*/
3506: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3507: {
3508: PetscFunctionBegin;
3511: if (info) PetscAssertPointer(info, 3);
3513: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3514: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3515: MatCheckPreallocated(mat, 1);
3516: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3517: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3518: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3519: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3520: PetscFunctionReturn(PETSC_SUCCESS);
3521: }
3523: /*@
3524: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3525: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3527: Collective
3529: Input Parameters:
3530: + fact - the factor matrix obtained with `MatGetFactor()`
3531: . mat - the matrix
3532: . col - column permutation
3533: - info - options for factorization, includes
3534: .vb
3535: fill - expected fill as ratio of original fill.
3536: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3537: Run with the option -info to determine an optimal value to use
3538: .ve
3540: Level: developer
3542: Note:
3543: Most users should employ the `KSP` interface for linear solvers
3544: instead of working directly with matrix algebra routines such as this.
3545: See, e.g., `KSPCreate()`.
3547: Developer Note:
3548: The Fortran interface is not autogenerated as the
3549: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3551: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3552: @*/
3553: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3554: {
3555: MatFactorInfo tinfo;
3557: PetscFunctionBegin;
3561: if (info) PetscAssertPointer(info, 4);
3564: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3565: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3566: MatCheckPreallocated(mat, 2);
3567: if (!info) {
3568: PetscCall(MatFactorInfoInitialize(&tinfo));
3569: info = &tinfo;
3570: }
3572: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3573: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3574: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3575: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3576: PetscFunctionReturn(PETSC_SUCCESS);
3577: }
3579: /*@
3580: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3581: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3583: Collective
3585: Input Parameters:
3586: + fact - the factor matrix obtained with `MatGetFactor()`
3587: . mat - the matrix
3588: - info - options for factorization
3590: Level: developer
3592: Notes:
3593: See `MatQRFactor()` for in-place factorization.
3595: Most users should employ the `KSP` interface for linear solvers
3596: instead of working directly with matrix algebra routines such as this.
3597: See, e.g., `KSPCreate()`.
3599: Developer Note:
3600: The Fortran interface is not autogenerated as the
3601: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3603: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3604: @*/
3605: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3606: {
3607: MatFactorInfo tinfo;
3609: PetscFunctionBegin;
3614: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3615: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3616: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3618: MatCheckPreallocated(mat, 2);
3619: if (!info) {
3620: PetscCall(MatFactorInfoInitialize(&tinfo));
3621: info = &tinfo;
3622: }
3624: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3625: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3626: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3627: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3628: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3629: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3630: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3631: PetscFunctionReturn(PETSC_SUCCESS);
3632: }
3634: /*@
3635: MatSolve - Solves $A x = b$, given a factored matrix.
3637: Neighbor-wise Collective
3639: Input Parameters:
3640: + mat - the factored matrix
3641: - b - the right-hand-side vector
3643: Output Parameter:
3644: . x - the result vector
3646: Level: developer
3648: Notes:
3649: The vectors `b` and `x` cannot be the same. I.e., one cannot
3650: call `MatSolve`(A,x,x).
3652: Most users should employ the `KSP` interface for linear solvers
3653: instead of working directly with matrix algebra routines such as this.
3654: See, e.g., `KSPCreate()`.
3656: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3657: @*/
3658: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3659: {
3660: PetscFunctionBegin;
3665: PetscCheckSameComm(mat, 1, b, 2);
3666: PetscCheckSameComm(mat, 1, x, 3);
3667: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3668: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3669: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3670: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3671: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3672: MatCheckPreallocated(mat, 1);
3674: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3675: PetscCall(VecFlag(x, mat->factorerrortype));
3676: if (mat->factorerrortype) {
3677: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3678: } else PetscUseTypeMethod(mat, solve, b, x);
3679: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3680: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3681: PetscFunctionReturn(PETSC_SUCCESS);
3682: }
3684: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3685: {
3686: Vec b, x;
3687: PetscInt N, i;
3688: PetscErrorCode (*f)(Mat, Vec, Vec);
3689: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3691: PetscFunctionBegin;
3692: if (A->factorerrortype) {
3693: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3694: PetscCall(MatSetInf(X));
3695: PetscFunctionReturn(PETSC_SUCCESS);
3696: }
3697: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3698: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3699: PetscCall(MatBoundToCPU(A, &Abound));
3700: if (!Abound) {
3701: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3702: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3703: }
3704: #if PetscDefined(HAVE_CUDA)
3705: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3706: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3707: #elif PetscDefined(HAVE_HIP)
3708: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3709: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3710: #endif
3711: PetscCall(MatGetSize(B, NULL, &N));
3712: for (i = 0; i < N; i++) {
3713: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3714: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3715: PetscCall((*f)(A, b, x));
3716: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3717: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3718: }
3719: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3720: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3721: PetscFunctionReturn(PETSC_SUCCESS);
3722: }
3724: /*@
3725: MatMatSolve - Solves $A X = B$, given a factored matrix.
3727: Neighbor-wise Collective
3729: Input Parameters:
3730: + A - the factored matrix
3731: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3733: Output Parameter:
3734: . X - the result matrix (dense matrix)
3736: Level: developer
3738: Note:
3739: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3740: otherwise, `B` and `X` cannot be the same.
3742: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3743: @*/
3744: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3745: {
3746: PetscFunctionBegin;
3751: PetscCheckSameComm(A, 1, B, 2);
3752: PetscCheckSameComm(A, 1, X, 3);
3753: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3754: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3755: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3756: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3757: MatCheckPreallocated(A, 1);
3759: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3760: if (!A->ops->matsolve) {
3761: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3762: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3763: } else PetscUseTypeMethod(A, matsolve, B, X);
3764: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3765: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3766: PetscFunctionReturn(PETSC_SUCCESS);
3767: }
3769: /*@
3770: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3772: Neighbor-wise Collective
3774: Input Parameters:
3775: + A - the factored matrix
3776: - B - the right-hand-side matrix (`MATDENSE` matrix)
3778: Output Parameter:
3779: . X - the result matrix (dense matrix)
3781: Level: developer
3783: Note:
3784: The matrices `B` and `X` cannot be the same. I.e., one cannot
3785: call `MatMatSolveTranspose`(A,X,X).
3787: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3788: @*/
3789: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3790: {
3791: PetscFunctionBegin;
3796: PetscCheckSameComm(A, 1, B, 2);
3797: PetscCheckSameComm(A, 1, X, 3);
3798: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3799: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3800: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3801: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3802: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3803: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3804: MatCheckPreallocated(A, 1);
3806: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3807: if (!A->ops->matsolvetranspose) {
3808: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3809: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3810: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3811: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3812: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3813: PetscFunctionReturn(PETSC_SUCCESS);
3814: }
3816: /*@
3817: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3819: Neighbor-wise Collective
3821: Input Parameters:
3822: + A - the factored matrix
3823: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3825: Output Parameter:
3826: . X - the result matrix (dense matrix)
3828: Level: developer
3830: Note:
3831: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right-hand side matrix. User must create `Bt` in sparse compressed row
3832: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3834: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3835: @*/
3836: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3837: {
3838: PetscFunctionBegin;
3843: PetscCheckSameComm(A, 1, Bt, 2);
3844: PetscCheckSameComm(A, 1, X, 3);
3846: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3847: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3848: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3849: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3850: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3851: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3852: MatCheckPreallocated(A, 1);
3854: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3855: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3856: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3857: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3858: PetscFunctionReturn(PETSC_SUCCESS);
3859: }
3861: /*@
3862: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3863: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3865: Neighbor-wise Collective
3867: Input Parameters:
3868: + mat - the factored matrix
3869: - b - the right-hand-side vector
3871: Output Parameter:
3872: . x - the result vector
3874: Level: developer
3876: Notes:
3877: `MatSolve()` should be used for most applications, as it performs
3878: a forward solve followed by a backward solve.
3880: The vectors `b` and `x` cannot be the same, i.e., one cannot
3881: call `MatForwardSolve`(A,x,x).
3883: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3884: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3885: `MatForwardSolve()` solves $U^T*D y = b$, and
3886: `MatBackwardSolve()` solves $U x = y$.
3887: Thus they do not provide a symmetric preconditioner.
3889: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3890: @*/
3891: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3892: {
3893: PetscFunctionBegin;
3898: PetscCheckSameComm(mat, 1, b, 2);
3899: PetscCheckSameComm(mat, 1, x, 3);
3900: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3901: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3902: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3903: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3904: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3905: MatCheckPreallocated(mat, 1);
3907: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3908: PetscUseTypeMethod(mat, forwardsolve, b, x);
3909: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3910: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3911: PetscFunctionReturn(PETSC_SUCCESS);
3912: }
3914: /*@
3915: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3916: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3918: Neighbor-wise Collective
3920: Input Parameters:
3921: + mat - the factored matrix
3922: - b - the right-hand-side vector
3924: Output Parameter:
3925: . x - the result vector
3927: Level: developer
3929: Notes:
3930: `MatSolve()` should be used for most applications, as it performs
3931: a forward solve followed by a backward solve.
3933: The vectors `b` and `x` cannot be the same. I.e., one cannot
3934: call `MatBackwardSolve`(A,x,x).
3936: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3937: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3938: `MatForwardSolve()` solves $U^T*D y = b$, and
3939: `MatBackwardSolve()` solves $U x = y$.
3940: Thus they do not provide a symmetric preconditioner.
3942: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3943: @*/
3944: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3945: {
3946: PetscFunctionBegin;
3951: PetscCheckSameComm(mat, 1, b, 2);
3952: PetscCheckSameComm(mat, 1, x, 3);
3953: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3954: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3955: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3956: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3957: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3958: MatCheckPreallocated(mat, 1);
3960: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3961: PetscUseTypeMethod(mat, backwardsolve, b, x);
3962: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3963: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3964: PetscFunctionReturn(PETSC_SUCCESS);
3965: }
3967: /*@
3968: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3970: Neighbor-wise Collective
3972: Input Parameters:
3973: + mat - the factored matrix
3974: . b - the right-hand-side vector
3975: - y - the vector to be added to
3977: Output Parameter:
3978: . x - the result vector
3980: Level: developer
3982: Note:
3983: The vectors `b` and `x` cannot be the same. I.e., one cannot
3984: call `MatSolveAdd`(A,x,y,x).
3986: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3987: @*/
3988: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3989: {
3990: PetscScalar one = 1.0;
3991: Vec tmp;
3993: PetscFunctionBegin;
3999: PetscCheckSameComm(mat, 1, b, 2);
4000: PetscCheckSameComm(mat, 1, y, 3);
4001: PetscCheckSameComm(mat, 1, x, 4);
4002: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4003: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4004: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4005: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
4006: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4007: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4008: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4009: MatCheckPreallocated(mat, 1);
4011: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
4012: PetscCall(VecFlag(x, mat->factorerrortype));
4013: if (mat->factorerrortype) {
4014: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4015: } else if (mat->ops->solveadd) {
4016: PetscUseTypeMethod(mat, solveadd, b, y, x);
4017: } else {
4018: /* do the solve then the add manually */
4019: if (x != y) {
4020: PetscCall(MatSolve(mat, b, x));
4021: PetscCall(VecAXPY(x, one, y));
4022: } else {
4023: PetscCall(VecDuplicate(x, &tmp));
4024: PetscCall(VecCopy(x, tmp));
4025: PetscCall(MatSolve(mat, b, x));
4026: PetscCall(VecAXPY(x, one, tmp));
4027: PetscCall(VecDestroy(&tmp));
4028: }
4029: }
4030: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
4031: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4032: PetscFunctionReturn(PETSC_SUCCESS);
4033: }
4035: /*@
4036: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
4038: Neighbor-wise Collective
4040: Input Parameters:
4041: + mat - the factored matrix
4042: - b - the right-hand-side vector
4044: Output Parameter:
4045: . x - the result vector
4047: Level: developer
4049: Notes:
4050: The vectors `b` and `x` cannot be the same. I.e., one cannot
4051: call `MatSolveTranspose`(A,x,x).
4053: Most users should employ the `KSP` interface for linear solvers
4054: instead of working directly with matrix algebra routines such as this.
4055: See, e.g., `KSPCreate()`.
4057: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4058: @*/
4059: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4060: {
4061: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4063: PetscFunctionBegin;
4068: PetscCheckSameComm(mat, 1, b, 2);
4069: PetscCheckSameComm(mat, 1, x, 3);
4070: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4071: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4072: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4073: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4074: MatCheckPreallocated(mat, 1);
4075: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4076: PetscCall(VecFlag(x, mat->factorerrortype));
4077: if (mat->factorerrortype) {
4078: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4079: } else {
4080: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4081: PetscCall((*f)(mat, b, x));
4082: }
4083: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4084: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4085: PetscFunctionReturn(PETSC_SUCCESS);
4086: }
4088: /*@
4089: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4090: factored matrix.
4092: Neighbor-wise Collective
4094: Input Parameters:
4095: + mat - the factored matrix
4096: . b - the right-hand-side vector
4097: - y - the vector to be added to
4099: Output Parameter:
4100: . x - the result vector
4102: Level: developer
4104: Note:
4105: The vectors `b` and `x` cannot be the same. I.e., one cannot
4106: call `MatSolveTransposeAdd`(A,x,y,x).
4108: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4109: @*/
4110: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4111: {
4112: PetscScalar one = 1.0;
4113: Vec tmp;
4114: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4116: PetscFunctionBegin;
4122: PetscCheckSameComm(mat, 1, b, 2);
4123: PetscCheckSameComm(mat, 1, y, 3);
4124: PetscCheckSameComm(mat, 1, x, 4);
4125: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4126: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4127: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4128: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4129: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4130: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4131: MatCheckPreallocated(mat, 1);
4133: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4134: PetscCall(VecFlag(x, mat->factorerrortype));
4135: if (mat->factorerrortype) {
4136: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4137: } else if (f) {
4138: PetscCall((*f)(mat, b, y, x));
4139: } else {
4140: /* do the solve then the add manually */
4141: if (x != y) {
4142: PetscCall(MatSolveTranspose(mat, b, x));
4143: PetscCall(VecAXPY(x, one, y));
4144: } else {
4145: PetscCall(VecDuplicate(x, &tmp));
4146: PetscCall(VecCopy(x, tmp));
4147: PetscCall(MatSolveTranspose(mat, b, x));
4148: PetscCall(VecAXPY(x, one, tmp));
4149: PetscCall(VecDestroy(&tmp));
4150: }
4151: }
4152: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4153: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4154: PetscFunctionReturn(PETSC_SUCCESS);
4155: }
4157: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4158: /*@
4159: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4161: Neighbor-wise Collective
4163: Input Parameters:
4164: + mat - the matrix
4165: . b - the right-hand side
4166: . omega - the relaxation factor
4167: . flag - flag indicating the type of SOR (see below)
4168: . shift - diagonal shift
4169: . its - the number of iterations
4170: - lits - the number of local iterations
4172: Output Parameter:
4173: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4175: SOR Flags:
4176: + `SOR_FORWARD_SWEEP` - forward SOR
4177: . `SOR_BACKWARD_SWEEP` - backward SOR
4178: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4179: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4180: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4181: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4182: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4183: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4184: upper/lower triangular part of matrix to
4185: vector (with omega)
4186: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4188: Level: developer
4190: Notes:
4191: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4192: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4193: on each processor.
4195: Application programmers will not generally use `MatSOR()` directly,
4196: but instead will employ the `KSP`/`PC` interface.
4198: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4200: Most users should employ the `KSP` interface for linear solvers
4201: instead of working directly with matrix algebra routines such as this.
4202: See, e.g., `KSPCreate()`.
4204: Vectors `x` and `b` CANNOT be the same
4206: The flags are implemented as bitwise inclusive or operations.
4207: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4208: to specify a zero initial guess for SSOR.
4210: Developer Note:
4211: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4213: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4214: @*/
4215: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4216: {
4217: PetscFunctionBegin;
4222: PetscCheckSameComm(mat, 1, b, 2);
4223: PetscCheckSameComm(mat, 1, x, 8);
4224: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4225: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4226: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4227: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4228: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4229: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4230: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4231: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4233: MatCheckPreallocated(mat, 1);
4234: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4235: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4236: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4237: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4238: PetscFunctionReturn(PETSC_SUCCESS);
4239: }
4241: /*
4242: Default matrix copy routine.
4243: */
4244: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4245: {
4246: PetscInt i, rstart = 0, rend = 0, nz;
4247: const PetscInt *cwork;
4248: const PetscScalar *vwork;
4250: PetscFunctionBegin;
4251: if (B->assembled) PetscCall(MatZeroEntries(B));
4252: if (str == SAME_NONZERO_PATTERN) {
4253: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4254: for (i = rstart; i < rend; i++) {
4255: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4256: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4257: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4258: }
4259: } else {
4260: PetscCall(MatAYPX(B, 0.0, A, str));
4261: }
4262: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4263: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4264: PetscFunctionReturn(PETSC_SUCCESS);
4265: }
4267: /*@
4268: MatCopy - Copies a matrix to another matrix.
4270: Collective
4272: Input Parameters:
4273: + A - the matrix
4274: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4276: Output Parameter:
4277: . B - where the copy is put
4279: Level: intermediate
4281: Notes:
4282: If you use `SAME_NONZERO_PATTERN`, then the two matrices must have the same nonzero pattern or the routine will crash.
4284: `MatCopy()` copies the matrix entries of a matrix to another existing
4285: matrix (after first zeroing the second matrix). A related routine is
4286: `MatConvert()`, which first creates a new matrix and then copies the data.
4288: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4289: @*/
4290: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4291: {
4292: PetscInt i;
4294: PetscFunctionBegin;
4299: PetscCheckSameComm(A, 1, B, 2);
4300: MatCheckPreallocated(B, 2);
4301: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4302: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4303: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4304: A->cmap->N, B->cmap->N);
4305: MatCheckPreallocated(A, 1);
4306: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4308: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4309: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4310: else PetscCall(MatCopy_Basic(A, B, str));
4312: B->stencil.dim = A->stencil.dim;
4313: B->stencil.noc = A->stencil.noc;
4314: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4315: B->stencil.dims[i] = A->stencil.dims[i];
4316: B->stencil.starts[i] = A->stencil.starts[i];
4317: }
4319: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4320: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4321: PetscFunctionReturn(PETSC_SUCCESS);
4322: }
4324: /*@
4325: MatConvert - Converts a matrix to another matrix, either of the same
4326: or different type.
4328: Collective
4330: Input Parameters:
4331: + mat - the matrix
4332: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4333: same type as the original matrix.
4334: - reuse - denotes if the destination matrix is to be created or reused.
4335: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input `Mat` to be changed to contain the matrix in the new format), otherwise use
4336: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4338: Output Parameter:
4339: . M - pointer to place new matrix
4341: Level: intermediate
4343: Notes:
4344: `MatConvert()` first creates a new matrix and then copies the data from
4345: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4346: entries of one matrix to another already existing matrix context.
4348: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4349: the MPI communicator of the generated matrix is always the same as the communicator
4350: of the input matrix.
4352: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4353: @*/
4354: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4355: {
4356: PetscBool sametype, issame, flg;
4357: PetscBool3 issymmetric, ishermitian;
4358: char convname[256], mtype[256];
4359: Mat B;
4361: PetscFunctionBegin;
4364: PetscAssertPointer(M, 4);
4365: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4366: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4367: MatCheckPreallocated(mat, 1);
4369: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4370: if (flg) newtype = mtype;
4372: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4373: PetscCall(PetscStrcmp(newtype, "same", &issame));
4374: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4375: if (reuse == MAT_REUSE_MATRIX) {
4377: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4378: }
4380: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4381: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4382: PetscFunctionReturn(PETSC_SUCCESS);
4383: }
4385: /* Cache Mat options because some converters use MatHeaderReplace */
4386: issymmetric = mat->symmetric;
4387: ishermitian = mat->hermitian;
4389: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4390: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4391: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4392: } else {
4393: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4394: const char *prefix[3] = {"seq", "mpi", ""};
4395: PetscInt i;
4396: /*
4397: Order of precedence:
4398: 0) See if newtype is a superclass of the current matrix.
4399: 1) See if a specialized converter is known to the current matrix.
4400: 2) See if a specialized converter is known to the desired matrix class.
4401: 3) See if a good general converter is registered for the desired class
4402: (as of 6/27/03 only MATMPIADJ falls into this category).
4403: 4) See if a good general converter is known for the current matrix.
4404: 5) Use a really basic converter.
4405: */
4407: /* 0) See if newtype is a superclass of the current matrix.
4408: i.e mat is mpiaij and newtype is aij */
4409: for (i = 0; i < 2; i++) {
4410: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4411: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4412: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4413: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4414: if (flg) {
4415: if (reuse == MAT_INPLACE_MATRIX) {
4416: PetscCall(PetscInfo(mat, "Early return\n"));
4417: PetscFunctionReturn(PETSC_SUCCESS);
4418: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4419: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4420: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4421: PetscFunctionReturn(PETSC_SUCCESS);
4422: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4423: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4424: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4425: PetscFunctionReturn(PETSC_SUCCESS);
4426: }
4427: }
4428: }
4429: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4430: for (i = 0; i < 3; i++) {
4431: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4432: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4433: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4434: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4435: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4436: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4437: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4438: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4439: if (conv) goto foundconv;
4440: }
4442: /* 2) See if a specialized converter is known to the desired matrix class. */
4443: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4444: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4445: PetscCall(MatSetType(B, newtype));
4446: for (i = 0; i < 3; i++) {
4447: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4448: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4449: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4450: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4451: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4452: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4453: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4454: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4455: if (conv) {
4456: PetscCall(MatDestroy(&B));
4457: goto foundconv;
4458: }
4459: }
4461: /* 3) See if a good general converter is registered for the desired class */
4462: conv = B->ops->convertfrom;
4463: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4464: PetscCall(MatDestroy(&B));
4465: if (conv) goto foundconv;
4467: /* 4) See if a good general converter is known for the current matrix */
4468: if (mat->ops->convert) conv = mat->ops->convert;
4469: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4470: if (conv) goto foundconv;
4472: /* 5) Use a really basic converter. */
4473: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4474: conv = MatConvert_Basic;
4476: foundconv:
4477: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4478: PetscCall((*conv)(mat, newtype, reuse, M));
4479: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4480: /* the block sizes must be same if the mappings are copied over */
4481: (*M)->rmap->bs = mat->rmap->bs;
4482: (*M)->cmap->bs = mat->cmap->bs;
4483: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4484: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4485: (*M)->rmap->mapping = mat->rmap->mapping;
4486: (*M)->cmap->mapping = mat->cmap->mapping;
4487: }
4488: (*M)->stencil.dim = mat->stencil.dim;
4489: (*M)->stencil.noc = mat->stencil.noc;
4490: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4491: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4492: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4493: }
4494: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4495: }
4496: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4498: /* Copy Mat options */
4499: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4500: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4501: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4502: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4503: PetscFunctionReturn(PETSC_SUCCESS);
4504: }
4506: /*@
4507: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4509: Not Collective
4511: Input Parameter:
4512: . mat - the matrix, must be a factored matrix
4514: Output Parameter:
4515: . type - the string name of the package (do not free this string)
4517: Level: intermediate
4519: Fortran Note:
4520: Pass in an empty string that is long enough and the package name will be copied into it.
4522: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4523: @*/
4524: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4525: {
4526: PetscErrorCode (*conv)(Mat, MatSolverType *);
4528: PetscFunctionBegin;
4531: PetscAssertPointer(type, 2);
4532: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4533: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4534: if (conv) PetscCall((*conv)(mat, type));
4535: else *type = MATSOLVERPETSC;
4536: PetscFunctionReturn(PETSC_SUCCESS);
4537: }
4539: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4540: struct _MatSolverTypeForSpecifcType {
4541: MatType mtype;
4542: /* no entry for MAT_FACTOR_NONE */
4543: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4544: MatSolverTypeForSpecifcType next;
4545: };
4547: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4548: struct _MatSolverTypeHolder {
4549: char *name;
4550: MatSolverTypeForSpecifcType handlers;
4551: MatSolverTypeHolder next;
4552: };
4554: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4556: /*@C
4557: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4559: Logically Collective, No Fortran Support
4561: Input Parameters:
4562: + package - name of the package, for example petsc or superlu
4563: . mtype - the matrix type that works with this package
4564: . ftype - the type of factorization supported by the package
4565: - createfactor - routine that will create the factored matrix ready to be used
4567: Level: developer
4569: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4570: `MatGetFactor()`
4571: @*/
4572: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4573: {
4574: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4575: PetscBool flg;
4576: MatSolverTypeForSpecifcType inext, iprev = NULL;
4578: PetscFunctionBegin;
4579: PetscCall(MatInitializePackage());
4580: if (!next) {
4581: PetscCall(PetscNew(&MatSolverTypeHolders));
4582: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4583: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4584: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4585: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4586: PetscFunctionReturn(PETSC_SUCCESS);
4587: }
4588: while (next) {
4589: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4590: if (flg) {
4591: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4592: inext = next->handlers;
4593: while (inext) {
4594: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4595: if (flg) {
4596: inext->createfactor[(int)ftype - 1] = createfactor;
4597: PetscFunctionReturn(PETSC_SUCCESS);
4598: }
4599: iprev = inext;
4600: inext = inext->next;
4601: }
4602: PetscCall(PetscNew(&iprev->next));
4603: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4604: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4605: PetscFunctionReturn(PETSC_SUCCESS);
4606: }
4607: prev = next;
4608: next = next->next;
4609: }
4610: PetscCall(PetscNew(&prev->next));
4611: PetscCall(PetscStrallocpy(package, &prev->next->name));
4612: PetscCall(PetscNew(&prev->next->handlers));
4613: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4614: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4615: PetscFunctionReturn(PETSC_SUCCESS);
4616: }
4618: /*@C
4619: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4621: Input Parameters:
4622: + type - name of the package, for example petsc or superlu, if this is 'NULL', then the first result that satisfies the other criteria is returned
4623: . ftype - the type of factorization supported by the type
4624: - mtype - the matrix type that works with this type
4626: Output Parameters:
4627: + foundtype - `PETSC_TRUE` if the type was registered
4628: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4629: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4631: Calling sequence of `createfactor`:
4632: + A - the matrix providing the factor matrix
4633: . ftype - the `MatFactorType` of the factor requested
4634: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4636: Level: developer
4638: Note:
4639: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4640: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4641: For example if one configuration had `--download-mumps` while a different one had `--download-superlu_dist`.
4643: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4644: `MatInitializePackage()`
4645: @*/
4646: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType ftype, Mat *B))
4647: {
4648: MatSolverTypeHolder next = MatSolverTypeHolders;
4649: PetscBool flg;
4650: MatSolverTypeForSpecifcType inext;
4652: PetscFunctionBegin;
4653: if (foundtype) *foundtype = PETSC_FALSE;
4654: if (foundmtype) *foundmtype = PETSC_FALSE;
4655: if (createfactor) *createfactor = NULL;
4657: if (type) {
4658: while (next) {
4659: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4660: if (flg) {
4661: if (foundtype) *foundtype = PETSC_TRUE;
4662: inext = next->handlers;
4663: while (inext) {
4664: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4665: if (flg) {
4666: if (foundmtype) *foundmtype = PETSC_TRUE;
4667: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4668: PetscFunctionReturn(PETSC_SUCCESS);
4669: }
4670: inext = inext->next;
4671: }
4672: }
4673: next = next->next;
4674: }
4675: } else {
4676: while (next) {
4677: inext = next->handlers;
4678: while (inext) {
4679: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4680: if (flg && inext->createfactor[(int)ftype - 1]) {
4681: if (foundtype) *foundtype = PETSC_TRUE;
4682: if (foundmtype) *foundmtype = PETSC_TRUE;
4683: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4684: PetscFunctionReturn(PETSC_SUCCESS);
4685: }
4686: inext = inext->next;
4687: }
4688: next = next->next;
4689: }
4690: /* try with base classes inext->mtype */
4691: next = MatSolverTypeHolders;
4692: while (next) {
4693: inext = next->handlers;
4694: while (inext) {
4695: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4696: if (flg && inext->createfactor[(int)ftype - 1]) {
4697: if (foundtype) *foundtype = PETSC_TRUE;
4698: if (foundmtype) *foundmtype = PETSC_TRUE;
4699: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4700: PetscFunctionReturn(PETSC_SUCCESS);
4701: }
4702: inext = inext->next;
4703: }
4704: next = next->next;
4705: }
4706: }
4707: PetscFunctionReturn(PETSC_SUCCESS);
4708: }
4710: PetscErrorCode MatSolverTypeDestroy(void)
4711: {
4712: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4713: MatSolverTypeForSpecifcType inext, iprev;
4715: PetscFunctionBegin;
4716: while (next) {
4717: PetscCall(PetscFree(next->name));
4718: inext = next->handlers;
4719: while (inext) {
4720: PetscCall(PetscFree(inext->mtype));
4721: iprev = inext;
4722: inext = inext->next;
4723: PetscCall(PetscFree(iprev));
4724: }
4725: prev = next;
4726: next = next->next;
4727: PetscCall(PetscFree(prev));
4728: }
4729: MatSolverTypeHolders = NULL;
4730: PetscFunctionReturn(PETSC_SUCCESS);
4731: }
4733: /*@
4734: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4736: Logically Collective
4738: Input Parameter:
4739: . mat - the matrix
4741: Output Parameter:
4742: . flg - `PETSC_TRUE` if uses the ordering
4744: Level: developer
4746: Note:
4747: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4748: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4750: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4751: @*/
4752: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4753: {
4754: PetscFunctionBegin;
4755: *flg = mat->canuseordering;
4756: PetscFunctionReturn(PETSC_SUCCESS);
4757: }
4759: /*@
4760: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4762: Logically Collective
4764: Input Parameters:
4765: + mat - the matrix obtained with `MatGetFactor()`
4766: - ftype - the factorization type to be used
4768: Output Parameter:
4769: . otype - the preferred ordering type
4771: Level: developer
4773: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4774: @*/
4775: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4776: {
4777: PetscFunctionBegin;
4778: *otype = mat->preferredordering[ftype];
4779: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4780: PetscFunctionReturn(PETSC_SUCCESS);
4781: }
4783: /*@
4784: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4786: Collective
4788: Input Parameters:
4789: + mat - the matrix
4790: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL', then the first result that satisfies
4791: the other criteria is returned
4792: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4794: Output Parameter:
4795: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4797: Options Database Keys:
4798: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4799: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4800: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4802: Level: intermediate
4804: Notes:
4805: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4806: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4808: Users usually access the factorization solvers via `KSP`
4810: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4811: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4813: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4814: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4815: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4817: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4818: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4819: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4821: Developer Note:
4822: This should actually be called `MatCreateFactor()` since it creates a new factor object
4824: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4825: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4826: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4827: @*/
4828: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4829: {
4830: PetscBool foundtype, foundmtype, shell, hasop = PETSC_FALSE;
4831: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4833: PetscFunctionBegin;
4837: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4838: MatCheckPreallocated(mat, 1);
4840: PetscCall(MatIsShell(mat, &shell));
4841: if (shell) PetscCall(MatHasOperation(mat, MATOP_GET_FACTOR, &hasop));
4842: if (hasop) {
4843: PetscUseTypeMethod(mat, getfactor, type, ftype, f);
4844: PetscFunctionReturn(PETSC_SUCCESS);
4845: }
4847: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4848: if (!foundtype) {
4849: if (type) {
4850: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4851: ((PetscObject)mat)->type_name, type);
4852: } else {
4853: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4854: }
4855: }
4856: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4857: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4859: PetscCall((*conv)(mat, ftype, f));
4860: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4861: PetscFunctionReturn(PETSC_SUCCESS);
4862: }
4864: /*@
4865: MatGetFactorAvailable - Returns a flag if matrix supports particular type and factor type
4867: Not Collective
4869: Input Parameters:
4870: + mat - the matrix
4871: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4872: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4874: Output Parameter:
4875: . flg - PETSC_TRUE if the factorization is available
4877: Level: intermediate
4879: Notes:
4880: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4881: such as pastix, superlu, mumps etc.
4883: PETSc must have been ./configure to use the external solver, using the option --download-package
4885: Developer Note:
4886: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4888: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4889: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4890: @*/
4891: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4892: {
4893: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4895: PetscFunctionBegin;
4897: PetscAssertPointer(flg, 4);
4899: *flg = PETSC_FALSE;
4900: if (!((PetscObject)mat)->type_name) PetscFunctionReturn(PETSC_SUCCESS);
4902: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4903: MatCheckPreallocated(mat, 1);
4905: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4906: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4907: PetscFunctionReturn(PETSC_SUCCESS);
4908: }
4910: /*@
4911: MatDuplicate - Duplicates a matrix including the non-zero structure.
4913: Collective
4915: Input Parameters:
4916: + mat - the matrix
4917: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4918: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4920: Output Parameter:
4921: . M - pointer to place new matrix
4923: Level: intermediate
4925: Notes:
4926: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4928: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4930: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4932: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4933: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4934: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4936: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4937: @*/
4938: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4939: {
4940: Mat B;
4941: VecType vtype;
4942: PetscInt i;
4943: PetscObject dm, container_h, container_d;
4944: void (*viewf)(void);
4946: PetscFunctionBegin;
4949: PetscAssertPointer(M, 3);
4950: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4951: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4952: MatCheckPreallocated(mat, 1);
4954: *M = NULL;
4955: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4956: PetscUseTypeMethod(mat, duplicate, op, M);
4957: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4958: B = *M;
4960: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4961: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4962: PetscCall(MatGetVecType(mat, &vtype));
4963: PetscCall(MatSetVecType(B, vtype));
4965: B->stencil.dim = mat->stencil.dim;
4966: B->stencil.noc = mat->stencil.noc;
4967: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4968: B->stencil.dims[i] = mat->stencil.dims[i];
4969: B->stencil.starts[i] = mat->stencil.starts[i];
4970: }
4972: B->nooffproczerorows = mat->nooffproczerorows;
4973: B->nooffprocentries = mat->nooffprocentries;
4975: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4976: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4977: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4978: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4979: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4980: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4981: if (op == MAT_COPY_VALUES) PetscCall(MatPropagateSymmetryOptions(mat, B));
4982: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4983: PetscFunctionReturn(PETSC_SUCCESS);
4984: }
4986: /*@
4987: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4989: Logically Collective
4991: Input Parameter:
4992: . mat - the matrix
4994: Output Parameter:
4995: . v - the diagonal of the matrix
4997: Level: intermediate
4999: Note:
5000: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
5001: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
5002: is larger than `ndiag`, the values of the remaining entries are unspecified.
5004: Currently only correct in parallel for square matrices.
5006: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
5007: @*/
5008: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
5009: {
5010: PetscFunctionBegin;
5014: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5015: MatCheckPreallocated(mat, 1);
5016: if (PetscDefined(USE_DEBUG)) {
5017: PetscInt nv, row, col, ndiag;
5019: PetscCall(VecGetLocalSize(v, &nv));
5020: PetscCall(MatGetLocalSize(mat, &row, &col));
5021: ndiag = PetscMin(row, col);
5022: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
5023: }
5025: PetscUseTypeMethod(mat, getdiagonal, v);
5026: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5027: PetscFunctionReturn(PETSC_SUCCESS);
5028: }
5030: /*@
5031: MatGetRowMin - Gets the minimum value (of the real part) of each
5032: row of the matrix
5034: Logically Collective
5036: Input Parameter:
5037: . mat - the matrix
5039: Output Parameters:
5040: + v - the vector for storing the maximums
5041: - idx - the indices of the column found for each row (optional, pass `NULL` if not needed)
5043: Level: intermediate
5045: Note:
5046: The result of this call are the same as if one converted the matrix to dense format
5047: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5049: This code is only implemented for a couple of matrix formats.
5051: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
5052: `MatGetRowMax()`
5053: @*/
5054: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
5055: {
5056: PetscFunctionBegin;
5060: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5062: if (!mat->cmap->N) {
5063: PetscCall(VecSet(v, PETSC_MAX_REAL));
5064: if (idx) {
5065: PetscInt i, m = mat->rmap->n;
5066: for (i = 0; i < m; i++) idx[i] = -1;
5067: }
5068: } else {
5069: MatCheckPreallocated(mat, 1);
5070: }
5071: PetscUseTypeMethod(mat, getrowmin, v, idx);
5072: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5073: PetscFunctionReturn(PETSC_SUCCESS);
5074: }
5076: /*@
5077: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5078: row of the matrix
5080: Logically Collective
5082: Input Parameter:
5083: . mat - the matrix
5085: Output Parameters:
5086: + v - the vector for storing the minimums
5087: - idx - the indices of the column found for each row (or `NULL` if not needed)
5089: Level: intermediate
5091: Notes:
5092: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5093: row is 0 (the first column).
5095: This code is only implemented for a couple of matrix formats.
5097: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5098: @*/
5099: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5100: {
5101: PetscFunctionBegin;
5105: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5106: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5108: if (!mat->cmap->N) {
5109: PetscCall(VecSet(v, 0.0));
5110: if (idx) {
5111: PetscInt i, m = mat->rmap->n;
5112: for (i = 0; i < m; i++) idx[i] = -1;
5113: }
5114: } else {
5115: MatCheckPreallocated(mat, 1);
5116: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5117: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5118: }
5119: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5120: PetscFunctionReturn(PETSC_SUCCESS);
5121: }
5123: /*@
5124: MatGetRowMax - Gets the maximum value (of the real part) of each
5125: row of the matrix
5127: Logically Collective
5129: Input Parameter:
5130: . mat - the matrix
5132: Output Parameters:
5133: + v - the vector for storing the maximums
5134: - idx - the indices of the column found for each row (optional, otherwise pass `NULL`)
5136: Level: intermediate
5138: Notes:
5139: The result of this call are the same as if one converted the matrix to dense format
5140: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5142: This code is only implemented for a couple of matrix formats.
5144: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5145: @*/
5146: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5147: {
5148: PetscFunctionBegin;
5152: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5154: if (!mat->cmap->N) {
5155: PetscCall(VecSet(v, PETSC_MIN_REAL));
5156: if (idx) {
5157: PetscInt i, m = mat->rmap->n;
5158: for (i = 0; i < m; i++) idx[i] = -1;
5159: }
5160: } else {
5161: MatCheckPreallocated(mat, 1);
5162: PetscUseTypeMethod(mat, getrowmax, v, idx);
5163: }
5164: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5165: PetscFunctionReturn(PETSC_SUCCESS);
5166: }
5168: /*@
5169: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5170: row of the matrix
5172: Logically Collective
5174: Input Parameter:
5175: . mat - the matrix
5177: Output Parameters:
5178: + v - the vector for storing the maximums
5179: - idx - the indices of the column found for each row (or `NULL` if not needed)
5181: Level: intermediate
5183: Notes:
5184: if a row is completely empty or has only 0.0 values, then the `idx` value for that
5185: row is 0 (the first column).
5187: This code is only implemented for a couple of matrix formats.
5189: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowSum()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5190: @*/
5191: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5192: {
5193: PetscFunctionBegin;
5197: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5199: if (!mat->cmap->N) {
5200: PetscCall(VecSet(v, 0.0));
5201: if (idx) {
5202: PetscInt i, m = mat->rmap->n;
5203: for (i = 0; i < m; i++) idx[i] = -1;
5204: }
5205: } else {
5206: MatCheckPreallocated(mat, 1);
5207: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5208: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5209: }
5210: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5211: PetscFunctionReturn(PETSC_SUCCESS);
5212: }
5214: /*@
5215: MatGetRowSumAbs - Gets the sum value (in absolute value) of each row of the matrix
5217: Logically Collective
5219: Input Parameter:
5220: . mat - the matrix
5222: Output Parameter:
5223: . v - the vector for storing the sum
5225: Level: intermediate
5227: This code is only implemented for a couple of matrix formats.
5229: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5230: @*/
5231: PetscErrorCode MatGetRowSumAbs(Mat mat, Vec v)
5232: {
5233: PetscFunctionBegin;
5237: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5239: if (!mat->cmap->N) {
5240: PetscCall(VecSet(v, 0.0));
5241: } else {
5242: MatCheckPreallocated(mat, 1);
5243: PetscUseTypeMethod(mat, getrowsumabs, v);
5244: }
5245: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5246: PetscFunctionReturn(PETSC_SUCCESS);
5247: }
5249: /*@
5250: MatGetRowSum - Gets the sum of each row of the matrix
5252: Logically or Neighborhood Collective
5254: Input Parameter:
5255: . mat - the matrix
5257: Output Parameter:
5258: . v - the vector for storing the sum of rows
5260: Level: intermediate
5262: Note:
5263: This code is slow since it is not currently specialized for different formats
5265: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`, `MatGetRowSumAbs()`
5266: @*/
5267: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5268: {
5269: Vec ones;
5271: PetscFunctionBegin;
5275: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5276: MatCheckPreallocated(mat, 1);
5277: PetscCall(MatCreateVecs(mat, &ones, NULL));
5278: PetscCall(VecSet(ones, 1.));
5279: PetscCall(MatMult(mat, ones, v));
5280: PetscCall(VecDestroy(&ones));
5281: PetscFunctionReturn(PETSC_SUCCESS);
5282: }
5284: /*@
5285: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5286: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5288: Collective
5290: Input Parameter:
5291: . mat - the matrix to provide the transpose
5293: Output Parameter:
5294: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5296: Level: advanced
5298: Note:
5299: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5300: routine allows bypassing that call.
5302: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5303: @*/
5304: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5305: {
5306: MatParentState *rb = NULL;
5308: PetscFunctionBegin;
5309: PetscCall(PetscNew(&rb));
5310: rb->id = ((PetscObject)mat)->id;
5311: rb->state = 0;
5312: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5313: PetscCall(PetscObjectContainerCompose((PetscObject)B, "MatTransposeParent", rb, PetscContainerUserDestroyDefault));
5314: PetscFunctionReturn(PETSC_SUCCESS);
5315: }
5317: /*@
5318: MatTranspose - Computes the transpose of a matrix, either in-place or out-of-place.
5320: Collective
5322: Input Parameters:
5323: + mat - the matrix to transpose
5324: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5326: Output Parameter:
5327: . B - the transpose of the matrix
5329: Level: intermediate
5331: Notes:
5332: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5334: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX` to store the transpose. If you already have a matrix to contain the
5335: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5337: If the nonzero structure of `mat` changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5339: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose but don't need the storage to be changed.
5340: For example, the result of `MatCreateTranspose()` will compute the transpose of the given matrix times a vector for matrix-vector products computed with `MatMult()`.
5342: If `mat` is unchanged from the last call this function returns immediately without recomputing the result
5344: If you only need the symbolic transpose of a matrix, and not the numerical values, use `MatTransposeSymbolic()`
5346: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5347: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5348: @*/
5349: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5350: {
5351: PetscContainer rB = NULL;
5352: MatParentState *rb = NULL;
5354: PetscFunctionBegin;
5357: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5358: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5359: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5360: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5361: MatCheckPreallocated(mat, 1);
5362: if (reuse == MAT_REUSE_MATRIX) {
5363: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5364: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5365: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5366: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5367: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5368: }
5370: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5371: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5372: PetscUseTypeMethod(mat, transpose, reuse, B);
5373: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5374: }
5375: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5377: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5378: if (reuse != MAT_INPLACE_MATRIX) {
5379: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5380: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5381: rb->state = ((PetscObject)mat)->state;
5382: rb->nonzerostate = mat->nonzerostate;
5383: }
5384: PetscFunctionReturn(PETSC_SUCCESS);
5385: }
5387: /*@
5388: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5390: Collective
5392: Input Parameter:
5393: . A - the matrix to transpose
5395: Output Parameter:
5396: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5397: numerical portion.
5399: Level: intermediate
5401: Note:
5402: This is not supported for many matrix types, use `MatTranspose()` in those cases
5404: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5405: @*/
5406: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5407: {
5408: PetscFunctionBegin;
5411: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5412: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5413: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5414: PetscUseTypeMethod(A, transposesymbolic, B);
5415: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5417: PetscCall(MatTransposeSetPrecursor(A, *B));
5418: PetscFunctionReturn(PETSC_SUCCESS);
5419: }
5421: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5422: {
5423: PetscContainer rB;
5424: MatParentState *rb;
5426: PetscFunctionBegin;
5429: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5430: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5431: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5432: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5433: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5434: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5435: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5436: PetscFunctionReturn(PETSC_SUCCESS);
5437: }
5439: /*@
5440: MatIsTranspose - Test whether a matrix is another one's transpose,
5441: or its own, in which case it tests symmetry.
5443: Collective
5445: Input Parameters:
5446: + A - the matrix to test
5447: . B - the matrix to test against, this can equal the first parameter
5448: - tol - tolerance, differences between entries smaller than this are counted as zero
5450: Output Parameter:
5451: . flg - the result
5453: Level: intermediate
5455: Notes:
5456: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5457: test involves parallel copies of the block off-diagonal parts of the matrix.
5459: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5460: @*/
5461: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5462: {
5463: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5465: PetscFunctionBegin;
5468: PetscAssertPointer(flg, 4);
5469: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5470: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5471: *flg = PETSC_FALSE;
5472: if (f && g) {
5473: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5474: PetscCall((*f)(A, B, tol, flg));
5475: } else {
5476: MatType mattype;
5478: PetscCall(MatGetType(f ? B : A, &mattype));
5479: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5480: }
5481: PetscFunctionReturn(PETSC_SUCCESS);
5482: }
5484: /*@
5485: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5487: Collective
5489: Input Parameters:
5490: + mat - the matrix to transpose and complex conjugate
5491: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5493: Output Parameter:
5494: . B - the Hermitian transpose
5496: Level: intermediate
5498: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5499: @*/
5500: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5501: {
5502: PetscFunctionBegin;
5503: PetscCall(MatTranspose(mat, reuse, B));
5504: #if defined(PETSC_USE_COMPLEX)
5505: PetscCall(MatConjugate(*B));
5506: #endif
5507: PetscFunctionReturn(PETSC_SUCCESS);
5508: }
5510: /*@
5511: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5513: Collective
5515: Input Parameters:
5516: + A - the matrix to test
5517: . B - the matrix to test against, this can equal the first parameter
5518: - tol - tolerance, differences between entries smaller than this are counted as zero
5520: Output Parameter:
5521: . flg - the result
5523: Level: intermediate
5525: Notes:
5526: Only available for `MATAIJ` matrices.
5528: The sequential algorithm
5529: has a running time of the order of the number of nonzeros; the parallel
5530: test involves parallel copies of the block off-diagonal parts of the matrix.
5532: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5533: @*/
5534: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5535: {
5536: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5538: PetscFunctionBegin;
5541: PetscAssertPointer(flg, 4);
5542: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5543: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5544: if (f && g) {
5545: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5546: PetscCall((*f)(A, B, tol, flg));
5547: }
5548: PetscFunctionReturn(PETSC_SUCCESS);
5549: }
5551: /*@
5552: MatPermute - Creates a new matrix with rows and columns permuted from the
5553: original.
5555: Collective
5557: Input Parameters:
5558: + mat - the matrix to permute
5559: . row - row permutation, each processor supplies only the permutation for its rows
5560: - col - column permutation, each processor supplies only the permutation for its columns
5562: Output Parameter:
5563: . B - the permuted matrix
5565: Level: advanced
5567: Note:
5568: The index sets map from row/col of permuted matrix to row/col of original matrix.
5569: The index sets should be on the same communicator as mat and have the same local sizes.
5571: Developer Note:
5572: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5573: exploit the fact that row and col are permutations, consider implementing the
5574: more general `MatCreateSubMatrix()` instead.
5576: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5577: @*/
5578: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5579: {
5580: PetscFunctionBegin;
5585: PetscAssertPointer(B, 4);
5586: PetscCheckSameComm(mat, 1, row, 2);
5587: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5588: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5589: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5590: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5591: MatCheckPreallocated(mat, 1);
5593: if (mat->ops->permute) {
5594: PetscUseTypeMethod(mat, permute, row, col, B);
5595: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5596: } else {
5597: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5598: }
5599: PetscFunctionReturn(PETSC_SUCCESS);
5600: }
5602: /*@
5603: MatEqual - Compares two matrices.
5605: Collective
5607: Input Parameters:
5608: + A - the first matrix
5609: - B - the second matrix
5611: Output Parameter:
5612: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5614: Level: intermediate
5616: Note:
5617: If either of the matrix is "matrix-free", meaning the matrix entries are not stored explicitly then equality is determined by comparing the results of several matrix-vector product
5618: using several randomly created vectors, see `MatMultEqual()`.
5620: .seealso: [](ch_matrices), `Mat`, `MatMultEqual()`
5621: @*/
5622: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5623: {
5624: PetscFunctionBegin;
5629: PetscAssertPointer(flg, 3);
5630: PetscCheckSameComm(A, 1, B, 2);
5631: MatCheckPreallocated(A, 1);
5632: MatCheckPreallocated(B, 2);
5633: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5634: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5635: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5636: B->cmap->N);
5637: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5638: PetscUseTypeMethod(A, equal, B, flg);
5639: } else {
5640: PetscCall(MatMultEqual(A, B, 10, flg));
5641: }
5642: PetscFunctionReturn(PETSC_SUCCESS);
5643: }
5645: /*@
5646: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5647: matrices that are stored as vectors. Either of the two scaling
5648: matrices can be `NULL`.
5650: Collective
5652: Input Parameters:
5653: + mat - the matrix to be scaled
5654: . l - the left scaling vector (or `NULL`)
5655: - r - the right scaling vector (or `NULL`)
5657: Level: intermediate
5659: Note:
5660: `MatDiagonalScale()` computes $A = LAR$, where
5661: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5662: The L scales the rows of the matrix, the R scales the columns of the matrix.
5664: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5665: @*/
5666: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5667: {
5668: PetscFunctionBegin;
5671: if (l) {
5673: PetscCheckSameComm(mat, 1, l, 2);
5674: }
5675: if (r) {
5677: PetscCheckSameComm(mat, 1, r, 3);
5678: }
5679: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5680: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5681: MatCheckPreallocated(mat, 1);
5682: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5684: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5685: PetscUseTypeMethod(mat, diagonalscale, l, r);
5686: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5687: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5688: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5689: PetscFunctionReturn(PETSC_SUCCESS);
5690: }
5692: /*@
5693: MatScale - Scales all elements of a matrix by a given number.
5695: Logically Collective
5697: Input Parameters:
5698: + mat - the matrix to be scaled
5699: - a - the scaling value
5701: Level: intermediate
5703: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5704: @*/
5705: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5706: {
5707: PetscFunctionBegin;
5710: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5711: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5713: MatCheckPreallocated(mat, 1);
5715: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5716: if (a != (PetscScalar)1.0) {
5717: PetscUseTypeMethod(mat, scale, a);
5718: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5719: }
5720: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5721: PetscFunctionReturn(PETSC_SUCCESS);
5722: }
5724: /*@
5725: MatNorm - Calculates various norms of a matrix.
5727: Collective
5729: Input Parameters:
5730: + mat - the matrix
5731: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5733: Output Parameter:
5734: . nrm - the resulting norm
5736: Level: intermediate
5738: .seealso: [](ch_matrices), `Mat`
5739: @*/
5740: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5741: {
5742: PetscFunctionBegin;
5745: PetscAssertPointer(nrm, 3);
5747: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5748: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5749: MatCheckPreallocated(mat, 1);
5751: PetscUseTypeMethod(mat, norm, type, nrm);
5752: PetscFunctionReturn(PETSC_SUCCESS);
5753: }
5755: /*
5756: This variable is used to prevent counting of MatAssemblyBegin() that
5757: are called from within a MatAssemblyEnd().
5758: */
5759: static PetscInt MatAssemblyEnd_InUse = 0;
5760: /*@
5761: MatAssemblyBegin - Begins assembling the matrix. This routine should
5762: be called after completing all calls to `MatSetValues()`.
5764: Collective
5766: Input Parameters:
5767: + mat - the matrix
5768: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5770: Level: beginner
5772: Notes:
5773: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5774: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5776: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5777: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5778: using the matrix.
5780: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5781: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5782: a global collective operation requiring all processes that share the matrix.
5784: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5785: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5786: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5788: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5789: @*/
5790: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5791: {
5792: PetscFunctionBegin;
5795: MatCheckPreallocated(mat, 1);
5796: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix. Did you forget to call MatSetUnfactored()?");
5797: if (mat->assembled) {
5798: mat->was_assembled = PETSC_TRUE;
5799: mat->assembled = PETSC_FALSE;
5800: }
5802: if (!MatAssemblyEnd_InUse) {
5803: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5804: PetscTryTypeMethod(mat, assemblybegin, type);
5805: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5806: } else PetscTryTypeMethod(mat, assemblybegin, type);
5807: PetscFunctionReturn(PETSC_SUCCESS);
5808: }
5810: /*@
5811: MatAssembled - Indicates if a matrix has been assembled and is ready for
5812: use; for example, in matrix-vector product.
5814: Not Collective
5816: Input Parameter:
5817: . mat - the matrix
5819: Output Parameter:
5820: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5822: Level: advanced
5824: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5825: @*/
5826: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5827: {
5828: PetscFunctionBegin;
5830: PetscAssertPointer(assembled, 2);
5831: *assembled = mat->assembled;
5832: PetscFunctionReturn(PETSC_SUCCESS);
5833: }
5835: /*@
5836: MatAssemblyEnd - Completes assembling the matrix. This routine should
5837: be called after `MatAssemblyBegin()`.
5839: Collective
5841: Input Parameters:
5842: + mat - the matrix
5843: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5845: Options Database Keys:
5846: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5847: . -mat_view ::ascii_info_detail - Prints more detailed info
5848: . -mat_view - Prints matrix in ASCII format
5849: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5850: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5851: . -display <name> - Sets display name (default is host)
5852: . -draw_pause <sec> - Sets number of seconds to pause after display
5853: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5854: . -viewer_socket_machine <machine> - Machine to use for socket
5855: . -viewer_socket_port <port> - Port number to use for socket
5856: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5858: Level: beginner
5860: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5861: @*/
5862: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5863: {
5864: static PetscInt inassm = 0;
5865: PetscBool flg = PETSC_FALSE;
5867: PetscFunctionBegin;
5871: inassm++;
5872: MatAssemblyEnd_InUse++;
5873: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5874: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5875: PetscTryTypeMethod(mat, assemblyend, type);
5876: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5877: } else PetscTryTypeMethod(mat, assemblyend, type);
5879: /* Flush assembly is not a true assembly */
5880: if (type != MAT_FLUSH_ASSEMBLY) {
5881: if (mat->num_ass) {
5882: if (!mat->symmetry_eternal) {
5883: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5884: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5885: }
5886: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5887: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5888: }
5889: mat->num_ass++;
5890: mat->assembled = PETSC_TRUE;
5891: mat->ass_nonzerostate = mat->nonzerostate;
5892: }
5894: mat->insertmode = NOT_SET_VALUES;
5895: MatAssemblyEnd_InUse--;
5896: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5897: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5898: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5900: if (mat->checksymmetryonassembly) {
5901: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5902: if (flg) {
5903: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5904: } else {
5905: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5906: }
5907: }
5908: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5909: }
5910: inassm--;
5911: PetscFunctionReturn(PETSC_SUCCESS);
5912: }
5914: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5915: /*@
5916: MatSetOption - Sets a parameter option for a matrix. Some options
5917: may be specific to certain storage formats. Some options
5918: determine how values will be inserted (or added). Sorted,
5919: row-oriented input will generally assemble the fastest. The default
5920: is row-oriented.
5922: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5924: Input Parameters:
5925: + mat - the matrix
5926: . op - the option, one of those listed below (and possibly others),
5927: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5929: Options Describing Matrix Structure:
5930: + `MAT_SPD` - symmetric positive definite
5931: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5932: . `MAT_HERMITIAN` - transpose is the complex conjugation
5933: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5934: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5935: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5936: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5938: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5939: do not need to be computed (usually at a high cost)
5941: Options For Use with `MatSetValues()`:
5942: Insert a logically dense subblock, which can be
5943: . `MAT_ROW_ORIENTED` - row-oriented (default)
5945: These options reflect the data you pass in with `MatSetValues()`; it has
5946: nothing to do with how the data is stored internally in the matrix
5947: data structure.
5949: When (re)assembling a matrix, we can restrict the input for
5950: efficiency/debugging purposes. These options include
5951: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5952: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5953: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5954: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5955: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5956: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5957: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5958: performance for very large process counts.
5959: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5960: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5961: functions, instead sending only neighbor messages.
5963: Level: intermediate
5965: Notes:
5966: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5968: Some options are relevant only for particular matrix types and
5969: are thus ignored by others. Other options are not supported by
5970: certain matrix types and will generate an error message if set.
5972: If using Fortran to compute a matrix, one may need to
5973: use the column-oriented option (or convert to the row-oriented
5974: format).
5976: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5977: that would generate a new entry in the nonzero structure is instead
5978: ignored. Thus, if memory has not already been allocated for this particular
5979: data, then the insertion is ignored. For dense matrices, in which
5980: the entire array is allocated, no entries are ever ignored.
5981: Set after the first `MatAssemblyEnd()`. If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5983: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5984: that would generate a new entry in the nonzero structure instead produces
5985: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5987: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5988: that would generate a new entry that has not been preallocated will
5989: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5990: only.) This is a useful flag when debugging matrix memory preallocation.
5991: If this option is set, then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5993: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5994: other processors should be dropped, rather than stashed.
5995: This is useful if you know that the "owning" processor is also
5996: always generating the correct matrix entries, so that PETSc need
5997: not transfer duplicate entries generated on another processor.
5999: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
6000: searches during matrix assembly. When this flag is set, the hash table
6001: is created during the first matrix assembly. This hash table is
6002: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
6003: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
6004: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
6005: supported by `MATMPIBAIJ` format only.
6007: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
6008: are kept in the nonzero structure. This flag is not used for `MatZeroRowsColumns()`
6010: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
6011: a zero location in the matrix
6013: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
6015: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
6016: zero row routines and thus improves performance for very large process counts.
6018: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
6019: part of the matrix (since they should match the upper triangular part).
6021: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
6022: single call to `MatSetValues()`, preallocation is perfect, row-oriented, `INSERT_VALUES` is used. Common
6023: with finite difference schemes with non-periodic boundary conditions.
6025: Developer Note:
6026: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
6027: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
6028: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
6029: not changed.
6031: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
6032: @*/
6033: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
6034: {
6035: PetscFunctionBegin;
6037: if (op > 0) {
6040: }
6042: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6044: switch (op) {
6045: case MAT_FORCE_DIAGONAL_ENTRIES:
6046: mat->force_diagonals = flg;
6047: PetscFunctionReturn(PETSC_SUCCESS);
6048: case MAT_NO_OFF_PROC_ENTRIES:
6049: mat->nooffprocentries = flg;
6050: PetscFunctionReturn(PETSC_SUCCESS);
6051: case MAT_SUBSET_OFF_PROC_ENTRIES:
6052: mat->assembly_subset = flg;
6053: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
6054: #if !defined(PETSC_HAVE_MPIUNI)
6055: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
6056: #endif
6057: mat->stash.first_assembly_done = PETSC_FALSE;
6058: }
6059: PetscFunctionReturn(PETSC_SUCCESS);
6060: case MAT_NO_OFF_PROC_ZERO_ROWS:
6061: mat->nooffproczerorows = flg;
6062: PetscFunctionReturn(PETSC_SUCCESS);
6063: case MAT_SPD:
6064: if (flg) {
6065: mat->spd = PETSC_BOOL3_TRUE;
6066: mat->symmetric = PETSC_BOOL3_TRUE;
6067: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6068: } else {
6069: mat->spd = PETSC_BOOL3_FALSE;
6070: }
6071: break;
6072: case MAT_SYMMETRIC:
6073: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6074: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6075: #if !defined(PETSC_USE_COMPLEX)
6076: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6077: #endif
6078: break;
6079: case MAT_HERMITIAN:
6080: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6081: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
6082: #if !defined(PETSC_USE_COMPLEX)
6083: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6084: #endif
6085: break;
6086: case MAT_STRUCTURALLY_SYMMETRIC:
6087: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
6088: break;
6089: case MAT_SYMMETRY_ETERNAL:
6090: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
6091: mat->symmetry_eternal = flg;
6092: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
6093: break;
6094: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6095: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
6096: mat->structural_symmetry_eternal = flg;
6097: break;
6098: case MAT_SPD_ETERNAL:
6099: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
6100: mat->spd_eternal = flg;
6101: if (flg) {
6102: mat->structural_symmetry_eternal = PETSC_TRUE;
6103: mat->symmetry_eternal = PETSC_TRUE;
6104: }
6105: break;
6106: case MAT_STRUCTURE_ONLY:
6107: mat->structure_only = flg;
6108: break;
6109: case MAT_SORTED_FULL:
6110: mat->sortedfull = flg;
6111: break;
6112: default:
6113: break;
6114: }
6115: PetscTryTypeMethod(mat, setoption, op, flg);
6116: PetscFunctionReturn(PETSC_SUCCESS);
6117: }
6119: /*@
6120: MatGetOption - Gets a parameter option that has been set for a matrix.
6122: Logically Collective
6124: Input Parameters:
6125: + mat - the matrix
6126: - op - the option, this only responds to certain options, check the code for which ones
6128: Output Parameter:
6129: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6131: Level: intermediate
6133: Notes:
6134: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6136: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6137: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6139: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6140: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6141: @*/
6142: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6143: {
6144: PetscFunctionBegin;
6148: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6149: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6151: switch (op) {
6152: case MAT_NO_OFF_PROC_ENTRIES:
6153: *flg = mat->nooffprocentries;
6154: break;
6155: case MAT_NO_OFF_PROC_ZERO_ROWS:
6156: *flg = mat->nooffproczerorows;
6157: break;
6158: case MAT_SYMMETRIC:
6159: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6160: break;
6161: case MAT_HERMITIAN:
6162: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6163: break;
6164: case MAT_STRUCTURALLY_SYMMETRIC:
6165: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6166: break;
6167: case MAT_SPD:
6168: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6169: break;
6170: case MAT_SYMMETRY_ETERNAL:
6171: *flg = mat->symmetry_eternal;
6172: break;
6173: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6174: *flg = mat->symmetry_eternal;
6175: break;
6176: default:
6177: break;
6178: }
6179: PetscFunctionReturn(PETSC_SUCCESS);
6180: }
6182: /*@
6183: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6184: this routine retains the old nonzero structure.
6186: Logically Collective
6188: Input Parameter:
6189: . mat - the matrix
6191: Level: intermediate
6193: Note:
6194: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6195: See the Performance chapter of the users manual for information on preallocating matrices.
6197: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6198: @*/
6199: PetscErrorCode MatZeroEntries(Mat mat)
6200: {
6201: PetscFunctionBegin;
6204: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6205: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6206: MatCheckPreallocated(mat, 1);
6208: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6209: PetscUseTypeMethod(mat, zeroentries);
6210: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6211: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6212: PetscFunctionReturn(PETSC_SUCCESS);
6213: }
6215: /*@
6216: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6217: of a set of rows and columns of a matrix.
6219: Collective
6221: Input Parameters:
6222: + mat - the matrix
6223: . numRows - the number of rows/columns to zero
6224: . rows - the global row indices
6225: . diag - value put in the diagonal of the eliminated rows
6226: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6227: - b - optional vector of the right-hand side, that will be adjusted by provided solution entries
6229: Level: intermediate
6231: Notes:
6232: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6234: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6235: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6237: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6238: Krylov method to take advantage of the known solution on the zeroed rows.
6240: For the parallel case, all processes that share the matrix (i.e.,
6241: those in the communicator used for matrix creation) MUST call this
6242: routine, regardless of whether any rows being zeroed are owned by
6243: them.
6245: Unlike `MatZeroRows()`, this ignores the `MAT_KEEP_NONZERO_PATTERN` option value set with `MatSetOption()`, it merely zeros those entries in the matrix, but never
6246: removes them from the nonzero pattern. The nonzero pattern of the matrix can still change if a nonzero needs to be inserted on a diagonal entry that was previously
6247: missing.
6249: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6250: list only rows local to itself).
6252: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6254: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6255: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6256: @*/
6257: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6258: {
6259: PetscFunctionBegin;
6262: if (numRows) PetscAssertPointer(rows, 3);
6263: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6264: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6265: MatCheckPreallocated(mat, 1);
6267: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6268: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6269: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6270: PetscFunctionReturn(PETSC_SUCCESS);
6271: }
6273: /*@
6274: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6275: of a set of rows and columns of a matrix.
6277: Collective
6279: Input Parameters:
6280: + mat - the matrix
6281: . is - the rows to zero
6282: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6283: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6284: - b - optional vector of right-hand side, that will be adjusted by provided solution
6286: Level: intermediate
6288: Note:
6289: See `MatZeroRowsColumns()` for details on how this routine operates.
6291: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6292: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6293: @*/
6294: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6295: {
6296: PetscInt numRows;
6297: const PetscInt *rows;
6299: PetscFunctionBegin;
6304: PetscCall(ISGetLocalSize(is, &numRows));
6305: PetscCall(ISGetIndices(is, &rows));
6306: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6307: PetscCall(ISRestoreIndices(is, &rows));
6308: PetscFunctionReturn(PETSC_SUCCESS);
6309: }
6311: /*@
6312: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6313: of a set of rows of a matrix.
6315: Collective
6317: Input Parameters:
6318: + mat - the matrix
6319: . numRows - the number of rows to zero
6320: . rows - the global row indices
6321: . diag - value put in the diagonal of the zeroed rows
6322: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6323: - b - optional vector of right-hand side, that will be adjusted by provided solution entries
6325: Level: intermediate
6327: Notes:
6328: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6330: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6332: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6333: Krylov method to take advantage of the known solution on the zeroed rows.
6335: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6336: from the matrix.
6338: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6339: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense
6340: formats this does not alter the nonzero structure.
6342: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6343: of the matrix is not changed the values are
6344: merely zeroed.
6346: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6347: formats can optionally remove the main diagonal entry from the
6348: nonzero structure as well, by passing 0.0 as the final argument).
6350: For the parallel case, all processes that share the matrix (i.e.,
6351: those in the communicator used for matrix creation) MUST call this
6352: routine, regardless of whether any rows being zeroed are owned by
6353: them.
6355: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6356: list only rows local to itself).
6358: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6359: owns that are to be zeroed. This saves a global synchronization in the implementation.
6361: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6362: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`, `MAT_KEEP_NONZERO_PATTERN`
6363: @*/
6364: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6365: {
6366: PetscFunctionBegin;
6369: if (numRows) PetscAssertPointer(rows, 3);
6370: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6371: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6372: MatCheckPreallocated(mat, 1);
6374: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6375: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6376: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6377: PetscFunctionReturn(PETSC_SUCCESS);
6378: }
6380: /*@
6381: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6382: of a set of rows of a matrix indicated by an `IS`
6384: Collective
6386: Input Parameters:
6387: + mat - the matrix
6388: . is - index set, `IS`, of rows to remove (if `NULL` then no row is removed)
6389: . diag - value put in all diagonals of eliminated rows
6390: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6391: - b - optional vector of right-hand side, that will be adjusted by provided solution
6393: Level: intermediate
6395: Note:
6396: See `MatZeroRows()` for details on how this routine operates.
6398: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6399: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `IS`
6400: @*/
6401: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6402: {
6403: PetscInt numRows = 0;
6404: const PetscInt *rows = NULL;
6406: PetscFunctionBegin;
6409: if (is) {
6411: PetscCall(ISGetLocalSize(is, &numRows));
6412: PetscCall(ISGetIndices(is, &rows));
6413: }
6414: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6415: if (is) PetscCall(ISRestoreIndices(is, &rows));
6416: PetscFunctionReturn(PETSC_SUCCESS);
6417: }
6419: /*@
6420: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6421: of a set of rows of a matrix indicated by a `MatStencil`. These rows must be local to the process.
6423: Collective
6425: Input Parameters:
6426: + mat - the matrix
6427: . numRows - the number of rows to remove
6428: . rows - the grid coordinates (and component number when dof > 1) for matrix rows indicated by an array of `MatStencil`
6429: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6430: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6431: - b - optional vector of right-hand side, that will be adjusted by provided solution
6433: Level: intermediate
6435: Notes:
6436: See `MatZeroRows()` for details on how this routine operates.
6438: The grid coordinates are across the entire grid, not just the local portion
6440: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6441: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6442: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6443: `DM_BOUNDARY_PERIODIC` boundary type.
6445: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6446: a single value per point) you can skip filling those indices.
6448: Fortran Note:
6449: `idxm` and `idxn` should be declared as
6450: $ MatStencil idxm(4, m)
6451: and the values inserted using
6452: .vb
6453: idxm(MatStencil_i, 1) = i
6454: idxm(MatStencil_j, 1) = j
6455: idxm(MatStencil_k, 1) = k
6456: idxm(MatStencil_c, 1) = c
6457: etc
6458: .ve
6460: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRows()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6461: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6462: @*/
6463: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6464: {
6465: PetscInt dim = mat->stencil.dim;
6466: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6467: PetscInt *dims = mat->stencil.dims + 1;
6468: PetscInt *starts = mat->stencil.starts;
6469: PetscInt *dxm = (PetscInt *)rows;
6470: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6472: PetscFunctionBegin;
6475: if (numRows) PetscAssertPointer(rows, 3);
6477: PetscCall(PetscMalloc1(numRows, &jdxm));
6478: for (i = 0; i < numRows; ++i) {
6479: /* Skip unused dimensions (they are ordered k, j, i, c) */
6480: for (j = 0; j < 3 - sdim; ++j) dxm++;
6481: /* Local index in X dir */
6482: tmp = *dxm++ - starts[0];
6483: /* Loop over remaining dimensions */
6484: for (j = 0; j < dim - 1; ++j) {
6485: /* If nonlocal, set index to be negative */
6486: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6487: /* Update local index */
6488: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6489: }
6490: /* Skip component slot if necessary */
6491: if (mat->stencil.noc) dxm++;
6492: /* Local row number */
6493: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6494: }
6495: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6496: PetscCall(PetscFree(jdxm));
6497: PetscFunctionReturn(PETSC_SUCCESS);
6498: }
6500: /*@
6501: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6502: of a set of rows and columns of a matrix.
6504: Collective
6506: Input Parameters:
6507: + mat - the matrix
6508: . numRows - the number of rows/columns to remove
6509: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6510: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6511: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6512: - b - optional vector of right-hand side, that will be adjusted by provided solution
6514: Level: intermediate
6516: Notes:
6517: See `MatZeroRowsColumns()` for details on how this routine operates.
6519: The grid coordinates are across the entire grid, not just the local portion
6521: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6522: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6523: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6524: `DM_BOUNDARY_PERIODIC` boundary type.
6526: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6527: a single value per point) you can skip filling those indices.
6529: Fortran Note:
6530: `idxm` and `idxn` should be declared as
6531: $ MatStencil idxm(4, m)
6532: and the values inserted using
6533: .vb
6534: idxm(MatStencil_i, 1) = i
6535: idxm(MatStencil_j, 1) = j
6536: idxm(MatStencil_k, 1) = k
6537: idxm(MatStencil_c, 1) = c
6538: etc
6539: .ve
6541: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6542: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6543: @*/
6544: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6545: {
6546: PetscInt dim = mat->stencil.dim;
6547: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6548: PetscInt *dims = mat->stencil.dims + 1;
6549: PetscInt *starts = mat->stencil.starts;
6550: PetscInt *dxm = (PetscInt *)rows;
6551: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6553: PetscFunctionBegin;
6556: if (numRows) PetscAssertPointer(rows, 3);
6558: PetscCall(PetscMalloc1(numRows, &jdxm));
6559: for (i = 0; i < numRows; ++i) {
6560: /* Skip unused dimensions (they are ordered k, j, i, c) */
6561: for (j = 0; j < 3 - sdim; ++j) dxm++;
6562: /* Local index in X dir */
6563: tmp = *dxm++ - starts[0];
6564: /* Loop over remaining dimensions */
6565: for (j = 0; j < dim - 1; ++j) {
6566: /* If nonlocal, set index to be negative */
6567: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_INT_MIN;
6568: /* Update local index */
6569: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6570: }
6571: /* Skip component slot if necessary */
6572: if (mat->stencil.noc) dxm++;
6573: /* Local row number */
6574: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6575: }
6576: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6577: PetscCall(PetscFree(jdxm));
6578: PetscFunctionReturn(PETSC_SUCCESS);
6579: }
6581: /*@
6582: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6583: of a set of rows of a matrix; using local numbering of rows.
6585: Collective
6587: Input Parameters:
6588: + mat - the matrix
6589: . numRows - the number of rows to remove
6590: . rows - the local row indices
6591: . diag - value put in all diagonals of eliminated rows
6592: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6593: - b - optional vector of right-hand side, that will be adjusted by provided solution
6595: Level: intermediate
6597: Notes:
6598: Before calling `MatZeroRowsLocal()`, the user must first set the
6599: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6601: See `MatZeroRows()` for details on how this routine operates.
6603: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6604: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6605: @*/
6606: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6607: {
6608: PetscFunctionBegin;
6611: if (numRows) PetscAssertPointer(rows, 3);
6612: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6613: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6614: MatCheckPreallocated(mat, 1);
6616: if (mat->ops->zerorowslocal) {
6617: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6618: } else {
6619: IS is, newis;
6620: const PetscInt *newRows;
6622: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6623: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6624: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6625: PetscCall(ISGetIndices(newis, &newRows));
6626: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6627: PetscCall(ISRestoreIndices(newis, &newRows));
6628: PetscCall(ISDestroy(&newis));
6629: PetscCall(ISDestroy(&is));
6630: }
6631: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6632: PetscFunctionReturn(PETSC_SUCCESS);
6633: }
6635: /*@
6636: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6637: of a set of rows of a matrix; using local numbering of rows.
6639: Collective
6641: Input Parameters:
6642: + mat - the matrix
6643: . is - index set of rows to remove
6644: . diag - value put in all diagonals of eliminated rows
6645: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6646: - b - optional vector of right-hand side, that will be adjusted by provided solution
6648: Level: intermediate
6650: Notes:
6651: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6652: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6654: See `MatZeroRows()` for details on how this routine operates.
6656: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6657: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6658: @*/
6659: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6660: {
6661: PetscInt numRows;
6662: const PetscInt *rows;
6664: PetscFunctionBegin;
6668: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6669: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6670: MatCheckPreallocated(mat, 1);
6672: PetscCall(ISGetLocalSize(is, &numRows));
6673: PetscCall(ISGetIndices(is, &rows));
6674: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6675: PetscCall(ISRestoreIndices(is, &rows));
6676: PetscFunctionReturn(PETSC_SUCCESS);
6677: }
6679: /*@
6680: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6681: of a set of rows and columns of a matrix; using local numbering of rows.
6683: Collective
6685: Input Parameters:
6686: + mat - the matrix
6687: . numRows - the number of rows to remove
6688: . rows - the global row indices
6689: . diag - value put in all diagonals of eliminated rows
6690: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6691: - b - optional vector of right-hand side, that will be adjusted by provided solution
6693: Level: intermediate
6695: Notes:
6696: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6697: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6699: See `MatZeroRowsColumns()` for details on how this routine operates.
6701: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6702: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6703: @*/
6704: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6705: {
6706: IS is, newis;
6707: const PetscInt *newRows;
6709: PetscFunctionBegin;
6712: if (numRows) PetscAssertPointer(rows, 3);
6713: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6714: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6715: MatCheckPreallocated(mat, 1);
6717: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6718: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6719: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6720: PetscCall(ISGetIndices(newis, &newRows));
6721: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6722: PetscCall(ISRestoreIndices(newis, &newRows));
6723: PetscCall(ISDestroy(&newis));
6724: PetscCall(ISDestroy(&is));
6725: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6726: PetscFunctionReturn(PETSC_SUCCESS);
6727: }
6729: /*@
6730: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6731: of a set of rows and columns of a matrix; using local numbering of rows.
6733: Collective
6735: Input Parameters:
6736: + mat - the matrix
6737: . is - index set of rows to remove
6738: . diag - value put in all diagonals of eliminated rows
6739: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6740: - b - optional vector of right-hand side, that will be adjusted by provided solution
6742: Level: intermediate
6744: Notes:
6745: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6746: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6748: See `MatZeroRowsColumns()` for details on how this routine operates.
6750: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6751: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6752: @*/
6753: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6754: {
6755: PetscInt numRows;
6756: const PetscInt *rows;
6758: PetscFunctionBegin;
6762: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6763: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6764: MatCheckPreallocated(mat, 1);
6766: PetscCall(ISGetLocalSize(is, &numRows));
6767: PetscCall(ISGetIndices(is, &rows));
6768: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6769: PetscCall(ISRestoreIndices(is, &rows));
6770: PetscFunctionReturn(PETSC_SUCCESS);
6771: }
6773: /*@
6774: MatGetSize - Returns the numbers of rows and columns in a matrix.
6776: Not Collective
6778: Input Parameter:
6779: . mat - the matrix
6781: Output Parameters:
6782: + m - the number of global rows
6783: - n - the number of global columns
6785: Level: beginner
6787: Note:
6788: Both output parameters can be `NULL` on input.
6790: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6791: @*/
6792: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6793: {
6794: PetscFunctionBegin;
6796: if (m) *m = mat->rmap->N;
6797: if (n) *n = mat->cmap->N;
6798: PetscFunctionReturn(PETSC_SUCCESS);
6799: }
6801: /*@
6802: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6803: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6805: Not Collective
6807: Input Parameter:
6808: . mat - the matrix
6810: Output Parameters:
6811: + m - the number of local rows, use `NULL` to not obtain this value
6812: - n - the number of local columns, use `NULL` to not obtain this value
6814: Level: beginner
6816: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6817: @*/
6818: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6819: {
6820: PetscFunctionBegin;
6822: if (m) PetscAssertPointer(m, 2);
6823: if (n) PetscAssertPointer(n, 3);
6824: if (m) *m = mat->rmap->n;
6825: if (n) *n = mat->cmap->n;
6826: PetscFunctionReturn(PETSC_SUCCESS);
6827: }
6829: /*@
6830: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6831: vector one multiplies this matrix by that are owned by this processor.
6833: Not Collective, unless matrix has not been allocated, then collective
6835: Input Parameter:
6836: . mat - the matrix
6838: Output Parameters:
6839: + m - the global index of the first local column, use `NULL` to not obtain this value
6840: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6842: Level: developer
6844: Notes:
6845: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6847: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6848: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6850: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6851: the local values in the matrix.
6853: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6854: Layouts](sec_matlayout) for details on matrix layouts.
6856: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6857: `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6858: @*/
6859: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6860: {
6861: PetscFunctionBegin;
6864: if (m) PetscAssertPointer(m, 2);
6865: if (n) PetscAssertPointer(n, 3);
6866: MatCheckPreallocated(mat, 1);
6867: if (m) *m = mat->cmap->rstart;
6868: if (n) *n = mat->cmap->rend;
6869: PetscFunctionReturn(PETSC_SUCCESS);
6870: }
6872: /*@
6873: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6874: this MPI process.
6876: Not Collective
6878: Input Parameter:
6879: . mat - the matrix
6881: Output Parameters:
6882: + m - the global index of the first local row, use `NULL` to not obtain this value
6883: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6885: Level: beginner
6887: Notes:
6888: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6890: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6891: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6893: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6894: the local values in the matrix.
6896: The high argument is one more than the last element stored locally.
6898: For all matrices it returns the range of matrix rows associated with rows of a vector that
6899: would contain the result of a matrix vector product with this matrix. See [Matrix
6900: Layouts](sec_matlayout) for details on matrix layouts.
6902: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`,
6903: `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`, `DMDAGetGhostCorners()`, `DM`
6904: @*/
6905: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6906: {
6907: PetscFunctionBegin;
6910: if (m) PetscAssertPointer(m, 2);
6911: if (n) PetscAssertPointer(n, 3);
6912: MatCheckPreallocated(mat, 1);
6913: if (m) *m = mat->rmap->rstart;
6914: if (n) *n = mat->rmap->rend;
6915: PetscFunctionReturn(PETSC_SUCCESS);
6916: }
6918: /*@C
6919: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6920: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6922: Not Collective, unless matrix has not been allocated
6924: Input Parameter:
6925: . mat - the matrix
6927: Output Parameter:
6928: . ranges - start of each processors portion plus one more than the total length at the end, of length `size` + 1
6929: where `size` is the number of MPI processes used by `mat`
6931: Level: beginner
6933: Notes:
6934: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6936: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6937: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6939: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6940: the local values in the matrix.
6942: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6943: would contain the result of a matrix vector product with this matrix. See [Matrix
6944: Layouts](sec_matlayout) for details on matrix layouts.
6946: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`,
6947: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `MatSetSizes()`, `MatCreateAIJ()`,
6948: `DMDAGetGhostCorners()`, `DM`
6949: @*/
6950: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt *ranges[])
6951: {
6952: PetscFunctionBegin;
6955: MatCheckPreallocated(mat, 1);
6956: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6957: PetscFunctionReturn(PETSC_SUCCESS);
6958: }
6960: /*@C
6961: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6962: vector one multiplies this vector by that are owned by each processor.
6964: Not Collective, unless matrix has not been allocated
6966: Input Parameter:
6967: . mat - the matrix
6969: Output Parameter:
6970: . ranges - start of each processors portion plus one more than the total length at the end
6972: Level: beginner
6974: Notes:
6975: If the `Mat` was obtained from a `DM` with `DMCreateMatrix()`, then the range values are determined by the specific `DM`.
6977: If the `Mat` was created directly the range values are determined by the local size passed to `MatSetSizes()` or `MatCreateAIJ()`.
6978: If `PETSC_DECIDE` was passed as the local size, then the vector uses default values for the range using `PetscSplitOwnership()`.
6980: For certain `DM`, such as `DMDA`, it is better to use `DM` specific routines, such as `DMDAGetGhostCorners()`, to determine
6981: the local values in the matrix.
6983: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6984: Layouts](sec_matlayout) for details on matrix layouts.
6986: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`,
6987: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`, `PetscLayout`, `MatSetSizes()`, `MatCreateAIJ()`,
6988: `DMDAGetGhostCorners()`, `DM`
6989: @*/
6990: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt *ranges[])
6991: {
6992: PetscFunctionBegin;
6995: MatCheckPreallocated(mat, 1);
6996: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6997: PetscFunctionReturn(PETSC_SUCCESS);
6998: }
7000: /*@
7001: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
7003: Not Collective
7005: Input Parameter:
7006: . A - matrix
7008: Output Parameters:
7009: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
7010: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
7012: Level: intermediate
7014: Note:
7015: You should call `ISDestroy()` on the returned `IS`
7017: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
7018: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
7019: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
7020: details on matrix layouts.
7022: .seealso: [](ch_matrices), `IS`, `Mat`, `MatGetOwnershipRanges()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
7023: @*/
7024: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
7025: {
7026: PetscErrorCode (*f)(Mat, IS *, IS *);
7028: PetscFunctionBegin;
7031: MatCheckPreallocated(A, 1);
7032: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
7033: if (f) {
7034: PetscCall((*f)(A, rows, cols));
7035: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
7036: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
7037: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
7038: }
7039: PetscFunctionReturn(PETSC_SUCCESS);
7040: }
7042: /*@
7043: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
7044: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
7045: to complete the factorization.
7047: Collective
7049: Input Parameters:
7050: + fact - the factorized matrix obtained with `MatGetFactor()`
7051: . mat - the matrix
7052: . row - row permutation
7053: . col - column permutation
7054: - info - structure containing
7055: .vb
7056: levels - number of levels of fill.
7057: expected fill - as ratio of original fill.
7058: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
7059: missing diagonal entries)
7060: .ve
7062: Level: developer
7064: Notes:
7065: See [Matrix Factorization](sec_matfactor) for additional information.
7067: Most users should employ the `KSP` interface for linear solvers
7068: instead of working directly with matrix algebra routines such as this.
7069: See, e.g., `KSPCreate()`.
7071: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
7073: Developer Note:
7074: The Fortran interface is not autogenerated as the
7075: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7077: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
7078: `MatGetOrdering()`, `MatFactorInfo`
7079: @*/
7080: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
7081: {
7082: PetscFunctionBegin;
7087: PetscAssertPointer(info, 5);
7088: PetscAssertPointer(fact, 1);
7089: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
7090: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7091: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7092: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7093: MatCheckPreallocated(mat, 2);
7095: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
7096: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
7097: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
7098: PetscFunctionReturn(PETSC_SUCCESS);
7099: }
7101: /*@
7102: MatICCFactorSymbolic - Performs symbolic incomplete
7103: Cholesky factorization for a symmetric matrix. Use
7104: `MatCholeskyFactorNumeric()` to complete the factorization.
7106: Collective
7108: Input Parameters:
7109: + fact - the factorized matrix obtained with `MatGetFactor()`
7110: . mat - the matrix to be factored
7111: . perm - row and column permutation
7112: - info - structure containing
7113: .vb
7114: levels - number of levels of fill.
7115: expected fill - as ratio of original fill.
7116: .ve
7118: Level: developer
7120: Notes:
7121: Most users should employ the `KSP` interface for linear solvers
7122: instead of working directly with matrix algebra routines such as this.
7123: See, e.g., `KSPCreate()`.
7125: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
7127: Developer Note:
7128: The Fortran interface is not autogenerated as the
7129: interface definition cannot be generated correctly [due to `MatFactorInfo`]
7131: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
7132: @*/
7133: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
7134: {
7135: PetscFunctionBegin;
7139: PetscAssertPointer(info, 4);
7140: PetscAssertPointer(fact, 1);
7141: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7142: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
7143: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
7144: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7145: MatCheckPreallocated(mat, 2);
7147: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7148: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7149: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7150: PetscFunctionReturn(PETSC_SUCCESS);
7151: }
7153: /*@C
7154: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7155: points to an array of valid matrices, they may be reused to store the new
7156: submatrices.
7158: Collective
7160: Input Parameters:
7161: + mat - the matrix
7162: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7163: . irow - index set of rows to extract
7164: . icol - index set of columns to extract
7165: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7167: Output Parameter:
7168: . submat - the array of submatrices
7170: Level: advanced
7172: Notes:
7173: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7174: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7175: to extract a parallel submatrix.
7177: Some matrix types place restrictions on the row and column
7178: indices, such as that they be sorted or that they be equal to each other.
7180: The index sets may not have duplicate entries.
7182: When extracting submatrices from a parallel matrix, each processor can
7183: form a different submatrix by setting the rows and columns of its
7184: individual index sets according to the local submatrix desired.
7186: When finished using the submatrices, the user should destroy
7187: them with `MatDestroySubMatrices()`.
7189: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7190: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7192: This routine creates the matrices in submat; you should NOT create them before
7193: calling it. It also allocates the array of matrix pointers submat.
7195: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7196: request one row/column in a block, they must request all rows/columns that are in
7197: that block. For example, if the block size is 2 you cannot request just row 0 and
7198: column 0.
7200: Fortran Note:
7201: One must pass in as `submat` a `Mat` array of size at least `n`+1.
7203: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7204: @*/
7205: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7206: {
7207: PetscInt i;
7208: PetscBool eq;
7210: PetscFunctionBegin;
7213: if (n) {
7214: PetscAssertPointer(irow, 3);
7216: PetscAssertPointer(icol, 4);
7218: }
7219: PetscAssertPointer(submat, 6);
7220: if (n && scall == MAT_REUSE_MATRIX) {
7221: PetscAssertPointer(*submat, 6);
7223: }
7224: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7225: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7226: MatCheckPreallocated(mat, 1);
7227: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7228: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7229: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7230: for (i = 0; i < n; i++) {
7231: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7232: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7233: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7234: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7235: if (mat->boundtocpu && mat->bindingpropagates) {
7236: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7237: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7238: }
7239: #endif
7240: }
7241: PetscFunctionReturn(PETSC_SUCCESS);
7242: }
7244: /*@C
7245: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7247: Collective
7249: Input Parameters:
7250: + mat - the matrix
7251: . n - the number of submatrixes to be extracted
7252: . irow - index set of rows to extract
7253: . icol - index set of columns to extract
7254: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7256: Output Parameter:
7257: . submat - the array of submatrices
7259: Level: advanced
7261: Note:
7262: This is used by `PCGASM`
7264: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7265: @*/
7266: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7267: {
7268: PetscInt i;
7269: PetscBool eq;
7271: PetscFunctionBegin;
7274: if (n) {
7275: PetscAssertPointer(irow, 3);
7277: PetscAssertPointer(icol, 4);
7279: }
7280: PetscAssertPointer(submat, 6);
7281: if (n && scall == MAT_REUSE_MATRIX) {
7282: PetscAssertPointer(*submat, 6);
7284: }
7285: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7286: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7287: MatCheckPreallocated(mat, 1);
7289: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7290: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7291: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7292: for (i = 0; i < n; i++) {
7293: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7294: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7295: }
7296: PetscFunctionReturn(PETSC_SUCCESS);
7297: }
7299: /*@C
7300: MatDestroyMatrices - Destroys an array of matrices.
7302: Collective
7304: Input Parameters:
7305: + n - the number of local matrices
7306: - mat - the matrices (this is a pointer to the array of matrices)
7308: Level: advanced
7310: Notes:
7311: Frees not only the matrices, but also the array that contains the matrices
7313: For matrices obtained with `MatCreateSubMatrices()` use `MatDestroySubMatrices()`
7315: Fortran Note:
7316: Does not free the `mat` array.
7318: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroySubMatrices()`
7319: @*/
7320: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7321: {
7322: PetscInt i;
7324: PetscFunctionBegin;
7325: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7326: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7327: PetscAssertPointer(mat, 2);
7329: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7331: /* memory is allocated even if n = 0 */
7332: PetscCall(PetscFree(*mat));
7333: PetscFunctionReturn(PETSC_SUCCESS);
7334: }
7336: /*@C
7337: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7339: Collective
7341: Input Parameters:
7342: + n - the number of local matrices
7343: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7344: sequence of `MatCreateSubMatrices()`)
7346: Level: advanced
7348: Note:
7349: Frees not only the matrices, but also the array that contains the matrices
7351: Fortran Note:
7352: Does not free the `mat` array.
7354: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7355: @*/
7356: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7357: {
7358: Mat mat0;
7360: PetscFunctionBegin;
7361: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7362: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7363: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7364: PetscAssertPointer(mat, 2);
7366: mat0 = (*mat)[0];
7367: if (mat0 && mat0->ops->destroysubmatrices) {
7368: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7369: } else {
7370: PetscCall(MatDestroyMatrices(n, mat));
7371: }
7372: PetscFunctionReturn(PETSC_SUCCESS);
7373: }
7375: /*@
7376: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7378: Collective
7380: Input Parameter:
7381: . mat - the matrix
7383: Output Parameter:
7384: . matstruct - the sequential matrix with the nonzero structure of `mat`
7386: Level: developer
7388: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7389: @*/
7390: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7391: {
7392: PetscFunctionBegin;
7394: PetscAssertPointer(matstruct, 2);
7397: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7398: MatCheckPreallocated(mat, 1);
7400: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7401: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7402: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7403: PetscFunctionReturn(PETSC_SUCCESS);
7404: }
7406: /*@C
7407: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7409: Collective
7411: Input Parameter:
7412: . mat - the matrix
7414: Level: advanced
7416: Note:
7417: This is not needed, one can just call `MatDestroy()`
7419: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7420: @*/
7421: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7422: {
7423: PetscFunctionBegin;
7424: PetscAssertPointer(mat, 1);
7425: PetscCall(MatDestroy(mat));
7426: PetscFunctionReturn(PETSC_SUCCESS);
7427: }
7429: /*@
7430: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7431: replaces the index sets by larger ones that represent submatrices with
7432: additional overlap.
7434: Collective
7436: Input Parameters:
7437: + mat - the matrix
7438: . n - the number of index sets
7439: . is - the array of index sets (these index sets will changed during the call)
7440: - ov - the additional overlap requested
7442: Options Database Key:
7443: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7445: Level: developer
7447: Note:
7448: The computed overlap preserves the matrix block sizes when the blocks are square.
7449: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7450: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7452: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7453: @*/
7454: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7455: {
7456: PetscInt i, bs, cbs;
7458: PetscFunctionBegin;
7462: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7463: if (n) {
7464: PetscAssertPointer(is, 3);
7466: }
7467: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7468: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7469: MatCheckPreallocated(mat, 1);
7471: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7472: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7473: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7474: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7475: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7476: if (bs == cbs) {
7477: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7478: }
7479: PetscFunctionReturn(PETSC_SUCCESS);
7480: }
7482: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7484: /*@
7485: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7486: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7487: additional overlap.
7489: Collective
7491: Input Parameters:
7492: + mat - the matrix
7493: . n - the number of index sets
7494: . is - the array of index sets (these index sets will changed during the call)
7495: - ov - the additional overlap requested
7497: ` Options Database Key:
7498: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7500: Level: developer
7502: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7503: @*/
7504: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7505: {
7506: PetscInt i;
7508: PetscFunctionBegin;
7511: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7512: if (n) {
7513: PetscAssertPointer(is, 3);
7515: }
7516: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7517: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7518: MatCheckPreallocated(mat, 1);
7519: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7520: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7521: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7522: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7523: PetscFunctionReturn(PETSC_SUCCESS);
7524: }
7526: /*@
7527: MatGetBlockSize - Returns the matrix block size.
7529: Not Collective
7531: Input Parameter:
7532: . mat - the matrix
7534: Output Parameter:
7535: . bs - block size
7537: Level: intermediate
7539: Notes:
7540: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7542: If the block size has not been set yet this routine returns 1.
7544: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7545: @*/
7546: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7547: {
7548: PetscFunctionBegin;
7550: PetscAssertPointer(bs, 2);
7551: *bs = PetscAbs(mat->rmap->bs);
7552: PetscFunctionReturn(PETSC_SUCCESS);
7553: }
7555: /*@
7556: MatGetBlockSizes - Returns the matrix block row and column sizes.
7558: Not Collective
7560: Input Parameter:
7561: . mat - the matrix
7563: Output Parameters:
7564: + rbs - row block size
7565: - cbs - column block size
7567: Level: intermediate
7569: Notes:
7570: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7571: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7573: If a block size has not been set yet this routine returns 1.
7575: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7576: @*/
7577: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7578: {
7579: PetscFunctionBegin;
7581: if (rbs) PetscAssertPointer(rbs, 2);
7582: if (cbs) PetscAssertPointer(cbs, 3);
7583: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7584: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7585: PetscFunctionReturn(PETSC_SUCCESS);
7586: }
7588: /*@
7589: MatSetBlockSize - Sets the matrix block size.
7591: Logically Collective
7593: Input Parameters:
7594: + mat - the matrix
7595: - bs - block size
7597: Level: intermediate
7599: Notes:
7600: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7601: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7603: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7604: is compatible with the matrix local sizes.
7606: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7607: @*/
7608: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7609: {
7610: PetscFunctionBegin;
7613: PetscCall(MatSetBlockSizes(mat, bs, bs));
7614: PetscFunctionReturn(PETSC_SUCCESS);
7615: }
7617: typedef struct {
7618: PetscInt n;
7619: IS *is;
7620: Mat *mat;
7621: PetscObjectState nonzerostate;
7622: Mat C;
7623: } EnvelopeData;
7625: static PetscErrorCode EnvelopeDataDestroy(void *ptr)
7626: {
7627: EnvelopeData *edata = (EnvelopeData *)ptr;
7629: PetscFunctionBegin;
7630: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7631: PetscCall(PetscFree(edata->is));
7632: PetscCall(PetscFree(edata));
7633: PetscFunctionReturn(PETSC_SUCCESS);
7634: }
7636: /*@
7637: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7638: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7640: Collective
7642: Input Parameter:
7643: . mat - the matrix
7645: Level: intermediate
7647: Notes:
7648: There can be zeros within the blocks
7650: The blocks can overlap between processes, including laying on more than two processes
7652: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7653: @*/
7654: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7655: {
7656: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7657: PetscInt *diag, *odiag, sc;
7658: VecScatter scatter;
7659: PetscScalar *seqv;
7660: const PetscScalar *parv;
7661: const PetscInt *ia, *ja;
7662: PetscBool set, flag, done;
7663: Mat AA = mat, A;
7664: MPI_Comm comm;
7665: PetscMPIInt rank, size, tag;
7666: MPI_Status status;
7667: PetscContainer container;
7668: EnvelopeData *edata;
7669: Vec seq, par;
7670: IS isglobal;
7672: PetscFunctionBegin;
7674: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7675: if (!set || !flag) {
7676: /* TODO: only needs nonzero structure of transpose */
7677: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7678: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7679: }
7680: PetscCall(MatAIJGetLocalMat(AA, &A));
7681: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7682: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7684: PetscCall(MatGetLocalSize(mat, &n, NULL));
7685: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7686: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7687: PetscCallMPI(MPI_Comm_size(comm, &size));
7688: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7690: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7692: if (rank > 0) {
7693: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7694: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7695: }
7696: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7697: for (i = 0; i < n; i++) {
7698: env = PetscMax(env, ja[ia[i + 1] - 1]);
7699: II = rstart + i;
7700: if (env == II) {
7701: starts[lblocks] = tbs;
7702: sizes[lblocks++] = 1 + II - tbs;
7703: tbs = 1 + II;
7704: }
7705: }
7706: if (rank < size - 1) {
7707: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7708: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7709: }
7711: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7712: if (!set || !flag) PetscCall(MatDestroy(&AA));
7713: PetscCall(MatDestroy(&A));
7715: PetscCall(PetscNew(&edata));
7716: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7717: edata->n = lblocks;
7718: /* create IS needed for extracting blocks from the original matrix */
7719: PetscCall(PetscMalloc1(lblocks, &edata->is));
7720: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7722: /* Create the resulting inverse matrix nonzero structure with preallocation information */
7723: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7724: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7725: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7726: PetscCall(MatSetType(edata->C, MATAIJ));
7728: /* Communicate the start and end of each row, from each block to the correct rank */
7729: /* TODO: Use PetscSF instead of VecScatter */
7730: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7731: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7732: PetscCall(VecGetArrayWrite(seq, &seqv));
7733: for (PetscInt i = 0; i < lblocks; i++) {
7734: for (PetscInt j = 0; j < sizes[i]; j++) {
7735: seqv[cnt] = starts[i];
7736: seqv[cnt + 1] = starts[i] + sizes[i];
7737: cnt += 2;
7738: }
7739: }
7740: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7741: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7742: sc -= cnt;
7743: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7744: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7745: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7746: PetscCall(ISDestroy(&isglobal));
7747: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7748: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7749: PetscCall(VecScatterDestroy(&scatter));
7750: PetscCall(VecDestroy(&seq));
7751: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7752: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7753: PetscCall(VecGetArrayRead(par, &parv));
7754: cnt = 0;
7755: PetscCall(MatGetSize(mat, NULL, &n));
7756: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7757: PetscInt start, end, d = 0, od = 0;
7759: start = (PetscInt)PetscRealPart(parv[cnt]);
7760: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7761: cnt += 2;
7763: if (start < cstart) {
7764: od += cstart - start + n - cend;
7765: d += cend - cstart;
7766: } else if (start < cend) {
7767: od += n - cend;
7768: d += cend - start;
7769: } else od += n - start;
7770: if (end <= cstart) {
7771: od -= cstart - end + n - cend;
7772: d -= cend - cstart;
7773: } else if (end < cend) {
7774: od -= n - cend;
7775: d -= cend - end;
7776: } else od -= n - end;
7778: odiag[i] = od;
7779: diag[i] = d;
7780: }
7781: PetscCall(VecRestoreArrayRead(par, &parv));
7782: PetscCall(VecDestroy(&par));
7783: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7784: PetscCall(PetscFree2(diag, odiag));
7785: PetscCall(PetscFree2(sizes, starts));
7787: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7788: PetscCall(PetscContainerSetPointer(container, edata));
7789: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode (*)(void *))EnvelopeDataDestroy));
7790: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7791: PetscCall(PetscObjectDereference((PetscObject)container));
7792: PetscFunctionReturn(PETSC_SUCCESS);
7793: }
7795: /*@
7796: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7798: Collective
7800: Input Parameters:
7801: + A - the matrix
7802: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7804: Output Parameter:
7805: . C - matrix with inverted block diagonal of `A`
7807: Level: advanced
7809: Note:
7810: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7812: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7813: @*/
7814: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7815: {
7816: PetscContainer container;
7817: EnvelopeData *edata;
7818: PetscObjectState nonzerostate;
7820: PetscFunctionBegin;
7821: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7822: if (!container) {
7823: PetscCall(MatComputeVariableBlockEnvelope(A));
7824: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7825: }
7826: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7827: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7828: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7829: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7831: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7832: *C = edata->C;
7834: for (PetscInt i = 0; i < edata->n; i++) {
7835: Mat D;
7836: PetscScalar *dvalues;
7838: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7839: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7840: PetscCall(MatSeqDenseInvert(D));
7841: PetscCall(MatDenseGetArray(D, &dvalues));
7842: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7843: PetscCall(MatDestroy(&D));
7844: }
7845: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7846: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7847: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7848: PetscFunctionReturn(PETSC_SUCCESS);
7849: }
7851: /*@
7852: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7854: Not Collective
7856: Input Parameters:
7857: + mat - the matrix
7858: . nblocks - the number of blocks on this process, each block can only exist on a single process
7859: - bsizes - the block sizes
7861: Level: intermediate
7863: Notes:
7864: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7866: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7868: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7869: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7870: @*/
7871: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, const PetscInt bsizes[])
7872: {
7873: PetscInt ncnt = 0, nlocal;
7875: PetscFunctionBegin;
7877: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7878: PetscCheck(nblocks >= 0 && nblocks <= nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks %" PetscInt_FMT " is not in [0, %" PetscInt_FMT "]", nblocks, nlocal);
7879: for (PetscInt i = 0; i < nblocks; i++) ncnt += bsizes[i];
7880: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7881: PetscCall(PetscFree(mat->bsizes));
7882: mat->nblocks = nblocks;
7883: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7884: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7885: PetscFunctionReturn(PETSC_SUCCESS);
7886: }
7888: /*@C
7889: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7891: Not Collective; No Fortran Support
7893: Input Parameter:
7894: . mat - the matrix
7896: Output Parameters:
7897: + nblocks - the number of blocks on this process
7898: - bsizes - the block sizes
7900: Level: intermediate
7902: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7903: @*/
7904: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt *bsizes[])
7905: {
7906: PetscFunctionBegin;
7908: if (nblocks) *nblocks = mat->nblocks;
7909: if (bsizes) *bsizes = mat->bsizes;
7910: PetscFunctionReturn(PETSC_SUCCESS);
7911: }
7913: /*@
7914: MatSetBlockSizes - Sets the matrix block row and column sizes.
7916: Logically Collective
7918: Input Parameters:
7919: + mat - the matrix
7920: . rbs - row block size
7921: - cbs - column block size
7923: Level: intermediate
7925: Notes:
7926: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7927: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7928: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7930: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7931: are compatible with the matrix local sizes.
7933: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7935: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7936: @*/
7937: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7938: {
7939: PetscFunctionBegin;
7943: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7944: if (mat->rmap->refcnt) {
7945: ISLocalToGlobalMapping l2g = NULL;
7946: PetscLayout nmap = NULL;
7948: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7949: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7950: PetscCall(PetscLayoutDestroy(&mat->rmap));
7951: mat->rmap = nmap;
7952: mat->rmap->mapping = l2g;
7953: }
7954: if (mat->cmap->refcnt) {
7955: ISLocalToGlobalMapping l2g = NULL;
7956: PetscLayout nmap = NULL;
7958: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7959: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7960: PetscCall(PetscLayoutDestroy(&mat->cmap));
7961: mat->cmap = nmap;
7962: mat->cmap->mapping = l2g;
7963: }
7964: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7965: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7966: PetscFunctionReturn(PETSC_SUCCESS);
7967: }
7969: /*@
7970: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7972: Logically Collective
7974: Input Parameters:
7975: + mat - the matrix
7976: . fromRow - matrix from which to copy row block size
7977: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7979: Level: developer
7981: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7982: @*/
7983: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7984: {
7985: PetscFunctionBegin;
7989: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7990: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7991: PetscFunctionReturn(PETSC_SUCCESS);
7992: }
7994: /*@
7995: MatResidual - Default routine to calculate the residual r = b - Ax
7997: Collective
7999: Input Parameters:
8000: + mat - the matrix
8001: . b - the right-hand-side
8002: - x - the approximate solution
8004: Output Parameter:
8005: . r - location to store the residual
8007: Level: developer
8009: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
8010: @*/
8011: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
8012: {
8013: PetscFunctionBegin;
8019: MatCheckPreallocated(mat, 1);
8020: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
8021: if (!mat->ops->residual) {
8022: PetscCall(MatMult(mat, x, r));
8023: PetscCall(VecAYPX(r, -1.0, b));
8024: } else {
8025: PetscUseTypeMethod(mat, residual, b, x, r);
8026: }
8027: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
8028: PetscFunctionReturn(PETSC_SUCCESS);
8029: }
8031: /*MC
8032: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
8034: Synopsis:
8035: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8037: Not Collective
8039: Input Parameters:
8040: + A - the matrix
8041: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8042: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8043: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8044: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8045: always used.
8047: Output Parameters:
8048: + n - number of local rows in the (possibly compressed) matrix
8049: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8050: . ja - the column indices
8051: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8052: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8054: Level: developer
8056: Note:
8057: Use `MatRestoreRowIJF90()` when you no longer need access to the data
8059: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
8060: M*/
8062: /*MC
8063: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
8065: Synopsis:
8066: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
8068: Not Collective
8070: Input Parameters:
8071: + A - the matrix
8072: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8073: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8074: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8075: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8076: always used.
8077: . n - number of local rows in the (possibly compressed) matrix
8078: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
8079: . ja - the column indices
8080: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8081: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8083: Level: developer
8085: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
8086: M*/
8088: /*@C
8089: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
8091: Collective
8093: Input Parameters:
8094: + mat - the matrix
8095: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
8096: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8097: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8098: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8099: always used.
8101: Output Parameters:
8102: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
8103: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
8104: . ja - the column indices, use `NULL` if not needed
8105: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
8106: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
8108: Level: developer
8110: Notes:
8111: You CANNOT change any of the ia[] or ja[] values.
8113: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
8115: Fortran Notes:
8116: Use
8117: .vb
8118: PetscInt, pointer :: ia(:),ja(:)
8119: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
8120: ! Access the ith and jth entries via ia(i) and ja(j)
8121: .ve
8123: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
8125: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
8126: @*/
8127: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8128: {
8129: PetscFunctionBegin;
8132: if (n) PetscAssertPointer(n, 5);
8133: if (ia) PetscAssertPointer(ia, 6);
8134: if (ja) PetscAssertPointer(ja, 7);
8135: if (done) PetscAssertPointer(done, 8);
8136: MatCheckPreallocated(mat, 1);
8137: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
8138: else {
8139: if (done) *done = PETSC_TRUE;
8140: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
8141: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8142: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
8143: }
8144: PetscFunctionReturn(PETSC_SUCCESS);
8145: }
8147: /*@C
8148: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
8150: Collective
8152: Input Parameters:
8153: + mat - the matrix
8154: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8155: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8156: symmetrized
8157: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8158: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8159: always used.
8160: . n - number of columns in the (possibly compressed) matrix
8161: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8162: - ja - the row indices
8164: Output Parameter:
8165: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8167: Level: developer
8169: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8170: @*/
8171: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8172: {
8173: PetscFunctionBegin;
8176: PetscAssertPointer(n, 5);
8177: if (ia) PetscAssertPointer(ia, 6);
8178: if (ja) PetscAssertPointer(ja, 7);
8179: PetscAssertPointer(done, 8);
8180: MatCheckPreallocated(mat, 1);
8181: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8182: else {
8183: *done = PETSC_TRUE;
8184: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8185: }
8186: PetscFunctionReturn(PETSC_SUCCESS);
8187: }
8189: /*@C
8190: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8192: Collective
8194: Input Parameters:
8195: + mat - the matrix
8196: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8197: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8198: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8199: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8200: always used.
8201: . n - size of (possibly compressed) matrix
8202: . ia - the row pointers
8203: - ja - the column indices
8205: Output Parameter:
8206: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8208: Level: developer
8210: Note:
8211: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8212: us of the array after it has been restored. If you pass `NULL`, it will
8213: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8215: Fortran Note:
8216: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8218: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8219: @*/
8220: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8221: {
8222: PetscFunctionBegin;
8225: if (ia) PetscAssertPointer(ia, 6);
8226: if (ja) PetscAssertPointer(ja, 7);
8227: if (done) PetscAssertPointer(done, 8);
8228: MatCheckPreallocated(mat, 1);
8230: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8231: else {
8232: if (done) *done = PETSC_TRUE;
8233: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8234: if (n) *n = 0;
8235: if (ia) *ia = NULL;
8236: if (ja) *ja = NULL;
8237: }
8238: PetscFunctionReturn(PETSC_SUCCESS);
8239: }
8241: /*@C
8242: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8244: Collective
8246: Input Parameters:
8247: + mat - the matrix
8248: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8249: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8250: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8251: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8252: always used.
8254: Output Parameters:
8255: + n - size of (possibly compressed) matrix
8256: . ia - the column pointers
8257: . ja - the row indices
8258: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8260: Level: developer
8262: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8263: @*/
8264: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8265: {
8266: PetscFunctionBegin;
8269: if (ia) PetscAssertPointer(ia, 6);
8270: if (ja) PetscAssertPointer(ja, 7);
8271: PetscAssertPointer(done, 8);
8272: MatCheckPreallocated(mat, 1);
8274: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8275: else {
8276: *done = PETSC_TRUE;
8277: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8278: if (n) *n = 0;
8279: if (ia) *ia = NULL;
8280: if (ja) *ja = NULL;
8281: }
8282: PetscFunctionReturn(PETSC_SUCCESS);
8283: }
8285: /*@
8286: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8287: `MatGetColumnIJ()`.
8289: Collective
8291: Input Parameters:
8292: + mat - the matrix
8293: . ncolors - maximum color value
8294: . n - number of entries in colorarray
8295: - colorarray - array indicating color for each column
8297: Output Parameter:
8298: . iscoloring - coloring generated using colorarray information
8300: Level: developer
8302: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8303: @*/
8304: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8305: {
8306: PetscFunctionBegin;
8309: PetscAssertPointer(colorarray, 4);
8310: PetscAssertPointer(iscoloring, 5);
8311: MatCheckPreallocated(mat, 1);
8313: if (!mat->ops->coloringpatch) {
8314: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8315: } else {
8316: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8317: }
8318: PetscFunctionReturn(PETSC_SUCCESS);
8319: }
8321: /*@
8322: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8324: Logically Collective
8326: Input Parameter:
8327: . mat - the factored matrix to be reset
8329: Level: developer
8331: Notes:
8332: This routine should be used only with factored matrices formed by in-place
8333: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8334: format). This option can save memory, for example, when solving nonlinear
8335: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8336: ILU(0) preconditioner.
8338: One can specify in-place ILU(0) factorization by calling
8339: .vb
8340: PCType(pc,PCILU);
8341: PCFactorSeUseInPlace(pc);
8342: .ve
8343: or by using the options -pc_type ilu -pc_factor_in_place
8345: In-place factorization ILU(0) can also be used as a local
8346: solver for the blocks within the block Jacobi or additive Schwarz
8347: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8348: for details on setting local solver options.
8350: Most users should employ the `KSP` interface for linear solvers
8351: instead of working directly with matrix algebra routines such as this.
8352: See, e.g., `KSPCreate()`.
8354: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8355: @*/
8356: PetscErrorCode MatSetUnfactored(Mat mat)
8357: {
8358: PetscFunctionBegin;
8361: MatCheckPreallocated(mat, 1);
8362: mat->factortype = MAT_FACTOR_NONE;
8363: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8364: PetscUseTypeMethod(mat, setunfactored);
8365: PetscFunctionReturn(PETSC_SUCCESS);
8366: }
8368: /*MC
8369: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8371: Synopsis:
8372: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8374: Not Collective
8376: Input Parameter:
8377: . x - matrix
8379: Output Parameters:
8380: + xx_v - the Fortran pointer to the array
8381: - ierr - error code
8383: Example of Usage:
8384: .vb
8385: PetscScalar, pointer xx_v(:,:)
8386: ....
8387: call MatDenseGetArrayF90(x,xx_v,ierr)
8388: a = xx_v(3)
8389: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8390: .ve
8392: Level: advanced
8394: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8395: M*/
8397: /*MC
8398: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8399: accessed with `MatDenseGetArrayF90()`.
8401: Synopsis:
8402: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8404: Not Collective
8406: Input Parameters:
8407: + x - matrix
8408: - xx_v - the Fortran90 pointer to the array
8410: Output Parameter:
8411: . ierr - error code
8413: Example of Usage:
8414: .vb
8415: PetscScalar, pointer xx_v(:,:)
8416: ....
8417: call MatDenseGetArrayF90(x,xx_v,ierr)
8418: a = xx_v(3)
8419: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8420: .ve
8422: Level: advanced
8424: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8425: M*/
8427: /*MC
8428: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8430: Synopsis:
8431: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8433: Not Collective
8435: Input Parameter:
8436: . x - matrix
8438: Output Parameters:
8439: + xx_v - the Fortran pointer to the array
8440: - ierr - error code
8442: Example of Usage:
8443: .vb
8444: PetscScalar, pointer xx_v(:)
8445: ....
8446: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8447: a = xx_v(3)
8448: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8449: .ve
8451: Level: advanced
8453: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8454: M*/
8456: /*MC
8457: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8458: accessed with `MatSeqAIJGetArrayF90()`.
8460: Synopsis:
8461: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8463: Not Collective
8465: Input Parameters:
8466: + x - matrix
8467: - xx_v - the Fortran90 pointer to the array
8469: Output Parameter:
8470: . ierr - error code
8472: Example of Usage:
8473: .vb
8474: PetscScalar, pointer xx_v(:)
8475: ....
8476: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8477: a = xx_v(3)
8478: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8479: .ve
8481: Level: advanced
8483: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8484: M*/
8486: /*@
8487: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8488: as the original matrix.
8490: Collective
8492: Input Parameters:
8493: + mat - the original matrix
8494: . isrow - parallel `IS` containing the rows this processor should obtain
8495: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8496: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8498: Output Parameter:
8499: . newmat - the new submatrix, of the same type as the original matrix
8501: Level: advanced
8503: Notes:
8504: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8506: Some matrix types place restrictions on the row and column indices, such
8507: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8508: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8510: The index sets may not have duplicate entries.
8512: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8513: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8514: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8515: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8516: you are finished using it.
8518: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8519: the input matrix.
8521: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8523: If `isrow` and `iscol` have a nontrivial block-size, then the resulting matrix has this block-size as well. This feature
8524: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8526: Example usage:
8527: Consider the following 8x8 matrix with 34 non-zero values, that is
8528: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8529: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8530: as follows
8531: .vb
8532: 1 2 0 | 0 3 0 | 0 4
8533: Proc0 0 5 6 | 7 0 0 | 8 0
8534: 9 0 10 | 11 0 0 | 12 0
8535: -------------------------------------
8536: 13 0 14 | 15 16 17 | 0 0
8537: Proc1 0 18 0 | 19 20 21 | 0 0
8538: 0 0 0 | 22 23 0 | 24 0
8539: -------------------------------------
8540: Proc2 25 26 27 | 0 0 28 | 29 0
8541: 30 0 0 | 31 32 33 | 0 34
8542: .ve
8544: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8546: .vb
8547: 2 0 | 0 3 0 | 0
8548: Proc0 5 6 | 7 0 0 | 8
8549: -------------------------------
8550: Proc1 18 0 | 19 20 21 | 0
8551: -------------------------------
8552: Proc2 26 27 | 0 0 28 | 29
8553: 0 0 | 31 32 33 | 0
8554: .ve
8556: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8557: @*/
8558: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8559: {
8560: PetscMPIInt size;
8561: Mat *local;
8562: IS iscoltmp;
8563: PetscBool flg;
8565: PetscFunctionBegin;
8569: PetscAssertPointer(newmat, 5);
8572: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8573: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8575: MatCheckPreallocated(mat, 1);
8576: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8578: if (!iscol || isrow == iscol) {
8579: PetscBool stride;
8580: PetscMPIInt grabentirematrix = 0, grab;
8581: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8582: if (stride) {
8583: PetscInt first, step, n, rstart, rend;
8584: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8585: if (step == 1) {
8586: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8587: if (rstart == first) {
8588: PetscCall(ISGetLocalSize(isrow, &n));
8589: if (n == rend - rstart) grabentirematrix = 1;
8590: }
8591: }
8592: }
8593: PetscCallMPI(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8594: if (grab) {
8595: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8596: if (cll == MAT_INITIAL_MATRIX) {
8597: *newmat = mat;
8598: PetscCall(PetscObjectReference((PetscObject)mat));
8599: }
8600: PetscFunctionReturn(PETSC_SUCCESS);
8601: }
8602: }
8604: if (!iscol) {
8605: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8606: } else {
8607: iscoltmp = iscol;
8608: }
8610: /* if original matrix is on just one processor then use submatrix generated */
8611: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8612: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8613: goto setproperties;
8614: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8615: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8616: *newmat = *local;
8617: PetscCall(PetscFree(local));
8618: goto setproperties;
8619: } else if (!mat->ops->createsubmatrix) {
8620: /* Create a new matrix type that implements the operation using the full matrix */
8621: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8622: switch (cll) {
8623: case MAT_INITIAL_MATRIX:
8624: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8625: break;
8626: case MAT_REUSE_MATRIX:
8627: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8628: break;
8629: default:
8630: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8631: }
8632: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8633: goto setproperties;
8634: }
8636: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8637: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8638: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8640: setproperties:
8641: if ((*newmat)->symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->structurally_symmetric == PETSC_BOOL3_UNKNOWN && (*newmat)->spd == PETSC_BOOL3_UNKNOWN && (*newmat)->hermitian == PETSC_BOOL3_UNKNOWN) {
8642: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8643: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8644: }
8645: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8646: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8647: PetscFunctionReturn(PETSC_SUCCESS);
8648: }
8650: /*@
8651: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8653: Not Collective
8655: Input Parameters:
8656: + A - the matrix we wish to propagate options from
8657: - B - the matrix we wish to propagate options to
8659: Level: beginner
8661: Note:
8662: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8664: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8665: @*/
8666: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8667: {
8668: PetscFunctionBegin;
8671: B->symmetry_eternal = A->symmetry_eternal;
8672: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8673: B->symmetric = A->symmetric;
8674: B->structurally_symmetric = A->structurally_symmetric;
8675: B->spd = A->spd;
8676: B->hermitian = A->hermitian;
8677: PetscFunctionReturn(PETSC_SUCCESS);
8678: }
8680: /*@
8681: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8682: used during the assembly process to store values that belong to
8683: other processors.
8685: Not Collective
8687: Input Parameters:
8688: + mat - the matrix
8689: . size - the initial size of the stash.
8690: - bsize - the initial size of the block-stash(if used).
8692: Options Database Keys:
8693: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8694: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8696: Level: intermediate
8698: Notes:
8699: The block-stash is used for values set with `MatSetValuesBlocked()` while
8700: the stash is used for values set with `MatSetValues()`
8702: Run with the option -info and look for output of the form
8703: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8704: to determine the appropriate value, MM, to use for size and
8705: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8706: to determine the value, BMM to use for bsize
8708: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8709: @*/
8710: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8711: {
8712: PetscFunctionBegin;
8715: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8716: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8717: PetscFunctionReturn(PETSC_SUCCESS);
8718: }
8720: /*@
8721: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8722: the matrix
8724: Neighbor-wise Collective
8726: Input Parameters:
8727: + A - the matrix
8728: . x - the vector to be multiplied by the interpolation operator
8729: - y - the vector to be added to the result
8731: Output Parameter:
8732: . w - the resulting vector
8734: Level: intermediate
8736: Notes:
8737: `w` may be the same vector as `y`.
8739: This allows one to use either the restriction or interpolation (its transpose)
8740: matrix to do the interpolation
8742: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8743: @*/
8744: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8745: {
8746: PetscInt M, N, Ny;
8748: PetscFunctionBegin;
8753: PetscCall(MatGetSize(A, &M, &N));
8754: PetscCall(VecGetSize(y, &Ny));
8755: if (M == Ny) {
8756: PetscCall(MatMultAdd(A, x, y, w));
8757: } else {
8758: PetscCall(MatMultTransposeAdd(A, x, y, w));
8759: }
8760: PetscFunctionReturn(PETSC_SUCCESS);
8761: }
8763: /*@
8764: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8765: the matrix
8767: Neighbor-wise Collective
8769: Input Parameters:
8770: + A - the matrix
8771: - x - the vector to be interpolated
8773: Output Parameter:
8774: . y - the resulting vector
8776: Level: intermediate
8778: Note:
8779: This allows one to use either the restriction or interpolation (its transpose)
8780: matrix to do the interpolation
8782: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8783: @*/
8784: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8785: {
8786: PetscInt M, N, Ny;
8788: PetscFunctionBegin;
8792: PetscCall(MatGetSize(A, &M, &N));
8793: PetscCall(VecGetSize(y, &Ny));
8794: if (M == Ny) {
8795: PetscCall(MatMult(A, x, y));
8796: } else {
8797: PetscCall(MatMultTranspose(A, x, y));
8798: }
8799: PetscFunctionReturn(PETSC_SUCCESS);
8800: }
8802: /*@
8803: MatRestrict - $y = A*x$ or $A^T*x$
8805: Neighbor-wise Collective
8807: Input Parameters:
8808: + A - the matrix
8809: - x - the vector to be restricted
8811: Output Parameter:
8812: . y - the resulting vector
8814: Level: intermediate
8816: Note:
8817: This allows one to use either the restriction or interpolation (its transpose)
8818: matrix to do the restriction
8820: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8821: @*/
8822: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8823: {
8824: PetscInt M, N, Nx;
8826: PetscFunctionBegin;
8830: PetscCall(MatGetSize(A, &M, &N));
8831: PetscCall(VecGetSize(x, &Nx));
8832: if (M == Nx) {
8833: PetscCall(MatMultTranspose(A, x, y));
8834: } else {
8835: PetscCall(MatMult(A, x, y));
8836: }
8837: PetscFunctionReturn(PETSC_SUCCESS);
8838: }
8840: /*@
8841: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8843: Neighbor-wise Collective
8845: Input Parameters:
8846: + A - the matrix
8847: . x - the input dense matrix to be multiplied
8848: - w - the input dense matrix to be added to the result
8850: Output Parameter:
8851: . y - the output dense matrix
8853: Level: intermediate
8855: Note:
8856: This allows one to use either the restriction or interpolation (its transpose)
8857: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8858: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8860: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8861: @*/
8862: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8863: {
8864: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8865: PetscBool trans = PETSC_TRUE;
8866: MatReuse reuse = MAT_INITIAL_MATRIX;
8868: PetscFunctionBegin;
8874: PetscCall(MatGetSize(A, &M, &N));
8875: PetscCall(MatGetSize(x, &Mx, &Nx));
8876: if (N == Mx) trans = PETSC_FALSE;
8877: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8878: Mo = trans ? N : M;
8879: if (*y) {
8880: PetscCall(MatGetSize(*y, &My, &Ny));
8881: if (Mo == My && Nx == Ny) {
8882: reuse = MAT_REUSE_MATRIX;
8883: } else {
8884: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8885: PetscCall(MatDestroy(y));
8886: }
8887: }
8889: if (w && *y == w) { /* this is to minimize changes in PCMG */
8890: PetscBool flg;
8892: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8893: if (w) {
8894: PetscInt My, Ny, Mw, Nw;
8896: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8897: PetscCall(MatGetSize(*y, &My, &Ny));
8898: PetscCall(MatGetSize(w, &Mw, &Nw));
8899: if (!flg || My != Mw || Ny != Nw) w = NULL;
8900: }
8901: if (!w) {
8902: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8903: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8904: PetscCall(PetscObjectDereference((PetscObject)w));
8905: } else {
8906: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8907: }
8908: }
8909: if (!trans) {
8910: PetscCall(MatMatMult(A, x, reuse, PETSC_DETERMINE, y));
8911: } else {
8912: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DETERMINE, y));
8913: }
8914: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8915: PetscFunctionReturn(PETSC_SUCCESS);
8916: }
8918: /*@
8919: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8921: Neighbor-wise Collective
8923: Input Parameters:
8924: + A - the matrix
8925: - x - the input dense matrix
8927: Output Parameter:
8928: . y - the output dense matrix
8930: Level: intermediate
8932: Note:
8933: This allows one to use either the restriction or interpolation (its transpose)
8934: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8935: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8937: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8938: @*/
8939: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8940: {
8941: PetscFunctionBegin;
8942: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8943: PetscFunctionReturn(PETSC_SUCCESS);
8944: }
8946: /*@
8947: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8949: Neighbor-wise Collective
8951: Input Parameters:
8952: + A - the matrix
8953: - x - the input dense matrix
8955: Output Parameter:
8956: . y - the output dense matrix
8958: Level: intermediate
8960: Note:
8961: This allows one to use either the restriction or interpolation (its transpose)
8962: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8963: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8965: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8966: @*/
8967: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8968: {
8969: PetscFunctionBegin;
8970: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8971: PetscFunctionReturn(PETSC_SUCCESS);
8972: }
8974: /*@
8975: MatGetNullSpace - retrieves the null space of a matrix.
8977: Logically Collective
8979: Input Parameters:
8980: + mat - the matrix
8981: - nullsp - the null space object
8983: Level: developer
8985: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8986: @*/
8987: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8988: {
8989: PetscFunctionBegin;
8991: PetscAssertPointer(nullsp, 2);
8992: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8993: PetscFunctionReturn(PETSC_SUCCESS);
8994: }
8996: /*@C
8997: MatGetNullSpaces - gets the null spaces, transpose null spaces, and near null spaces from an array of matrices
8999: Logically Collective
9001: Input Parameters:
9002: + n - the number of matrices
9003: - mat - the array of matrices
9005: Output Parameters:
9006: . nullsp - an array of null spaces, `NULL` for each matrix that does not have a null space, length 3 * `n`
9008: Level: developer
9010: Note:
9011: Call `MatRestoreNullspaces()` to provide these to another array of matrices
9013: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9014: `MatNullSpaceRemove()`, `MatRestoreNullSpaces()`
9015: @*/
9016: PetscErrorCode MatGetNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9017: {
9018: PetscFunctionBegin;
9019: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9020: PetscAssertPointer(mat, 2);
9021: PetscAssertPointer(nullsp, 3);
9023: PetscCall(PetscCalloc1(3 * n, nullsp));
9024: for (PetscInt i = 0; i < n; i++) {
9026: (*nullsp)[i] = mat[i]->nullsp;
9027: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[i]));
9028: (*nullsp)[n + i] = mat[i]->nearnullsp;
9029: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[n + i]));
9030: (*nullsp)[2 * n + i] = mat[i]->transnullsp;
9031: PetscCall(PetscObjectReference((PetscObject)(*nullsp)[2 * n + i]));
9032: }
9033: PetscFunctionReturn(PETSC_SUCCESS);
9034: }
9036: /*@C
9037: MatRestoreNullSpaces - sets the null spaces, transpose null spaces, and near null spaces obtained with `MatGetNullSpaces()` for an array of matrices
9039: Logically Collective
9041: Input Parameters:
9042: + n - the number of matrices
9043: . mat - the array of matrices
9044: - nullsp - an array of null spaces
9046: Level: developer
9048: Note:
9049: Call `MatGetNullSpaces()` to create `nullsp`
9051: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`,
9052: `MatNullSpaceRemove()`, `MatGetNullSpaces()`
9053: @*/
9054: PetscErrorCode MatRestoreNullSpaces(PetscInt n, Mat mat[], MatNullSpace *nullsp[])
9055: {
9056: PetscFunctionBegin;
9057: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of matrices %" PetscInt_FMT " must be non-negative", n);
9058: PetscAssertPointer(mat, 2);
9059: PetscAssertPointer(nullsp, 3);
9060: PetscAssertPointer(*nullsp, 3);
9062: for (PetscInt i = 0; i < n; i++) {
9064: PetscCall(MatSetNullSpace(mat[i], (*nullsp)[i]));
9065: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[i]));
9066: PetscCall(MatSetNearNullSpace(mat[i], (*nullsp)[n + i]));
9067: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[n + i]));
9068: PetscCall(MatSetTransposeNullSpace(mat[i], (*nullsp)[2 * n + i]));
9069: PetscCall(PetscObjectDereference((PetscObject)(*nullsp)[2 * n + i]));
9070: }
9071: PetscCall(PetscFree(*nullsp));
9072: PetscFunctionReturn(PETSC_SUCCESS);
9073: }
9075: /*@
9076: MatSetNullSpace - attaches a null space to a matrix.
9078: Logically Collective
9080: Input Parameters:
9081: + mat - the matrix
9082: - nullsp - the null space object
9084: Level: advanced
9086: Notes:
9087: This null space is used by the `KSP` linear solvers to solve singular systems.
9089: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
9091: For inconsistent singular systems (linear systems where the right-hand side is not in the range of the operator) the `KSP` residuals will not converge
9092: to zero but the linear system will still be solved in a least squares sense.
9094: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
9095: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
9096: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
9097: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
9098: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
9099: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
9101: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
9102: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
9103: routine also automatically calls `MatSetTransposeNullSpace()`.
9105: The user should call `MatNullSpaceDestroy()`.
9107: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
9108: `KSPSetPCSide()`
9109: @*/
9110: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
9111: {
9112: PetscFunctionBegin;
9115: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9116: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
9117: mat->nullsp = nullsp;
9118: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
9119: PetscFunctionReturn(PETSC_SUCCESS);
9120: }
9122: /*@
9123: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
9125: Logically Collective
9127: Input Parameters:
9128: + mat - the matrix
9129: - nullsp - the null space object
9131: Level: developer
9133: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
9134: @*/
9135: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
9136: {
9137: PetscFunctionBegin;
9140: PetscAssertPointer(nullsp, 2);
9141: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
9142: PetscFunctionReturn(PETSC_SUCCESS);
9143: }
9145: /*@
9146: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
9148: Logically Collective
9150: Input Parameters:
9151: + mat - the matrix
9152: - nullsp - the null space object
9154: Level: advanced
9156: Notes:
9157: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
9159: See `MatSetNullSpace()`
9161: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
9162: @*/
9163: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
9164: {
9165: PetscFunctionBegin;
9168: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9169: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
9170: mat->transnullsp = nullsp;
9171: PetscFunctionReturn(PETSC_SUCCESS);
9172: }
9174: /*@
9175: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
9176: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
9178: Logically Collective
9180: Input Parameters:
9181: + mat - the matrix
9182: - nullsp - the null space object
9184: Level: advanced
9186: Notes:
9187: Overwrites any previous near null space that may have been attached
9189: You can remove the null space by calling this routine with an `nullsp` of `NULL`
9191: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
9192: @*/
9193: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
9194: {
9195: PetscFunctionBegin;
9199: MatCheckPreallocated(mat, 1);
9200: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
9201: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
9202: mat->nearnullsp = nullsp;
9203: PetscFunctionReturn(PETSC_SUCCESS);
9204: }
9206: /*@
9207: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
9209: Not Collective
9211: Input Parameter:
9212: . mat - the matrix
9214: Output Parameter:
9215: . nullsp - the null space object, `NULL` if not set
9217: Level: advanced
9219: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
9220: @*/
9221: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
9222: {
9223: PetscFunctionBegin;
9226: PetscAssertPointer(nullsp, 2);
9227: MatCheckPreallocated(mat, 1);
9228: *nullsp = mat->nearnullsp;
9229: PetscFunctionReturn(PETSC_SUCCESS);
9230: }
9232: /*@
9233: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9235: Collective
9237: Input Parameters:
9238: + mat - the matrix
9239: . row - row/column permutation
9240: - info - information on desired factorization process
9242: Level: developer
9244: Notes:
9245: Probably really in-place only when level of fill is zero, otherwise allocates
9246: new space to store factored matrix and deletes previous memory.
9248: Most users should employ the `KSP` interface for linear solvers
9249: instead of working directly with matrix algebra routines such as this.
9250: See, e.g., `KSPCreate()`.
9252: Developer Note:
9253: The Fortran interface is not autogenerated as the
9254: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9256: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9257: @*/
9258: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9259: {
9260: PetscFunctionBegin;
9264: PetscAssertPointer(info, 3);
9265: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9266: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9267: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9268: MatCheckPreallocated(mat, 1);
9269: PetscUseTypeMethod(mat, iccfactor, row, info);
9270: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9271: PetscFunctionReturn(PETSC_SUCCESS);
9272: }
9274: /*@
9275: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9276: ghosted ones.
9278: Not Collective
9280: Input Parameters:
9281: + mat - the matrix
9282: - diag - the diagonal values, including ghost ones
9284: Level: developer
9286: Notes:
9287: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9289: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9291: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9292: @*/
9293: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9294: {
9295: PetscMPIInt size;
9297: PetscFunctionBegin;
9302: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9303: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9304: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9305: if (size == 1) {
9306: PetscInt n, m;
9307: PetscCall(VecGetSize(diag, &n));
9308: PetscCall(MatGetSize(mat, NULL, &m));
9309: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9310: PetscCall(MatDiagonalScale(mat, NULL, diag));
9311: } else {
9312: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9313: }
9314: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9315: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9316: PetscFunctionReturn(PETSC_SUCCESS);
9317: }
9319: /*@
9320: MatGetInertia - Gets the inertia from a factored matrix
9322: Collective
9324: Input Parameter:
9325: . mat - the matrix
9327: Output Parameters:
9328: + nneg - number of negative eigenvalues
9329: . nzero - number of zero eigenvalues
9330: - npos - number of positive eigenvalues
9332: Level: advanced
9334: Note:
9335: Matrix must have been factored by `MatCholeskyFactor()`
9337: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9338: @*/
9339: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9340: {
9341: PetscFunctionBegin;
9344: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9345: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9346: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9347: PetscFunctionReturn(PETSC_SUCCESS);
9348: }
9350: /*@C
9351: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9353: Neighbor-wise Collective
9355: Input Parameters:
9356: + mat - the factored matrix obtained with `MatGetFactor()`
9357: - b - the right-hand-side vectors
9359: Output Parameter:
9360: . x - the result vectors
9362: Level: developer
9364: Note:
9365: The vectors `b` and `x` cannot be the same. I.e., one cannot
9366: call `MatSolves`(A,x,x).
9368: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9369: @*/
9370: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9371: {
9372: PetscFunctionBegin;
9375: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9376: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9377: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9379: MatCheckPreallocated(mat, 1);
9380: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9381: PetscUseTypeMethod(mat, solves, b, x);
9382: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9383: PetscFunctionReturn(PETSC_SUCCESS);
9384: }
9386: /*@
9387: MatIsSymmetric - Test whether a matrix is symmetric
9389: Collective
9391: Input Parameters:
9392: + A - the matrix to test
9393: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9395: Output Parameter:
9396: . flg - the result
9398: Level: intermediate
9400: Notes:
9401: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9403: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9405: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9406: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9408: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9409: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9410: @*/
9411: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9412: {
9413: PetscFunctionBegin;
9415: PetscAssertPointer(flg, 3);
9416: if (A->symmetric != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->symmetric);
9417: else {
9418: if (A->ops->issymmetric) PetscUseTypeMethod(A, issymmetric, tol, flg);
9419: else PetscCall(MatIsTranspose(A, A, tol, flg));
9420: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9421: }
9422: PetscFunctionReturn(PETSC_SUCCESS);
9423: }
9425: /*@
9426: MatIsHermitian - Test whether a matrix is Hermitian
9428: Collective
9430: Input Parameters:
9431: + A - the matrix to test
9432: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9434: Output Parameter:
9435: . flg - the result
9437: Level: intermediate
9439: Notes:
9440: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9442: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9444: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9445: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9447: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9448: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9449: @*/
9450: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9451: {
9452: PetscFunctionBegin;
9454: PetscAssertPointer(flg, 3);
9455: if (A->hermitian != PETSC_BOOL3_UNKNOWN && !tol) *flg = PetscBool3ToBool(A->hermitian);
9456: else {
9457: if (A->ops->ishermitian) PetscUseTypeMethod(A, ishermitian, tol, flg);
9458: else PetscCall(MatIsHermitianTranspose(A, A, tol, flg));
9459: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9460: }
9461: PetscFunctionReturn(PETSC_SUCCESS);
9462: }
9464: /*@
9465: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9467: Not Collective
9469: Input Parameter:
9470: . A - the matrix to check
9472: Output Parameters:
9473: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9474: - flg - the result (only valid if set is `PETSC_TRUE`)
9476: Level: advanced
9478: Notes:
9479: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9480: if you want it explicitly checked
9482: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9483: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9485: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9486: @*/
9487: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9488: {
9489: PetscFunctionBegin;
9491: PetscAssertPointer(set, 2);
9492: PetscAssertPointer(flg, 3);
9493: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9494: *set = PETSC_TRUE;
9495: *flg = PetscBool3ToBool(A->symmetric);
9496: } else {
9497: *set = PETSC_FALSE;
9498: }
9499: PetscFunctionReturn(PETSC_SUCCESS);
9500: }
9502: /*@
9503: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9505: Not Collective
9507: Input Parameter:
9508: . A - the matrix to check
9510: Output Parameters:
9511: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9512: - flg - the result (only valid if set is `PETSC_TRUE`)
9514: Level: advanced
9516: Notes:
9517: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9519: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9520: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9522: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9523: @*/
9524: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9525: {
9526: PetscFunctionBegin;
9528: PetscAssertPointer(set, 2);
9529: PetscAssertPointer(flg, 3);
9530: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9531: *set = PETSC_TRUE;
9532: *flg = PetscBool3ToBool(A->spd);
9533: } else {
9534: *set = PETSC_FALSE;
9535: }
9536: PetscFunctionReturn(PETSC_SUCCESS);
9537: }
9539: /*@
9540: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9542: Not Collective
9544: Input Parameter:
9545: . A - the matrix to check
9547: Output Parameters:
9548: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9549: - flg - the result (only valid if set is `PETSC_TRUE`)
9551: Level: advanced
9553: Notes:
9554: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9555: if you want it explicitly checked
9557: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9558: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9560: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9561: @*/
9562: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9563: {
9564: PetscFunctionBegin;
9566: PetscAssertPointer(set, 2);
9567: PetscAssertPointer(flg, 3);
9568: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9569: *set = PETSC_TRUE;
9570: *flg = PetscBool3ToBool(A->hermitian);
9571: } else {
9572: *set = PETSC_FALSE;
9573: }
9574: PetscFunctionReturn(PETSC_SUCCESS);
9575: }
9577: /*@
9578: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9580: Collective
9582: Input Parameter:
9583: . A - the matrix to test
9585: Output Parameter:
9586: . flg - the result
9588: Level: intermediate
9590: Notes:
9591: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9593: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9594: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9596: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9597: @*/
9598: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9599: {
9600: PetscFunctionBegin;
9602: PetscAssertPointer(flg, 2);
9603: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9604: *flg = PetscBool3ToBool(A->structurally_symmetric);
9605: } else {
9606: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9607: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9608: }
9609: PetscFunctionReturn(PETSC_SUCCESS);
9610: }
9612: /*@
9613: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9615: Not Collective
9617: Input Parameter:
9618: . A - the matrix to check
9620: Output Parameters:
9621: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9622: - flg - the result (only valid if set is PETSC_TRUE)
9624: Level: advanced
9626: Notes:
9627: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9628: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9630: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9632: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9633: @*/
9634: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9635: {
9636: PetscFunctionBegin;
9638: PetscAssertPointer(set, 2);
9639: PetscAssertPointer(flg, 3);
9640: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9641: *set = PETSC_TRUE;
9642: *flg = PetscBool3ToBool(A->structurally_symmetric);
9643: } else {
9644: *set = PETSC_FALSE;
9645: }
9646: PetscFunctionReturn(PETSC_SUCCESS);
9647: }
9649: /*@
9650: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9651: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9653: Not Collective
9655: Input Parameter:
9656: . mat - the matrix
9658: Output Parameters:
9659: + nstash - the size of the stash
9660: . reallocs - the number of additional mallocs incurred.
9661: . bnstash - the size of the block stash
9662: - breallocs - the number of additional mallocs incurred.in the block stash
9664: Level: advanced
9666: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9667: @*/
9668: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9669: {
9670: PetscFunctionBegin;
9671: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9672: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9673: PetscFunctionReturn(PETSC_SUCCESS);
9674: }
9676: /*@
9677: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9678: parallel layout, `PetscLayout` for rows and columns
9680: Collective
9682: Input Parameter:
9683: . mat - the matrix
9685: Output Parameters:
9686: + right - (optional) vector that the matrix can be multiplied against
9687: - left - (optional) vector that the matrix vector product can be stored in
9689: Level: advanced
9691: Notes:
9692: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9694: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9696: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9697: @*/
9698: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9699: {
9700: PetscFunctionBegin;
9703: if (mat->ops->getvecs) {
9704: PetscUseTypeMethod(mat, getvecs, right, left);
9705: } else {
9706: if (right) {
9707: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9708: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9709: PetscCall(VecSetType(*right, mat->defaultvectype));
9710: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9711: if (mat->boundtocpu && mat->bindingpropagates) {
9712: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9713: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9714: }
9715: #endif
9716: }
9717: if (left) {
9718: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9719: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9720: PetscCall(VecSetType(*left, mat->defaultvectype));
9721: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9722: if (mat->boundtocpu && mat->bindingpropagates) {
9723: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9724: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9725: }
9726: #endif
9727: }
9728: }
9729: PetscFunctionReturn(PETSC_SUCCESS);
9730: }
9732: /*@
9733: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9734: with default values.
9736: Not Collective
9738: Input Parameter:
9739: . info - the `MatFactorInfo` data structure
9741: Level: developer
9743: Notes:
9744: The solvers are generally used through the `KSP` and `PC` objects, for example
9745: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9747: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9749: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9750: @*/
9751: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9752: {
9753: PetscFunctionBegin;
9754: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9755: PetscFunctionReturn(PETSC_SUCCESS);
9756: }
9758: /*@
9759: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9761: Collective
9763: Input Parameters:
9764: + mat - the factored matrix
9765: - is - the index set defining the Schur indices (0-based)
9767: Level: advanced
9769: Notes:
9770: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9772: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9774: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9776: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9777: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9778: @*/
9779: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9780: {
9781: PetscErrorCode (*f)(Mat, IS);
9783: PetscFunctionBegin;
9788: PetscCheckSameComm(mat, 1, is, 2);
9789: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9790: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9791: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9792: PetscCall(MatDestroy(&mat->schur));
9793: PetscCall((*f)(mat, is));
9794: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9795: PetscFunctionReturn(PETSC_SUCCESS);
9796: }
9798: /*@
9799: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9801: Logically Collective
9803: Input Parameters:
9804: + F - the factored matrix obtained by calling `MatGetFactor()`
9805: . S - location where to return the Schur complement, can be `NULL`
9806: - status - the status of the Schur complement matrix, can be `NULL`
9808: Level: advanced
9810: Notes:
9811: You must call `MatFactorSetSchurIS()` before calling this routine.
9813: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9815: The routine provides a copy of the Schur matrix stored within the solver data structures.
9816: The caller must destroy the object when it is no longer needed.
9817: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9819: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9821: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9823: Developer Note:
9824: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9825: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9827: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9828: @*/
9829: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9830: {
9831: PetscFunctionBegin;
9833: if (S) PetscAssertPointer(S, 2);
9834: if (status) PetscAssertPointer(status, 3);
9835: if (S) {
9836: PetscErrorCode (*f)(Mat, Mat *);
9838: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9839: if (f) {
9840: PetscCall((*f)(F, S));
9841: } else {
9842: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9843: }
9844: }
9845: if (status) *status = F->schur_status;
9846: PetscFunctionReturn(PETSC_SUCCESS);
9847: }
9849: /*@
9850: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9852: Logically Collective
9854: Input Parameters:
9855: + F - the factored matrix obtained by calling `MatGetFactor()`
9856: . S - location where to return the Schur complement, can be `NULL`
9857: - status - the status of the Schur complement matrix, can be `NULL`
9859: Level: advanced
9861: Notes:
9862: You must call `MatFactorSetSchurIS()` before calling this routine.
9864: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9866: The routine returns a the Schur Complement stored within the data structures of the solver.
9868: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9870: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9872: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9874: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9876: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9877: @*/
9878: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9879: {
9880: PetscFunctionBegin;
9882: if (S) {
9883: PetscAssertPointer(S, 2);
9884: *S = F->schur;
9885: }
9886: if (status) {
9887: PetscAssertPointer(status, 3);
9888: *status = F->schur_status;
9889: }
9890: PetscFunctionReturn(PETSC_SUCCESS);
9891: }
9893: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9894: {
9895: Mat S = F->schur;
9897: PetscFunctionBegin;
9898: switch (F->schur_status) {
9899: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9900: case MAT_FACTOR_SCHUR_INVERTED:
9901: if (S) {
9902: S->ops->solve = NULL;
9903: S->ops->matsolve = NULL;
9904: S->ops->solvetranspose = NULL;
9905: S->ops->matsolvetranspose = NULL;
9906: S->ops->solveadd = NULL;
9907: S->ops->solvetransposeadd = NULL;
9908: S->factortype = MAT_FACTOR_NONE;
9909: PetscCall(PetscFree(S->solvertype));
9910: }
9911: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9912: break;
9913: default:
9914: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9915: }
9916: PetscFunctionReturn(PETSC_SUCCESS);
9917: }
9919: /*@
9920: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9922: Logically Collective
9924: Input Parameters:
9925: + F - the factored matrix obtained by calling `MatGetFactor()`
9926: . S - location where the Schur complement is stored
9927: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9929: Level: advanced
9931: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9932: @*/
9933: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9934: {
9935: PetscFunctionBegin;
9937: if (S) {
9939: *S = NULL;
9940: }
9941: F->schur_status = status;
9942: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9943: PetscFunctionReturn(PETSC_SUCCESS);
9944: }
9946: /*@
9947: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9949: Logically Collective
9951: Input Parameters:
9952: + F - the factored matrix obtained by calling `MatGetFactor()`
9953: . rhs - location where the right-hand side of the Schur complement system is stored
9954: - sol - location where the solution of the Schur complement system has to be returned
9956: Level: advanced
9958: Notes:
9959: The sizes of the vectors should match the size of the Schur complement
9961: Must be called after `MatFactorSetSchurIS()`
9963: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9964: @*/
9965: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9966: {
9967: PetscFunctionBegin;
9974: PetscCheckSameComm(F, 1, rhs, 2);
9975: PetscCheckSameComm(F, 1, sol, 3);
9976: PetscCall(MatFactorFactorizeSchurComplement(F));
9977: switch (F->schur_status) {
9978: case MAT_FACTOR_SCHUR_FACTORED:
9979: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9980: break;
9981: case MAT_FACTOR_SCHUR_INVERTED:
9982: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9983: break;
9984: default:
9985: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9986: }
9987: PetscFunctionReturn(PETSC_SUCCESS);
9988: }
9990: /*@
9991: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9993: Logically Collective
9995: Input Parameters:
9996: + F - the factored matrix obtained by calling `MatGetFactor()`
9997: . rhs - location where the right-hand side of the Schur complement system is stored
9998: - sol - location where the solution of the Schur complement system has to be returned
10000: Level: advanced
10002: Notes:
10003: The sizes of the vectors should match the size of the Schur complement
10005: Must be called after `MatFactorSetSchurIS()`
10007: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
10008: @*/
10009: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
10010: {
10011: PetscFunctionBegin;
10018: PetscCheckSameComm(F, 1, rhs, 2);
10019: PetscCheckSameComm(F, 1, sol, 3);
10020: PetscCall(MatFactorFactorizeSchurComplement(F));
10021: switch (F->schur_status) {
10022: case MAT_FACTOR_SCHUR_FACTORED:
10023: PetscCall(MatSolve(F->schur, rhs, sol));
10024: break;
10025: case MAT_FACTOR_SCHUR_INVERTED:
10026: PetscCall(MatMult(F->schur, rhs, sol));
10027: break;
10028: default:
10029: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
10030: }
10031: PetscFunctionReturn(PETSC_SUCCESS);
10032: }
10034: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
10035: #if PetscDefined(HAVE_CUDA)
10036: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
10037: #endif
10039: /* Schur status updated in the interface */
10040: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
10041: {
10042: Mat S = F->schur;
10044: PetscFunctionBegin;
10045: if (S) {
10046: PetscMPIInt size;
10047: PetscBool isdense, isdensecuda;
10049: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
10050: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
10051: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
10052: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
10053: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
10054: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
10055: if (isdense) {
10056: PetscCall(MatSeqDenseInvertFactors_Private(S));
10057: } else if (isdensecuda) {
10058: #if defined(PETSC_HAVE_CUDA)
10059: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
10060: #endif
10061: }
10062: // HIP??????????????
10063: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
10064: }
10065: PetscFunctionReturn(PETSC_SUCCESS);
10066: }
10068: /*@
10069: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
10071: Logically Collective
10073: Input Parameter:
10074: . F - the factored matrix obtained by calling `MatGetFactor()`
10076: Level: advanced
10078: Notes:
10079: Must be called after `MatFactorSetSchurIS()`.
10081: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
10083: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
10084: @*/
10085: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
10086: {
10087: PetscFunctionBegin;
10090: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
10091: PetscCall(MatFactorFactorizeSchurComplement(F));
10092: PetscCall(MatFactorInvertSchurComplement_Private(F));
10093: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
10094: PetscFunctionReturn(PETSC_SUCCESS);
10095: }
10097: /*@
10098: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
10100: Logically Collective
10102: Input Parameter:
10103: . F - the factored matrix obtained by calling `MatGetFactor()`
10105: Level: advanced
10107: Note:
10108: Must be called after `MatFactorSetSchurIS()`
10110: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
10111: @*/
10112: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
10113: {
10114: MatFactorInfo info;
10116: PetscFunctionBegin;
10119: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
10120: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
10121: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
10122: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
10123: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
10124: } else {
10125: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
10126: }
10127: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
10128: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
10129: PetscFunctionReturn(PETSC_SUCCESS);
10130: }
10132: /*@
10133: MatPtAP - Creates the matrix product $C = P^T * A * P$
10135: Neighbor-wise Collective
10137: Input Parameters:
10138: + A - the matrix
10139: . P - the projection matrix
10140: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10141: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10142: if the result is a dense matrix this is irrelevant
10144: Output Parameter:
10145: . C - the product matrix
10147: Level: intermediate
10149: Notes:
10150: C will be created and must be destroyed by the user with `MatDestroy()`.
10152: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10154: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10156: Developer Note:
10157: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
10159: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
10160: @*/
10161: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
10162: {
10163: PetscFunctionBegin;
10164: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10165: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10167: if (scall == MAT_INITIAL_MATRIX) {
10168: PetscCall(MatProductCreate(A, P, NULL, C));
10169: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
10170: PetscCall(MatProductSetAlgorithm(*C, "default"));
10171: PetscCall(MatProductSetFill(*C, fill));
10173: (*C)->product->api_user = PETSC_TRUE;
10174: PetscCall(MatProductSetFromOptions(*C));
10175: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
10176: PetscCall(MatProductSymbolic(*C));
10177: } else { /* scall == MAT_REUSE_MATRIX */
10178: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
10179: }
10181: PetscCall(MatProductNumeric(*C));
10182: (*C)->symmetric = A->symmetric;
10183: (*C)->spd = A->spd;
10184: PetscFunctionReturn(PETSC_SUCCESS);
10185: }
10187: /*@
10188: MatRARt - Creates the matrix product $C = R * A * R^T$
10190: Neighbor-wise Collective
10192: Input Parameters:
10193: + A - the matrix
10194: . R - the projection matrix
10195: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10196: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10197: if the result is a dense matrix this is irrelevant
10199: Output Parameter:
10200: . C - the product matrix
10202: Level: intermediate
10204: Notes:
10205: `C` will be created and must be destroyed by the user with `MatDestroy()`.
10207: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
10209: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
10210: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
10211: the parallel `MatRARt()` is implemented computing the explicit transpose of `R`, which can be very expensive.
10212: We recommend using `MatPtAP()` when possible.
10214: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10216: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
10217: @*/
10218: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
10219: {
10220: PetscFunctionBegin;
10221: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
10222: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10224: if (scall == MAT_INITIAL_MATRIX) {
10225: PetscCall(MatProductCreate(A, R, NULL, C));
10226: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
10227: PetscCall(MatProductSetAlgorithm(*C, "default"));
10228: PetscCall(MatProductSetFill(*C, fill));
10230: (*C)->product->api_user = PETSC_TRUE;
10231: PetscCall(MatProductSetFromOptions(*C));
10232: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10233: PetscCall(MatProductSymbolic(*C));
10234: } else { /* scall == MAT_REUSE_MATRIX */
10235: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10236: }
10238: PetscCall(MatProductNumeric(*C));
10239: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10240: PetscFunctionReturn(PETSC_SUCCESS);
10241: }
10243: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10244: {
10245: PetscBool flg = PETSC_TRUE;
10247: PetscFunctionBegin;
10248: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10249: if (scall == MAT_INITIAL_MATRIX) {
10250: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10251: PetscCall(MatProductCreate(A, B, NULL, C));
10252: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10253: PetscCall(MatProductSetFill(*C, fill));
10254: } else { /* scall == MAT_REUSE_MATRIX */
10255: Mat_Product *product = (*C)->product;
10257: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)*C, &flg, MATSEQDENSE, MATMPIDENSE, ""));
10258: if (flg && product && product->type != ptype) {
10259: PetscCall(MatProductClear(*C));
10260: product = NULL;
10261: }
10262: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10263: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10264: PetscCheck(flg, PetscObjectComm((PetscObject)*C), PETSC_ERR_SUP, "Call MatProductCreate() first");
10265: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10266: product = (*C)->product;
10267: product->fill = fill;
10268: product->clear = PETSC_TRUE;
10269: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10270: flg = PETSC_FALSE;
10271: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10272: }
10273: }
10274: if (flg) {
10275: (*C)->product->api_user = PETSC_TRUE;
10276: PetscCall(MatProductSetType(*C, ptype));
10277: PetscCall(MatProductSetFromOptions(*C));
10278: PetscCall(MatProductSymbolic(*C));
10279: }
10280: PetscCall(MatProductNumeric(*C));
10281: PetscFunctionReturn(PETSC_SUCCESS);
10282: }
10284: /*@
10285: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10287: Neighbor-wise Collective
10289: Input Parameters:
10290: + A - the left matrix
10291: . B - the right matrix
10292: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10293: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10294: if the result is a dense matrix this is irrelevant
10296: Output Parameter:
10297: . C - the product matrix
10299: Notes:
10300: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10302: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10303: call to this function with `MAT_INITIAL_MATRIX`.
10305: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value actually needed.
10307: In the special case where matrix `B` (and hence `C`) are dense you can create the correctly sized matrix `C` yourself and then call this routine with `MAT_REUSE_MATRIX`,
10308: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix `C` is sparse.
10310: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10312: Example of Usage:
10313: .vb
10314: MatProductCreate(A,B,NULL,&C);
10315: MatProductSetType(C,MATPRODUCT_AB);
10316: MatProductSymbolic(C);
10317: MatProductNumeric(C); // compute C=A * B
10318: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10319: MatProductNumeric(C);
10320: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10321: MatProductNumeric(C);
10322: .ve
10324: Level: intermediate
10326: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10327: @*/
10328: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10329: {
10330: PetscFunctionBegin;
10331: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10332: PetscFunctionReturn(PETSC_SUCCESS);
10333: }
10335: /*@
10336: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10338: Neighbor-wise Collective
10340: Input Parameters:
10341: + A - the left matrix
10342: . B - the right matrix
10343: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10344: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10346: Output Parameter:
10347: . C - the product matrix
10349: Options Database Key:
10350: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10351: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10352: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10354: Level: intermediate
10356: Notes:
10357: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10359: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10361: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10362: actually needed.
10364: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10365: and for pairs of `MATMPIDENSE` matrices.
10367: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10369: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10371: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10372: @*/
10373: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10374: {
10375: PetscFunctionBegin;
10376: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10377: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10378: PetscFunctionReturn(PETSC_SUCCESS);
10379: }
10381: /*@
10382: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10384: Neighbor-wise Collective
10386: Input Parameters:
10387: + A - the left matrix
10388: . B - the right matrix
10389: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10390: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if not known
10392: Output Parameter:
10393: . C - the product matrix
10395: Level: intermediate
10397: Notes:
10398: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10400: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10402: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10404: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10405: actually needed.
10407: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10408: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10410: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10412: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10413: @*/
10414: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10415: {
10416: PetscFunctionBegin;
10417: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10418: PetscFunctionReturn(PETSC_SUCCESS);
10419: }
10421: /*@
10422: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10424: Neighbor-wise Collective
10426: Input Parameters:
10427: + A - the left matrix
10428: . B - the middle matrix
10429: . C - the right matrix
10430: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10431: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DETERMINE` or `PETSC_CURRENT` if you do not have a good estimate
10432: if the result is a dense matrix this is irrelevant
10434: Output Parameter:
10435: . D - the product matrix
10437: Level: intermediate
10439: Notes:
10440: Unless `scall` is `MAT_REUSE_MATRIX` `D` will be created.
10442: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10444: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10446: To determine the correct fill value, run with `-info` and search for the string "Fill ratio" to see the value
10447: actually needed.
10449: If you have many matrices with the same non-zero structure to multiply, you
10450: should use `MAT_REUSE_MATRIX` in all calls but the first
10452: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
10454: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10455: @*/
10456: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10457: {
10458: PetscFunctionBegin;
10459: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10460: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10462: if (scall == MAT_INITIAL_MATRIX) {
10463: PetscCall(MatProductCreate(A, B, C, D));
10464: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10465: PetscCall(MatProductSetAlgorithm(*D, "default"));
10466: PetscCall(MatProductSetFill(*D, fill));
10468: (*D)->product->api_user = PETSC_TRUE;
10469: PetscCall(MatProductSetFromOptions(*D));
10470: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)*D), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10471: ((PetscObject)C)->type_name);
10472: PetscCall(MatProductSymbolic(*D));
10473: } else { /* user may change input matrices when REUSE */
10474: PetscCall(MatProductReplaceMats(A, B, C, *D));
10475: }
10476: PetscCall(MatProductNumeric(*D));
10477: PetscFunctionReturn(PETSC_SUCCESS);
10478: }
10480: /*@
10481: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10483: Collective
10485: Input Parameters:
10486: + mat - the matrix
10487: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10488: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10489: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10491: Output Parameter:
10492: . matredundant - redundant matrix
10494: Level: advanced
10496: Notes:
10497: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10498: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10500: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10501: calling it.
10503: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10505: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10506: @*/
10507: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10508: {
10509: MPI_Comm comm;
10510: PetscMPIInt size;
10511: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10512: Mat_Redundant *redund = NULL;
10513: PetscSubcomm psubcomm = NULL;
10514: MPI_Comm subcomm_in = subcomm;
10515: Mat *matseq;
10516: IS isrow, iscol;
10517: PetscBool newsubcomm = PETSC_FALSE;
10519: PetscFunctionBegin;
10521: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10522: PetscAssertPointer(*matredundant, 5);
10524: }
10526: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10527: if (size == 1 || nsubcomm == 1) {
10528: if (reuse == MAT_INITIAL_MATRIX) {
10529: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10530: } else {
10531: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10532: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10533: }
10534: PetscFunctionReturn(PETSC_SUCCESS);
10535: }
10537: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10538: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10539: MatCheckPreallocated(mat, 1);
10541: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10542: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10543: /* create psubcomm, then get subcomm */
10544: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10545: PetscCallMPI(MPI_Comm_size(comm, &size));
10546: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10548: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10549: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10550: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10551: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10552: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10553: newsubcomm = PETSC_TRUE;
10554: PetscCall(PetscSubcommDestroy(&psubcomm));
10555: }
10557: /* get isrow, iscol and a local sequential matrix matseq[0] */
10558: if (reuse == MAT_INITIAL_MATRIX) {
10559: mloc_sub = PETSC_DECIDE;
10560: nloc_sub = PETSC_DECIDE;
10561: if (bs < 1) {
10562: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10563: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10564: } else {
10565: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10566: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10567: }
10568: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10569: rstart = rend - mloc_sub;
10570: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10571: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10572: PetscCall(ISSetIdentity(iscol));
10573: } else { /* reuse == MAT_REUSE_MATRIX */
10574: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10575: /* retrieve subcomm */
10576: PetscCall(PetscObjectGetComm((PetscObject)*matredundant, &subcomm));
10577: redund = (*matredundant)->redundant;
10578: isrow = redund->isrow;
10579: iscol = redund->iscol;
10580: matseq = redund->matseq;
10581: }
10582: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10584: /* get matredundant over subcomm */
10585: if (reuse == MAT_INITIAL_MATRIX) {
10586: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10588: /* create a supporting struct and attach it to C for reuse */
10589: PetscCall(PetscNew(&redund));
10590: (*matredundant)->redundant = redund;
10591: redund->isrow = isrow;
10592: redund->iscol = iscol;
10593: redund->matseq = matseq;
10594: if (newsubcomm) {
10595: redund->subcomm = subcomm;
10596: } else {
10597: redund->subcomm = MPI_COMM_NULL;
10598: }
10599: } else {
10600: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10601: }
10602: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10603: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10604: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10605: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10606: }
10607: #endif
10608: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10609: PetscFunctionReturn(PETSC_SUCCESS);
10610: }
10612: /*@C
10613: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10614: a given `Mat`. Each submatrix can span multiple procs.
10616: Collective
10618: Input Parameters:
10619: + mat - the matrix
10620: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10621: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10623: Output Parameter:
10624: . subMat - parallel sub-matrices each spanning a given `subcomm`
10626: Level: advanced
10628: Notes:
10629: The submatrix partition across processors is dictated by `subComm` a
10630: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10631: is not restricted to be grouped with consecutive original MPI processes.
10633: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10634: map directly to the layout of the original matrix [wrt the local
10635: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10636: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10637: the `subMat`. However the offDiagMat looses some columns - and this is
10638: reconstructed with `MatSetValues()`
10640: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10642: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10643: @*/
10644: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10645: {
10646: PetscMPIInt commsize, subCommSize;
10648: PetscFunctionBegin;
10649: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10650: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10651: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10653: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10654: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10655: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10656: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10657: PetscFunctionReturn(PETSC_SUCCESS);
10658: }
10660: /*@
10661: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10663: Not Collective
10665: Input Parameters:
10666: + mat - matrix to extract local submatrix from
10667: . isrow - local row indices for submatrix
10668: - iscol - local column indices for submatrix
10670: Output Parameter:
10671: . submat - the submatrix
10673: Level: intermediate
10675: Notes:
10676: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10678: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10679: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10681: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10682: `MatSetValuesBlockedLocal()` will also be implemented.
10684: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10685: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10687: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10688: @*/
10689: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10690: {
10691: PetscFunctionBegin;
10695: PetscCheckSameComm(isrow, 2, iscol, 3);
10696: PetscAssertPointer(submat, 4);
10697: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10699: if (mat->ops->getlocalsubmatrix) {
10700: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10701: } else {
10702: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10703: }
10704: PetscFunctionReturn(PETSC_SUCCESS);
10705: }
10707: /*@
10708: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10710: Not Collective
10712: Input Parameters:
10713: + mat - matrix to extract local submatrix from
10714: . isrow - local row indices for submatrix
10715: . iscol - local column indices for submatrix
10716: - submat - the submatrix
10718: Level: intermediate
10720: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10721: @*/
10722: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10723: {
10724: PetscFunctionBegin;
10728: PetscCheckSameComm(isrow, 2, iscol, 3);
10729: PetscAssertPointer(submat, 4);
10732: if (mat->ops->restorelocalsubmatrix) {
10733: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10734: } else {
10735: PetscCall(MatDestroy(submat));
10736: }
10737: *submat = NULL;
10738: PetscFunctionReturn(PETSC_SUCCESS);
10739: }
10741: /*@
10742: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10744: Collective
10746: Input Parameter:
10747: . mat - the matrix
10749: Output Parameter:
10750: . is - if any rows have zero diagonals this contains the list of them
10752: Level: developer
10754: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10755: @*/
10756: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10757: {
10758: PetscFunctionBegin;
10761: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10762: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10764: if (!mat->ops->findzerodiagonals) {
10765: Vec diag;
10766: const PetscScalar *a;
10767: PetscInt *rows;
10768: PetscInt rStart, rEnd, r, nrow = 0;
10770: PetscCall(MatCreateVecs(mat, &diag, NULL));
10771: PetscCall(MatGetDiagonal(mat, diag));
10772: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10773: PetscCall(VecGetArrayRead(diag, &a));
10774: for (r = 0; r < rEnd - rStart; ++r)
10775: if (a[r] == 0.0) ++nrow;
10776: PetscCall(PetscMalloc1(nrow, &rows));
10777: nrow = 0;
10778: for (r = 0; r < rEnd - rStart; ++r)
10779: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10780: PetscCall(VecRestoreArrayRead(diag, &a));
10781: PetscCall(VecDestroy(&diag));
10782: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10783: } else {
10784: PetscUseTypeMethod(mat, findzerodiagonals, is);
10785: }
10786: PetscFunctionReturn(PETSC_SUCCESS);
10787: }
10789: /*@
10790: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10792: Collective
10794: Input Parameter:
10795: . mat - the matrix
10797: Output Parameter:
10798: . is - contains the list of rows with off block diagonal entries
10800: Level: developer
10802: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10803: @*/
10804: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10805: {
10806: PetscFunctionBegin;
10809: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10810: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10812: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10813: PetscFunctionReturn(PETSC_SUCCESS);
10814: }
10816: /*@C
10817: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10819: Collective; No Fortran Support
10821: Input Parameter:
10822: . mat - the matrix
10824: Output Parameter:
10825: . values - the block inverses in column major order (FORTRAN-like)
10827: Level: advanced
10829: Notes:
10830: The size of the blocks is determined by the block size of the matrix.
10832: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10834: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10836: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10837: @*/
10838: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar *values[])
10839: {
10840: PetscFunctionBegin;
10842: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10843: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10844: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10845: PetscFunctionReturn(PETSC_SUCCESS);
10846: }
10848: /*@
10849: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10851: Collective; No Fortran Support
10853: Input Parameters:
10854: + mat - the matrix
10855: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10856: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10858: Output Parameter:
10859: . values - the block inverses in column major order (FORTRAN-like)
10861: Level: advanced
10863: Notes:
10864: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10866: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10868: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10869: @*/
10870: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt bsizes[], PetscScalar values[])
10871: {
10872: PetscFunctionBegin;
10874: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10875: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10876: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10877: PetscFunctionReturn(PETSC_SUCCESS);
10878: }
10880: /*@
10881: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10883: Collective
10885: Input Parameters:
10886: + A - the matrix
10887: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10889: Level: advanced
10891: Note:
10892: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10894: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10895: @*/
10896: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10897: {
10898: const PetscScalar *vals;
10899: PetscInt *dnnz;
10900: PetscInt m, rstart, rend, bs, i, j;
10902: PetscFunctionBegin;
10903: PetscCall(MatInvertBlockDiagonal(A, &vals));
10904: PetscCall(MatGetBlockSize(A, &bs));
10905: PetscCall(MatGetLocalSize(A, &m, NULL));
10906: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10907: PetscCall(PetscMalloc1(m / bs, &dnnz));
10908: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10909: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10910: PetscCall(PetscFree(dnnz));
10911: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10912: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10913: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10914: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10915: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10916: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10917: PetscFunctionReturn(PETSC_SUCCESS);
10918: }
10920: /*@
10921: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10922: via `MatTransposeColoringCreate()`.
10924: Collective
10926: Input Parameter:
10927: . c - coloring context
10929: Level: intermediate
10931: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10932: @*/
10933: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10934: {
10935: MatTransposeColoring matcolor = *c;
10937: PetscFunctionBegin;
10938: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10939: if (--((PetscObject)matcolor)->refct > 0) {
10940: matcolor = NULL;
10941: PetscFunctionReturn(PETSC_SUCCESS);
10942: }
10944: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10945: PetscCall(PetscFree(matcolor->rows));
10946: PetscCall(PetscFree(matcolor->den2sp));
10947: PetscCall(PetscFree(matcolor->colorforcol));
10948: PetscCall(PetscFree(matcolor->columns));
10949: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10950: PetscCall(PetscHeaderDestroy(c));
10951: PetscFunctionReturn(PETSC_SUCCESS);
10952: }
10954: /*@
10955: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10956: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10957: `MatTransposeColoring` to sparse `B`.
10959: Collective
10961: Input Parameters:
10962: + coloring - coloring context created with `MatTransposeColoringCreate()`
10963: - B - sparse matrix
10965: Output Parameter:
10966: . Btdense - dense matrix $B^T$
10968: Level: developer
10970: Note:
10971: These are used internally for some implementations of `MatRARt()`
10973: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10974: @*/
10975: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10976: {
10977: PetscFunctionBegin;
10982: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10983: PetscFunctionReturn(PETSC_SUCCESS);
10984: }
10986: /*@
10987: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10988: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10989: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10990: $C_{sp}$ from $C_{den}$.
10992: Collective
10994: Input Parameters:
10995: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10996: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10998: Output Parameter:
10999: . Csp - sparse matrix
11001: Level: developer
11003: Note:
11004: These are used internally for some implementations of `MatRARt()`
11006: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
11007: @*/
11008: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
11009: {
11010: PetscFunctionBegin;
11015: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
11016: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
11017: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
11018: PetscFunctionReturn(PETSC_SUCCESS);
11019: }
11021: /*@
11022: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
11024: Collective
11026: Input Parameters:
11027: + mat - the matrix product C
11028: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
11030: Output Parameter:
11031: . color - the new coloring context
11033: Level: intermediate
11035: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
11036: `MatTransColoringApplyDenToSp()`
11037: @*/
11038: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
11039: {
11040: MatTransposeColoring c;
11041: MPI_Comm comm;
11043: PetscFunctionBegin;
11044: PetscAssertPointer(color, 3);
11046: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11047: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
11048: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
11049: c->ctype = iscoloring->ctype;
11050: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
11051: *color = c;
11052: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
11053: PetscFunctionReturn(PETSC_SUCCESS);
11054: }
11056: /*@
11057: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
11058: matrix has had new nonzero locations added to (or removed from) the matrix since the previous call, the value will be larger.
11060: Not Collective
11062: Input Parameter:
11063: . mat - the matrix
11065: Output Parameter:
11066: . state - the current state
11068: Level: intermediate
11070: Notes:
11071: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
11072: different matrices
11074: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
11076: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
11078: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
11079: @*/
11080: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
11081: {
11082: PetscFunctionBegin;
11084: *state = mat->nonzerostate;
11085: PetscFunctionReturn(PETSC_SUCCESS);
11086: }
11088: /*@
11089: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
11090: matrices from each processor
11092: Collective
11094: Input Parameters:
11095: + comm - the communicators the parallel matrix will live on
11096: . seqmat - the input sequential matrices
11097: . n - number of local columns (or `PETSC_DECIDE`)
11098: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11100: Output Parameter:
11101: . mpimat - the parallel matrix generated
11103: Level: developer
11105: Note:
11106: The number of columns of the matrix in EACH processor MUST be the same.
11108: .seealso: [](ch_matrices), `Mat`
11109: @*/
11110: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
11111: {
11112: PetscMPIInt size;
11114: PetscFunctionBegin;
11115: PetscCallMPI(MPI_Comm_size(comm, &size));
11116: if (size == 1) {
11117: if (reuse == MAT_INITIAL_MATRIX) {
11118: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
11119: } else {
11120: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
11121: }
11122: PetscFunctionReturn(PETSC_SUCCESS);
11123: }
11125: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
11127: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
11128: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
11129: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
11130: PetscFunctionReturn(PETSC_SUCCESS);
11131: }
11133: /*@
11134: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
11136: Collective
11138: Input Parameters:
11139: + A - the matrix to create subdomains from
11140: - N - requested number of subdomains
11142: Output Parameters:
11143: + n - number of subdomains resulting on this MPI process
11144: - iss - `IS` list with indices of subdomains on this MPI process
11146: Level: advanced
11148: Note:
11149: The number of subdomains must be smaller than the communicator size
11151: .seealso: [](ch_matrices), `Mat`, `IS`
11152: @*/
11153: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
11154: {
11155: MPI_Comm comm, subcomm;
11156: PetscMPIInt size, rank, color;
11157: PetscInt rstart, rend, k;
11159: PetscFunctionBegin;
11160: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
11161: PetscCallMPI(MPI_Comm_size(comm, &size));
11162: PetscCallMPI(MPI_Comm_rank(comm, &rank));
11163: PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
11164: *n = 1;
11165: k = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
11166: color = rank / k;
11167: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
11168: PetscCall(PetscMalloc1(1, iss));
11169: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
11170: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
11171: PetscCallMPI(MPI_Comm_free(&subcomm));
11172: PetscFunctionReturn(PETSC_SUCCESS);
11173: }
11175: /*@
11176: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
11178: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
11179: If they are not the same, uses `MatMatMatMult()`.
11181: Once the coarse grid problem is constructed, correct for interpolation operators
11182: that are not of full rank, which can legitimately happen in the case of non-nested
11183: geometric multigrid.
11185: Input Parameters:
11186: + restrct - restriction operator
11187: . dA - fine grid matrix
11188: . interpolate - interpolation operator
11189: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
11190: - fill - expected fill, use `PETSC_DETERMINE` or `PETSC_DETERMINE` if you do not have a good estimate
11192: Output Parameter:
11193: . A - the Galerkin coarse matrix
11195: Options Database Key:
11196: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
11198: Level: developer
11200: Note:
11201: The deprecated `PETSC_DEFAULT` in `fill` also means use the current value
11203: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
11204: @*/
11205: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
11206: {
11207: IS zerorows;
11208: Vec diag;
11210: PetscFunctionBegin;
11211: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
11212: /* Construct the coarse grid matrix */
11213: if (interpolate == restrct) {
11214: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
11215: } else {
11216: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
11217: }
11219: /* If the interpolation matrix is not of full rank, A will have zero rows.
11220: This can legitimately happen in the case of non-nested geometric multigrid.
11221: In that event, we set the rows of the matrix to the rows of the identity,
11222: ignoring the equations (as the RHS will also be zero). */
11224: PetscCall(MatFindZeroRows(*A, &zerorows));
11226: if (zerorows != NULL) { /* if there are any zero rows */
11227: PetscCall(MatCreateVecs(*A, &diag, NULL));
11228: PetscCall(MatGetDiagonal(*A, diag));
11229: PetscCall(VecISSet(diag, zerorows, 1.0));
11230: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
11231: PetscCall(VecDestroy(&diag));
11232: PetscCall(ISDestroy(&zerorows));
11233: }
11234: PetscFunctionReturn(PETSC_SUCCESS);
11235: }
11237: /*@C
11238: MatSetOperation - Allows user to set a matrix operation for any matrix type
11240: Logically Collective
11242: Input Parameters:
11243: + mat - the matrix
11244: . op - the name of the operation
11245: - f - the function that provides the operation
11247: Level: developer
11249: Example Usage:
11250: .vb
11251: extern PetscErrorCode usermult(Mat, Vec, Vec);
11253: PetscCall(MatCreateXXX(comm, ..., &A));
11254: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFn *)usermult));
11255: .ve
11257: Notes:
11258: See the file `include/petscmat.h` for a complete list of matrix
11259: operations, which all have the form MATOP_<OPERATION>, where
11260: <OPERATION> is the name (in all capital letters) of the
11261: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11263: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11264: sequence as the usual matrix interface routines, since they
11265: are intended to be accessed via the usual matrix interface
11266: routines, e.g.,
11267: .vb
11268: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11269: .ve
11271: In particular each function MUST return `PETSC_SUCCESS` on success and
11272: nonzero on failure.
11274: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11276: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11277: @*/
11278: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11279: {
11280: PetscFunctionBegin;
11282: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))mat->ops->view) mat->ops->viewnative = mat->ops->view;
11283: (((void (**)(void))mat->ops)[op]) = f;
11284: PetscFunctionReturn(PETSC_SUCCESS);
11285: }
11287: /*@C
11288: MatGetOperation - Gets a matrix operation for any matrix type.
11290: Not Collective
11292: Input Parameters:
11293: + mat - the matrix
11294: - op - the name of the operation
11296: Output Parameter:
11297: . f - the function that provides the operation
11299: Level: developer
11301: Example Usage:
11302: .vb
11303: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11305: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11306: .ve
11308: Notes:
11309: See the file include/petscmat.h for a complete list of matrix
11310: operations, which all have the form MATOP_<OPERATION>, where
11311: <OPERATION> is the name (in all capital letters) of the
11312: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11314: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11316: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11317: @*/
11318: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11319: {
11320: PetscFunctionBegin;
11322: *f = (((void (**)(void))mat->ops)[op]);
11323: PetscFunctionReturn(PETSC_SUCCESS);
11324: }
11326: /*@
11327: MatHasOperation - Determines whether the given matrix supports the particular operation.
11329: Not Collective
11331: Input Parameters:
11332: + mat - the matrix
11333: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11335: Output Parameter:
11336: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11338: Level: advanced
11340: Note:
11341: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11343: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11344: @*/
11345: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11346: {
11347: PetscFunctionBegin;
11349: PetscAssertPointer(has, 3);
11350: if (mat->ops->hasoperation) {
11351: PetscUseTypeMethod(mat, hasoperation, op, has);
11352: } else {
11353: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11354: else {
11355: *has = PETSC_FALSE;
11356: if (op == MATOP_CREATE_SUBMATRIX) {
11357: PetscMPIInt size;
11359: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11360: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11361: }
11362: }
11363: }
11364: PetscFunctionReturn(PETSC_SUCCESS);
11365: }
11367: /*@
11368: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11370: Collective
11372: Input Parameter:
11373: . mat - the matrix
11375: Output Parameter:
11376: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11378: Level: beginner
11380: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11381: @*/
11382: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11383: {
11384: PetscFunctionBegin;
11387: PetscAssertPointer(cong, 2);
11388: if (!mat->rmap || !mat->cmap) {
11389: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11390: PetscFunctionReturn(PETSC_SUCCESS);
11391: }
11392: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11393: PetscCall(PetscLayoutSetUp(mat->rmap));
11394: PetscCall(PetscLayoutSetUp(mat->cmap));
11395: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11396: if (*cong) mat->congruentlayouts = 1;
11397: else mat->congruentlayouts = 0;
11398: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11399: PetscFunctionReturn(PETSC_SUCCESS);
11400: }
11402: PetscErrorCode MatSetInf(Mat A)
11403: {
11404: PetscFunctionBegin;
11405: PetscUseTypeMethod(A, setinf);
11406: PetscFunctionReturn(PETSC_SUCCESS);
11407: }
11409: /*@
11410: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11411: and possibly removes small values from the graph structure.
11413: Collective
11415: Input Parameters:
11416: + A - the matrix
11417: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11418: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11419: . filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11420: . num_idx - size of 'index' array
11421: - index - array of block indices to use for graph strength of connection weight
11423: Output Parameter:
11424: . graph - the resulting graph
11426: Level: advanced
11428: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11429: @*/
11430: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, PetscInt num_idx, PetscInt index[], Mat *graph)
11431: {
11432: PetscFunctionBegin;
11436: PetscAssertPointer(graph, 7);
11437: PetscCall(PetscLogEventBegin(MAT_CreateGraph, A, 0, 0, 0));
11438: PetscUseTypeMethod(A, creategraph, sym, scale, filter, num_idx, index, graph);
11439: PetscCall(PetscLogEventEnd(MAT_CreateGraph, A, 0, 0, 0));
11440: PetscFunctionReturn(PETSC_SUCCESS);
11441: }
11443: /*@
11444: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11445: meaning the same memory is used for the matrix, and no new memory is allocated.
11447: Collective
11449: Input Parameters:
11450: + A - the matrix
11451: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11453: Level: intermediate
11455: Developer Note:
11456: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11457: of the arrays in the data structure are unneeded.
11459: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11460: @*/
11461: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11462: {
11463: PetscFunctionBegin;
11465: PetscUseTypeMethod(A, eliminatezeros, keep);
11466: PetscFunctionReturn(PETSC_SUCCESS);
11467: }