Actual source code: pastix.c
petsc-3.4.2 2013-07-02
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX) && defined(__cplusplus)
10: #define _H_COMPLEX
11: #endif
13: EXTERN_C_BEGIN
14: #include <pastix.h>
15: EXTERN_C_END
17: #if defined(PETSC_USE_COMPLEX)
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #define PASTIX_CALL c_pastix
20: #define PASTIX_CHECKMATRIX c_pastix_checkMatrix
21: #define PastixScalar COMPLEX
22: #else
23: #define PASTIX_CALL z_pastix
24: #define PASTIX_CHECKMATRIX z_pastix_checkMatrix
25: #define PastixScalar DCOMPLEX
26: #endif
28: #else /* PETSC_USE_COMPLEX */
30: #if defined(PETSC_USE_REAL_SINGLE)
31: #define PASTIX_CALL s_pastix
32: #define PASTIX_CHECKMATRIX s_pastix_checkMatrix
33: #define PastixScalar float
34: #else
35: #define PASTIX_CALL d_pastix
36: #define PASTIX_CHECKMATRIX d_pastix_checkMatrix
37: #define PastixScalar double
38: #endif
40: #endif /* PETSC_USE_COMPLEX */
42: typedef struct Mat_Pastix_ {
43: pastix_data_t *pastix_data; /* Pastix data storage structure */
44: MatStructure matstruc;
45: PetscInt n; /* Number of columns in the matrix */
46: PetscInt *colptr; /* Index of first element of each column in row and val */
47: PetscInt *row; /* Row of each element of the matrix */
48: PetscScalar *val; /* Value of each element of the matrix */
49: PetscInt *perm; /* Permutation tabular */
50: PetscInt *invp; /* Reverse permutation tabular */
51: PetscScalar *rhs; /* Rhight-hand-side member */
52: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
53: PetscInt iparm[64]; /* Integer parameters */
54: double dparm[64]; /* Floating point parameters */
55: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
56: PetscMPIInt commRank; /* MPI rank */
57: PetscMPIInt commSize; /* MPI communicator size */
58: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
59: VecScatter scat_rhs;
60: VecScatter scat_sol;
61: Vec b_seq;
62: PetscBool isAIJ;
63: PetscErrorCode (*Destroy)(Mat);
64: } Mat_Pastix;
66: extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
70: /*
71: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
73: input:
74: A - matrix in seqaij or mpisbaij (bs=1) format
75: valOnly - FALSE: spaces are allocated and values are set for the CSC
76: TRUE: Only fill values
77: output:
78: n - Size of the matrix
79: colptr - Index of first element of each column in row and val
80: row - Row of each element of the matrix
81: values - Value of each element of the matrix
82: */
83: PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
84: {
85: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
86: PetscInt *rowptr = aa->i;
87: PetscInt *col = aa->j;
88: PetscScalar *rvalues = aa->a;
89: PetscInt m = A->rmap->N;
90: PetscInt nnz;
91: PetscInt i,j, k;
92: PetscInt base = 1;
93: PetscInt idx;
95: PetscInt colidx;
96: PetscInt *colcount;
97: PetscBool isSBAIJ;
98: PetscBool isSeqSBAIJ;
99: PetscBool isMpiSBAIJ;
100: PetscBool isSym;
101: PetscBool flg;
102: PetscInt icntl;
103: PetscInt verb;
104: PetscInt check;
107: MatIsSymmetric(A,0.0,&isSym);
108: PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
109: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
110: PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
112: *n = A->cmap->N;
114: /* PaStiX only needs triangular matrix if matrix is symmetric
115: */
116: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n)/2 + *n;
117: else nnz = aa->nz;
119: if (!valOnly) {
120: PetscMalloc(((*n)+1) *sizeof(PetscInt) ,colptr);
121: PetscMalloc(nnz *sizeof(PetscInt) ,row);
122: PetscMalloc(nnz *sizeof(PetscScalar),values);
124: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
125: PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
126: for (i = 0; i < *n+1; i++) (*colptr)[i] += base;
127: PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
128: for (i = 0; i < nnz; i++) (*row)[i] += base;
129: PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
130: } else {
131: PetscMalloc((*n)*sizeof(PetscInt),&colcount);
133: for (i = 0; i < m; i++) colcount[i] = 0;
134: /* Fill-in colptr */
135: for (i = 0; i < m; i++) {
136: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
137: if (!isSym || col[j] <= i) colcount[col[j]]++;
138: }
139: }
141: (*colptr)[0] = base;
142: for (j = 0; j < *n; j++) {
143: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
144: /* in next loop we fill starting from (*colptr)[colidx] - base */
145: colcount[j] = -base;
146: }
148: /* Fill-in rows and values */
149: for (i = 0; i < m; i++) {
150: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
151: if (!isSym || col[j] <= i) {
152: colidx = col[j];
153: idx = (*colptr)[colidx] + colcount[colidx];
154: (*row)[idx] = i + base;
155: (*values)[idx] = rvalues[j];
156: colcount[colidx]++;
157: }
158: }
159: }
160: PetscFree(colcount);
161: }
162: } else {
163: /* Fill-in only values */
164: for (i = 0; i < m; i++) {
165: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
166: colidx = col[j];
167: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i) {
168: /* look for the value to fill */
169: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
170: if (((*row)[k]-base) == i) {
171: (*values)[k] = rvalues[j];
172: break;
173: }
174: }
175: /* data structure of sparse matrix has changed */
176: if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
177: }
178: }
179: }
180: }
182: icntl =-1;
183: check = 0;
184: PetscOptionsInt("-mat_pastix_check","Check the matrix 0 : no, 1 : yes)","None",check,&icntl,&flg);
185: if ((flg && icntl >= 0) || PetscLogPrintInfo) check = icntl;
187: if (check == 1) {
188: PetscScalar *tmpvalues;
189: PetscInt *tmprows,*tmpcolptr;
190: tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar)); if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
191: tmprows = (PetscInt*) malloc(nnz*sizeof(PetscInt)); if (!tmprows) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
192: tmpcolptr = (PetscInt*) malloc((*n+1)*sizeof(PetscInt)); if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
194: PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
195: PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
196: PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
197: PetscFree(*row);
198: PetscFree(*values);
200: icntl=-1;
201: verb = API_VERBOSE_NOT;
202: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",verb,&icntl,&flg);
203: if ((flg && icntl >= 0) || PetscLogPrintInfo) {
204: verb = icntl;
205: }
206: PASTIX_CHECKMATRIX(MPI_COMM_WORLD,verb,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,(PastixScalar**)&tmpvalues,NULL,1);
208: PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
209: PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscInt),row);
210: PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
211: PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscScalar),values);
212: PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
213: free(tmpvalues);
214: free(tmprows);
215: free(tmpcolptr);
217: }
218: return(0);
219: }
225: /*
226: Call clean step of PaStiX if lu->CleanUpPastix == true.
227: Free the CSC matrix.
228: */
229: PetscErrorCode MatDestroy_Pastix(Mat A)
230: {
231: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
233: PetscMPIInt size=lu->commSize;
236: if (lu && lu->CleanUpPastix) {
237: /* Terminate instance, deallocate memories */
238: if (size > 1) {
239: VecScatterDestroy(&lu->scat_rhs);
240: VecDestroy(&lu->b_seq);
241: VecScatterDestroy(&lu->scat_sol);
242: }
244: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
245: lu->iparm[IPARM_END_TASK] =API_TASK_CLEAN;
247: PASTIX_CALL(&(lu->pastix_data),
248: lu->pastix_comm,
249: lu->n,
250: lu->colptr,
251: lu->row,
252: (PastixScalar*)lu->val,
253: lu->perm,
254: lu->invp,
255: (PastixScalar*)lu->rhs,
256: lu->rhsnbr,
257: lu->iparm,
258: lu->dparm);
260: PetscFree(lu->colptr);
261: PetscFree(lu->row);
262: PetscFree(lu->val);
263: PetscFree(lu->perm);
264: PetscFree(lu->invp);
265: MPI_Comm_free(&(lu->pastix_comm));
266: }
267: if (lu && lu->Destroy) {
268: (lu->Destroy)(A);
269: }
270: PetscFree(A->spptr);
271: return(0);
272: }
276: /*
277: Gather right-hand-side.
278: Call for Solve step.
279: Scatter solution.
280: */
281: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
282: {
283: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
284: PetscScalar *array;
285: Vec x_seq;
289: lu->rhsnbr = 1;
290: x_seq = lu->b_seq;
291: if (lu->commSize > 1) {
292: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
293: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
294: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
295: VecGetArray(x_seq,&array);
296: } else { /* size == 1 */
297: VecCopy(b,x);
298: VecGetArray(x,&array);
299: }
300: lu->rhs = array;
301: if (lu->commSize == 1) {
302: VecRestoreArray(x,&array);
303: } else {
304: VecRestoreArray(x_seq,&array);
305: }
307: /* solve phase */
308: /*-------------*/
309: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
310: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
311: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
313: PASTIX_CALL(&(lu->pastix_data),
314: lu->pastix_comm,
315: lu->n,
316: lu->colptr,
317: lu->row,
318: (PastixScalar*)lu->val,
319: lu->perm,
320: lu->invp,
321: (PastixScalar*)lu->rhs,
322: lu->rhsnbr,
323: lu->iparm,
324: lu->dparm);
326: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER]);
328: if (lu->commSize == 1) {
329: VecRestoreArray(x,&(lu->rhs));
330: } else {
331: VecRestoreArray(x_seq,&(lu->rhs));
332: }
334: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
335: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
336: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
337: }
338: return(0);
339: }
341: /*
342: Numeric factorisation using PaStiX solver.
344: */
347: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
348: {
349: Mat_Pastix *lu =(Mat_Pastix*)(F)->spptr;
350: Mat *tseq;
351: PetscErrorCode 0;
352: PetscInt icntl;
353: PetscInt M=A->rmap->N;
354: PetscBool valOnly,flg, isSym;
355: Mat F_diag;
356: IS is_iden;
357: Vec b;
358: IS isrow;
359: PetscBool isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
362: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
363: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
364: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
365: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
366: (F)->ops->solve = MatSolve_PaStiX;
368: /* Initialize a PASTIX instance */
369: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->pastix_comm));
370: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
371: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
373: /* Set pastix options */
374: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
375: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
376: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
378: lu->rhsnbr = 1;
380: /* Call to set default pastix options */
381: PASTIX_CALL(&(lu->pastix_data),
382: lu->pastix_comm,
383: lu->n,
384: lu->colptr,
385: lu->row,
386: (PastixScalar*)lu->val,
387: lu->perm,
388: lu->invp,
389: (PastixScalar*)lu->rhs,
390: lu->rhsnbr,
391: lu->iparm,
392: lu->dparm);
394: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"PaStiX Options","Mat");
396: icntl = -1;
398: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
400: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
401: if ((flg && icntl >= 0) || PetscLogPrintInfo) {
402: lu->iparm[IPARM_VERBOSE] = icntl;
403: }
404: icntl=-1;
405: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,&flg);
406: if ((flg && icntl > 0)) {
407: lu->iparm[IPARM_THREAD_NBR] = icntl;
408: }
409: PetscOptionsEnd();
410: valOnly = PETSC_FALSE;
411: } else {
412: if (isSeqAIJ || isMPIAIJ) {
413: PetscFree(lu->colptr);
414: PetscFree(lu->row);
415: PetscFree(lu->val);
416: valOnly = PETSC_FALSE;
417: } else valOnly = PETSC_TRUE;
418: }
420: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
422: /* convert mpi A to seq mat A */
423: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
424: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
425: ISDestroy(&isrow);
427: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
428: MatIsSymmetric(*tseq,0.0,&isSym);
429: MatDestroyMatrices(1,&tseq);
431: if (!lu->perm) {
432: PetscMalloc((lu->n)*sizeof(PetscInt),&(lu->perm));
433: PetscMalloc((lu->n)*sizeof(PetscInt),&(lu->invp));
434: }
436: if (isSym) {
437: /* On symmetric matrix, LLT */
438: lu->iparm[IPARM_SYM] = API_SYM_YES;
439: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
440: } else {
441: /* On unsymmetric matrix, LU */
442: lu->iparm[IPARM_SYM] = API_SYM_NO;
443: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
444: }
446: /*----------------*/
447: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
448: if (!(isSeqAIJ || isSeqSBAIJ)) {
449: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
450: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
451: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
452: VecCreate(PetscObjectComm((PetscObject)A),&b);
453: VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
454: VecSetFromOptions(b);
456: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
457: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
458: ISDestroy(&is_iden);
459: VecDestroy(&b);
460: }
461: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
462: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
464: PASTIX_CALL(&(lu->pastix_data),
465: lu->pastix_comm,
466: lu->n,
467: lu->colptr,
468: lu->row,
469: (PastixScalar*)lu->val,
470: lu->perm,
471: lu->invp,
472: (PastixScalar*)lu->rhs,
473: lu->rhsnbr,
474: lu->iparm,
475: lu->dparm);
476: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
477: } else {
478: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
479: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
480: PASTIX_CALL(&(lu->pastix_data),
481: lu->pastix_comm,
482: lu->n,
483: lu->colptr,
484: lu->row,
485: (PastixScalar*)lu->val,
486: lu->perm,
487: lu->invp,
488: (PastixScalar*)lu->rhs,
489: lu->rhsnbr,
490: lu->iparm,
491: lu->dparm);
493: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
494: }
496: if (lu->commSize > 1) {
497: if ((F)->factortype == MAT_FACTOR_LU) {
498: F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
499: } else {
500: F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
501: }
502: F_diag->assembled = PETSC_TRUE;
503: }
504: (F)->assembled = PETSC_TRUE;
505: lu->matstruc = SAME_NONZERO_PATTERN;
506: lu->CleanUpPastix = PETSC_TRUE;
507: return(0);
508: }
510: /* Note the Petsc r and c permutations are ignored */
513: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
514: {
515: Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
518: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
519: lu->iparm[IPARM_SYM] = API_SYM_YES;
520: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
521: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
522: return(0);
523: }
526: /* Note the Petsc r permutation is ignored */
529: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
530: {
531: Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
534: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
535: lu->iparm[IPARM_SYM] = API_SYM_NO;
536: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
537: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
538: return(0);
539: }
543: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
544: {
545: PetscErrorCode ierr;
546: PetscBool iascii;
547: PetscViewerFormat format;
550: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
551: if (iascii) {
552: PetscViewerGetFormat(viewer,&format);
553: if (format == PETSC_VIEWER_ASCII_INFO) {
554: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
556: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
557: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));
558: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
559: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
560: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
561: }
562: }
563: return(0);
564: }
567: /*MC
568: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
569: and sequential matrices via the external package PaStiX.
571: Use ./configure --download-pastix to have PETSc installed with PaStiX
573: Options Database Keys:
574: + -mat_pastix_verbose <0,1,2> - print level
575: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
577: Level: beginner
579: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
581: M*/
586: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
587: {
588: Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
591: info->block_size = 1.0;
592: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
593: info->nz_used = lu->iparm[IPARM_NNZEROS];
594: info->nz_unneeded = 0.0;
595: info->assemblies = 0.0;
596: info->mallocs = 0.0;
597: info->memory = 0.0;
598: info->fill_ratio_given = 0;
599: info->fill_ratio_needed = 0;
600: info->factor_mallocs = 0;
601: return(0);
602: }
606: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
607: {
609: *type = MATSOLVERPASTIX;
610: return(0);
611: }
613: /*
614: The seq and mpi versions of this function are the same
615: */
618: PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
619: {
620: Mat B;
622: Mat_Pastix *pastix;
625: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
626: /* Create the factorization matrix */
627: MatCreate(PetscObjectComm((PetscObject)A),&B);
628: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
629: MatSetType(B,((PetscObject)A)->type_name);
630: MatSeqAIJSetPreallocation(B,0,NULL);
632: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
633: B->ops->view = MatView_PaStiX;
634: B->ops->getinfo = MatGetInfo_PaStiX;
636: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
638: B->factortype = MAT_FACTOR_LU;
640: PetscNewLog(B,Mat_Pastix,&pastix);
642: pastix->CleanUpPastix = PETSC_FALSE;
643: pastix->isAIJ = PETSC_TRUE;
644: pastix->scat_rhs = NULL;
645: pastix->scat_sol = NULL;
646: pastix->Destroy = B->ops->destroy;
647: B->ops->destroy = MatDestroy_Pastix;
648: B->spptr = (void*)pastix;
650: *F = B;
651: return(0);
652: }
656: PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
657: {
658: Mat B;
660: Mat_Pastix *pastix;
663: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
664: /* Create the factorization matrix */
665: MatCreate(PetscObjectComm((PetscObject)A),&B);
666: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
667: MatSetType(B,((PetscObject)A)->type_name);
668: MatSeqAIJSetPreallocation(B,0,NULL);
669: MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);
671: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
672: B->ops->view = MatView_PaStiX;
674: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
676: B->factortype = MAT_FACTOR_LU;
678: PetscNewLog(B,Mat_Pastix,&pastix);
680: pastix->CleanUpPastix = PETSC_FALSE;
681: pastix->isAIJ = PETSC_TRUE;
682: pastix->scat_rhs = NULL;
683: pastix->scat_sol = NULL;
684: pastix->Destroy = B->ops->destroy;
685: B->ops->destroy = MatDestroy_Pastix;
686: B->spptr = (void*)pastix;
688: *F = B;
689: return(0);
690: }
694: PETSC_EXTERN PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
695: {
696: Mat B;
698: Mat_Pastix *pastix;
701: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
702: /* Create the factorization matrix */
703: MatCreate(PetscObjectComm((PetscObject)A),&B);
704: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
705: MatSetType(B,((PetscObject)A)->type_name);
706: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
707: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
709: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
710: B->ops->view = MatView_PaStiX;
712: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
714: B->factortype = MAT_FACTOR_CHOLESKY;
716: PetscNewLog(B,Mat_Pastix,&pastix);
718: pastix->CleanUpPastix = PETSC_FALSE;
719: pastix->isAIJ = PETSC_TRUE;
720: pastix->scat_rhs = NULL;
721: pastix->scat_sol = NULL;
722: pastix->Destroy = B->ops->destroy;
723: B->ops->destroy = MatDestroy_Pastix;
724: B->spptr = (void*)pastix;
726: *F = B;
727: return(0);
728: }
732: PETSC_EXTERN PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
733: {
734: Mat B;
736: Mat_Pastix *pastix;
739: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
741: /* Create the factorization matrix */
742: MatCreate(PetscObjectComm((PetscObject)A),&B);
743: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
744: MatSetType(B,((PetscObject)A)->type_name);
745: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
746: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
748: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
749: B->ops->view = MatView_PaStiX;
751: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
753: B->factortype = MAT_FACTOR_CHOLESKY;
755: PetscNewLog(B,Mat_Pastix,&pastix);
757: pastix->CleanUpPastix = PETSC_FALSE;
758: pastix->isAIJ = PETSC_TRUE;
759: pastix->scat_rhs = NULL;
760: pastix->scat_sol = NULL;
761: pastix->Destroy = B->ops->destroy;
762: B->ops->destroy = MatDestroy_Pastix;
763: B->spptr = (void*)pastix;
765: *F = B;
766: return(0);
767: }