Actual source code: gamg.c
petsc-3.4.2 2013-07-02
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include "petsc-private/matimpl.h"
5: #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6: #include <petsc-private/kspimpl.h>
8: #if defined PETSC_GAMG_USE_LOG
9: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10: #endif
12: #if defined PETSC_USE_LOG
13: PetscLogEvent PC_GAMGGgraph_AGG;
14: PetscLogEvent PC_GAMGGgraph_GEO;
15: PetscLogEvent PC_GAMGCoarsen_AGG;
16: PetscLogEvent PC_GAMGCoarsen_GEO;
17: PetscLogEvent PC_GAMGProlongator_AGG;
18: PetscLogEvent PC_GAMGProlongator_GEO;
19: PetscLogEvent PC_GAMGOptprol_AGG;
20: PetscLogEvent PC_GAMGKKTProl_AGG;
21: #endif
23: #define GAMG_MAXLEVELS 30
25: /* #define GAMG_STAGES */
26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
28: #endif
30: static PetscFunctionList GAMGList = 0;
31: static PetscBool PCGAMGPackageInitialized;
33: /* ----------------------------------------------------------------------------- */
36: PetscErrorCode PCReset_GAMG(PC pc)
37: {
39: PC_MG *mg = (PC_MG*)pc->data;
40: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
43: if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
44: PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
45: PetscFree(pc_gamg->data);
46: }
47: pc_gamg->data = NULL; pc_gamg->data_sz = 0;
49: if (pc_gamg->orig_data) {
50: PetscFree(pc_gamg->orig_data);
51: }
52: return(0);
53: }
55: /* private 2x2 Mat Nest for Stokes */
56: typedef struct {
57: Mat A11,A21,A12,Amat;
58: IS prim_is,constr_is;
59: } GAMGKKTMat;
63: static PetscErrorCode GAMGKKTMatCreate(Mat A, PetscBool iskkt, GAMGKKTMat *out)
64: {
66: out->Amat = A;
67: if (iskkt) {
69: IS is_constraint, is_prime;
70: PetscInt nmin,nmax;
72: MatGetOwnershipRange(A, &nmin, &nmax);
73: MatFindZeroDiagonals(A, &is_constraint);
74: ISComplement(is_constraint, nmin, nmax, &is_prime);
76: out->prim_is = is_prime;
77: out->constr_is = is_constraint;
79: MatGetSubMatrix(A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11);
80: MatGetSubMatrix(A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);
81: MatGetSubMatrix(A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);
82: } else {
83: out->A11 = A;
84: out->A21 = NULL;
85: out->A12 = NULL;
86: out->prim_is = NULL;
87: out->constr_is = NULL;
88: }
89: return(0);
90: }
94: static PetscErrorCode GAMGKKTMatDestroy(GAMGKKTMat *mat)
95: {
99: if (mat->A11 && mat->A11 != mat->Amat) {
100: MatDestroy(&mat->A11);
101: }
102: MatDestroy(&mat->A21);
103: MatDestroy(&mat->A12);
105: ISDestroy(&mat->prim_is);
106: ISDestroy(&mat->constr_is);
107: return(0);
108: }
110: /* -------------------------------------------------------------------------- */
111: /*
112: createLevel: create coarse op with RAP. repartition and/or reduce number
113: of active processors.
115: Input Parameter:
116: . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
117: 'pc_gamg->data_sz' are changed via repartitioning/reduction.
118: . Amat_fine - matrix on this fine (k) level
119: . cr_bs - coarse block size
120: . isLast -
121: . stokes -
122: In/Output Parameter:
123: . a_P_inout - prolongation operator to the next level (k-->k-1)
124: . a_nactive_proc - number of active procs
125: Output Parameter:
126: . a_Amat_crs - coarse matrix that is created (k-1)
127: */
131: static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,const PetscBool stokes,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc)
132: {
133: PetscErrorCode ierr;
134: PC_MG *mg = (PC_MG*)pc->data;
135: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
136: const PetscBool repart = pc_gamg->repart;
137: const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
138: Mat Cmat,Pold=*a_P_inout;
139: MPI_Comm comm;
140: PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc;
141: PetscInt ncrs_eq,ncrs_prim,f_bs;
144: PetscObjectGetComm((PetscObject)Amat_fine,&comm);
145: MPI_Comm_rank(comm, &rank);
146: MPI_Comm_size(comm, &size);
147: MatGetBlockSize(Amat_fine, &f_bs);
148: /* RAP */
149: MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);
151: /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/
152: ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
153: if (pc_gamg->data_sz % (pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_sz %D not divisible by (pc_gamg->data_cell_cols %D *pc_gamg->data_cell_rows %D)",pc_gamg->data_sz,pc_gamg->data_cell_cols,pc_gamg->data_cell_rows);
154: MatGetLocalSize(Cmat, &ncrs_eq, NULL);
156: /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
157: {
158: PetscInt ncrs_eq_glob;
159: MatGetSize(Cmat, &ncrs_eq_glob, NULL);
160: new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
161: if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1;
162: else if (new_size >= nactive) new_size = nactive; /* no change, rare */
163: if (isLast) new_size = 1;
164: }
166: if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
167: else {
168: const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
169: PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old;
170: IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices;
171: VecScatter vecscat;
172: PetscScalar *array;
173: Vec src_crd, dest_crd;
175: nloc_old = ncrs_eq/cr_bs;
176: if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
177: #if defined PETSC_GAMG_USE_LOG
178: PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
179: #endif
180: /* make 'is_eq_newproc' */
181: PetscMalloc(size*sizeof(PetscInt), &counts);
182: if (repart && !stokes) {
183: /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
184: Mat adj;
186: if (pc_gamg->verbose>0) {
187: if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq);
188: else {
189: PetscInt n;
190: MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);
191: PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n);
192: }
193: }
195: /* get 'adj' */
196: if (cr_bs == 1) {
197: MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
198: } else {
199: /* make a scalar matrix to partition (no Stokes here) */
200: Mat tMat;
201: PetscInt Istart_crs,Iend_crs,ncols,jj,Ii;
202: const PetscScalar *vals;
203: const PetscInt *idx;
204: PetscInt *d_nnz, *o_nnz, M, N;
205: static PetscInt llev = 0;
207: PetscMalloc(ncrs_prim*sizeof(PetscInt), &d_nnz);
208: PetscMalloc(ncrs_prim*sizeof(PetscInt), &o_nnz);
209: MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
210: MatGetSize(Cmat, &M, &N);
211: for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
212: MatGetRow(Cmat,Ii,&ncols,0,0);
213: d_nnz[jj] = ncols/cr_bs;
214: o_nnz[jj] = ncols/cr_bs;
215: MatRestoreRow(Cmat,Ii,&ncols,0,0);
216: if (d_nnz[jj] > ncrs_prim) d_nnz[jj] = ncrs_prim;
217: if (o_nnz[jj] > (M/cr_bs-ncrs_prim)) o_nnz[jj] = M/cr_bs-ncrs_prim;
218: }
220: MatCreate(comm, &tMat);
221: MatSetSizes(tMat, ncrs_prim, ncrs_prim,PETSC_DETERMINE, PETSC_DETERMINE);
222: MatSetType(tMat,MATAIJ);
223: MatSeqAIJSetPreallocation(tMat,0,d_nnz);
224: MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
225: PetscFree(d_nnz);
226: PetscFree(o_nnz);
228: for (ii = Istart_crs; ii < Iend_crs; ii++) {
229: PetscInt dest_row = ii/cr_bs;
230: MatGetRow(Cmat,ii,&ncols,&idx,&vals);
231: for (jj = 0; jj < ncols; jj++) {
232: PetscInt dest_col = idx[jj]/cr_bs;
233: PetscScalar v = 1.0;
234: MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
235: }
236: MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
237: }
238: MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
239: MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);
241: if (llev++ == -1) {
242: PetscViewer viewer; char fname[32];
243: PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
244: PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
245: MatView(tMat, viewer);
246: PetscViewerDestroy(&viewer);
247: }
249: MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
251: MatDestroy(&tMat);
252: } /* create 'adj' */
254: { /* partition: get newproc_idx */
255: char prefix[256];
256: const char *pcpre;
257: const PetscInt *is_idx;
258: MatPartitioning mpart;
259: IS proc_is;
260: PetscInt targetPE;
262: MatPartitioningCreate(comm, &mpart);
263: MatPartitioningSetAdjacency(mpart, adj);
264: PCGetOptionsPrefix(pc, &pcpre);
265: PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
266: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
267: MatPartitioningSetFromOptions(mpart);
268: MatPartitioningSetNParts(mpart, new_size);
269: MatPartitioningApply(mpart, &proc_is);
270: MatPartitioningDestroy(&mpart);
272: /* collect IS info */
273: PetscMalloc(ncrs_eq*sizeof(PetscInt), &newproc_idx);
274: ISGetIndices(proc_is, &is_idx);
275: targetPE = 1; /* bring to "front" of machine */
276: /*targetPE = size/new_size;*/ /* spread partitioning across machine */
277: for (kk = jj = 0 ; kk < nloc_old ; kk++) {
278: for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
279: newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
280: }
281: }
282: ISRestoreIndices(proc_is, &is_idx);
283: ISDestroy(&proc_is);
284: }
285: MatDestroy(&adj);
287: ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
288: if (newproc_idx != 0) {
289: PetscFree(newproc_idx);
290: }
291: } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
293: PetscInt rfactor,targetPE;
294: /* find factor */
295: if (new_size == 1) rfactor = size; /* easy */
296: else {
297: PetscReal best_fact = 0.;
298: jj = -1;
299: for (kk = 1 ; kk <= size ; kk++) {
300: if (size%kk==0) { /* a candidate */
301: PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
302: if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
303: if (fact > best_fact) {
304: best_fact = fact; jj = kk;
305: }
306: }
307: }
308: if (jj != -1) rfactor = jj;
309: else rfactor = 1; /* does this happen .. a prime */
310: }
311: new_size = size/rfactor;
313: if (new_size==nactive) {
314: *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
315: PetscFree(counts);
316: if (pc_gamg->verbose>0) {
317: PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq);
318: }
319: return(0);
320: }
322: if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq);
323: targetPE = rank/rfactor;
324: ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
326: if (stokes) {
327: ISCreateStride(comm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim);
328: }
329: } /* end simple 'is_eq_newproc' */
331: /*
332: Create an index set from the is_eq_newproc index set to indicate the mapping TO
333: */
334: ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
335: if (stokes) {
336: ISPartitioningToNumbering(is_eq_newproc_prim, &is_eq_num_prim);
337: } else is_eq_num_prim = is_eq_num;
338: /*
339: Determine how many equations/vertices are assigned to each processor
340: */
341: ISPartitioningCount(is_eq_newproc, size, counts);
342: ncrs_eq_new = counts[rank];
343: ISDestroy(&is_eq_newproc);
344: if (stokes) {
345: ISPartitioningCount(is_eq_newproc_prim, size, counts);
346: ISDestroy(&is_eq_newproc_prim);
347: ncrs_prim_new = counts[rank]/cr_bs; /* nodes */
348: } else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */
350: PetscFree(counts);
351: #if defined PETSC_GAMG_USE_LOG
352: PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
353: #endif
355: /* move data (for primal equations only) */
356: /* Create a vector to contain the newly ordered element information */
357: VecCreate(comm, &dest_crd);
358: VecSetSizes(dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE);
359: VecSetFromOptions(dest_crd); /* this is needed! */
360: /*
361: There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
362: a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'.
363: */
364: PetscMalloc((ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx);
365: ISGetIndices(is_eq_num_prim, &idx);
366: for (ii=0,jj=0; ii<ncrs_prim; ii++) {
367: PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
368: for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
369: }
370: ISRestoreIndices(is_eq_num_prim, &idx);
371: ISCreateGeneral(comm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat);
372: PetscFree(tidx);
373: /*
374: Create a vector to contain the original vertex information for each element
375: */
376: VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd);
377: for (jj=0; jj<ndata_cols; jj++) {
378: const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows;
379: for (ii=0; ii<ncrs_prim; ii++) {
380: for (kk=0; kk<ndata_rows; kk++) {
381: PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
382: PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
383: VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
384: }
385: }
386: }
387: VecAssemblyBegin(src_crd);
388: VecAssemblyEnd(src_crd);
389: /*
390: Scatter the element vertex information (still in the original vertex ordering)
391: to the correct processor
392: */
393: VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
394: ISDestroy(&isscat);
395: VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
396: VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
397: VecScatterDestroy(&vecscat);
398: VecDestroy(&src_crd);
399: /*
400: Put the element vertex data into a new allocation of the gdata->ele
401: */
402: PetscFree(pc_gamg->data);
403: PetscMalloc(node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data);
405: pc_gamg->data_sz = node_data_sz*ncrs_prim_new;
406: strideNew = ncrs_prim_new*ndata_rows;
408: VecGetArray(dest_crd, &array);
409: for (jj=0; jj<ndata_cols; jj++) {
410: for (ii=0; ii<ncrs_prim_new; ii++) {
411: for (kk=0; kk<ndata_rows; kk++) {
412: PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
413: pc_gamg->data[ix] = PetscRealPart(array[jx]);
414: }
415: }
416: }
417: VecRestoreArray(dest_crd, &array);
418: VecDestroy(&dest_crd);
420: /* move A and P (columns) with new layout */
421: #if defined PETSC_GAMG_USE_LOG
422: PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
423: #endif
425: /*
426: Invert for MatGetSubMatrix
427: */
428: ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
429: ISSort(new_eq_indices); /* is this needed? */
430: ISSetBlockSize(new_eq_indices, cr_bs);
431: if (is_eq_num != is_eq_num_prim) {
432: ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
433: }
434: ISDestroy(&is_eq_num);
435: #if defined PETSC_GAMG_USE_LOG
436: PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
437: PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
438: #endif
439: /* 'a_Amat_crs' output */
440: {
441: Mat mat;
442: MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
443: *a_Amat_crs = mat;
445: if (!PETSC_TRUE) {
446: PetscInt cbs, rbs;
447: MatGetBlockSizes(Cmat, &rbs, &cbs);
448: PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
449: MatGetBlockSizes(mat, &rbs, &cbs);
450: PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);
451: }
452: }
453: MatDestroy(&Cmat);
455: #if defined PETSC_GAMG_USE_LOG
456: PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
457: #endif
458: /* prolongator */
459: {
460: IS findices;
461: PetscInt Istart,Iend;
462: Mat Pnew;
463: MatGetOwnershipRange(Pold, &Istart, &Iend);
464: #if defined PETSC_GAMG_USE_LOG
465: PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
466: #endif
467: ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
468: ISSetBlockSize(findices,f_bs);
469: MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
470: ISDestroy(&findices);
472: if (!PETSC_TRUE) {
473: PetscInt cbs, rbs;
474: MatGetBlockSizes(Pold, &rbs, &cbs);
475: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
476: MatGetBlockSizes(Pnew, &rbs, &cbs);
477: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
478: }
479: #if defined PETSC_GAMG_USE_LOG
480: PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
481: #endif
482: MatDestroy(a_P_inout);
484: /* output - repartitioned */
485: *a_P_inout = Pnew;
486: }
487: ISDestroy(&new_eq_indices);
489: *a_nactive_proc = new_size; /* output */
490: }
492: /* outout matrix data */
493: if (!PETSC_TRUE) {
494: PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
495: if (llev==0) {
496: sprintf(fname,"Cmat_%d.m",llev++);
497: PetscViewerASCIIOpen(comm,fname,&viewer);
498: PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
499: MatView(Amat_fine, viewer);
500: PetscViewerDestroy(&viewer);
501: }
502: sprintf(fname,"Cmat_%d.m",llev++);
503: PetscViewerASCIIOpen(comm,fname,&viewer);
504: PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
505: MatView(Cmat, viewer);
506: PetscViewerDestroy(&viewer);
507: }
508: return(0);
509: }
511: /* -------------------------------------------------------------------------- */
512: /*
513: PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
514: by setting data structures and options.
516: Input Parameter:
517: . pc - the preconditioner context
519: Application Interface Routine: PCSetUp()
521: Notes:
522: The interface routine PCSetUp() is not usually called directly by
523: the user, but instead is called by PCApply() if necessary.
524: */
527: PetscErrorCode PCSetUp_GAMG(PC pc)
528: {
530: PC_MG *mg = (PC_MG*)pc->data;
531: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
532: Mat Pmat = pc->pmat;
533: PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
534: MPI_Comm comm;
535: PetscMPIInt rank,size,nactivepe;
536: Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
537: PetscReal emaxs[GAMG_MAXLEVELS];
538: IS *ASMLocalIDsArr[GAMG_MAXLEVELS];
539: GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS];
540: PetscLogDouble nnz0=0.,nnztot=0.;
541: MatInfo info;
542: PetscBool stokes = PETSC_FALSE, redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol);
545: PetscObjectGetComm((PetscObject)pc,&comm);
546: MPI_Comm_rank(comm,&rank);
547: MPI_Comm_size(comm,&size);
549: if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);
551: if (pc_gamg->setup_count++ > 0) {
552: if (redo_mesh_setup) {
553: /* reset everything */
554: PCReset_MG(pc);
555: pc->setupcalled = 0;
556: } else {
557: PC_MG_Levels **mglevels = mg->levels;
558: /* just do Galerkin grids */
559: Mat B,dA,dB;
561: if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
562: if (pc_gamg->Nlevels > 1) {
563: /* currently only handle case where mat and pmat are the same on coarser levels */
564: KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,NULL);
565: /* (re)set to get dirty flag */
566: KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);
568: for (level=pc_gamg->Nlevels-2; level>=0; level--) {
569: /* the first time through the matrix structure has changed from repartitioning */
570: if (pc_gamg->setup_count==2 && (pc_gamg->repart || level==0)) {
571: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
572: MatDestroy(&mglevels[level]->A);
574: mglevels[level]->A = B;
575: } else {
576: KSPGetOperators(mglevels[level]->smoothd,NULL,&B,NULL);
577: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
578: }
579: KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);
580: dB = B;
581: }
582: }
584: PCSetUp_MG(pc);
586: /* PCSetUp_MG seems to insists on setting this to GMRES */
587: KSPSetType(mglevels[0]->smoothd, KSPPREONLY);
588: return(0);
589: }
590: }
592: PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,NULL);
594: GAMGKKTMatCreate(Pmat, stokes, &kktMatsArr[0]);
596: if (!pc_gamg->data) {
597: if (pc_gamg->orig_data) {
598: MatGetBlockSize(Pmat, &bs);
599: MatGetLocalSize(Pmat, &qq, NULL);
601: pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
602: pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
603: pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
605: PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data);
606: for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
607: } else {
608: if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
609: if (stokes) SETERRQ(comm,PETSC_ERR_PLIB,"Need data (eg, PCSetCoordinates) for Stokes problems");
610: pc_gamg->ops->createdefaultdata(pc, kktMatsArr[0].A11);
611: }
612: }
614: /* cache original data for reuse */
615: if (!pc_gamg->orig_data && redo_mesh_setup) {
616: PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data);
617: for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
618: pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
619: pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
620: }
622: /* get basic dims */
623: if (stokes) bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */
624: else {
625: MatGetBlockSize(Pmat, &bs);
626: }
628: MatGetSize(Pmat, &M, &qq);
629: if (pc_gamg->verbose) {
630: PetscInt NN = M;
631: if (pc_gamg->verbose==1) {
632: MatGetInfo(Pmat,MAT_LOCAL,&info);
633: MatGetLocalSize(Pmat, &NN, &qq);
634: } else {
635: MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
636: }
637: nnz0 = info.nz_used;
638: nnztot = info.nz_used;
639: PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
640: rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
641: (int)(nnz0/(PetscReal)NN),size);
642: }
644: /* Get A_i and R_i */
645: for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
646: level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (size==1 || nactivepe>1); */
647: level++) {
648: level1 = level + 1;
649: #if defined PETSC_GAMG_USE_LOG
650: PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
651: #if (defined GAMG_STAGES)
652: PetscLogStagePush(gamg_stages[level]);
653: #endif
654: #endif
655: /* deal with Stokes, get sub matrices */
656: if (level > 0) {
657: GAMGKKTMatCreate(Aarr[level], stokes, &kktMatsArr[level]);
658: }
659: { /* construct prolongator */
660: Mat Gmat;
661: PetscCoarsenData *agg_lists;
662: Mat Prol11,Prol22;
664: pc_gamg->ops->graph(pc,kktMatsArr[level].A11, &Gmat);
665: pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
666: pc_gamg->ops->prolongator(pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11);
668: /* could have failed to create new level */
669: if (Prol11) {
670: /* get new block size of coarse matrices */
671: MatGetBlockSizes(Prol11, NULL, &bs);
673: if (stokes) {
674: if (!pc_gamg->ops->formkktprol) SETERRQ(comm,PETSC_ERR_USER,"Stokes not supportd by AMG method.");
675: /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */
676: pc_gamg->ops->formkktprol(pc, Prol11, kktMatsArr[level].A21, &Prol22);
677: }
679: if (pc_gamg->ops->optprol) {
680: /* smooth */
681: pc_gamg->ops->optprol(pc, kktMatsArr[level].A11, &Prol11);
682: }
684: if (stokes) {
685: IS is_row[2];
686: Mat a[4];
688: is_row[0] = kktMatsArr[level].prim_is; is_row[1] = kktMatsArr[level].constr_is;
689: a[0] = Prol11; a[1] = NULL; a[2] = NULL; a[3] = Prol22;
690: MatCreateNest(comm,2,is_row, 2, is_row, a, &Parr[level1]);
691: } else Parr[level1] = Prol11;
692: } else Parr[level1] = NULL;
694: if (pc_gamg->use_aggs_in_gasm) {
695: PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
696: }
698: MatDestroy(&Gmat);
699: PetscCDDestroy(agg_lists);
700: } /* construct prolongator scope */
701: #if defined PETSC_GAMG_USE_LOG
702: PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
703: #endif
704: /* cache eigen estimate */
705: if (pc_gamg->emax_id != -1) {
706: PetscBool flag;
707: PetscObjectComposedDataGetReal((PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag);
708: if (!flag) emaxs[level] = -1.;
709: } else emaxs[level] = -1.;
710: if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
711: if (!Parr[level1]) {
712: if (pc_gamg->verbose) {
713: PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);
714: }
715: break;
716: }
717: #if defined PETSC_GAMG_USE_LOG
718: PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
719: #endif
721: createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2),
722: stokes, &Parr[level1], &Aarr[level1], &nactivepe);
724: #if defined PETSC_GAMG_USE_LOG
725: PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
726: #endif
727: MatGetSize(Aarr[level1], &M, &qq);
729: if (pc_gamg->verbose > 0) {
730: PetscInt NN = M;
731: if (pc_gamg->verbose==1) {
732: MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
733: MatGetLocalSize(Aarr[level1], &NN, &qq);
734: } else {
735: MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
736: }
738: nnztot += info.nz_used;
739: PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
740: rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
741: (int)(info.nz_used/(PetscReal)NN), nactivepe);
742: }
744: /* stop if one node -- could pull back for singular problems */
745: if (M/pc_gamg->data_cell_cols < 2) {
746: level++;
747: break;
748: }
749: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
750: PetscLogStagePop();
751: #endif
752: } /* levels */
754: if (pc_gamg->data) {
755: PetscFree(pc_gamg->data);
756: pc_gamg->data = NULL;
757: }
759: if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
760: pc_gamg->Nlevels = level + 1;
761: fine_level = level;
762: PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);
764: /* simple setup */
765: if (!PETSC_TRUE) {
766: PC_MG_Levels **mglevels = mg->levels;
767: for (lidx=0,level=pc_gamg->Nlevels-1;
768: lidx<fine_level;
769: lidx++, level--) {
770: PCMGSetInterpolation(pc, lidx+1, Parr[level]);
771: KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);
772: MatDestroy(&Parr[level]);
773: MatDestroy(&Aarr[level]);
774: }
775: KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);
777: PCSetUp_MG(pc);
778: } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
779: /* set default smoothers & set operators */
780: for (lidx = 1, level = pc_gamg->Nlevels-2;
781: lidx <= fine_level;
782: lidx++, level--) {
783: KSP smoother;
784: PC subpc;
786: PCMGGetSmoother(pc, lidx, &smoother);
787: KSPGetPC(smoother, &subpc);
789: KSPSetNormType(smoother, KSP_NORM_NONE);
790: /* set ops */
791: KSPSetOperators(smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);
792: PCMGSetInterpolation(pc, lidx, Parr[level+1]);
794: /* create field split PC, get subsmoother */
795: if (stokes) {
796: KSP *ksps;
797: PetscInt nn;
798: PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);
799: PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);
800: PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
801: smoother = ksps[0];
802: KSPGetPC(smoother, &subpc);
803: PetscFree(ksps);
804: }
805: GAMGKKTMatDestroy(&kktMatsArr[level]);
807: /* set defaults */
808: KSPSetType(smoother, KSPCHEBYSHEV);
810: /* override defaults and command line args (!) */
811: if (pc_gamg->use_aggs_in_gasm) {
812: PetscInt sz;
813: IS *is;
815: sz = nASMBlocksArr[level];
816: is = ASMLocalIDsArr[level];
817: PCSetType(subpc, PCGASM);
818: if (sz==0) {
819: IS is;
820: PetscInt my0,kk;
821: MatGetOwnershipRange(Aarr[level], &my0, &kk);
822: ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
823: PCGASMSetSubdomains(subpc, 1, &is, NULL);
824: ISDestroy(&is);
825: } else {
826: PetscInt kk;
827: PCGASMSetSubdomains(subpc, sz, is, NULL);
828: for (kk=0; kk<sz; kk++) {
829: ISDestroy(&is[kk]);
830: }
831: PetscFree(is);
832: }
833: PCGASMSetOverlap(subpc, 0);
835: ASMLocalIDsArr[level] = NULL;
836: nASMBlocksArr[level] = 0;
837: PCGASMSetType(subpc, PC_GASM_BASIC);
838: } else {
839: PCSetType(subpc, PCJACOBI);
840: }
841: }
842: {
843: /* coarse grid */
844: KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
845: Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
846: PCMGGetSmoother(pc, lidx, &smoother);
847: KSPSetOperators(smoother, Lmat, Lmat, SAME_NONZERO_PATTERN);
848: KSPSetNormType(smoother, KSP_NORM_NONE);
849: KSPGetPC(smoother, &subpc);
850: PCSetType(subpc, PCBJACOBI);
851: PCSetUp(subpc);
852: PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
853: if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
854: KSPGetPC(k2[0],&pc2);
855: PCSetType(pc2, PCLU);
856: PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
857: KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
858: }
860: /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
861: PetscObjectOptionsBegin((PetscObject)pc);
862: PCSetFromOptions_MG(pc);
863: PetscOptionsEnd();
864: if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
866: /* create cheby smoothers */
867: for (lidx = 1, level = pc_gamg->Nlevels-2;
868: lidx <= fine_level;
869: lidx++, level--) {
870: KSP smoother;
871: PetscBool flag;
872: PC subpc;
874: PCMGGetSmoother(pc, lidx, &smoother);
875: KSPGetPC(smoother, &subpc);
877: /* create field split PC, get subsmoother */
878: if (stokes) {
879: KSP *ksps;
880: PetscInt nn;
881: PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
882: smoother = ksps[0];
883: KSPGetPC(smoother, &subpc);
884: PetscFree(ksps);
885: }
887: /* do my own cheby */
888: PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);
889: if (flag) {
890: PetscReal emax, emin;
891: PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);
892: if (flag && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
893: else { /* eigen estimate 'emax' */
894: KSP eksp;
895: Mat Lmat = Aarr[level];
896: Vec bb, xx;
898: MatGetVecs(Lmat, &bb, 0);
899: MatGetVecs(Lmat, &xx, 0);
900: {
901: PetscRandom rctx;
902: PetscRandomCreate(comm,&rctx);
903: PetscRandomSetFromOptions(rctx);
904: VecSetRandom(bb,rctx);
905: PetscRandomDestroy(&rctx);
906: }
908: /* zeroing out BC rows -- needed for crazy matrices */
909: {
910: PetscInt Istart,Iend,ncols,jj,Ii;
911: PetscScalar zero = 0.0;
912: MatGetOwnershipRange(Lmat, &Istart, &Iend);
913: for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
914: MatGetRow(Lmat,Ii,&ncols,0,0);
915: if (ncols <= 1) {
916: VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);
917: }
918: MatRestoreRow(Lmat,Ii,&ncols,0,0);
919: }
920: VecAssemblyBegin(bb);
921: VecAssemblyEnd(bb);
922: }
924: KSPCreate(comm, &eksp);
925: KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);
926: KSPSetNormType(eksp, KSP_NORM_NONE);
927: KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
928: KSPAppendOptionsPrefix(eksp, "gamg_est_");
929: KSPSetFromOptions(eksp);
931: KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);
932: KSPSetOperators(eksp, Lmat, Lmat, SAME_NONZERO_PATTERN);
933: KSPSetComputeSingularValues(eksp,PETSC_TRUE);
935: /* set PC type to be same as smoother */
936: KSPSetPC(eksp, subpc);
938: /* solve - keep stuff out of logging */
939: PetscLogEventDeactivate(KSP_Solve);
940: PetscLogEventDeactivate(PC_Apply);
941: KSPSolve(eksp, bb, xx);
942: PetscLogEventActivate(KSP_Solve);
943: PetscLogEventActivate(PC_Apply);
945: KSPComputeExtremeSingularValues(eksp, &emax, &emin);
947: VecDestroy(&xx);
948: VecDestroy(&bb);
949: KSPDestroy(&eksp);
951: if (pc_gamg->verbose > 0) {
952: PetscInt N1, tt;
953: MatGetSize(Aarr[level], &N1, &tt);
954: PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
955: }
956: }
957: {
958: PetscInt N1, N0;
959: MatGetSize(Aarr[level], &N1, NULL);
960: MatGetSize(Aarr[level+1], &N0, NULL);
961: /* heuristic - is this crap? */
962: /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */
963: emin = emax * pc_gamg->eigtarget[0];
964: emax *= pc_gamg->eigtarget[1];
965: }
966: KSPChebyshevSetEigenvalues(smoother, emax, emin);
967: } /* setup checby flag */
968: } /* non-coarse levels */
970: /* clean up */
971: for (level=1; level<pc_gamg->Nlevels; level++) {
972: MatDestroy(&Parr[level]);
973: MatDestroy(&Aarr[level]);
974: }
976: PCSetUp_MG(pc);
978: if (PETSC_TRUE) {
979: KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
980: PCMGGetSmoother(pc, 0, &smoother);
981: KSPSetType(smoother, KSPPREONLY);
982: }
983: } else {
984: KSP smoother;
985: if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__);
986: PCMGGetSmoother(pc, 0, &smoother);
987: KSPSetOperators(smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);
988: KSPSetType(smoother, KSPPREONLY);
989: PCSetUp_MG(pc);
990: }
991: return(0);
992: }
994: /* ------------------------------------------------------------------------- */
995: /*
996: PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
997: that was created with PCCreate_GAMG().
999: Input Parameter:
1000: . pc - the preconditioner context
1002: Application Interface Routine: PCDestroy()
1003: */
1006: PetscErrorCode PCDestroy_GAMG(PC pc)
1007: {
1009: PC_MG *mg = (PC_MG*)pc->data;
1010: PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx;
1013: PCReset_GAMG(pc);
1014: if (pc_gamg->ops->destroy) {
1015: (*pc_gamg->ops->destroy)(pc);
1016: }
1017: PetscFree(pc_gamg->ops);
1018: PetscFree(pc_gamg->gamg_type_name);
1019: PetscFree(pc_gamg);
1020: PCDestroy_MG(pc);
1021: return(0);
1022: }
1027: /*@
1028: PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
1029: processor reduction.
1031: Not Collective on PC
1033: Input Parameters:
1034: . pc - the preconditioner context
1037: Options Database Key:
1038: . -pc_gamg_process_eq_limit
1040: Level: intermediate
1042: Concepts: Unstructured multrigrid preconditioner
1044: .seealso: ()
1045: @*/
1046: PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n)
1047: {
1052: PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
1053: return(0);
1054: }
1058: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
1059: {
1060: PC_MG *mg = (PC_MG*)pc->data;
1061: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1064: if (n>0) pc_gamg->min_eq_proc = n;
1065: return(0);
1066: }
1070: /*@
1071: PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
1073: Collective on PC
1075: Input Parameters:
1076: . pc - the preconditioner context
1079: Options Database Key:
1080: . -pc_gamg_coarse_eq_limit
1082: Level: intermediate
1084: Concepts: Unstructured multrigrid preconditioner
1086: .seealso: ()
1087: @*/
1088: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
1089: {
1094: PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
1095: return(0);
1096: }
1100: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
1101: {
1102: PC_MG *mg = (PC_MG*)pc->data;
1103: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1106: if (n>0) pc_gamg->coarse_eq_limit = n;
1107: return(0);
1108: }
1112: /*@
1113: PCGAMGSetRepartitioning - Repartition the coarse grids
1115: Collective on PC
1117: Input Parameters:
1118: . pc - the preconditioner context
1121: Options Database Key:
1122: . -pc_gamg_repartition
1124: Level: intermediate
1126: Concepts: Unstructured multrigrid preconditioner
1128: .seealso: ()
1129: @*/
1130: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1131: {
1136: PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1137: return(0);
1138: }
1142: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1143: {
1144: PC_MG *mg = (PC_MG*)pc->data;
1145: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1148: pc_gamg->repart = n;
1149: return(0);
1150: }
1154: /*@
1155: PCGAMGSetReuseProl - Reuse prlongation
1157: Collective on PC
1159: Input Parameters:
1160: . pc - the preconditioner context
1163: Options Database Key:
1164: . -pc_gamg_reuse_interpolation
1166: Level: intermediate
1168: Concepts: Unstructured multrigrid preconditioner
1170: .seealso: ()
1171: @*/
1172: PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n)
1173: {
1178: PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));
1179: return(0);
1180: }
1184: static PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n)
1185: {
1186: PC_MG *mg = (PC_MG*)pc->data;
1187: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1190: pc_gamg->reuse_prol = n;
1191: return(0);
1192: }
1196: /*@
1197: PCGAMGSetUseASMAggs -
1199: Collective on PC
1201: Input Parameters:
1202: . pc - the preconditioner context
1205: Options Database Key:
1206: . -pc_gamg_use_agg_gasm
1208: Level: intermediate
1210: Concepts: Unstructured multrigrid preconditioner
1212: .seealso: ()
1213: @*/
1214: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1215: {
1220: PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1221: return(0);
1222: }
1226: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1227: {
1228: PC_MG *mg = (PC_MG*)pc->data;
1229: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1232: pc_gamg->use_aggs_in_gasm = n;
1233: return(0);
1234: }
1238: /*@
1239: PCGAMGSetNlevels -
1241: Not collective on PC
1243: Input Parameters:
1244: . pc - the preconditioner context
1247: Options Database Key:
1248: . -pc_mg_levels
1250: Level: intermediate
1252: Concepts: Unstructured multrigrid preconditioner
1254: .seealso: ()
1255: @*/
1256: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1257: {
1262: PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1263: return(0);
1264: }
1268: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1269: {
1270: PC_MG *mg = (PC_MG*)pc->data;
1271: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1274: pc_gamg->Nlevels = n;
1275: return(0);
1276: }
1280: /*@
1281: PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1283: Not collective on PC
1285: Input Parameters:
1286: . pc - the preconditioner context
1289: Options Database Key:
1290: . -pc_gamg_threshold
1292: Level: intermediate
1294: Concepts: Unstructured multrigrid preconditioner
1296: .seealso: ()
1297: @*/
1298: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1299: {
1304: PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1305: return(0);
1306: }
1310: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1311: {
1312: PC_MG *mg = (PC_MG*)pc->data;
1313: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1316: pc_gamg->threshold = n;
1317: return(0);
1318: }
1322: /*@
1323: PCGAMGSetType - Set solution method - calls sub create method
1325: Collective on PC
1327: Input Parameters:
1328: . pc - the preconditioner context
1331: Options Database Key:
1332: . -pc_gamg_type
1334: Level: intermediate
1336: Concepts: Unstructured multrigrid preconditioner
1338: .seealso: ()
1339: @*/
1340: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1341: {
1346: PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1347: return(0);
1348: }
1352: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1353: {
1354: PetscErrorCode ierr,(*r)(PC);
1355: PC_MG *mg = (PC_MG*)pc->data;
1356: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1359: PetscFunctionListFind(GAMGList,type,&r);
1360: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1361: if (pc_gamg->ops->destroy) {
1362: (*pc_gamg->ops->destroy)(pc);
1363: PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1364: }
1365: PetscFree(pc_gamg->gamg_type_name);
1366: PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1367: (*r)(pc);
1368: return(0);
1369: }
1373: PetscErrorCode PCSetFromOptions_GAMG(PC pc)
1374: {
1376: PC_MG *mg = (PC_MG*)pc->data;
1377: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1378: PetscBool flag;
1379: PetscInt two = 2;
1380: MPI_Comm comm;
1383: PetscObjectGetComm((PetscObject)pc,&comm);
1384: PetscOptionsHead("GAMG options");
1385: {
1386: /* -pc_gamg_type */
1387: {
1388: char tname[256] = PCGAMGAGG;
1389: const char *deftype = pc_gamg->gamg_type_name ? pc_gamg->gamg_type_name : tname;
1390: PetscOptionsList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, tname, tname, sizeof(tname), &flag);
1391: /* call PCCreateGAMG_XYZ */
1392: if (flag || !pc_gamg->gamg_type_name) {
1393: PCGAMGSetType(pc, flag ? tname : deftype);
1394: }
1395: }
1396: /* -pc_gamg_verbose */
1397: PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1398: "none", pc_gamg->verbose,
1399: &pc_gamg->verbose, NULL);
1400: /* -pc_gamg_repartition */
1401: PetscOptionsBool("-pc_gamg_repartition",
1402: "Repartion coarse grids (false)",
1403: "PCGAMGRepartitioning",
1404: pc_gamg->repart,
1405: &pc_gamg->repart,
1406: &flag);
1407: /* -pc_gamg_reuse_interpolation */
1408: PetscOptionsBool("-pc_gamg_reuse_interpolation",
1409: "Reuse prolongation operator (true)",
1410: "PCGAMGReuseProl",
1411: pc_gamg->reuse_prol,
1412: &pc_gamg->reuse_prol,
1413: &flag);
1414: /* -pc_gamg_use_agg_gasm */
1415: PetscOptionsBool("-pc_gamg_use_agg_gasm",
1416: "Use aggregation agragates for GASM smoother (false)",
1417: "PCGAMGUseASMAggs",
1418: pc_gamg->use_aggs_in_gasm,
1419: &pc_gamg->use_aggs_in_gasm,
1420: &flag);
1421: /* -pc_gamg_process_eq_limit */
1422: PetscOptionsInt("-pc_gamg_process_eq_limit",
1423: "Limit (goal) on number of equations per process on coarse grids",
1424: "PCGAMGSetProcEqLim",
1425: pc_gamg->min_eq_proc,
1426: &pc_gamg->min_eq_proc,
1427: &flag);
1428: /* -pc_gamg_coarse_eq_limit */
1429: PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1430: "Limit on number of equations for the coarse grid",
1431: "PCGAMGSetCoarseEqLim",
1432: pc_gamg->coarse_eq_limit,
1433: &pc_gamg->coarse_eq_limit,
1434: &flag);
1435: /* -pc_gamg_threshold */
1436: PetscOptionsReal("-pc_gamg_threshold",
1437: "Relative threshold to use for dropping edges in aggregation graph",
1438: "PCGAMGSetThreshold",
1439: pc_gamg->threshold,
1440: &pc_gamg->threshold,
1441: &flag);
1442: if (flag && pc_gamg->verbose) {
1443: PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1444: }
1445: /* -pc_gamg_eigtarget */
1446: PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);
1447: PetscOptionsInt("-pc_mg_levels",
1448: "Set number of MG levels",
1449: "PCGAMGSetNlevels",
1450: pc_gamg->Nlevels,
1451: &pc_gamg->Nlevels,
1452: &flag);
1454: /* set options for subtype */
1455: if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(pc);}
1456: }
1457: PetscOptionsTail();
1458: return(0);
1459: }
1461: /* -------------------------------------------------------------------------- */
1462: /*MC
1463: PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1464: - This is the entry point to GAMG, registered in pcregis.c
1466: Options Database Keys:
1467: Multigrid options(inherited)
1468: + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1469: . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1470: . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1471: - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1473: Level: intermediate
1475: Concepts: multigrid
1477: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1478: PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1479: PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1480: PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1481: PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1482: M*/
1486: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1487: {
1489: PC_GAMG *pc_gamg;
1490: PC_MG *mg;
1491: #if defined PETSC_GAMG_USE_LOG
1492: static long count = 0;
1493: #endif
1496: /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1497: PCSetType(pc, PCMG); /* calls PCCreate_MG() and MGCreate_Private() */
1498: PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);
1500: /* create a supporting struct and attach it to pc */
1501: PetscNewLog(pc, PC_GAMG, &pc_gamg);
1502: mg = (PC_MG*)pc->data;
1503: mg->galerkin = 2; /* Use Galerkin, but it is computed externally */
1504: mg->innerctx = pc_gamg;
1506: PetscNewLog(pc,struct _PCGAMGOps,&pc_gamg->ops);
1508: pc_gamg->setup_count = 0;
1509: /* these should be in subctx but repartitioning needs simple arrays */
1510: pc_gamg->data_sz = 0;
1511: pc_gamg->data = 0;
1513: /* register AMG type */
1514: #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
1515: PCGAMGInitializePackage();
1516: #endif
1518: /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1519: pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1520: pc->ops->setup = PCSetUp_GAMG;
1521: pc->ops->reset = PCReset_GAMG;
1522: pc->ops->destroy = PCDestroy_GAMG;
1524: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1525: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1526: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1527: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseProl_C",PCGAMGSetReuseProl_GAMG);
1528: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1529: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1530: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1531: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1532: pc_gamg->repart = PETSC_FALSE;
1533: pc_gamg->reuse_prol = PETSC_FALSE;
1534: pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1535: pc_gamg->min_eq_proc = 50;
1536: pc_gamg->coarse_eq_limit = 800;
1537: pc_gamg->threshold = 0.;
1538: pc_gamg->Nlevels = GAMG_MAXLEVELS;
1539: pc_gamg->verbose = 0;
1540: pc_gamg->emax_id = -1;
1541: pc_gamg->eigtarget[0] = 0.05;
1542: pc_gamg->eigtarget[1] = 1.05;
1544: /* private events */
1545: #if defined PETSC_GAMG_USE_LOG
1546: if (count++ == 0) {
1547: PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1548: PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1549: /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1550: /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1551: /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1552: PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1553: PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1554: PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1555: PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1556: PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1557: PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1558: PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1559: PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1560: PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1561: PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1562: PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1563: PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);
1565: /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1566: /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1567: /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1568: /* create timer stages */
1569: #if defined GAMG_STAGES
1570: {
1571: char str[32];
1572: PetscInt lidx;
1573: sprintf(str,"MG Level %d (finest)",0);
1574: PetscLogStageRegister(str, &gamg_stages[0]);
1575: for (lidx=1; lidx<9; lidx++) {
1576: sprintf(str,"MG Level %d",lidx);
1577: PetscLogStageRegister(str, &gamg_stages[lidx]);
1578: }
1579: }
1580: #endif
1581: }
1582: #endif
1583: /* general events */
1584: #if defined PETSC_USE_LOG
1585: PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1586: PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1587: PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1588: PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1589: PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1590: PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1591: PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1592: PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);
1593: #endif
1595: return(0);
1596: }
1600: /*@C
1601: PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1602: from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1603: when using static libraries.
1605: Level: developer
1607: .keywords: PC, PCGAMG, initialize, package
1608: .seealso: PetscInitialize()
1609: @*/
1610: PetscErrorCode PCGAMGInitializePackage(void)
1611: {
1615: if (PCGAMGPackageInitialized) return(0);
1616: PCGAMGPackageInitialized = PETSC_TRUE;
1617: PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1618: PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1619: PetscRegisterFinalize(PCGAMGFinalizePackage);
1620: return(0);
1621: }
1625: /*@C
1626: PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is
1627: called from PetscFinalize().
1629: Level: developer
1631: .keywords: Petsc, destroy, package
1632: .seealso: PetscFinalize()
1633: @*/
1634: PetscErrorCode PCGAMGFinalizePackage(void)
1635: {
1639: PCGAMGPackageInitialized = PETSC_FALSE;
1640: PetscFunctionListDestroy(&GAMGList);
1641: return(0);
1642: }