Actual source code: superlu_dist.c
1: /*
2: Provides an interface to the SuperLU_DIST sparse solver
3: */
5: #include <../src/mat/impls/aij/seq/aij.h>
6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: #include <petscpkg_version.h>
9: EXTERN_C_BEGIN
10: #if defined(PETSC_USE_COMPLEX)
11: #include <superlu_zdefs.h>
12: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(6,3,0)
13: #define LUstructInit zLUstructInit
14: #define ScalePermstructInit zScalePermstructInit
15: #define ScalePermstructFree zScalePermstructFree
16: #define LUstructFree zLUstructFree
17: #define Destroy_LU zDestroy_LU
18: #define ScalePermstruct_t zScalePermstruct_t
19: #define LUstruct_t zLUstruct_t
20: #define SOLVEstruct_t zSOLVEstruct_t
21: #endif
22: #else
23: #include <superlu_ddefs.h>
24: #if PETSC_PKG_SUPERLU_DIST_VERSION_GE(6,3,0)
25: #define LUstructInit dLUstructInit
26: #define ScalePermstructInit dScalePermstructInit
27: #define ScalePermstructFree dScalePermstructFree
28: #define LUstructFree dLUstructFree
29: #define Destroy_LU dDestroy_LU
30: #define ScalePermstruct_t dScalePermstruct_t
31: #define LUstruct_t dLUstruct_t
32: #define SOLVEstruct_t dSOLVEstruct_t
33: #endif
34: #endif
35: EXTERN_C_END
37: typedef struct {
38: int_t nprow,npcol,*row,*col;
39: gridinfo_t grid;
40: superlu_dist_options_t options;
41: SuperMatrix A_sup;
42: ScalePermstruct_t ScalePermstruct;
43: LUstruct_t LUstruct;
44: int StatPrint;
45: SOLVEstruct_t SOLVEstruct;
46: fact_t FactPattern;
47: MPI_Comm comm_superlu;
48: #if defined(PETSC_USE_COMPLEX)
49: doublecomplex *val;
50: #else
51: double *val;
52: #endif
53: PetscBool matsolve_iscalled,matmatsolve_iscalled;
54: PetscBool CleanUpSuperLU_Dist; /* Flag to clean up (non-global) SuperLU objects during Destroy */
55: } Mat_SuperLU_DIST;
57: PetscErrorCode MatSuperluDistGetDiagU_SuperLU_DIST(Mat F,PetscScalar *diagU)
58: {
59: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
62: #if defined(PETSC_USE_COMPLEX)
63: PetscStackCall("SuperLU_DIST:pzGetDiagU",pzGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,(doublecomplex*)diagU));
64: #else
65: PetscStackCall("SuperLU_DIST:pdGetDiagU",pdGetDiagU(F->rmap->N,&lu->LUstruct,&lu->grid,diagU));
66: #endif
67: return(0);
68: }
70: PetscErrorCode MatSuperluDistGetDiagU(Mat F,PetscScalar *diagU)
71: {
76: PetscTryMethod(F,"MatSuperluDistGetDiagU_C",(Mat,PetscScalar*),(F,diagU));
77: return(0);
78: }
80: /* This allows reusing the Superlu_DIST communicator and grid when only a single SuperLU_DIST matrix is used at a time */
81: typedef struct {
82: MPI_Comm comm;
83: PetscBool busy;
84: gridinfo_t grid;
85: } PetscSuperLU_DIST;
86: static PetscMPIInt Petsc_Superlu_dist_keyval = MPI_KEYVAL_INVALID;
88: PETSC_EXTERN PetscMPIInt MPIAPI Petsc_Superlu_dist_keyval_Delete_Fn(MPI_Comm comm,PetscMPIInt keyval,void *attr_val,void *extra_state)
89: {
90: PetscErrorCode ierr;
91: PetscSuperLU_DIST *context = (PetscSuperLU_DIST *) attr_val;
94: if (keyval != Petsc_Superlu_dist_keyval) SETERRMPI(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Unexpected keyval");
95: PetscInfo(NULL,"Removing Petsc_Superlu_dist_keyval attribute from communicator that is being freed\n");
96: PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&context->grid));
97: MPI_Comm_free(&context->comm);
98: PetscFree(context);
99: PetscFunctionReturn(MPI_SUCCESS);
100: }
102: /*
103: Performs MPI_Comm_free_keyval() on Petsc_Superlu_dist_keyval but keeps the global variable for
104: users who do not destroy all PETSc objects before PetscFinalize().
106: The value Petsc_Superlu_dist_keyval is retained so that Petsc_Superlu_dist_keyval_Delete_Fn()
107: can still check that the keyval associated with the MPI communicator is correct when the MPI
108: communicator is destroyed.
110: This is called in PetscFinalize()
111: */
112: static PetscErrorCode Petsc_Superlu_dist_keyval_free(void)
113: {
115: PetscMPIInt Petsc_Superlu_dist_keyval_temp = Petsc_Superlu_dist_keyval;
118: PetscInfo(NULL,"Freeing Petsc_Superlu_dist_keyval\n");
119: MPI_Comm_free_keyval(&Petsc_Superlu_dist_keyval_temp);
120: return(0);
121: }
123: static PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
124: {
125: PetscErrorCode ierr;
126: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
129: if (lu->CleanUpSuperLU_Dist) {
130: /* Deallocate SuperLU_DIST storage */
131: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
132: if (lu->options.SolveInitialized) {
133: #if defined(PETSC_USE_COMPLEX)
134: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
135: #else
136: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
137: #endif
138: }
139: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct));
140: PetscStackCall("SuperLU_DIST:ScalePermstructFree",ScalePermstructFree(&lu->ScalePermstruct));
141: PetscStackCall("SuperLU_DIST:LUstructFree",LUstructFree(&lu->LUstruct));
143: /* Release the SuperLU_DIST process grid. Only if the matrix has its own copy, this is it is not in the communicator context */
144: if (lu->comm_superlu) {
145: PetscStackCall("SuperLU_DIST:superlu_gridexit",superlu_gridexit(&lu->grid));
146: }
147: }
148: /*
149: * We always need to release the communicator that was created in MatGetFactor_aij_superlu_dist.
150: * lu->CleanUpSuperLU_Dist was turned on in MatLUFactorSymbolic_SuperLU_DIST. There are some use
151: * cases where we only create a matrix but do not solve mat. In these cases, lu->CleanUpSuperLU_Dist
152: * is off, and the communicator was not released or marked as "not busy " in the old code.
153: * Here we try to release comm regardless.
154: */
155: if (lu->comm_superlu) {
156: MPI_Comm_free(&(lu->comm_superlu));
157: } else {
158: PetscSuperLU_DIST *context;
159: MPI_Comm comm;
160: PetscMPIInt flg;
162: PetscObjectGetComm((PetscObject)A,&comm);
163: MPI_Comm_get_attr(comm,Petsc_Superlu_dist_keyval,&context,&flg);
164: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Communicator does not have expected Petsc_Superlu_dist_keyval attribute");
165: context->busy = PETSC_FALSE;
166: }
168: PetscFree(A->data);
169: /* clear composed functions */
170: PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
171: PetscObjectComposeFunction((PetscObject)A,"MatSuperluDistGetDiagU_C",NULL);
173: return(0);
174: }
176: static PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
177: {
178: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
179: PetscErrorCode ierr;
180: PetscInt m=A->rmap->n;
181: SuperLUStat_t stat;
182: double berr[1];
183: PetscScalar *bptr=NULL;
184: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
185: static PetscBool cite = PETSC_FALSE;
188: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact must equal FACTORED");
189: PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);
191: if (lu->options.SolveInitialized && !lu->matsolve_iscalled) {
192: /* see comments in MatMatSolve() */
193: #if defined(PETSC_USE_COMPLEX)
194: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
195: #else
196: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
197: #endif
198: lu->options.SolveInitialized = NO;
199: }
200: VecCopy(b_mpi,x);
201: VecGetArray(x,&bptr);
203: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
204: #if defined(PETSC_USE_COMPLEX)
205: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,1,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
206: #else
207: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,1,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
208: #endif
209: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
211: if (lu->options.PrintStat) PetscStackCall("SuperLU_DIST:PStatPrint",PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
212: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
214: VecRestoreArray(x,&bptr);
215: lu->matsolve_iscalled = PETSC_TRUE;
216: lu->matmatsolve_iscalled = PETSC_FALSE;
217: return(0);
218: }
220: static PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X)
221: {
222: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->data;
223: PetscErrorCode ierr;
224: PetscInt m=A->rmap->n,nrhs;
225: SuperLUStat_t stat;
226: double berr[1];
227: PetscScalar *bptr;
228: int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */
229: PetscBool flg;
232: if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact must equal FACTORED");
233: PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
234: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix");
235: if (X != B_mpi) {
236: PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);
237: if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix");
238: }
240: if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) {
241: /* communication pattern of SOLVEstruct is unlikely created for matmatsolve,
242: thus destroy it and create a new SOLVEstruct.
243: Otherwise it may result in memory corruption or incorrect solution
244: See src/mat/tests/ex125.c */
245: #if defined(PETSC_USE_COMPLEX)
246: PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct));
247: #else
248: PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct));
249: #endif
250: lu->options.SolveInitialized = NO;
251: }
252: if (X != B_mpi) {
253: MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);
254: }
256: MatGetSize(B_mpi,NULL,&nrhs);
258: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
259: MatDenseGetArray(X,&bptr);
261: #if defined(PETSC_USE_COMPLEX)
262: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
263: #else
264: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info));
265: #endif
267: if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
268: MatDenseRestoreArray(X,&bptr);
270: if (lu->options.PrintStat) PetscStackCall("SuperLU_DIST:PStatPrint",PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
271: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
272: lu->matsolve_iscalled = PETSC_FALSE;
273: lu->matmatsolve_iscalled = PETSC_TRUE;
274: return(0);
275: }
277: /*
278: input:
279: F: numeric Cholesky factor
280: output:
281: nneg: total number of negative pivots
282: nzero: total number of zero pivots
283: npos: (global dimension of F) - nneg - nzero
284: */
285: static PetscErrorCode MatGetInertia_SuperLU_DIST(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
286: {
287: PetscErrorCode ierr;
288: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
289: PetscScalar *diagU=NULL;
290: PetscInt M,i,neg=0,zero=0,pos=0;
291: PetscReal r;
294: if (!F->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix factor F is not assembled");
295: if (lu->options.RowPerm != NOROWPERM) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Must set NOROWPERM");
296: MatGetSize(F,&M,NULL);
297: PetscMalloc1(M,&diagU);
298: MatSuperluDistGetDiagU(F,diagU);
299: for (i=0; i<M; i++) {
300: #if defined(PETSC_USE_COMPLEX)
301: r = PetscImaginaryPart(diagU[i])/10.0;
302: if (r< -PETSC_MACHINE_EPSILON || r>PETSC_MACHINE_EPSILON) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"diagU[%d]=%g + i %g is non-real",i,PetscRealPart(diagU[i]),r*10.0);
303: r = PetscRealPart(diagU[i]);
304: #else
305: r = diagU[i];
306: #endif
307: if (r > 0) {
308: pos++;
309: } else if (r < 0) {
310: neg++;
311: } else zero++;
312: }
314: PetscFree(diagU);
315: if (nneg) *nneg = neg;
316: if (nzero) *nzero = zero;
317: if (npos) *npos = pos;
318: return(0);
319: }
321: static PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info)
322: {
323: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
324: Mat Aloc;
325: const PetscScalar *av;
326: const PetscInt *ai=NULL,*aj=NULL;
327: PetscInt nz,dummy;
328: int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */
329: SuperLUStat_t stat;
330: double *berr=0;
331: PetscBool ismpiaij,isseqaij,flg;
332: PetscErrorCode ierr;
335: PetscObjectBaseTypeCompare((PetscObject)A,MATSEQAIJ,&isseqaij);
336: PetscObjectBaseTypeCompare((PetscObject)A,MATMPIAIJ,&ismpiaij);
337: if (ismpiaij) {
338: MatMPIAIJGetLocalMat(A,MAT_INITIAL_MATRIX,&Aloc);
339: } else if (isseqaij) {
340: PetscObjectReference((PetscObject)A);
341: Aloc = A;
342: } else SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not for type %s",((PetscObject)A)->type_name);
344: MatGetRowIJ(Aloc,0,PETSC_FALSE,PETSC_FALSE,&dummy,&ai,&aj,&flg);
345: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"GetRowIJ failed");
346: MatSeqAIJGetArrayRead(Aloc,&av);
347: nz = ai[Aloc->rmap->n];
349: /* Allocations for A_sup */
350: if (lu->options.Fact == DOFACT) { /* first numeric factorization */
351: #if defined(PETSC_USE_COMPLEX)
352: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
353: #else
354: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
355: #endif
356: } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
357: if (lu->FactPattern == SamePattern_SameRowPerm) {
358: lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */
359: } else if (lu->FactPattern == SamePattern) {
360: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->rmap->N, &lu->grid, &lu->LUstruct)); /* Deallocate L and U matrices. */
361: lu->options.Fact = SamePattern;
362: } else if (lu->FactPattern == DOFACT) {
363: PetscStackCall("SuperLU_DIST:Destroy_CompRowLoc_Matrix_dist",Destroy_CompRowLoc_Matrix_dist(&lu->A_sup));
364: PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(A->rmap->N, &lu->grid, &lu->LUstruct));
365: lu->options.Fact = DOFACT;
367: #if defined(PETSC_USE_COMPLEX)
368: PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
369: #else
370: PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(Aloc->rmap->n, nz, &lu->val, &lu->col, &lu->row));
371: #endif
372: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"options.Fact must be one of SamePattern SamePattern_SameRowPerm DOFACT");
373: }
375: /* Copy AIJ matrix to superlu_dist matrix */
376: PetscArraycpy(lu->row,ai,Aloc->rmap->n+1);
377: PetscArraycpy(lu->col,aj,nz);
378: PetscArraycpy(lu->val,av,nz);
379: MatRestoreRowIJ(Aloc,0,PETSC_FALSE,PETSC_FALSE,&dummy,&ai,&aj,&flg);
380: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"RestoreRowIJ failed");
381: MatSeqAIJRestoreArrayRead(Aloc,&av);
382: MatDestroy(&Aloc);
384: /* Create and setup A_sup */
385: if (lu->options.Fact == DOFACT) {
386: #if defined(PETSC_USE_COMPLEX)
387: PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, A->rmap->N, A->cmap->N, nz, A->rmap->n, A->rmap->rstart, lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE));
388: #else
389: PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, A->rmap->N, A->cmap->N, nz, A->rmap->n, A->rmap->rstart, lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE));
390: #endif
391: }
393: /* Factor the matrix. */
394: PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */
395: #if defined(PETSC_USE_COMPLEX)
396: PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
397: #else
398: PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, A->rmap->n, 0, &lu->grid, &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo));
399: #endif
401: if (sinfo > 0) {
402: if (A->erroriffailure) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot in row %D",sinfo);
403: else {
404: if (sinfo <= lu->A_sup.ncol) {
405: F->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT;
406: PetscInfo1(F,"U(i,i) is exactly zero, i= %D\n",sinfo);
407: } else if (sinfo > lu->A_sup.ncol) {
408: /*
409: number of bytes allocated when memory allocation
410: failure occurred, plus A->ncol.
411: */
412: F->factorerrortype = MAT_FACTOR_OUTMEMORY;
413: PetscInfo1(F,"Number of bytes allocated when memory allocation fails %D\n",sinfo);
414: }
415: }
416: } else if (sinfo < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, "info = %D, argument in p*gssvx() had an illegal value", sinfo);
418: if (lu->options.PrintStat) {
419: PetscStackCall("SuperLU_DIST:PStatPrint",PStatPrint(&lu->options, &stat, &lu->grid)); /* Print the statistics. */
420: }
421: PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat));
422: F->assembled = PETSC_TRUE;
423: F->preallocated = PETSC_TRUE;
424: lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */
425: return(0);
426: }
428: /* Note the Petsc r and c permutations are ignored */
429: static PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
430: {
431: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)F->data;
432: PetscInt M = A->rmap->N,N=A->cmap->N;
435: /* Initialize ScalePermstruct and LUstruct. */
436: PetscStackCall("SuperLU_DIST:ScalePermstructInit",ScalePermstructInit(M, N, &lu->ScalePermstruct));
437: PetscStackCall("SuperLU_DIST:LUstructInit",LUstructInit(N, &lu->LUstruct));
438: F->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
439: F->ops->solve = MatSolve_SuperLU_DIST;
440: F->ops->matsolve = MatMatSolve_SuperLU_DIST;
441: F->ops->getinertia = NULL;
443: if (A->symmetric || A->hermitian) F->ops->getinertia = MatGetInertia_SuperLU_DIST;
444: lu->CleanUpSuperLU_Dist = PETSC_TRUE;
445: return(0);
446: }
448: static PetscErrorCode MatCholeskyFactorSymbolic_SuperLU_DIST(Mat F,Mat A,IS r,const MatFactorInfo *info)
449: {
453: MatLUFactorSymbolic_SuperLU_DIST(F,A,r,r,info);
454: F->ops->choleskyfactornumeric = MatLUFactorNumeric_SuperLU_DIST;
455: return(0);
456: }
458: static PetscErrorCode MatFactorGetSolverType_aij_superlu_dist(Mat A,MatSolverType *type)
459: {
461: *type = MATSOLVERSUPERLU_DIST;
462: return(0);
463: }
465: static PetscErrorCode MatView_Info_SuperLU_DIST(Mat A,PetscViewer viewer)
466: {
467: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->data;
468: superlu_dist_options_t options;
469: PetscErrorCode ierr;
472: /* check if matrix is superlu_dist type */
473: if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);
475: options = lu->options;
476: PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
477: PetscViewerASCIIPrintf(viewer," Process grid nprow %D x npcol %D \n",lu->nprow,lu->npcol);
478: PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscBools[options.Equil != NO]);
479: PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscBools[options.ReplaceTinyPivot != NO]);
480: PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscBools[options.IterRefine == SLU_DOUBLE]);
481: PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
483: switch (options.RowPerm) {
484: case NOROWPERM:
485: PetscViewerASCIIPrintf(viewer," Row permutation NOROWPERM\n");
486: break;
487: case LargeDiag_MC64:
488: PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_MC64\n");
489: break;
490: case LargeDiag_AWPM:
491: PetscViewerASCIIPrintf(viewer," Row permutation LargeDiag_AWPM\n");
492: break;
493: case MY_PERMR:
494: PetscViewerASCIIPrintf(viewer," Row permutation MY_PERMR\n");
495: break;
496: default:
497: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
498: }
500: switch (options.ColPerm) {
501: case NATURAL:
502: PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");
503: break;
504: case MMD_AT_PLUS_A:
505: PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");
506: break;
507: case MMD_ATA:
508: PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");
509: break;
510: /* Even though this is called METIS, the SuperLU_DIST code sets this by default if PARMETIS is defined, not METIS */
511: case METIS_AT_PLUS_A:
512: PetscViewerASCIIPrintf(viewer," Column permutation METIS_AT_PLUS_A\n");
513: break;
514: case PARMETIS:
515: PetscViewerASCIIPrintf(viewer," Column permutation PARMETIS\n");
516: break;
517: default:
518: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
519: }
521: PetscViewerASCIIPrintf(viewer," Parallel symbolic factorization %s \n",PetscBools[options.ParSymbFact != NO]);
523: if (lu->FactPattern == SamePattern) {
524: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern\n");
525: } else if (lu->FactPattern == SamePattern_SameRowPerm) {
526: PetscViewerASCIIPrintf(viewer," Repeated factorization SamePattern_SameRowPerm\n");
527: } else if (lu->FactPattern == DOFACT) {
528: PetscViewerASCIIPrintf(viewer," Repeated factorization DOFACT\n");
529: } else {
530: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown factorization pattern");
531: }
532: return(0);
533: }
535: static PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
536: {
537: PetscErrorCode ierr;
538: PetscBool iascii;
539: PetscViewerFormat format;
542: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
543: if (iascii) {
544: PetscViewerGetFormat(viewer,&format);
545: if (format == PETSC_VIEWER_ASCII_INFO) {
546: MatView_Info_SuperLU_DIST(A,viewer);
547: }
548: }
549: return(0);
550: }
552: static PetscErrorCode MatGetFactor_aij_superlu_dist(Mat A,MatFactorType ftype,Mat *F)
553: {
554: Mat B;
555: Mat_SuperLU_DIST *lu;
556: PetscErrorCode ierr;
557: PetscInt M=A->rmap->N,N=A->cmap->N,indx;
558: PetscMPIInt size;
559: superlu_dist_options_t options;
560: PetscBool flg;
561: const char *colperm[] = {"NATURAL","MMD_AT_PLUS_A","MMD_ATA","METIS_AT_PLUS_A","PARMETIS"};
562: const char *rowperm[] = {"NOROWPERM","LargeDiag_MC64","LargeDiag_AWPM","MY_PERMR"};
563: const char *factPattern[] = {"SamePattern","SamePattern_SameRowPerm","DOFACT"};
564: PetscBool set;
567: /* Create the factorization matrix */
568: MatCreate(PetscObjectComm((PetscObject)A),&B);
569: MatSetSizes(B,A->rmap->n,A->cmap->n,M,N);
570: PetscStrallocpy("superlu_dist",&((PetscObject)B)->type_name);
571: MatSetUp(B);
572: B->ops->getinfo = MatGetInfo_External;
573: B->ops->view = MatView_SuperLU_DIST;
574: B->ops->destroy = MatDestroy_SuperLU_DIST;
576: /* Set the default input options:
577: options.Fact = DOFACT;
578: options.Equil = YES;
579: options.ParSymbFact = NO;
580: options.ColPerm = METIS_AT_PLUS_A;
581: options.RowPerm = LargeDiag_MC64;
582: options.ReplaceTinyPivot = YES;
583: options.IterRefine = DOUBLE;
584: options.Trans = NOTRANS;
585: options.SolveInitialized = NO; -hold the communication pattern used MatSolve() and MatMatSolve()
586: options.RefineInitialized = NO;
587: options.PrintStat = YES;
588: options.SymPattern = NO;
589: */
590: set_default_options_dist(&options);
592: B->trivialsymbolic = PETSC_TRUE;
593: if (ftype == MAT_FACTOR_LU) {
594: B->factortype = MAT_FACTOR_LU;
595: B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
596: } else {
597: B->factortype = MAT_FACTOR_CHOLESKY;
598: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SuperLU_DIST;
599: options.SymPattern = YES;
600: }
602: /* set solvertype */
603: PetscFree(B->solvertype);
604: PetscStrallocpy(MATSOLVERSUPERLU_DIST,&B->solvertype);
606: PetscNewLog(B,&lu);
607: B->data = lu;
608: MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
610: {
611: PetscMPIInt flg;
612: MPI_Comm comm;
613: PetscSuperLU_DIST *context = NULL;
615: PetscObjectGetComm((PetscObject)A,&comm);
616: if (Petsc_Superlu_dist_keyval == MPI_KEYVAL_INVALID) {
617: MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,Petsc_Superlu_dist_keyval_Delete_Fn,&Petsc_Superlu_dist_keyval,(void*)0);
618: PetscRegisterFinalize(Petsc_Superlu_dist_keyval_free);
619: }
620: MPI_Comm_get_attr(comm,Petsc_Superlu_dist_keyval,&context,&flg);
621: if (!flg || context->busy) {
622: if (!flg) {
623: PetscNew(&context);
624: context->busy = PETSC_TRUE;
625: MPI_Comm_dup(comm,&context->comm);
626: MPI_Comm_set_attr(comm,Petsc_Superlu_dist_keyval,context);
627: } else {
628: MPI_Comm_dup(comm,&lu->comm_superlu);
629: }
631: /* Default num of process columns and rows */
632: lu->nprow = (int_t) (0.5 + PetscSqrtReal((PetscReal)size));
633: if (!lu->nprow) lu->nprow = 1;
634: while (lu->nprow > 0) {
635: lu->npcol = (int_t) (size/lu->nprow);
636: if (size == lu->nprow * lu->npcol) break;
637: lu->nprow--;
638: }
639: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
640: PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,(PetscInt*)&lu->nprow,NULL);
641: PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,(PetscInt*)&lu->npcol,NULL);
642: PetscOptionsEnd();
643: if (size != lu->nprow * lu->npcol) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of processes %d must equal to nprow %d * npcol %d",size,lu->nprow,lu->npcol);
644: PetscStackCall("SuperLU_DIST:superlu_gridinit",superlu_gridinit(context ? context->comm : lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid));
645: if (context) context->grid = lu->grid;
646: PetscInfo(NULL,"Duplicating a communicator for SuperLU_DIST and calling superlu_gridinit()\n");
647: if (!flg) {
648: PetscInfo(NULL,"Storing communicator and SuperLU_DIST grid in communicator attribute\n");
649: } else {
650: PetscInfo(NULL,"Communicator attribute already in use so not saving communicator and SuperLU_DIST grid in communicator attribute \n");
651: }
652: } else {
653: PetscInfo(NULL,"Reusing communicator and superlu_gridinit() for SuperLU_DIST from communicator attribute.");
654: context->busy = PETSC_TRUE;
655: lu->grid = context->grid;
656: }
657: }
659: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"SuperLU_Dist Options","Mat");
660: PetscOptionsBool("-mat_superlu_dist_equil","Equilibrate matrix","None",options.Equil ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
661: if (set && !flg) options.Equil = NO;
663: PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",rowperm,4,rowperm[1],&indx,&flg);
664: if (flg) {
665: switch (indx) {
666: case 0:
667: options.RowPerm = NOROWPERM;
668: break;
669: case 1:
670: options.RowPerm = LargeDiag_MC64;
671: break;
672: case 2:
673: options.RowPerm = LargeDiag_AWPM;
674: break;
675: case 3:
676: options.RowPerm = MY_PERMR;
677: break;
678: default:
679: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown row permutation");
680: }
681: }
683: PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",colperm,5,colperm[3],&indx,&flg);
684: if (flg) {
685: switch (indx) {
686: case 0:
687: options.ColPerm = NATURAL;
688: break;
689: case 1:
690: options.ColPerm = MMD_AT_PLUS_A;
691: break;
692: case 2:
693: options.ColPerm = MMD_ATA;
694: break;
695: case 3:
696: options.ColPerm = METIS_AT_PLUS_A;
697: break;
698: case 4:
699: options.ColPerm = PARMETIS; /* only works for np>1 */
700: break;
701: default:
702: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown column permutation");
703: }
704: }
706: options.ReplaceTinyPivot = NO;
707: PetscOptionsBool("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",options.ReplaceTinyPivot ? PETSC_TRUE : PETSC_FALSE,&flg,&set);
708: if (set && flg) options.ReplaceTinyPivot = YES;
710: options.ParSymbFact = NO;
711: PetscOptionsBool("-mat_superlu_dist_parsymbfact","Parallel symbolic factorization","None",PETSC_FALSE,&flg,&set);
712: if (set && flg && size>1) {
713: #if defined(PETSC_HAVE_PARMETIS)
714: options.ParSymbFact = YES;
715: options.ColPerm = PARMETIS; /* in v2.2, PARMETIS is forced for ParSymbFact regardless of user ordering setting */
716: #else
717: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"parsymbfact needs PARMETIS");
718: #endif
719: }
721: lu->FactPattern = SamePattern;
722: PetscOptionsEList("-mat_superlu_dist_fact","Sparsity pattern for repeated matrix factorization","None",factPattern,3,factPattern[0],&indx,&flg);
723: if (flg) {
724: switch (indx) {
725: case 0:
726: lu->FactPattern = SamePattern;
727: break;
728: case 1:
729: lu->FactPattern = SamePattern_SameRowPerm;
730: break;
731: case 2:
732: lu->FactPattern = DOFACT;
733: break;
734: }
735: }
737: options.IterRefine = NOREFINE;
738: PetscOptionsBool("-mat_superlu_dist_iterrefine","Use iterative refinement","None",options.IterRefine == NOREFINE ? PETSC_FALSE : PETSC_TRUE ,&flg,&set);
739: if (set) {
740: if (flg) options.IterRefine = SLU_DOUBLE;
741: else options.IterRefine = NOREFINE;
742: }
744: if (PetscLogPrintInfo) options.PrintStat = YES;
745: else options.PrintStat = NO;
746: PetscOptionsBool("-mat_superlu_dist_statprint","Print factorization information","None",(PetscBool)options.PrintStat,(PetscBool*)&options.PrintStat,NULL);
747: PetscOptionsEnd();
749: lu->options = options;
750: lu->options.Fact = DOFACT;
751: lu->matsolve_iscalled = PETSC_FALSE;
752: lu->matmatsolve_iscalled = PETSC_FALSE;
754: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_aij_superlu_dist);
755: PetscObjectComposeFunction((PetscObject)B,"MatSuperluDistGetDiagU_C",MatSuperluDistGetDiagU_SuperLU_DIST);
757: *F = B;
758: return(0);
759: }
761: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_SuperLU_DIST(void)
762: {
765: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
766: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_superlu_dist);
767: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATMPIAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
768: MatSolverTypeRegister(MATSOLVERSUPERLU_DIST,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_superlu_dist);
769: return(0);
770: }
772: /*MC
773: MATSOLVERSUPERLU_DIST - Parallel direct solver package for LU factorization
775: Use ./configure --download-superlu_dist --download-parmetis --download-metis --download-ptscotch to have PETSc installed with SuperLU_DIST
777: Use -pc_type lu -pc_factor_mat_solver_type superlu_dist to use this direct solver
779: Works with AIJ matrices
781: Options Database Keys:
782: + -mat_superlu_dist_r <n> - number of rows in processor partition
783: . -mat_superlu_dist_c <n> - number of columns in processor partition
784: . -mat_superlu_dist_equil - equilibrate the matrix
785: . -mat_superlu_dist_rowperm <NOROWPERM,LargeDiag_MC64,LargeDiag_AWPM,MY_PERMR> - row permutation
786: . -mat_superlu_dist_colperm <NATURAL,MMD_AT_PLUS_A,MMD_ATA,METIS_AT_PLUS_A,PARMETIS> - column permutation
787: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
788: . -mat_superlu_dist_fact <SamePattern> - (choose one of) SamePattern SamePattern_SameRowPerm DOFACT
789: . -mat_superlu_dist_iterrefine - use iterative refinement
790: - -mat_superlu_dist_statprint - print factorization information
792: Level: beginner
794: .seealso: PCLU
796: .seealso: PCFactorSetMatSolverType(), MatSolverType
798: M*/