Actual source code: ex134.c
1: static const char help[] = "Test parallel assembly of SBAIJ matrices\n\n";
3: #include <petscmat.h>
5: PetscErrorCode Assemble(MPI_Comm comm,PetscInt bs,MatType mtype)
6: {
7: const PetscInt rc[] = {0,1,2,3};
8: const PetscScalar vals[] = {100, 2, 3, 4, 5, 600, 7, 8,
9: 9,100,11,1200,13,14,15,1600,
10: 17,18,19,20,21,22,23,24,
11: 25,26,27,2800,29,30,31,32,
12: 33,34,35,36,37,38,39,40,
13: 41,42,43,44,45,46,47,48,
14: 49,50,51,52,53,54,55,56,
15: 57,58,49,60,61,62,63,64};
16: Mat A;
17: #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
18: Mat F;
19: MatSolverType stype = MATSOLVERPETSC;
20: PetscRandom rdm;
21: Vec b,x,y;
22: PetscInt i,j;
23: PetscReal norm2,tol=100*PETSC_SMALL;
24: PetscBool issbaij;
25: #endif
26: PetscViewer viewer;
28: MatCreate(comm,&A);
29: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,4*bs,4*bs);
30: MatSetType(A,mtype);
31: MatMPIBAIJSetPreallocation(A,bs,2,NULL,2,NULL);
32: MatMPISBAIJSetPreallocation(A,bs,2,NULL,2,NULL);
33: MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);
34: /* All processes contribute a global matrix */
35: MatSetValuesBlocked(A,4,rc,4,rc,vals,ADD_VALUES);
36: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
37: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
38: PetscPrintf(comm,"Matrix %s(%" PetscInt_FMT ")\n",mtype,bs);
39: PetscViewerASCIIGetStdout(comm,&viewer);
40: PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO_DETAIL);
41: MatView(A,viewer);
42: PetscViewerPopFormat(viewer);
43: MatView(A,viewer);
44: #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
45: PetscStrcmp(mtype,MATMPISBAIJ,&issbaij);
46: if (!issbaij) {
47: MatShift(A,10);
48: }
49: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
50: PetscRandomSetFromOptions(rdm);
51: MatCreateVecs(A,&x,&y);
52: VecDuplicate(x,&b);
53: for (j=0; j<2; j++) {
54: #if defined(PETSC_HAVE_MUMPS)
55: if (j==0) stype = MATSOLVERMUMPS;
56: #else
57: if (j==0) continue;
58: #endif
59: #if defined(PETSC_HAVE_MKL_CPARDISO)
60: if (j==1) stype = MATSOLVERMKL_CPARDISO;
61: #else
62: if (j==1) continue;
63: #endif
64: if (issbaij) {
65: MatGetFactor(A,stype,MAT_FACTOR_CHOLESKY,&F);
66: MatCholeskyFactorSymbolic(F,A,NULL,NULL);
67: MatCholeskyFactorNumeric(F,A,NULL);
68: } else {
69: MatGetFactor(A,stype,MAT_FACTOR_LU,&F);
70: MatLUFactorSymbolic(F,A,NULL,NULL,NULL);
71: MatLUFactorNumeric(F,A,NULL);
72: }
73: for (i=0; i<10; i++) {
74: VecSetRandom(b,rdm);
75: MatSolve(F,b,y);
76: /* Check the error */
77: MatMult(A,y,x);
78: VecAXPY(x,-1.0,b);
79: VecNorm(x,NORM_2,&norm2);
80: if (norm2>tol) {
81: PetscPrintf(PETSC_COMM_WORLD,"Error:MatSolve(), norm2: %g\n",(double)norm2);
82: }
83: }
84: MatDestroy(&F);
85: }
86: VecDestroy(&x);
87: VecDestroy(&y);
88: VecDestroy(&b);
89: PetscRandomDestroy(&rdm);
90: #endif
91: MatDestroy(&A);
92: return 0;
93: }
95: int main(int argc,char *argv[])
96: {
97: MPI_Comm comm;
98: PetscMPIInt size;
100: PetscInitialize(&argc,&argv,NULL,help);
101: comm = PETSC_COMM_WORLD;
102: MPI_Comm_size(comm,&size);
104: Assemble(comm,2,MATMPIBAIJ);
105: Assemble(comm,2,MATMPISBAIJ);
106: Assemble(comm,1,MATMPIBAIJ);
107: Assemble(comm,1,MATMPISBAIJ);
108: PetscFinalize();
109: return 0;
110: }
112: /*TEST
114: test:
115: nsize: 2
116: args: -mat_ignore_lower_triangular
117: filter: sed -e "s~mem [0-9]*~mem~g"
119: TEST*/