Actual source code: ex231.cxx
1: static char help[] = "A test for MatAssembly that heavily relies on PetscSortIntWithArrayPair\n";
3: /*
4: The characteristic of the array (about 4M in length) to sort in this test is that it has
5: many duplicated values that already clustered together (around 95 duplicates per unique integer).
7: It was gotten from a petsc performance bug report from the Moose project. One can use
8: it for future performance study.
10: Contributed-by: Fande Kong <fdkong.jd@gmail.com>, John Peterson <jwpeterson@gmail.com>
11: */
13: // PETSc includes
14: #include <petscmat.h>
16: // C++ includes
17: #include <iostream>
18: #include <fstream>
19: #include <sstream>
20: #include <algorithm>
21: #include <vector>
22: #include <string>
23: #include <set>
25: int main (int argc, char** argv)
26: {
27: PetscMPIInt size, rank;
28: char file[2][PETSC_MAX_PATH_LEN];
29: PetscBool flg;
30: const unsigned int n_dofs = 26559;
31: unsigned int first_local_index;
32: unsigned int last_local_index;
34: PetscInitialize(&argc,&argv,(char*)0,help);
35: MPI_Comm_size(PETSC_COMM_WORLD,&size);
36: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
39: PetscOptionsGetString(NULL,NULL,"-f0",file[0],sizeof(file[0]),&flg);
41: if (size == 2) {
42: PetscOptionsGetString(NULL,NULL,"-f1",file[1],sizeof(file[1]),&flg);
44: }
46: if (size == 1) {
47: first_local_index = 0;
48: last_local_index = 26559;
49: } else {
50: if (rank == 0) {
51: first_local_index = 0;
52: last_local_index = 13911;
53: } else {
54: first_local_index = 13911;
55: last_local_index = 26559;
56: }
57: }
59: // Read element dof indices from files
60: std::vector<std::vector<std::vector<PetscInt> > > elem_dof_indices(size);
61: for (PetscInt proc_id = 0; proc_id < size; ++proc_id) {
62: std::string line;
63: std::ifstream dof_file(file[proc_id]);
64: if (dof_file.good()) {
65: while (std::getline (dof_file,line)) {
66: std::vector<PetscInt> dof_indices;
67: std::stringstream sstream(line);
68: std::string token;
69: while (std::getline(sstream, token, ' ')) {dof_indices.push_back(std::atoi(token.c_str()));}
70: elem_dof_indices[proc_id].push_back(dof_indices);
71: }
72: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Could not open file %s",file[proc_id]);
73: }
75: // Debugging: Verify we read in elem_dof_indices correctly
76: // for (unsigned int i=0; i<elem_dof_indices.size(); ++i)
77: // {
78: // for (unsigned int j=0; j<elem_dof_indices[i].size(); ++j)
79: // {
80: // for (unsigned int k=0; k<elem_dof_indices[i][j].size(); ++k)
81: // std::cout << elem_dof_indices[i][j][k] << " ";
82: // std::cout << std::endl;
83: // }
84: // std::cout << std::endl;
85: // }
87: // Set up the (global) sparsity pattern
88: std::vector< std::set< unsigned int > > sparsity(n_dofs);
89: for (PetscInt proc_id = 0; proc_id < size; ++proc_id)
90: for (unsigned int k = 0; k < elem_dof_indices[proc_id].size(); k++) {
91: std::vector<PetscInt>& dof_indices = elem_dof_indices[proc_id][k];
92: for (unsigned int i = 0; i < dof_indices.size(); ++i)
93: for (unsigned int j = 0; j < dof_indices.size(); ++j)
94: sparsity[dof_indices[i]].insert(dof_indices[j]);
95: }
97: // Determine the local nonzeros on this processor
98: const unsigned int n_local_dofs = last_local_index - first_local_index;
99: std::vector<PetscInt> n_nz(n_local_dofs);
100: std::vector<PetscInt> n_oz(n_local_dofs);
102: for (unsigned int i = 0; i < n_local_dofs; ++i) {
103: for (std::set<unsigned int>::iterator iter = sparsity[i+first_local_index].begin(); iter != sparsity[i+first_local_index].end(); iter++) {
104: unsigned int dof = *iter;
105: if ((dof >= first_local_index) && (dof < last_local_index)) n_nz[i]++;
106: else n_oz[i]++;
107: }
108: }
110: // Debugging: print number of on/off diagonal nonzeros
111: // for (unsigned int i=0; i<n_nz.size(); ++i)
112: // std::cout << n_nz[i] << " ";
113: // std::cout << std::endl;
115: // for (unsigned int i=0; i<n_oz.size(); ++i)
116: // std::cout << n_oz[i] << " ";
117: // std::cout << std::endl;
119: // Compute and print max number of on- and off-diagonal nonzeros.
120: PetscInt n_nz_max = *std::max_element(n_nz.begin(), n_nz.end());
121: PetscInt n_oz_max = *std::max_element(n_oz.begin(), n_oz.end());
123: PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Max on-diagonal non-zeros: = %" PetscInt_FMT "\n", n_nz_max);
124: PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Max off-diagonal non-zeros: = %" PetscInt_FMT "\n", n_oz_max);
125: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
127: // Initialize the matrix similar to what we do in the PetscMatrix
128: // ctor and init() routines.
129: Mat mat;
130: MatCreate(PETSC_COMM_WORLD, &mat);
131: MatSetSizes(mat, n_local_dofs, n_local_dofs, n_dofs, n_dofs);
132: MatSetBlockSize(mat, 1);
133: MatSetType(mat, MATAIJ); // Automatically chooses seqaij or mpiaij
134: MatSeqAIJSetPreallocation(mat, 0, &n_nz[0]);
135: MatMPIAIJSetPreallocation(mat, 0, &n_nz[0], 0, &n_oz[0]);
136: MatSetOption(mat, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
138: // Local "element" loop
139: for (unsigned int k = 0; k < elem_dof_indices[rank].size(); k++) {
140: std::vector<PetscInt>& dof_indices = elem_dof_indices[rank][k];
141: // DenseMatrix< Number > zero_mat( dof_indices.size(), dof_indices.size());
142: // B.add_matrix( zero_mat, dof_indices);
143: std::vector<PetscScalar> ones(dof_indices.size() * dof_indices.size(), 1.);
144: MatSetValues(mat, dof_indices.size(), &dof_indices[0], dof_indices.size(), &dof_indices[0], &ones[0], ADD_VALUES);
145: }
147: // Matrix assembly
148: MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY);
149: MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY);
151: // Clean up
152: MatDestroy(&mat);
153: PetscFinalize();
154: return 0;
155: }
156: /*TEST
157: build:
158: requires: !defined(PETSC_HAVE_SUN_CXX)
160: test:
161: nsize: 2
162: args: -f0 ${DATAFILESPATH}/meshes/moose_dof_indices_np_2_proc_0.txt -f1 ${DATAFILESPATH}/meshes/moose_dof_indices_np_2_proc_1.txt
163: # Skip the test for Sun C++ compiler because of its warnings/errors in petscmat.h
164: requires: datafilespath
166: TEST*/