Actual source code: packm.c
2: #include <../src/dm/impls/composite/packimpl.h>
4: static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm,Mat *J)
5: {
6: const DM_Composite *com = (DM_Composite*)dm->data;
7: const struct DMCompositeLink *rlink,*clink;
8: IS *isg;
9: Mat *submats;
10: PetscInt i,j,n;
12: n = com->nDM; /* Total number of entries */
14: /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
15: * checking and allows ISEqual to compare by identity instead of by contents. */
16: DMCompositeGetGlobalISs(dm,&isg);
18: /* Get submatrices */
19: PetscMalloc1(n*n,&submats);
20: for (i=0,rlink=com->next; rlink; i++,rlink=rlink->next) {
21: for (j=0,clink=com->next; clink; j++,clink=clink->next) {
22: Mat sub = NULL;
23: if (i == j) {
24: DMCreateMatrix(rlink->dm,&sub);
26: submats[i*n+j] = sub;
27: }
28: }
30: MatCreateNest(PetscObjectComm((PetscObject)dm),n,isg,n,isg,submats,J);
32: /* Disown references */
33: for (i=0; i<n; i++) ISDestroy(&isg[i]);
34: PetscFree(isg);
36: for (i=0; i<n*n; i++) {
37: if (submats[i]) MatDestroy(&submats[i]);
38: }
39: PetscFree(submats);
40: return 0;
41: }
43: static PetscErrorCode DMCreateMatrix_Composite_AIJ(DM dm,Mat *J)
44: {
45: PetscErrorCode ierr;
46: DM_Composite *com = (DM_Composite*)dm->data;
47: struct DMCompositeLink *next;
48: PetscInt m,*dnz,*onz,i,j,mA;
49: Mat Atmp;
50: PetscMPIInt rank;
51: PetscBool dense = PETSC_FALSE;
53: /* use global vector to determine layout needed for matrix */
54: m = com->n;
56: MatCreate(PetscObjectComm((PetscObject)dm),J);
57: MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);
58: MatSetType(*J,dm->mattype);
60: /*
61: Extremely inefficient but will compute entire Jacobian for testing
62: */
63: PetscOptionsGetBool(((PetscObject)dm)->options,((PetscObject)dm)->prefix,"-dmcomposite_dense_jacobian",&dense,NULL);
64: if (dense) {
65: PetscInt rstart,rend,*indices;
66: PetscScalar *values;
68: mA = com->N;
69: MatMPIAIJSetPreallocation(*J,mA,NULL,mA-m,NULL);
70: MatSeqAIJSetPreallocation(*J,mA,NULL);
72: MatGetOwnershipRange(*J,&rstart,&rend);
73: PetscMalloc2(mA,&values,mA,&indices);
74: PetscArrayzero(values,mA);
75: for (i=0; i<mA; i++) indices[i] = i;
76: for (i=rstart; i<rend; i++) {
77: MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);
78: }
79: PetscFree2(values,indices);
80: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
81: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
82: return 0;
83: }
85: MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);
86: MatPreallocateInitialize(PetscObjectComm((PetscObject)dm),m,m,dnz,onz);
87: /* loop over packed objects, handling one at at time */
88: next = com->next;
89: while (next) {
90: PetscInt nc,rstart,*ccols,maxnc;
91: const PetscInt *cols,*rstarts;
92: PetscMPIInt proc;
94: DMCreateMatrix(next->dm,&Atmp);
95: MatGetOwnershipRange(Atmp,&rstart,NULL);
96: MatGetOwnershipRanges(Atmp,&rstarts);
97: MatGetLocalSize(Atmp,&mA,NULL);
99: maxnc = 0;
100: for (i=0; i<mA; i++) {
101: MatGetRow(Atmp,rstart+i,&nc,NULL,NULL);
102: maxnc = PetscMax(nc,maxnc);
103: MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL);
104: }
105: PetscMalloc1(maxnc,&ccols);
106: for (i=0; i<mA; i++) {
107: MatGetRow(Atmp,rstart+i,&nc,&cols,NULL);
108: /* remap the columns taking into how much they are shifted on each process */
109: for (j=0; j<nc; j++) {
110: proc = 0;
111: while (cols[j] >= rstarts[proc+1]) proc++;
112: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
113: }
114: MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);
115: MatRestoreRow(Atmp,rstart+i,&nc,&cols,NULL);
116: }
117: PetscFree(ccols);
118: MatDestroy(&Atmp);
119: next = next->next;
120: }
121: if (com->FormCoupleLocations) {
122: (*com->FormCoupleLocations)(dm,NULL,dnz,onz,__rstart,__nrows,__start,__end);
123: }
124: MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);
125: MatSeqAIJSetPreallocation(*J,0,dnz);
126: MatPreallocateFinalize(dnz,onz);
128: if (dm->prealloc_only) return 0;
130: next = com->next;
131: while (next) {
132: PetscInt nc,rstart,row,maxnc,*ccols;
133: const PetscInt *cols,*rstarts;
134: const PetscScalar *values;
135: PetscMPIInt proc;
137: DMCreateMatrix(next->dm,&Atmp);
138: MatGetOwnershipRange(Atmp,&rstart,NULL);
139: MatGetOwnershipRanges(Atmp,&rstarts);
140: MatGetLocalSize(Atmp,&mA,NULL);
141: maxnc = 0;
142: for (i=0; i<mA; i++) {
143: MatGetRow(Atmp,rstart+i,&nc,NULL,NULL);
144: maxnc = PetscMax(nc,maxnc);
145: MatRestoreRow(Atmp,rstart+i,&nc,NULL,NULL);
146: }
147: PetscMalloc1(maxnc,&ccols);
148: for (i=0; i<mA; i++) {
149: MatGetRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values);
150: for (j=0; j<nc; j++) {
151: proc = 0;
152: while (cols[j] >= rstarts[proc+1]) proc++;
153: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
154: }
155: row = com->rstart+next->rstart+i;
156: MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);
157: MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt**)&cols,&values);
158: }
159: PetscFree(ccols);
160: MatDestroy(&Atmp);
161: next = next->next;
162: }
163: if (com->FormCoupleLocations) {
164: PetscInt __rstart;
165: MatGetOwnershipRange(*J,&__rstart,NULL);
166: (*com->FormCoupleLocations)(dm,*J,NULL,NULL,__rstart,0,0,0);
167: }
168: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
169: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
170: return 0;
171: }
173: PetscErrorCode DMCreateMatrix_Composite(DM dm,Mat *J)
174: {
175: PetscBool usenest;
176: ISLocalToGlobalMapping ltogmap;
178: DMSetFromOptions(dm);
179: DMSetUp(dm);
180: PetscStrcmp(dm->mattype,MATNEST,&usenest);
181: if (usenest) {
182: DMCreateMatrix_Composite_Nest(dm,J);
183: } else {
184: DMCreateMatrix_Composite_AIJ(dm,J);
185: }
187: DMGetLocalToGlobalMapping(dm,<ogmap);
188: MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap);
189: MatSetDM(*J,dm);
190: return 0;
191: }