Actual source code: grvtk.c

  1: #include <petsc/private/dmdaimpl.h>
  2: /*
  3:    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
  4:    including the private vtkvimpl.h file. The code should be refactored.
  5: */
  6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>

  8: /* Helper function which determines if any DMDA fields are named.  This is used
  9:    as a proxy for the user's intention to use DMDA fields as distinct
 10:    scalar-valued fields as opposed to a single vector-valued field */
 11: static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
 12: {
 13:   PetscInt       f,bs;

 15:   *fieldsnamed = PETSC_FALSE;
 16:   DMDAGetDof(da,&bs);
 17:   for (f=0; f<bs; ++f) {
 18:     const char * fieldname;
 19:     DMDAGetFieldName(da,f,&fieldname);
 20:     if (fieldname) {
 21:       *fieldsnamed = PETSC_TRUE;
 22:       break;
 23:     }
 24:   }
 25:   return 0;
 26: }

 28: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
 29: {
 30:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
 31: #if defined(PETSC_USE_REAL_SINGLE)
 32:   const char precision[] = "Float32";
 33: #elif defined(PETSC_USE_REAL_DOUBLE)
 34:   const char precision[] = "Float64";
 35: #else
 36:   const char precision[] = "UnknownPrecision";
 37: #endif
 38:   MPI_Comm                 comm;
 39:   Vec                      Coords;
 40:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
 41:   PetscViewerVTKObjectLink link;
 42:   FILE                     *fp;
 43:   PetscMPIInt              rank,size,tag;
 44:   DMDALocalInfo            info;
 45:   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
 46:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
 47:   PetscScalar              *array,*array2;

 49:   PetscObjectGetComm((PetscObject)da,&comm);
 51:   MPI_Comm_size(comm,&size);
 52:   MPI_Comm_rank(comm,&rank);
 53:   DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
 54:   DMDAGetLocalInfo(da,&info);
 55:   DMGetCoordinates(da,&Coords);
 56:   if (Coords) {
 57:     PetscInt csize;
 58:     VecGetSize(Coords,&csize);
 60:     cdim = csize/(mx*my*mz);
 62:   } else {
 63:     cdim = dim;
 64:   }

 66:   PetscFOpen(comm,vtk->filename,"wb",&fp);
 67:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
 68:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
 69:   PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

 71:   if (rank == 0) PetscMalloc1(size*6,&grloc);
 72:   rloc[0] = info.xs;
 73:   rloc[1] = info.xm;
 74:   rloc[2] = info.ys;
 75:   rloc[3] = info.ym;
 76:   rloc[4] = info.zs;
 77:   rloc[5] = info.zm;
 78:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

 80:   /* Write XML header */
 81:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
 82:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
 83:   boffset   = 0;                /* Offset into binary file */
 84:   for (r=0; r<size; r++) {
 85:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
 86:     if (rank == 0) {
 87:       xs     = grloc[r][0];
 88:       xm     = grloc[r][1];
 89:       ys     = grloc[r][2];
 90:       ym     = grloc[r][3];
 91:       zs     = grloc[r][4];
 92:       zm     = grloc[r][5];
 93:       nnodes = xm*ym*zm;
 94:     }
 95:     maxnnodes = PetscMax(maxnnodes,nnodes);
 96:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
 97:     PetscFPrintf(comm,fp,"      <Points>\n");
 98:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
 99:     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
100:     PetscFPrintf(comm,fp,"      </Points>\n");

102:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
103:     for (link=vtk->link; link; link=link->next) {
104:       Vec        X = (Vec)link->vec;
105:       PetscInt   bs,f;
106:       DM         daCurr;
107:       PetscBool  fieldsnamed;
108:       const char *vecname = "Unnamed";

110:       VecGetDM(X,&daCurr);
111:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
112:       maxbs = PetscMax(maxbs,bs);

114:       if (((PetscObject)X)->name || link != vtk->link) {
115:         PetscObjectGetName((PetscObject)X,&vecname);
116:       }

118:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
119:       DMDAGetFieldsNamed(daCurr,&fieldsnamed);
120:       if (fieldsnamed) {
121:         for (f=0; f<bs; f++) {
122:           char       buf[256];
123:           const char *fieldname;
124:           DMDAGetFieldName(daCurr,f,&fieldname);
125:           if (!fieldname) {
126:             PetscSNPrintf(buf,sizeof(buf),"%D",f);
127:             fieldname = buf;
128:           }
129:           PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
130:           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
131:         }
132:       } else {
133:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
134:         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
135:       }
136:     }
137:     PetscFPrintf(comm,fp,"      </PointData>\n");
138:     PetscFPrintf(comm,fp,"    </Piece>\n");
139:   }
140:   PetscFPrintf(comm,fp,"  </StructuredGrid>\n");
141:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
142:   PetscFPrintf(comm,fp,"_");

144:   /* Now write the arrays. */
145:   tag  = ((PetscObject)viewer)->tag;
146:   PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);
147:   for (r=0; r<size; r++) {
148:     MPI_Status status;
149:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
150:     if (rank == 0) {
151:       xs     = grloc[r][0];
152:       xm     = grloc[r][1];
153:       ys     = grloc[r][2];
154:       ym     = grloc[r][3];
155:       zs     = grloc[r][4];
156:       zm     = grloc[r][5];
157:       nnodes = xm*ym*zm;
158:     } else if (r == rank) {
159:       nnodes = info.xm*info.ym*info.zm;
160:     }

162:     /* Write the coordinates */
163:     if (Coords) {
164:       const PetscScalar *coords;
165:       VecGetArrayRead(Coords,&coords);
166:       if (rank == 0) {
167:         if (r) {
168:           PetscMPIInt nn;
169:           MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
170:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
172:         } else {
173:           PetscArraycpy(array,coords,nnodes*cdim);
174:         }
175:         /* Transpose coordinates to VTK (C-style) ordering */
176:         for (k=0; k<zm; k++) {
177:           for (j=0; j<ym; j++) {
178:             for (i=0; i<xm; i++) {
179:               PetscInt Iloc = i+xm*(j+ym*k);
180:               array2[Iloc*3+0] = array[Iloc*cdim + 0];
181:               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
182:               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
183:             }
184:           }
185:         }
186:       } else if (r == rank) {
187:         MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
188:       }
189:       VecRestoreArrayRead(Coords,&coords);
190:     } else {       /* Fabricate some coordinates using grid index */
191:       for (k=0; k<zm; k++) {
192:         for (j=0; j<ym; j++) {
193:           for (i=0; i<xm; i++) {
194:             PetscInt Iloc = i+xm*(j+ym*k);
195:             array2[Iloc*3+0] = xs+i;
196:             array2[Iloc*3+1] = ys+j;
197:             array2[Iloc*3+2] = zs+k;
198:           }
199:         }
200:       }
201:     }
202:     PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);

204:     /* Write each of the objects queued up for this file */
205:     for (link=vtk->link; link; link=link->next) {
206:       Vec               X = (Vec)link->vec;
207:       const PetscScalar *x;
208:       PetscInt          bs,f;
209:       DM                daCurr;
210:       PetscBool         fieldsnamed;
211:       VecGetDM(X,&daCurr);
212:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
213:       VecGetArrayRead(X,&x);
214:       if (rank == 0) {
215:         if (r) {
216:           PetscMPIInt nn;
217:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
218:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
220:         } else {
221:           PetscArraycpy(array,x,nnodes*bs);
222:         }

224:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
225:         DMDAGetFieldsNamed(daCurr,&fieldsnamed);
226:         if (fieldsnamed) {
227:           for (f=0; f<bs; f++) {
228:             /* Extract and transpose the f'th field */
229:             for (k=0; k<zm; k++) {
230:               for (j=0; j<ym; j++) {
231:                 for (i=0; i<xm; i++) {
232:                   PetscInt Iloc = i+xm*(j+ym*k);
233:                   array2[Iloc] = array[Iloc*bs + f];
234:                 }
235:               }
236:             }
237:             PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
238:           }
239:         } else {
240:           PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);
241:         }
242:       } else if (r == rank) {
243:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
244:       }
245:       VecRestoreArrayRead(X,&x);
246:     }
247:   }
248:   PetscFree2(array,array2);
249:   PetscFree(grloc);

251:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
252:   PetscFPrintf(comm,fp,"</VTKFile>\n");
253:   PetscFClose(comm,fp);
254:   return 0;
255: }

257: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
258: {
259:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
260: #if defined(PETSC_USE_REAL_SINGLE)
261:   const char precision[] = "Float32";
262: #elif defined(PETSC_USE_REAL_DOUBLE)
263:   const char precision[] = "Float64";
264: #else
265:   const char precision[] = "UnknownPrecision";
266: #endif
267:   MPI_Comm                 comm;
268:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
269:   PetscViewerVTKObjectLink link;
270:   FILE                     *fp;
271:   PetscMPIInt              rank,size,tag;
272:   DMDALocalInfo            info;
273:   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
274:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
275:   PetscScalar              *array,*array2;

277:   PetscObjectGetComm((PetscObject)da,&comm);
279:   MPI_Comm_size(comm,&size);
280:   MPI_Comm_rank(comm,&rank);
281:   DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
282:   DMDAGetLocalInfo(da,&info);
283:   PetscFOpen(comm,vtk->filename,"wb",&fp);
284:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
285:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
286:   PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

288:   if (rank == 0) PetscMalloc1(size*6,&grloc);
289:   rloc[0] = info.xs;
290:   rloc[1] = info.xm;
291:   rloc[2] = info.ys;
292:   rloc[3] = info.ym;
293:   rloc[4] = info.zs;
294:   rloc[5] = info.zm;
295:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

297:   /* Write XML header */
298:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
299:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
300:   boffset   = 0;                /* Offset into binary file */
301:   for (r=0; r<size; r++) {
302:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
303:     if (rank == 0) {
304:       xs     = grloc[r][0];
305:       xm     = grloc[r][1];
306:       ys     = grloc[r][2];
307:       ym     = grloc[r][3];
308:       zs     = grloc[r][4];
309:       zm     = grloc[r][5];
310:       nnodes = xm*ym*zm;
311:     }
312:     maxnnodes = PetscMax(maxnnodes,nnodes);
313:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
314:     PetscFPrintf(comm,fp,"      <Coordinates>\n");
315:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
316:     boffset += xm*sizeof(PetscScalar) + sizeof(int);
317:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
318:     boffset += ym*sizeof(PetscScalar) + sizeof(int);
319:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
320:     boffset += zm*sizeof(PetscScalar) + sizeof(int);
321:     PetscFPrintf(comm,fp,"      </Coordinates>\n");
322:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
323:     for (link=vtk->link; link; link=link->next) {
324:       Vec        X = (Vec)link->vec;
325:       PetscInt   bs,f;
326:       DM         daCurr;
327:       PetscBool  fieldsnamed;
328:       const char *vecname = "Unnamed";

330:       VecGetDM(X,&daCurr);
331:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
332:       maxbs = PetscMax(maxbs,bs);
333:       if (((PetscObject)X)->name || link != vtk->link) {
334:         PetscObjectGetName((PetscObject)X,&vecname);
335:       }

337:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
338:       DMDAGetFieldsNamed(daCurr,&fieldsnamed);
339:       if (fieldsnamed) {
340:         for (f=0; f<bs; f++) {
341:           char       buf[256];
342:           const char *fieldname;
343:           DMDAGetFieldName(daCurr,f,&fieldname);
344:           if (!fieldname) {
345:             PetscSNPrintf(buf,sizeof(buf),"%D",f);
346:             fieldname = buf;
347:           }
348:           PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
349:           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
350:         }
351:       } else {
352:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
353:         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
354:       }
355:     }
356:     PetscFPrintf(comm,fp,"      </PointData>\n");
357:     PetscFPrintf(comm,fp,"    </Piece>\n");
358:   }
359:   PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");
360:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
361:   PetscFPrintf(comm,fp,"_");

363:   /* Now write the arrays. */
364:   tag  = ((PetscObject)viewer)->tag;
365:   PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);
366:   for (r=0; r<size; r++) {
367:     MPI_Status status;
368:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
369:     if (rank == 0) {
370:       xs     = grloc[r][0];
371:       xm     = grloc[r][1];
372:       ys     = grloc[r][2];
373:       ym     = grloc[r][3];
374:       zs     = grloc[r][4];
375:       zm     = grloc[r][5];
376:       nnodes = xm*ym*zm;
377:     } else if (r == rank) {
378:       nnodes = info.xm*info.ym*info.zm;
379:     }
380:     {                           /* Write the coordinates */
381:       Vec Coords;

383:       DMGetCoordinates(da,&Coords);
384:       if (Coords) {
385:         const PetscScalar *coords;
386:         VecGetArrayRead(Coords,&coords);
387:         if (rank == 0) {
388:           if (r) {
389:             PetscMPIInt nn;
390:             MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
391:             MPI_Get_count(&status,MPIU_SCALAR,&nn);
393:           } else {
394:             /* x coordinates */
395:             for (j=0, k=0, i=0; i<xm; i++) {
396:               PetscInt Iloc = i+xm*(j+ym*k);
397:               array[i] = coords[Iloc*dim + 0];
398:             }
399:             /* y coordinates */
400:             for (i=0, k=0, j=0; j<ym; j++) {
401:               PetscInt Iloc = i+xm*(j+ym*k);
402:               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
403:             }
404:             /* z coordinates */
405:             for (i=0, j=0, k=0; k<zm; k++) {
406:               PetscInt Iloc = i+xm*(j+ym*k);
407:               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
408:             }
409:           }
410:         } else if (r == rank) {
411:           xm = info.xm;
412:           ym = info.ym;
413:           zm = info.zm;
414:           for (j=0, k=0, i=0; i<xm; i++) {
415:             PetscInt Iloc = i+xm*(j+ym*k);
416:             array2[i] = coords[Iloc*dim + 0];
417:           }
418:           for (i=0, k=0, j=0; j<ym; j++) {
419:             PetscInt Iloc = i+xm*(j+ym*k);
420:             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
421:           }
422:           for (i=0, j=0, k=0; k<zm; k++) {
423:             PetscInt Iloc = i+xm*(j+ym*k);
424:             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
425:           }
426:           MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
427:         }
428:         VecRestoreArrayRead(Coords,&coords);
429:       } else {       /* Fabricate some coordinates using grid index */
430:         for (i=0; i<xm; i++) {
431:           array[i] = xs+i;
432:         }
433:         for (j=0; j<ym; j++) {
434:           array[j+xm] = ys+j;
435:         }
436:         for (k=0; k<zm; k++) {
437:           array[k+xm+ym] = zs+k;
438:         }
439:       }
440:       if (rank == 0) {
441:         PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);
442:         PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);
443:         PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);
444:       }
445:     }

447:     /* Write each of the objects queued up for this file */
448:     for (link=vtk->link; link; link=link->next) {
449:       Vec               X = (Vec)link->vec;
450:       const PetscScalar *x;
451:       PetscInt          bs,f;
452:       DM                daCurr;
453:       PetscBool         fieldsnamed;
454:       VecGetDM(X,&daCurr);
455:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);

457:       VecGetArrayRead(X,&x);
458:       if (rank == 0) {
459:         if (r) {
460:           PetscMPIInt nn;
461:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
462:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
464:         } else {
465:           PetscArraycpy(array,x,nnodes*bs);
466:         }
467:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
468:         DMDAGetFieldsNamed(daCurr,&fieldsnamed);
469:         if (fieldsnamed) {
470:           for (f=0; f<bs; f++) {
471:             /* Extract and transpose the f'th field */
472:             for (k=0; k<zm; k++) {
473:               for (j=0; j<ym; j++) {
474:                 for (i=0; i<xm; i++) {
475:                   PetscInt Iloc = i+xm*(j+ym*k);
476:                   array2[Iloc] = array[Iloc*bs + f];
477:                 }
478:               }
479:             }
480:             PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
481:           }
482:         }
483:         PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);

485:       } else if (r == rank) {
486:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
487:       }
488:       VecRestoreArrayRead(X,&x);
489:     }
490:   }
491:   PetscFree2(array,array2);
492:   PetscFree(grloc);

494:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
495:   PetscFPrintf(comm,fp,"</VTKFile>\n");
496:   PetscFClose(comm,fp);
497:   return 0;
498: }

500: /*@C
501:    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer

503:    Collective

505:    Input Parameters:
506: +  odm - DM specifying the grid layout, passed as a PetscObject
507: -  viewer - viewer of type VTK

509:    Level: developer

511:    Notes:
512:    This function is a callback used by the VTK viewer to actually write the file.
513:    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
514:    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.

516:    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
517:    fields are written. Otherwise, a single multi-dof (vector) field is written.

519: .seealso: PETSCVIEWERVTK
520: @*/
521: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
522: {
523:   DM             dm = (DM)odm;
524:   PetscBool      isvtk;

528:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
530:   switch (viewer->format) {
531:   case PETSC_VIEWER_VTK_VTS:
532:     DMDAVTKWriteAll_VTS(dm,viewer);
533:     break;
534:   case PETSC_VIEWER_VTK_VTR:
535:     DMDAVTKWriteAll_VTR(dm,viewer);
536:     break;
537:   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
538:   }
539:   return 0;
540: }