Actual source code: grvtk.c

  1: #include <petsc/private/dmdaimpl.h>
  2: /*
  3:    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
  4:    including the private vtkvimpl.h file. The code should be refactored.
  5: */
  6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>

  8: /* Helper function which determines if any DMDA fields are named.  This is used
  9:    as a proxy for the user's intention to use DMDA fields as distinct
 10:    scalar-valued fields as opposed to a single vector-valued field */
 11: static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
 12: {
 14:   PetscInt       f,bs;

 17:   *fieldsnamed = PETSC_FALSE;
 18:   DMDAGetDof(da,&bs);
 19:   for (f=0; f<bs; ++f) {
 20:     const char * fieldname;
 21:     DMDAGetFieldName(da,f,&fieldname);
 22:     if (fieldname) {
 23:       *fieldsnamed = PETSC_TRUE;
 24:       break;
 25:     }
 26:   }
 27:   return(0);
 28: }

 30: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
 31: {
 32:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
 33: #if defined(PETSC_USE_REAL_SINGLE)
 34:   const char precision[] = "Float32";
 35: #elif defined(PETSC_USE_REAL_DOUBLE)
 36:   const char precision[] = "Float64";
 37: #else
 38:   const char precision[] = "UnknownPrecision";
 39: #endif
 40:   MPI_Comm                 comm;
 41:   Vec                      Coords;
 42:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
 43:   PetscViewerVTKObjectLink link;
 44:   FILE                     *fp;
 45:   PetscMPIInt              rank,size,tag;
 46:   DMDALocalInfo            info;
 47:   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
 48:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
 49:   PetscScalar              *array,*array2;
 50:   PetscErrorCode           ierr;

 53:   PetscObjectGetComm((PetscObject)da,&comm);
 54:   if (PetscDefined(USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported");
 55:   MPI_Comm_size(comm,&size);
 56:   MPI_Comm_rank(comm,&rank);
 57:   DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
 58:   DMDAGetLocalInfo(da,&info);
 59:   DMGetCoordinates(da,&Coords);
 60:   if (Coords) {
 61:     PetscInt csize;
 62:     VecGetSize(Coords,&csize);
 63:     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 64:     cdim = csize/(mx*my*mz);
 65:     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
 66:   } else {
 67:     cdim = dim;
 68:   }

 70:   PetscFOpen(comm,vtk->filename,"wb",&fp);
 71:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
 72:   PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
 73:   PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

 75:   if (rank == 0) {PetscMalloc1(size*6,&grloc);}
 76:   rloc[0] = info.xs;
 77:   rloc[1] = info.xm;
 78:   rloc[2] = info.ys;
 79:   rloc[3] = info.ym;
 80:   rloc[4] = info.zs;
 81:   rloc[5] = info.zm;
 82:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

 84:   /* Write XML header */
 85:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
 86:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
 87:   boffset   = 0;                /* Offset into binary file */
 88:   for (r=0; r<size; r++) {
 89:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
 90:     if (rank == 0) {
 91:       xs     = grloc[r][0];
 92:       xm     = grloc[r][1];
 93:       ys     = grloc[r][2];
 94:       ym     = grloc[r][3];
 95:       zs     = grloc[r][4];
 96:       zm     = grloc[r][5];
 97:       nnodes = xm*ym*zm;
 98:     }
 99:     maxnnodes = PetscMax(maxnnodes,nnodes);
100:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
101:     PetscFPrintf(comm,fp,"      <Points>\n");
102:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
103:     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
104:     PetscFPrintf(comm,fp,"      </Points>\n");

106:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
107:     for (link=vtk->link; link; link=link->next) {
108:       Vec        X = (Vec)link->vec;
109:       PetscInt   bs,f;
110:       DM         daCurr;
111:       PetscBool  fieldsnamed;
112:       const char *vecname = "Unnamed";

114:       VecGetDM(X,&daCurr);
115:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
116:       maxbs = PetscMax(maxbs,bs);

118:       if (((PetscObject)X)->name || link != vtk->link) {
119:         PetscObjectGetName((PetscObject)X,&vecname);
120:       }

122:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
123:       DMDAGetFieldsNamed(daCurr,&fieldsnamed);
124:       if (fieldsnamed) {
125:         for (f=0; f<bs; f++) {
126:           char       buf[256];
127:           const char *fieldname;
128:           DMDAGetFieldName(daCurr,f,&fieldname);
129:           if (!fieldname) {
130:             PetscSNPrintf(buf,sizeof(buf),"%D",f);
131:             fieldname = buf;
132:           }
133:           PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
134:           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
135:         }
136:       } else {
137:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
138:         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
139:       }
140:     }
141:     PetscFPrintf(comm,fp,"      </PointData>\n");
142:     PetscFPrintf(comm,fp,"    </Piece>\n");
143:   }
144:   PetscFPrintf(comm,fp,"  </StructuredGrid>\n");
145:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
146:   PetscFPrintf(comm,fp,"_");

148:   /* Now write the arrays. */
149:   tag  = ((PetscObject)viewer)->tag;
150:   PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);
151:   for (r=0; r<size; r++) {
152:     MPI_Status status;
153:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
154:     if (rank == 0) {
155:       xs     = grloc[r][0];
156:       xm     = grloc[r][1];
157:       ys     = grloc[r][2];
158:       ym     = grloc[r][3];
159:       zs     = grloc[r][4];
160:       zm     = grloc[r][5];
161:       nnodes = xm*ym*zm;
162:     } else if (r == rank) {
163:       nnodes = info.xm*info.ym*info.zm;
164:     }

166:     /* Write the coordinates */
167:     if (Coords) {
168:       const PetscScalar *coords;
169:       VecGetArrayRead(Coords,&coords);
170:       if (rank == 0) {
171:         if (r) {
172:           PetscMPIInt nn;
173:           MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
174:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
175:           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
176:         } else {
177:           PetscArraycpy(array,coords,nnodes*cdim);
178:         }
179:         /* Transpose coordinates to VTK (C-style) ordering */
180:         for (k=0; k<zm; k++) {
181:           for (j=0; j<ym; j++) {
182:             for (i=0; i<xm; i++) {
183:               PetscInt Iloc = i+xm*(j+ym*k);
184:               array2[Iloc*3+0] = array[Iloc*cdim + 0];
185:               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
186:               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
187:             }
188:           }
189:         }
190:       } else if (r == rank) {
191:         MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
192:       }
193:       VecRestoreArrayRead(Coords,&coords);
194:     } else {       /* Fabricate some coordinates using grid index */
195:       for (k=0; k<zm; k++) {
196:         for (j=0; j<ym; j++) {
197:           for (i=0; i<xm; i++) {
198:             PetscInt Iloc = i+xm*(j+ym*k);
199:             array2[Iloc*3+0] = xs+i;
200:             array2[Iloc*3+1] = ys+j;
201:             array2[Iloc*3+2] = zs+k;
202:           }
203:         }
204:       }
205:     }
206:     PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);

208:     /* Write each of the objects queued up for this file */
209:     for (link=vtk->link; link; link=link->next) {
210:       Vec               X = (Vec)link->vec;
211:       const PetscScalar *x;
212:       PetscInt          bs,f;
213:       DM                daCurr;
214:       PetscBool         fieldsnamed;
215:       VecGetDM(X,&daCurr);
216:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
217:       VecGetArrayRead(X,&x);
218:       if (rank == 0) {
219:         if (r) {
220:           PetscMPIInt nn;
221:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
222:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
223:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
224:         } else {
225:           PetscArraycpy(array,x,nnodes*bs);
226:         }

228:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
229:         DMDAGetFieldsNamed(daCurr,&fieldsnamed);
230:         if (fieldsnamed) {
231:           for (f=0; f<bs; f++) {
232:             /* Extract and transpose the f'th field */
233:             for (k=0; k<zm; k++) {
234:               for (j=0; j<ym; j++) {
235:                 for (i=0; i<xm; i++) {
236:                   PetscInt Iloc = i+xm*(j+ym*k);
237:                   array2[Iloc] = array[Iloc*bs + f];
238:                 }
239:               }
240:             }
241:             PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
242:           }
243:         } else {
244:           PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);
245:         }
246:       } else if (r == rank) {
247:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
248:       }
249:       VecRestoreArrayRead(X,&x);
250:     }
251:   }
252:   PetscFree2(array,array2);
253:   PetscFree(grloc);

255:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
256:   PetscFPrintf(comm,fp,"</VTKFile>\n");
257:   PetscFClose(comm,fp);
258:   return(0);
259: }

261: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
262: {
263:   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
264: #if defined(PETSC_USE_REAL_SINGLE)
265:   const char precision[] = "Float32";
266: #elif defined(PETSC_USE_REAL_DOUBLE)
267:   const char precision[] = "Float64";
268: #else
269:   const char precision[] = "UnknownPrecision";
270: #endif
271:   MPI_Comm                 comm;
272:   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
273:   PetscViewerVTKObjectLink link;
274:   FILE                     *fp;
275:   PetscMPIInt              rank,size,tag;
276:   DMDALocalInfo            info;
277:   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
278:   PetscInt                 rloc[6],(*grloc)[6] = NULL;
279:   PetscScalar              *array,*array2;
280:   PetscErrorCode           ierr;

283:   PetscObjectGetComm((PetscObject)da,&comm);
284:   if (PetscDefined(USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported");
285:   MPI_Comm_size(comm,&size);
286:   MPI_Comm_rank(comm,&rank);
287:   DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
288:   DMDAGetLocalInfo(da,&info);
289:   PetscFOpen(comm,vtk->filename,"wb",&fp);
290:   PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
291:   PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
292:   PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);

294:   if (rank == 0) {PetscMalloc1(size*6,&grloc);}
295:   rloc[0] = info.xs;
296:   rloc[1] = info.xm;
297:   rloc[2] = info.ys;
298:   rloc[3] = info.ym;
299:   rloc[4] = info.zs;
300:   rloc[5] = info.zm;
301:   MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);

303:   /* Write XML header */
304:   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
305:   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
306:   boffset   = 0;                /* Offset into binary file */
307:   for (r=0; r<size; r++) {
308:     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
309:     if (rank == 0) {
310:       xs     = grloc[r][0];
311:       xm     = grloc[r][1];
312:       ys     = grloc[r][2];
313:       ym     = grloc[r][3];
314:       zs     = grloc[r][4];
315:       zm     = grloc[r][5];
316:       nnodes = xm*ym*zm;
317:     }
318:     maxnnodes = PetscMax(maxnnodes,nnodes);
319:     PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
320:     PetscFPrintf(comm,fp,"      <Coordinates>\n");
321:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
322:     boffset += xm*sizeof(PetscScalar) + sizeof(int);
323:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
324:     boffset += ym*sizeof(PetscScalar) + sizeof(int);
325:     PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);
326:     boffset += zm*sizeof(PetscScalar) + sizeof(int);
327:     PetscFPrintf(comm,fp,"      </Coordinates>\n");
328:     PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");
329:     for (link=vtk->link; link; link=link->next) {
330:       Vec        X = (Vec)link->vec;
331:       PetscInt   bs,f;
332:       DM         daCurr;
333:       PetscBool  fieldsnamed;
334:       const char *vecname = "Unnamed";

336:       VecGetDM(X,&daCurr);
337:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
338:       maxbs = PetscMax(maxbs,bs);
339:       if (((PetscObject)X)->name || link != vtk->link) {
340:         PetscObjectGetName((PetscObject)X,&vecname);
341:       }

343:       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
344:       DMDAGetFieldsNamed(daCurr,&fieldsnamed);
345:       if (fieldsnamed) {
346:         for (f=0; f<bs; f++) {
347:           char       buf[256];
348:           const char *fieldname;
349:           DMDAGetFieldName(daCurr,f,&fieldname);
350:           if (!fieldname) {
351:             PetscSNPrintf(buf,sizeof(buf),"%D",f);
352:             fieldname = buf;
353:           }
354:           PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
355:           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
356:         }
357:       } else {
358:         PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
359:         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
360:       }
361:     }
362:     PetscFPrintf(comm,fp,"      </PointData>\n");
363:     PetscFPrintf(comm,fp,"    </Piece>\n");
364:   }
365:   PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");
366:   PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");
367:   PetscFPrintf(comm,fp,"_");

369:   /* Now write the arrays. */
370:   tag  = ((PetscObject)viewer)->tag;
371:   PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);
372:   for (r=0; r<size; r++) {
373:     MPI_Status status;
374:     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
375:     if (rank == 0) {
376:       xs     = grloc[r][0];
377:       xm     = grloc[r][1];
378:       ys     = grloc[r][2];
379:       ym     = grloc[r][3];
380:       zs     = grloc[r][4];
381:       zm     = grloc[r][5];
382:       nnodes = xm*ym*zm;
383:     } else if (r == rank) {
384:       nnodes = info.xm*info.ym*info.zm;
385:     }
386:     {                           /* Write the coordinates */
387:       Vec Coords;

389:       DMGetCoordinates(da,&Coords);
390:       if (Coords) {
391:         const PetscScalar *coords;
392:         VecGetArrayRead(Coords,&coords);
393:         if (rank == 0) {
394:           if (r) {
395:             PetscMPIInt nn;
396:             MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
397:             MPI_Get_count(&status,MPIU_SCALAR,&nn);
398:             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
399:           } else {
400:             /* x coordinates */
401:             for (j=0, k=0, i=0; i<xm; i++) {
402:               PetscInt Iloc = i+xm*(j+ym*k);
403:               array[i] = coords[Iloc*dim + 0];
404:             }
405:             /* y coordinates */
406:             for (i=0, k=0, j=0; j<ym; j++) {
407:               PetscInt Iloc = i+xm*(j+ym*k);
408:               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
409:             }
410:             /* z coordinates */
411:             for (i=0, j=0, k=0; k<zm; k++) {
412:               PetscInt Iloc = i+xm*(j+ym*k);
413:               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
414:             }
415:           }
416:         } else if (r == rank) {
417:           xm = info.xm;
418:           ym = info.ym;
419:           zm = info.zm;
420:           for (j=0, k=0, i=0; i<xm; i++) {
421:             PetscInt Iloc = i+xm*(j+ym*k);
422:             array2[i] = coords[Iloc*dim + 0];
423:           }
424:           for (i=0, k=0, j=0; j<ym; j++) {
425:             PetscInt Iloc = i+xm*(j+ym*k);
426:             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
427:           }
428:           for (i=0, j=0, k=0; k<zm; k++) {
429:             PetscInt Iloc = i+xm*(j+ym*k);
430:             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
431:           }
432:           MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
433:         }
434:         VecRestoreArrayRead(Coords,&coords);
435:       } else {       /* Fabricate some coordinates using grid index */
436:         for (i=0; i<xm; i++) {
437:           array[i] = xs+i;
438:         }
439:         for (j=0; j<ym; j++) {
440:           array[j+xm] = ys+j;
441:         }
442:         for (k=0; k<zm; k++) {
443:           array[k+xm+ym] = zs+k;
444:         }
445:       }
446:       if (rank == 0) {
447:         PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);
448:         PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);
449:         PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);
450:       }
451:     }

453:     /* Write each of the objects queued up for this file */
454:     for (link=vtk->link; link; link=link->next) {
455:       Vec               X = (Vec)link->vec;
456:       const PetscScalar *x;
457:       PetscInt          bs,f;
458:       DM                daCurr;
459:       PetscBool         fieldsnamed;
460:       VecGetDM(X,&daCurr);
461:       DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);

463:       VecGetArrayRead(X,&x);
464:       if (rank == 0) {
465:         if (r) {
466:           PetscMPIInt nn;
467:           MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
468:           MPI_Get_count(&status,MPIU_SCALAR,&nn);
469:           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
470:         } else {
471:           PetscArraycpy(array,x,nnodes*bs);
472:         }
473:         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
474:         DMDAGetFieldsNamed(daCurr,&fieldsnamed);
475:         if (fieldsnamed) {
476:           for (f=0; f<bs; f++) {
477:             /* Extract and transpose the f'th field */
478:             for (k=0; k<zm; k++) {
479:               for (j=0; j<ym; j++) {
480:                 for (i=0; i<xm; i++) {
481:                   PetscInt Iloc = i+xm*(j+ym*k);
482:                   array2[Iloc] = array[Iloc*bs + f];
483:                 }
484:               }
485:             }
486:             PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
487:           }
488:         }
489:         PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);

491:       } else if (r == rank) {
492:         MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
493:       }
494:       VecRestoreArrayRead(X,&x);
495:     }
496:   }
497:   PetscFree2(array,array2);
498:   PetscFree(grloc);

500:   PetscFPrintf(comm,fp,"\n </AppendedData>\n");
501:   PetscFPrintf(comm,fp,"</VTKFile>\n");
502:   PetscFClose(comm,fp);
503:   return(0);
504: }

506: /*@C
507:    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer

509:    Collective

511:    Input Parameters:
512: +  odm - DM specifying the grid layout, passed as a PetscObject
513: -  viewer - viewer of type VTK

515:    Level: developer

517:    Notes:
518:    This function is a callback used by the VTK viewer to actually write the file.
519:    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
520:    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.

522:    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
523:    fields are written. Otherwise, a single multi-dof (vector) field is written.

525: .seealso: PETSCVIEWERVTK
526: @*/
527: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
528: {
529:   DM             dm = (DM)odm;
530:   PetscBool      isvtk;

536:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
537:   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
538:   switch (viewer->format) {
539:   case PETSC_VIEWER_VTK_VTS:
540:     DMDAVTKWriteAll_VTS(dm,viewer);
541:     break;
542:   case PETSC_VIEWER_VTK_VTR:
543:     DMDAVTKWriteAll_VTR(dm,viewer);
544:     break;
545:   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
546:   }
547:   return(0);
548: }