Actual source code: ex1.c
petsc-3.13.4 2020-08-01
1: static char help[] = "Tests various DMPlex routines to construct, refine and distribute a mesh.\n\n";
3: #include <petscdmplex.h>
4: #include <petscsf.h>
6: typedef enum {BOX, CYLINDER, SPHERE, BALL} DomainShape;
7: enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
9: typedef struct {
10: DM dm; /* REQUIRED in order to use SNES evaluation functions */
11: PetscInt debug; /* The debugging level */
12: PetscLogEvent createMeshEvent;
13: PetscLogStage stages[4];
14: /* Domain and mesh definition */
15: PetscInt dim; /* The topological mesh dimension */
16: PetscBool interpolate; /* Generate intermediate mesh elements */
17: PetscReal refinementLimit; /* The largest allowable cell volume */
18: PetscBool cellSimplex; /* Use simplices or hexes */
19: PetscBool cellWedge; /* Use wedges */
20: DomainShape domainShape; /* Shape of the region to be meshed */
21: PetscInt *domainBoxSizes; /* Sizes of the box mesh */
22: PetscReal *domainBoxL,*domainBoxU; /* Lower left, upper right corner of the box mesh */
23: DMBoundaryType periodicity[3]; /* The domain periodicity */
24: char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
25: char bdfilename[PETSC_MAX_PATH_LEN]; /* Import mesh boundary from file */
26: char extfilename[PETSC_MAX_PATH_LEN]; /* Import 2D mesh to be extruded from file */
27: PetscBool testPartition; /* Use a fixed partitioning for testing */
28: PetscInt overlap; /* The cell overlap to use during partitioning */
29: PetscReal extrude_thickness; /* Thickness of extrusion */
30: PetscInt extrude_layers; /* Layers to be extruded */
31: PetscBool testp4est[2];
32: PetscBool redistribute;
33: PetscBool final_ref; /* Run refinement at the end */
34: PetscBool final_diagnostics; /* Run diagnostics on the final mesh */
35: } AppCtx;
37: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
38: {
39: const char *dShapes[4] = {"box", "cylinder", "sphere", "ball"};
40: PetscInt shape, bd, n;
41: static PetscInt domainBoxSizes[3] = {1,1,1};
42: static PetscReal domainBoxL[3] = {0.,0.,0.};
43: static PetscReal domainBoxU[3] = {1.,1.,1.};
44: PetscBool flg;
45: PetscErrorCode ierr;
48: options->debug = 0;
49: options->dim = 2;
50: options->interpolate = PETSC_FALSE;
51: options->refinementLimit = 0.0;
52: options->cellSimplex = PETSC_TRUE;
53: options->cellWedge = PETSC_FALSE;
54: options->domainShape = BOX;
55: options->domainBoxSizes = NULL;
56: options->domainBoxL = NULL;
57: options->domainBoxU = NULL;
58: options->periodicity[0] = DM_BOUNDARY_NONE;
59: options->periodicity[1] = DM_BOUNDARY_NONE;
60: options->periodicity[2] = DM_BOUNDARY_NONE;
61: options->filename[0] = '\0';
62: options->bdfilename[0] = '\0';
63: options->extfilename[0] = '\0';
64: options->testPartition = PETSC_FALSE;
65: options->overlap = 0;
66: options->extrude_layers = 2;
67: options->extrude_thickness = 0.1;
68: options->testp4est[0] = PETSC_FALSE;
69: options->testp4est[1] = PETSC_FALSE;
70: options->redistribute = PETSC_FALSE;
71: options->final_ref = PETSC_FALSE;
72: options->final_diagnostics = PETSC_TRUE;
74: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
75: PetscOptionsBoundedInt("-debug", "The debugging level", "ex1.c", options->debug, &options->debug, NULL,0);
76: PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, NULL,1,3);
77: PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex1.c", options->interpolate, &options->interpolate, NULL);
78: PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex1.c", options->refinementLimit, &options->refinementLimit, NULL);
79: PetscOptionsBool("-cell_simplex", "Use simplices if true, otherwise hexes", "ex1.c", options->cellSimplex, &options->cellSimplex, NULL);
80: PetscOptionsBool("-cell_wedge", "Use wedges if true", "ex1.c", options->cellWedge, &options->cellWedge, NULL);
81: shape = options->domainShape;
82: PetscOptionsEList("-domain_shape","The shape of the domain","ex1.c", dShapes, 4, dShapes[options->domainShape], &shape, NULL);
83: options->domainShape = (DomainShape) shape;
84: PetscOptionsIntArray("-domain_box_sizes","The sizes of the box domain","ex1.c", domainBoxSizes, (n=3,&n), &flg);
85: if (flg) { options->domainShape = BOX; options->domainBoxSizes = domainBoxSizes;}
86: PetscOptionsRealArray("-domain_box_ll","Coordinates of the lower left corner of the box domain","ex1.c", domainBoxL, (n=3,&n), &flg);
87: if (flg) { options->domainBoxL = domainBoxL;}
88: PetscOptionsRealArray("-domain_box_ur","Coordinates of the upper right corner of the box domain","ex1.c", domainBoxU, (n=3,&n), &flg);
89: if (flg) { options->domainBoxU = domainBoxU;}
90: bd = options->periodicity[0];
91: PetscOptionsEList("-x_periodicity", "The x-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[0]], &bd, NULL);
92: options->periodicity[0] = (DMBoundaryType) bd;
93: bd = options->periodicity[1];
94: PetscOptionsEList("-y_periodicity", "The y-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[1]], &bd, NULL);
95: options->periodicity[1] = (DMBoundaryType) bd;
96: bd = options->periodicity[2];
97: PetscOptionsEList("-z_periodicity", "The z-boundary periodicity", "ex1.c", DMBoundaryTypes, 5, DMBoundaryTypes[options->periodicity[2]], &bd, NULL);
98: options->periodicity[2] = (DMBoundaryType) bd;
99: PetscOptionsString("-filename", "The mesh file", "ex1.c", options->filename, options->filename, PETSC_MAX_PATH_LEN, NULL);
100: PetscOptionsString("-bd_filename", "The mesh boundary file", "ex1.c", options->bdfilename, options->bdfilename, PETSC_MAX_PATH_LEN, NULL);
101: PetscOptionsString("-ext_filename", "The 2D mesh file to be extruded", "ex1.c", options->extfilename, options->extfilename, PETSC_MAX_PATH_LEN, NULL);
102: PetscOptionsBoundedInt("-ext_layers", "The number of layers to extrude", "ex1.c", options->extrude_layers, &options->extrude_layers, NULL,0);
103: PetscOptionsReal("-ext_thickness", "The thickness of the layer to be extruded", "ex1.c", options->extrude_thickness, &options->extrude_thickness, NULL);
104: PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex1.c", options->testPartition, &options->testPartition, NULL);
105: PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex1.c", options->overlap, &options->overlap, NULL,0);
106: PetscOptionsBool("-test_p4est_seq", "Test p4est with sequential base DM", "ex1.c", options->testp4est[0], &options->testp4est[0], NULL);
107: PetscOptionsBool("-test_p4est_par", "Test p4est with parallel base DM", "ex1.c", options->testp4est[1], &options->testp4est[1], NULL);
108: PetscOptionsBool("-test_redistribute", "Test redistribution", "ex1.c", options->redistribute, &options->redistribute, NULL);
109: PetscOptionsBool("-final_ref", "Run uniform refinement on the final mesh", "ex1.c", options->final_ref, &options->final_ref, NULL);
110: PetscOptionsBool("-final_diagnostics", "Run diagnostics on the final mesh", "ex1.c", options->final_diagnostics, &options->final_diagnostics, NULL);
111: PetscOptionsEnd();
113: PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);
114: PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]);
115: PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]);
116: PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]);
117: PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]);
118: return(0);
119: }
121: /* Overload time to be the sphere radius */
122: static void snapToSphere(PetscInt dim, PetscInt Nf, PetscInt NfAux,
123: const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
124: const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
125: PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
126: {
127: PetscReal norm2 = 0.0, fac;
128: PetscInt n = uOff[1] - uOff[0], d;
130: for (d = 0; d < n; ++d) norm2 += PetscSqr(PetscRealPart(u[d]));
131: fac = t/PetscSqrtReal(norm2);
132: for (d = 0; d < n; ++d) f0[d] = u[d]*fac;
133: }
135: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
136: {
137: PetscInt dim = user->dim;
138: PetscBool interpolate = user->interpolate;
139: PetscReal refinementLimit = user->refinementLimit;
140: PetscBool cellSimplex = user->cellSimplex;
141: PetscBool cellWedge = user->cellWedge;
142: const char *filename = user->filename;
143: const char *bdfilename = user->bdfilename;
144: const char *extfilename = user->extfilename;
145: PetscBool testp4est_seq = user->testp4est[0];
146: PetscBool testp4est_par = user->testp4est[1];
147: PetscInt triSizes_n2[2] = {4, 4};
148: PetscInt triPoints_n2[8] = {3, 5, 6, 7, 0, 1, 2, 4};
149: PetscInt triSizes_n8[8] = {1, 1, 1, 1, 1, 1, 1, 1};
150: PetscInt triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
151: PetscInt quadSizes[2] = {2, 2};
152: PetscInt quadPoints[4] = {2, 3, 0, 1};
153: PetscInt gmshSizes_n3[3] = {14, 14, 14};
154: PetscInt gmshPoints_n3[42] = {1, 2, 4, 5, 9, 10, 11, 15, 16, 20, 21, 27, 28, 29,
155: 3, 8, 12, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
156: 0, 6, 7, 13, 14, 17, 18, 19, 22, 23, 24, 25, 26, 41};
157: PetscInt fluentSizes_n3[3] = {50, 50, 50};
158: PetscInt fluentPoints_n3[150] = { 5, 6, 7, 8, 12, 14, 16, 34, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 50, 51, 80, 81, 89,
159: 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 121, 122, 124, 125, 126, 127, 128, 129, 131, 133, 143, 144, 145, 147,
160: 1, 3, 4, 9, 10, 17, 18, 19, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 47, 61, 71, 72, 73, 74,
161: 75, 76, 77, 78, 79, 86, 87, 88, 90, 92, 113, 115, 116, 117, 118, 119, 120, 123, 138, 140, 141, 142, 146, 148, 149,
162: 0, 2, 11, 13, 15, 20, 21, 22, 23, 49, 52, 53, 54, 55, 56, 57, 58, 59, 60, 62, 63, 64, 65, 66, 67,
163: 68, 69, 70, 82, 83, 84, 85, 102, 103, 105, 106, 107, 108, 109, 110, 111, 112, 114, 130, 132, 134, 135, 136, 137, 139};
164: size_t len, bdlen, extlen;
165: PetscMPIInt rank, size;
169: PetscLogEventBegin(user->createMeshEvent,0,0,0,0);
170: MPI_Comm_rank(comm, &rank);
171: MPI_Comm_size(comm, &size);
172: PetscStrlen(filename, &len);
173: PetscStrlen(bdfilename, &bdlen);
174: PetscStrlen(extfilename, &extlen);
175: PetscLogStagePush(user->stages[STAGE_LOAD]);
176: if (len) {
177: DMPlexCreateFromFile(comm, filename, interpolate, dm);
178: } else if (bdlen) {
179: DM boundary;
181: DMPlexCreateFromFile(comm, bdfilename, interpolate, &boundary);
182: DMPlexGenerate(boundary, NULL, interpolate, dm);
183: DMDestroy(&boundary);
184: } else if (extlen) {
185: DM edm;
187: DMPlexCreateFromFile(comm, extfilename, interpolate, &edm);
188: DMPlexExtrude(edm, user->extrude_layers, user->extrude_thickness, PETSC_TRUE, interpolate, dm);
189: DMDestroy(&edm);
190: } else {
191: switch (user->domainShape) {
192: case BOX:
193: if (cellWedge) {
194: if (dim != 3) SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Dimension must be 3 for a wedge mesh, not %D", dim);
195: DMPlexCreateWedgeBoxMesh(comm, user->domainBoxSizes, user->domainBoxL, user->domainBoxU, user->periodicity, PETSC_FALSE, interpolate, dm);
196: } else {
197: DMPlexCreateBoxMesh(comm, dim, cellSimplex, user->domainBoxSizes, user->domainBoxL, user->domainBoxU, user->periodicity, interpolate, dm);
198: }
199: break;
200: case CYLINDER:
201: if (cellSimplex) SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Cannot mesh a cylinder with simplices");
202: if (dim != 3) SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Dimension must be 3 for a cylinder mesh, not %D", dim);
203: if (cellWedge) {
204: DMPlexCreateWedgeCylinderMesh(comm, 6, interpolate, dm);
205: } else {
206: DMPlexCreateHexCylinderMesh(comm, 1, user->periodicity[2], dm);
207: }
208: break;
209: case SPHERE:
210: DMPlexCreateSphereMesh(comm, dim, cellSimplex, dm);
211: break;
212: case BALL:
213: {
214: DM sdm;
215: PetscInt Nr = 0, r;
217: DMPlexCreateSphereMesh(comm, dim-1, cellSimplex, &sdm);
218: {
219: DM cdm;
220: PetscFE fe;
221: PetscInt dim, dE;
223: DMGetCoordinateDM(sdm, &cdm);
224: DMGetDimension(sdm, &dim);
225: DMGetCoordinateDim(sdm, &dE);
226: PetscFECreateLagrange(PETSC_COMM_SELF, dim, dE, PETSC_TRUE, 1, -1, &fe);
227: DMSetField(cdm, 0, NULL, (PetscObject) fe);
228: PetscFEDestroy(&fe);
229: DMCreateDS(cdm);
230: }
231: PetscOptionsGetInt(NULL, "bd_", "-dm_refine", &Nr, NULL);
232: for (r = 0; r < Nr; ++r) {
233: DM rdm, cdm, rcdm;
234: DMRefine(sdm, PETSC_COMM_WORLD, &rdm);
235: DMGetCoordinateDM(sdm, &cdm);
236: DMGetCoordinateDM(rdm, &rcdm);
237: DMCopyDisc(cdm, rcdm);
238: DMPlexRemapGeometry(rdm, 1.0, snapToSphere);
239: DMDestroy(&sdm);
240: sdm = rdm;
241: }
242: DMPlexGenerate(sdm, NULL, interpolate, dm);
243: DMDestroy(&sdm);
244: }
245: break;
246: default: SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "Unknown domain shape %D", user->domainShape);
247: }
248: }
249: DMLocalizeCoordinates(*dm); /* needed for periodic */
250: DMViewFromOptions(*dm,NULL,"-init_dm_view");
251: DMGetDimension(*dm,&dim);
253: if (testp4est_seq) {
254: #if defined(PETSC_HAVE_P4EST)
255: DM dmConv = NULL;
257: DMPlexCheckSymmetry(*dm);
258: DMPlexCheckSkeleton(*dm, 0);
259: DMPlexCheckFaces(*dm, 0);
260: DMPlexCheckGeometry(*dm);
261: DMPlexCheckPointSF(*dm);
262: DMPlexCheckInterfaceCones(*dm);
263: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
264: DMPlexSetCellRefinerType(*dm, DM_REFINER_TO_BOX);
265: DMRefine(*dm, PETSC_COMM_WORLD, &dmConv);
266: if (dmConv) {
267: DMDestroy(dm);
268: *dm = dmConv;
269: }
270: DMViewFromOptions(*dm,NULL,"-initref_dm_view");
271: DMPlexCheckSymmetry(*dm);
272: DMPlexCheckSkeleton(*dm, 0);
273: DMPlexCheckFaces(*dm, 0);
274: DMPlexCheckGeometry(*dm);
275: DMPlexCheckPointSF(*dm);
276: DMPlexCheckInterfaceCones(*dm);
277: user->cellSimplex = PETSC_FALSE;
279: DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv);
280: if (dmConv) {
281: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_1_");
282: DMSetFromOptions(dmConv);
283: DMDestroy(dm);
284: *dm = dmConv;
285: }
286: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_1_");
287: DMSetUp(*dm);
288: DMViewFromOptions(*dm, NULL, "-dm_view");
289: DMConvert(*dm,DMPLEX,&dmConv);
290: if (dmConv) {
291: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_seq_2_");
292: DMSetFromOptions(dmConv);
293: DMDestroy(dm);
294: *dm = dmConv;
295: }
296: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_seq_2_");
297: DMViewFromOptions(*dm, NULL, "-dm_view");
298: PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);
299: #else
300: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
301: #endif
302: }
304: PetscLogStagePop();
305: if (!testp4est_seq) {
306: DM refinedMesh = NULL;
307: DM distributedMesh = NULL;
309: if (user->testPartition) {
310: const PetscInt *sizes = NULL;
311: const PetscInt *points = NULL;
312: PetscPartitioner part;
314: if (!rank) {
315: if (dim == 2 && cellSimplex && size == 2) {
316: sizes = triSizes_n2; points = triPoints_n2;
317: } else if (dim == 2 && cellSimplex && size == 8) {
318: sizes = triSizes_n8; points = triPoints_n8;
319: } else if (dim == 2 && !cellSimplex && size == 2) {
320: sizes = quadSizes; points = quadPoints;
321: } else if (dim == 2 && size == 3) {
322: PetscInt Nc;
324: DMPlexGetHeightStratum(*dm, 0, NULL, &Nc);
325: if (Nc == 42) { /* Gmsh 3 & 4 */
326: sizes = gmshSizes_n3; points = gmshPoints_n3;
327: } else if (Nc == 150) { /* Fluent 1 */
328: sizes = fluentSizes_n3; points = fluentPoints_n3;
329: } else if (Nc == 42) { /* Med 1 */
330: } else if (Nc == 161) { /* Med 3 */
331: }
332: }
333: }
334: DMPlexGetPartitioner(*dm, &part);
335: PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
336: PetscPartitionerShellSetPartition(part, size, sizes, points);
337: } else {
338: PetscPartitioner part;
340: DMPlexGetPartitioner(*dm,&part);
341: PetscPartitionerSetFromOptions(part);
342: }
343: /* Distribute mesh over processes */
344: PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);
345: DMViewFromOptions(*dm, NULL, "-dm_pre_dist_view");
346: DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
347: if (distributedMesh) {
348: DMDestroy(dm);
349: *dm = distributedMesh;
350: }
351: PetscLogStagePop();
352: DMViewFromOptions(*dm, NULL, "-distributed_dm_view");
353: /* Refine mesh using a volume constraint */
354: PetscLogStagePush(user->stages[STAGE_REFINE]);
355: DMPlexSetRefinementUniform(*dm, PETSC_FALSE);
356: DMPlexSetRefinementLimit(*dm, refinementLimit);
357: DMRefine(*dm, comm, &refinedMesh);
358: if (refinedMesh) {
359: DMDestroy(dm);
360: *dm = refinedMesh;
361: }
362: PetscLogStagePop();
363: }
364: PetscLogStagePush(user->stages[STAGE_REFINE]);
365: DMSetFromOptions(*dm);
366: PetscLogStagePop();
368: if (testp4est_par) {
369: #if defined(PETSC_HAVE_P4EST)
370: DM dmConv = NULL;
372: DMViewFromOptions(*dm, NULL, "-dm_tobox_view");
373: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
374: DMPlexSetCellRefinerType(*dm, DM_REFINER_TO_BOX);
375: DMRefine(*dm, PETSC_COMM_WORLD, &dmConv);
376: if (dmConv) {
377: DMDestroy(dm);
378: *dm = dmConv;
379: }
380: user->cellSimplex = PETSC_FALSE;
381: DMViewFromOptions(*dm, NULL, "-dm_tobox_view");
382: DMPlexCheckSymmetry(*dm);
383: DMPlexCheckSkeleton(*dm, 0);
384: DMPlexCheckFaces(*dm, 0);
385: DMPlexCheckGeometry(*dm);
386: DMPlexCheckPointSF(*dm);
387: DMPlexCheckInterfaceCones(*dm);
389: DMConvert(*dm,dim == 2 ? DMP4EST : DMP8EST,&dmConv);
390: if (dmConv) {
391: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_1_");
392: DMSetFromOptions(dmConv);
393: DMDestroy(dm);
394: *dm = dmConv;
395: }
396: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_1_");
397: DMSetUp(*dm);
398: DMViewFromOptions(*dm, NULL, "-dm_view");
399: DMConvert(*dm, DMPLEX, &dmConv);
400: if (dmConv) {
401: PetscObjectSetOptionsPrefix((PetscObject) dmConv, "conv_par_2_");
402: DMSetFromOptions(dmConv);
403: DMDestroy(dm);
404: *dm = dmConv;
405: }
406: PetscObjectSetOptionsPrefix((PetscObject) *dm, "conv_par_2_");
407: DMViewFromOptions(*dm, NULL, "-dm_view");
408: PetscObjectSetOptionsPrefix((PetscObject) *dm, NULL);
409: #else
410: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"Recompile with --download-p4est");
411: #endif
412: }
414: /* test redistribution of an already distributed mesh */
415: if (user->redistribute) {
416: DM distributedMesh;
417: PetscSF sf;
418: PetscInt nranks;
420: DMViewFromOptions(*dm, NULL, "-dm_pre_redist_view");
421: DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
422: if (distributedMesh) {
423: DMGetPointSF(distributedMesh, &sf);
424: PetscSFSetUp(sf);
425: DMGetNeighbors(distributedMesh, &nranks, NULL);
426: MPI_Allreduce(MPI_IN_PLACE, &nranks, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)*dm));
427: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)*dm)), "Minimum number of neighbors: %D\n", nranks);
428: DMDestroy(dm);
429: *dm = distributedMesh;
430: }
431: DMViewFromOptions(*dm, NULL, "-dm_post_redist_view");
432: }
434: if (user->overlap) {
435: DM overlapMesh = NULL;
437: /* Add the overlap to refined mesh */
438: PetscLogStagePush(user->stages[STAGE_OVERLAP]);
439: DMViewFromOptions(*dm, NULL, "-dm_pre_overlap_view");
440: DMPlexDistributeOverlap(*dm, user->overlap, NULL, &overlapMesh);
441: if (overlapMesh) {
442: PetscInt overlap;
443: DMPlexGetOverlap(overlapMesh, &overlap);
444: PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD, "Overlap: %D\n", overlap);
445: DMDestroy(dm);
446: *dm = overlapMesh;
447: }
448: DMViewFromOptions(*dm, NULL, "-dm_post_overlap_view");
449: PetscLogStagePop();
450: }
451: if (user->final_ref) {
452: DM refinedMesh = NULL;
454: DMPlexSetRefinementUniform(*dm, PETSC_TRUE);
455: DMRefine(*dm, comm, &refinedMesh);
456: if (refinedMesh) {
457: DMDestroy(dm);
458: *dm = refinedMesh;
459: }
460: }
462: PetscObjectSetName((PetscObject) *dm, "Simplicial Mesh");
463: DMViewFromOptions(*dm, NULL, "-dm_view");
464: if (user->final_diagnostics) {
465: DMPlexInterpolatedFlag interpolated;
466: PetscInt dim, depth;
468: DMGetDimension(*dm, &dim);
469: DMPlexGetDepth(*dm, &depth);
470: DMPlexIsInterpolatedCollective(*dm, &interpolated);
472: DMPlexCheckSymmetry(*dm);
473: if (interpolated == DMPLEX_INTERPOLATED_FULL) {
474: DMPlexCheckFaces(*dm, 0);
475: }
476: DMPlexCheckSkeleton(*dm, 0);
477: DMPlexCheckGeometry(*dm);
478: }
479: PetscLogEventEnd(user->createMeshEvent,0,0,0,0);
480: user->dm = *dm;
481: return(0);
482: }
484: int main(int argc, char **argv)
485: {
486: AppCtx user; /* user-defined work context */
489: PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
490: ProcessOptions(PETSC_COMM_WORLD, &user);
491: CreateMesh(PETSC_COMM_WORLD, &user, &user.dm);
492: DMDestroy(&user.dm);
493: PetscFinalize();
494: return ierr;
495: }
497: /*TEST
499: # CTetGen 0-1
500: test:
501: suffix: 0
502: requires: ctetgen
503: args: -dim 3 -ctetgen_verbose 4 -dm_view ascii::ascii_info_detail -info :~sys
504: test:
505: suffix: 1
506: requires: ctetgen
507: args: -dim 3 -ctetgen_verbose 4 -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail -info :~sys
510: # 2D LaTex and ASCII output 2-9
511: test:
512: suffix: 2
513: requires: triangle
514: args: -dim 2 -dm_view ascii::ascii_latex
515: test:
516: suffix: 3
517: requires: triangle
518: args: -dim 2 -dm_refine 1 -interpolate 1 -dm_view ascii::ascii_info_detail
519: test:
520: suffix: 4
521: requires: triangle
522: nsize: 2
523: args: -dim 2 -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_info_detail
524: test:
525: suffix: 5
526: requires: triangle
527: nsize: 2
528: args: -dim 2 -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_latex
529: test:
530: suffix: 6
531: args: -dim 2 -cell_simplex 0 -interpolate -dm_view ascii::ascii_info_detail
532: test:
533: suffix: 7
534: args: -dim 2 -cell_simplex 0 -interpolate -dm_refine 1 -dm_view ascii::ascii_info_detail
535: test:
536: suffix: 8
537: nsize: 2
538: args: -dim 2 -cell_simplex 0 -interpolate -dm_refine 1 -interpolate 1 -test_partition -dm_view ascii::ascii_latex
540: # 1D ASCII output
541: test:
542: suffix: 1d_0
543: args: -dim 1 -domain_shape box -dm_view ascii::ascii_info_detail
544: test:
545: suffix: 1d_1
546: args: -dim 1 -domain_shape box -dm_refine 2 -dm_view ascii::ascii_info_detail
547: test:
548: suffix: 1d_2
549: args: -dim 1 -domain_box_sizes 5 -x_periodicity periodic -dm_view ascii::ascii_info_detail -dm_plex_check_all
551: # Parallel refinement tests with overlap
552: test:
553: suffix: refine_overlap_1d
554: nsize: 2
555: args: -dim 1 -domain_box_sizes 4 -dm_refine 1 -overlap {{0 1 2}separate output} -petscpartitioner_type simple -dm_view ascii::ascii_info
556: test:
557: suffix: refine_overlap_2d
558: requires: triangle
559: nsize: {{2 8}separate output}
560: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -test_partition -overlap {{0 1 2}separate output} -dm_view ascii::ascii_info
562: # Parallel simple partitioner tests
563: test:
564: suffix: part_simple_0
565: requires: triangle
566: nsize: 2
567: args: -dim 2 -cell_simplex 1 -dm_refine 0 -interpolate 0 -petscpartitioner_type simple -partition_view -dm_view ascii::ascii_info_detail
568: test:
569: suffix: part_simple_1
570: requires: triangle
571: nsize: 8
572: args: -dim 2 -cell_simplex 1 -dm_refine 1 -interpolate 1 -petscpartitioner_type simple -partition_view -dm_view ascii::ascii_info_detail
574: # Parallel partitioner tests
575: test:
576: suffix: part_parmetis_0
577: requires: parmetis
578: nsize: 2
579: args: -dim 2 -cell_simplex 0 -dm_refine 1 -interpolate 1 -petscpartitioner_type parmetis -dm_view -petscpartitioner_view -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
580: test:
581: suffix: part_ptscotch_0
582: requires: ptscotch
583: nsize: 2
584: args: -dim 2 -cell_simplex 0 -dm_refine 0 -interpolate 1 -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_strategy quality -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_pre_redist_view ::load_balance -dm_post_redist_view ::load_balance -petscpartitioner_view_graph
585: test:
586: suffix: part_ptscotch_1
587: requires: ptscotch
588: nsize: 8
589: args: -dim 2 -cell_simplex 0 -dm_refine 1 -interpolate 1 -petscpartitioner_type ptscotch -petscpartitioner_view -petscpartitioner_ptscotch_imbalance 0.1
591: # CGNS reader tests 10-11 (need to find smaller test meshes)
592: test:
593: suffix: cgns_0
594: requires: cgns
595: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/tut21.cgns -interpolate 1 -dm_view
597: # Gmsh mesh reader tests
598: test:
599: suffix: gmsh_0
600: requires: !single
601: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh -interpolate 1 -dm_view
602: test:
603: suffix: gmsh_1
604: requires: !single
605: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -interpolate 1 -dm_view
606: test:
607: suffix: gmsh_2
608: requires: !single
609: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -interpolate 1 -dm_view
610: test:
611: suffix: gmsh_3
612: nsize: 3
613: requires: !single
614: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.msh -test_partition -interpolate 1 -dm_view
615: test:
616: suffix: gmsh_4
617: nsize: 3
618: requires: !single
619: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -test_partition -interpolate 1 -dm_view
620: test:
621: suffix: gmsh_5
622: requires: !single
623: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_quad.msh -interpolate 1 -dm_view
624: # TODO: it seems the mesh is not a valid gmsh (inverted cell)
625: test:
626: suffix: gmsh_6
627: requires: !single
628: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -interpolate 1 -dm_view -final_diagnostics 0
629: test:
630: suffix: gmsh_7
631: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
632: test:
633: suffix: gmsh_8
634: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
635: testset:
636: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic_bin.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
637: test:
638: suffix: gmsh_9
639: test:
640: suffix: gmsh_9_periodic_0
641: args: -dm_plex_gmsh_periodic 0
642: testset:
643: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
644: test:
645: suffix: gmsh_10
646: test:
647: suffix: gmsh_10_periodic_0
648: args: -dm_plex_gmsh_periodic 0
649: testset:
650: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all -dm_refine 1
651: test:
652: suffix: gmsh_11
653: test:
654: suffix: gmsh_11_periodic_0
655: args: -dm_plex_gmsh_periodic 0
656: # TODO: it seems the mesh is not a valid gmsh (inverted cell)
657: test:
658: suffix: gmsh_12
659: nsize: 4
660: requires: !single mpiio
661: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin_physnames.msh -viewer_binary_mpiio -petscpartitioner_type simple -interpolate 1 -dm_view -final_diagnostics 0
662: test:
663: suffix: gmsh_13_hybs2t
664: nsize: 4
665: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -petscpartitioner_type simple -interpolate 1 -dm_view -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all
666: test:
667: suffix: gmsh_14_ext
668: requires: !single
669: args: -ext_layers 2 -ext_thickness 1.5 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -interpolate -dm_plex_check_all
670: test:
671: suffix: gmsh_14_ext_s2t
672: requires: !single
673: args: -ext_layers 2 -ext_thickness 1.5 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_bin.msh -dm_view -interpolate -dm_plex_check_all -dm_refine 1 -dm_plex_cell_refiner tobox
674: test:
675: suffix: gmsh_15_hyb3d
676: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -interpolate -dm_plex_check_all
677: test:
678: suffix: gmsh_15_hyb3d_vtk
679: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view vtk: -interpolate -dm_plex_gmsh_hybrid -dm_plex_check_all
680: test:
681: suffix: gmsh_15_hyb3d_s2t
682: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -dm_view -interpolate -dm_plex_check_all -dm_refine 1 -dm_plex_cell_refiner tobox
683: test:
684: suffix: gmsh_16_spheresurface
685: nsize : 4
686: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
687: test:
688: suffix: gmsh_16_spheresurface_s2t
689: nsize : 4
690: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
691: test:
692: suffix: gmsh_16_spheresurface_extruded
693: nsize : 4
694: args: -ext_layers 3 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
695: test:
696: suffix: gmsh_16_spheresurface_extruded_s2t
697: nsize : 4
698: args: -ext_layers 3 -ext_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all -dm_view -interpolate -petscpartitioner_type simple
699: test:
700: suffix: gmsh_17_hyb3d_interp_ascii
701: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.msh -dm_view -interpolate -dm_plex_check_all
702: test:
703: suffix: exodus_17_hyb3d_interp_ascii
704: requires: exodusii
705: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_hexwedge.exo -dm_view -interpolate -dm_plex_check_all
707: # Legacy Gmsh v22/v40 ascii/binary reader tests
708: testset:
709: output_file: output/ex1_gmsh_3d_legacy.out
710: args: -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
711: test:
712: suffix: gmsh_3d_ascii_v22
713: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh2
714: test:
715: suffix: gmsh_3d_ascii_v40
716: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii.msh4
717: test:
718: suffix: gmsh_3d_binary_v22
719: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh2
720: test:
721: suffix: gmsh_3d_binary_v40
722: requires: long64
723: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary.msh4
725: # Gmsh v41 ascii/binary reader tests
726: testset: # 32bit mesh, sequential
727: args: -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
728: output_file: output/ex1_gmsh_3d_32.out
729: test:
730: suffix: gmsh_3d_ascii_v41_32
731: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
732: test:
733: suffix: gmsh_3d_binary_v41_32
734: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
735: test:
736: suffix: gmsh_3d_binary_v41_32_mpiio
737: requires: define(PETSC_HAVE_MPIIO)
738: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
739: testset: # 32bit mesh, parallel
740: args: -petscpartitioner_type simple -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
741: nsize: 2
742: output_file: output/ex1_gmsh_3d_32_np2.out
743: test:
744: suffix: gmsh_3d_ascii_v41_32_np2
745: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-32.msh
746: test:
747: suffix: gmsh_3d_binary_v41_32_np2
748: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh
749: test:
750: suffix: gmsh_3d_binary_v41_32_np2_mpiio
751: requires: define(PETSC_HAVE_MPIIO)
752: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-32.msh -viewer_binary_mpiio
753: testset: # 64bit mesh, sequential
754: args: -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
755: output_file: output/ex1_gmsh_3d_64.out
756: test:
757: suffix: gmsh_3d_ascii_v41_64
758: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
759: test:
760: suffix: gmsh_3d_binary_v41_64
761: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
762: test:
763: suffix: gmsh_3d_binary_v41_64_mpiio
764: requires: define(PETSC_HAVE_MPIIO)
765: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
766: testset: # 64bit mesh, parallel
767: args: -petscpartitioner_type simple -dm_view ::ascii_info_detail -interpolate -dm_plex_check_all
768: nsize: 2
769: output_file: output/ex1_gmsh_3d_64_np2.out
770: test:
771: suffix: gmsh_3d_ascii_v41_64_np2
772: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-ascii-64.msh
773: test:
774: suffix: gmsh_3d_binary_v41_64_np2
775: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh
776: test:
777: suffix: gmsh_3d_binary_v41_64_np2_mpiio
778: requires: define(PETSC_HAVE_MPIIO)
779: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/gmsh-3d-binary-64.msh -viewer_binary_mpiio
781: # Fluent mesh reader tests
782: # TODO: Geometry checks fail
783: test:
784: suffix: fluent_0
785: requires: !complex
786: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -interpolate 1 -dm_view -final_diagnostics 0
787: test:
788: suffix: fluent_1
789: nsize: 3
790: requires: !complex
791: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.cas -interpolate 1 -test_partition -dm_view -final_diagnostics 0
792: test:
793: suffix: fluent_2
794: requires: !complex
795: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets_ascii.cas -interpolate 1 -dm_view -final_diagnostics 0
796: test:
797: suffix: fluent_3
798: requires: !complex
799: TODO: Fails on non-linux: fseek(), fileno() ? https://gitlab.com/petsc/petsc/merge_requests/2206#note_238166382
800: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cube_5tets.cas -interpolate 1 -dm_view -final_diagnostics 0
802: # Med mesh reader tests, including parallel file reads
803: test:
804: suffix: med_0
805: requires: med
806: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -interpolate 1 -dm_view
807: test:
808: suffix: med_1
809: requires: med
810: nsize: 3
811: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square.med -interpolate 1 -petscpartitioner_type simple -dm_view
812: test:
813: suffix: med_2
814: requires: med
815: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -interpolate 1 -dm_view
816: test:
817: suffix: med_3
818: requires: med
819: TODO: MED
820: nsize: 3
821: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med -interpolate 1 -petscpartitioner_type simple -dm_view
823: # Test shape quality
824: test:
825: suffix: test_shape
826: requires: ctetgen
827: args: -dim 3 -interpolate -dm_refine_hierarchy 3 -dm_plex_check_all -dm_plex_check_cell_shape
829: # Test simplex to tensor conversion
830: test:
831: suffix: s2t2
832: requires: triangle
833: args: -dim 2 -dm_refine 1 -interpolate -dm_plex_cell_refiner tobox -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail
835: test:
836: suffix: s2t3
837: requires: ctetgen
838: args: -dim 3 -dm_refine 1 -interpolate -dm_plex_cell_refiner tobox -refinement_limit 0.0625 -dm_view ascii::ascii_info_detail
840: # Test domain shapes
841: test:
842: suffix: cylinder
843: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape cylinder -dm_plex_check_all -dm_view
845: test:
846: suffix: cylinder_per
847: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape cylinder -z_periodicity periodic -dm_plex_check_all -dm_view
849: test:
850: suffix: cylinder_wedge
851: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape cylinder -dm_view vtk: -dm_plex_check_all
853: test:
854: suffix: cylinder_wedge_int
855: output_file: output/ex1_cylinder_wedge.out
856: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape cylinder -dm_view vtk: -dm_plex_check_all
858: test:
859: suffix: box_2d
860: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 2 -dm_plex_check_all -dm_view
862: test:
863: suffix: box_2d_per
864: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 2 -dm_plex_check_all -dm_view
866: test:
867: suffix: box_2d_per_unint
868: args: -dim 2 -cell_simplex 0 -interpolate 0 -domain_shape box -domain_box_sizes 3,3 -dm_plex_check_all -dm_view ::ascii_info_detail
870: test:
871: suffix: box_3d
872: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -dm_refine 3 -dm_plex_check_all -dm_view
874: test:
875: requires: triangle
876: suffix: box_wedge
877: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape box -dm_view vtk: -dm_plex_check_all
879: testset:
880: requires: triangle
881: args: -dim 3 -cell_simplex 0 -interpolate -cell_wedge -domain_shape box -domain_box_sizes 2,3,1 -dm_view -dm_plex_check_all -dm_refine 1 -dm_plex_cell_refiner tobox
882: test:
883: suffix: box_wedge_s2t
884: test:
885: nsize: 3
886: args: -petscpartitioner_type simple
887: suffix: box_wedge_s2t_parallel
889: # Test GLVis output
890: test:
891: suffix: glvis_2d_tet
892: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
894: test:
895: suffix: glvis_2d_tet_per
896: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
898: test:
899: suffix: glvis_2d_tet_per_mfem
900: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: -interpolate
902: test:
903: suffix: glvis_2d_quad
904: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -dm_view glvis:
906: test:
907: suffix: glvis_2d_quad_per
908: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -x_periodicity periodic -y_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary
910: test:
911: suffix: glvis_2d_quad_per_mfem
912: args: -dim 2 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3 -x_periodicity periodic -y_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem
914: test:
915: suffix: glvis_3d_tet
916: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_plex_gmsh_periodic 0 -dm_view glvis:
918: test:
919: suffix: glvis_3d_tet_per
920: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -dm_view glvis: -interpolate -viewer_glvis_dm_plex_enable_boundary
922: test:
923: suffix: glvis_3d_tet_per_mfem
924: TODO: broken
925: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere_bin.msh -viewer_glvis_dm_plex_enable_mfem -dm_view glvis: -interpolate
927: test:
928: suffix: glvis_3d_hex
929: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3,3 -dm_view glvis:
931: test:
932: suffix: glvis_3d_hex_per
933: args: -dim 3 -cell_simplex 0 -interpolate -domain_shape box -domain_box_sizes 3,3,3 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary 0
935: test:
936: suffix: glvis_3d_hex_per_mfem
937: args: -dim 3 -cell_simplex 0 -domain_shape box -domain_box_sizes 3,3,3 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -viewer_glvis_dm_plex_enable_mfem -interpolate
939: # Test P4EST
940: testset:
941: requires: p4est
942: args: -interpolate -dm_view -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 1
943: test:
944: suffix: p4est_periodic
945: args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
946: test:
947: suffix: p4est_periodic_3d
948: args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity -domain_box_sizes 3,5,4 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash
949: test:
950: suffix: p4est_gmsh_periodic
951: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
952: test:
953: suffix: p4est_gmsh_surface
954: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
955: test:
956: suffix: p4est_gmsh_surface_parallel
957: nsize: 2
958: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3 -petscpartitioner_type simple -dm_view ::load_balance
959: test:
960: suffix: p4est_hyb_2d
961: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
962: test:
963: suffix: p4est_hyb_3d
964: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
965: test:
966: requires: ctetgen
967: suffix: p4est_s2t_bugfaces_3d
968: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 0 -dim 3 -domain_box_sizes 1,1 -cell_simplex
969: test:
970: suffix: p4est_bug_overlapsf
971: nsize: 3
972: args: -dim 3 -cell_simplex 0 -domain_box_sizes 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple
973: test:
974: suffix: p4est_redistribute
975: nsize: 3
976: args: -dim 3 -cell_simplex 0 -domain_box_sizes 2,2,1 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -test_redistribute -dm_plex_csr_via_mat {{0 1}} -dm_view ::load_balance
977: test:
978: suffix: p4est_gmsh_s2t_3d
979: args: -conv_seq_1_dm_forest_initial_refinement 1 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
980: test:
981: suffix: p4est_gmsh_s2t_3d_hash
982: args: -conv_seq_1_dm_forest_initial_refinement 1 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
983: test:
984: requires: long_runtime
985: suffix: p4est_gmsh_periodic_3d
986: args: -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 1 -conv_seq_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
988: testset:
989: requires: p4est
990: nsize: 6
991: args: -interpolate -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 0
992: test:
993: TODO: interface cones do not conform
994: suffix: p4est_par_periodic
995: args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
996: test:
997: TODO: interface cones do not conform
998: suffix: p4est_par_periodic_3d
999: args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity periodic -domain_box_sizes 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
1000: test:
1001: TODO: interface cones do not conform
1002: suffix: p4est_par_gmsh_periodic
1003: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
1004: test:
1005: suffix: p4est_par_gmsh_surface
1006: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
1007: test:
1008: suffix: p4est_par_gmsh_s2t_3d
1009: args: -conv_par_1_dm_forest_initial_refinement 1 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1010: test:
1011: TODO: interface cones do not conform
1012: suffix: p4est_par_gmsh_s2t_3d_hash
1013: args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1014: test:
1015: requires: long_runtime
1016: suffix: p4est_par_gmsh_periodic_3d
1017: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
1019: testset:
1020: requires: p4est
1021: nsize: 6
1022: args: -interpolate -test_p4est_par -conv_par_2_dm_plex_check_all -conv_par_1_dm_forest_minimum_refinement 1 -conv_par_1_dm_forest_partition_overlap 1 -petscpartitioner_type simple
1023: test:
1024: suffix: p4est_par_ovl_periodic
1025: args: -dim 2 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -domain_box_sizes 3,5 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash
1026: #TODO Mesh cell 201 is inverted, vol = 0. (FVM Volume. Is it correct? -> Diagnostics disabled)
1027: test:
1028: suffix: p4est_par_ovl_periodic_3d
1029: args: -dim 3 -domain_shape box -cell_simplex 0 -x_periodicity periodic -y_periodicity periodic -z_periodicity -domain_box_sizes 3,5,4 -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -final_diagnostics 0
1030: test:
1031: suffix: p4est_par_ovl_gmsh_periodic
1032: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/square_periodic.msh
1033: test:
1034: suffix: p4est_par_ovl_gmsh_surface
1035: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/surfacesphere_bin.msh -dm_plex_gmsh_spacedim 3
1036: test:
1037: suffix: p4est_par_ovl_gmsh_s2t_3d
1038: args: -conv_par_1_dm_forest_initial_refinement 1 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1039: test:
1040: suffix: p4est_par_ovl_gmsh_s2t_3d_hash
1041: args: -conv_par_1_dm_forest_initial_refinement 1 -conv_par_1_dm_forest_maximum_refinement 2 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
1042: test:
1043: requires: long_runtime
1044: suffix: p4est_par_ovl_gmsh_periodic_3d
1045: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/mesh-3d-box-innersphere.msh
1046: test:
1047: suffix: p4est_par_ovl_hyb_2d
1048: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh
1049: test:
1050: suffix: p4est_par_ovl_hyb_3d
1051: args: -conv_par_1_dm_forest_initial_refinement 0 -conv_par_1_dm_forest_maximum_refinement 1 -conv_par_1_dm_p4est_refine_pattern hash -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh
1053: test:
1054: TODO: broken
1055: requires: p4est
1056: nsize: 2
1057: suffix: p4est_bug_labels_noovl
1058: args: -interpolate -test_p4est_seq -dm_plex_check_all -dm_forest_minimum_refinement 0 -dm_forest_partition_overlap 1 -dim 2 -domain_shape box -cell_simplex 0 -domain_box_sizes 3,3 -dm_forest_initial_refinement 0 -dm_forest_maximum_refinement 2 -dm_p4est_refine_pattern hash -petscpartitioner_type simple -dm_forest_print_label_error
1060: test:
1061: requires: p4est
1062: nsize: 2
1063: suffix: p4est_bug_distribute_overlap
1064: args: -interpolate -test_p4est_seq -conv_seq_2_dm_plex_check_all -conv_seq_1_dm_forest_minimum_refinement 0 -conv_seq_1_dm_forest_partition_overlap 0 -dim 2 -domain_shape box -cell_simplex 0 -domain_box_sizes 3,3 -conv_seq_1_dm_forest_initial_refinement 0 -conv_seq_1_dm_forest_maximum_refinement 2 -conv_seq_1_dm_p4est_refine_pattern hash -petscpartitioner_type simple -overlap 1 -dm_view ::load_balance
1065: args: -dm_post_overlap_view
1067: test:
1068: suffix: glvis_2d_hyb
1069: args: -dim 2 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_triquad.msh -interpolate -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -petscpartitioner_type simple
1071: test:
1072: suffix: glvis_3d_hyb
1073: args: -dim 3 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_tetwedge.msh -interpolate -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -petscpartitioner_type simple
1075: test:
1076: suffix: glvis_3d_hyb_s2t
1077: args: -dim 3 -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/hybrid_3d_cube.msh -interpolate -dm_view glvis: -viewer_glvis_dm_plex_enable_boundary -petscpartitioner_type simple -dm_refine 1 -dm_plex_cell_refiner tobox -dm_plex_check_all
1078: TEST*/