Actual source code: ex5f90.F
1: ! "$Id: ex5f90.F,v 1.44 2001/09/11 18:47:20 bsmith Exp $";
2: !
3: ! Description: Solves a nonlinear system in parallel with SNES.
4: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
5: ! domain, using distributed arrays (DAs) to partition the parallel grid.
6: ! The command line options include:
7: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
8: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
9: !
10: !/*T
11: ! Concepts: SNES^parallel Bratu example
12: ! Concepts: DA^using distributed arrays;
13: ! Processors: n
14: !T*/
15: !
16: ! --------------------------------------------------------------------------
17: !
18: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
19: ! the partial differential equation
20: !
21: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
22: !
23: ! with boundary conditions
24: !
25: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
26: !
27: ! A finite difference approximation with the usual 5-point stencil
28: ! is used to discretize the boundary value problem to obtain a nonlinear
29: ! system of equations.
30: !
31: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
32: !
33: ! --------------------------------------------------------------------------
34: ! The following define must be used before including any PETSc include files
35: ! into a module or interface. This is because they can't handle declarations
36: ! in them
37: !
39: module f90module
40: type userctx
41: #define PETSC_AVOID_DECLARATIONS
42: #include include/finclude/petsc.h
43: #include include/finclude/petscvec.h
44: #include include/finclude/petscda.h
45: #undef PETSC_AVOID_DECLARATIONS
46: DA da
47: integer xs,xe,xm,gxs,gxe,gxm
48: integer ys,ye,ym,gys,gye,gym
49: integer mx,my,rank
50: double precision lambda
51: end type userctx
52: contains
53: ! ---------------------------------------------------------------------
54: !
55: ! FormFunction - Evaluates nonlinear function, F(x).
56: !
57: ! Input Parameters:
58: ! snes - the SNES context
59: ! X - input vector
60: ! dummy - optional user-defined context, as set by SNESSetFunction()
61: ! (not used here)
62: !
63: ! Output Parameter:
64: ! F - function vector
65: !
66: ! Notes:
67: ! This routine serves as a wrapper for the lower-level routine
68: ! "FormFunctionLocal", where the actual computations are
69: ! done using the standard Fortran style of treating the local
70: ! vector data as a multidimensional array over the local mesh.
71: ! This routine merely handles ghost point scatters and accesses
72: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
73: !
74: subroutine FormFunction(snes,X,F,user,ierr)
75: implicit none
77: #include include/finclude/petsc.h
78: #include include/finclude/petscvec.h
79: #include include/finclude/petscda.h
80: #include include/finclude/petscis.h
81: #include include/finclude/petscmat.h
82: #include include/finclude/petscksp.h
83: #include include/finclude/petscpc.h
84: #include include/finclude/petscsnes.h
86: #include "include/finclude/petscvec.h90"
89: ! Input/output variables:
90: SNES snes
91: Vec X,F
92: integer ierr
93: type (userctx) user
95: ! Declarations for use with local arrays:
96: PetscScalar,pointer :: lx_v(:),lf_v(:)
97: Vec localX
99: ! Scatter ghost points to local vector, using the 2-step process
100: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
101: ! By placing code between these two statements, computations can
102: ! be done while messages are in transition.
104: call DAGetLocalVector(user%da,localX,ierr)
105: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES, &
106: & localX,ierr)
107: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
109: ! Get a pointer to vector data.
110: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
111: ! the data array. Otherwise, the routine is implementation dependent.
112: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
113: ! the array.
114: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
115: ! and is useable from Fortran-90 Only.
117: call VecGetArrayF90(localX,lx_v,ierr)
118: call VecGetArrayF90(F,lf_v,ierr)
120: ! Compute function over the locally owned part of the grid
122: call FormFunctionLocal(lx_v,lf_v,user,ierr)
124: ! Restore vectors
126: call VecRestoreArrayF90(localX,lx_v,ierr)
127: call VecRestoreArrayF90(F,lf_v,ierr)
129: ! Insert values into global vector
131: call DARestoreLocalVector(user%da,localX,ierr)
132: call PetscLogFlops(11*user%ym*user%xm,ierr)
134: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
135: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
137: return
138: end subroutine formfunction
139: end module f90module
143: program main
144: use f90module
145: implicit none
146: !
147: !
148: #include include/finclude/petsc.h
149: #include include/finclude/petscvec.h
150: #include include/finclude/petscda.h
151: #include include/finclude/petscis.h
152: #include include/finclude/petscmat.h
153: #include include/finclude/petscksp.h
154: #include include/finclude/petscpc.h
155: #include include/finclude/petscsnes.h
156: #include "include/finclude/petscvec.h90"
157: #include "include/finclude/petscda.h90"
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: ! Variable declarations
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: !
163: ! Variables:
164: ! snes - nonlinear solver
165: ! x, r - solution, residual vectors
166: ! J - Jacobian matrix
167: ! its - iterations for convergence
168: ! Nx, Ny - number of preocessors in x- and y- directions
169: ! matrix_free - flag - 1 indicates matrix-free version
170: !
171: !
172: SNES snes
173: Vec x,r
174: Mat J
175: integer its,matrix_free,flg,ierr
176: double precision lambda_max,lambda_min
177: type (userctx) user
179: ! Note: Any user-defined Fortran routines (such as FormJacobian)
180: ! MUST be declared as external.
182: external FormInitialGuess,FormJacobian
184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: ! Initialize program
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
189: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
191: ! Initialize problem parameters
193: lambda_max = 6.81
194: lambda_min = 0.0
195: user%lambda = 6.0
196: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par', &
197: & user%lambda,flg,ierr)
198: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
199: & then
200: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
201: SETERRQ(1,' ',ierr)
202: endif
205: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206: ! Create nonlinear solver context
207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
209: call SNESCreate(PETSC_COMM_WORLD,snes,ierr)
211: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212: ! Create vector data structures; set function evaluation routine
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: ! Create distributed array (DA) to manage parallel grid and vectors
217: ! This really needs only the star-type stencil, but we use the box
218: ! stencil temporarily.
219: call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX, &
220: & -4,-4,PETSC_DECIDE,PETSC_DECIDE,1,1, &
221: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)
222: call DAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my, &
223: & PETSC_NULL_INTEGER, &
224: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
225: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
226: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
227: & PETSC_NULL_INTEGER,ierr)
228:
229: !
230: ! Visualize the distribution of the array across the processors
231: !
232: ! call DAView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)
234: ! Extract global and local vectors from DA; then duplicate for remaining
235: ! vectors that are the same types
237: call DACreateGlobalVector(user%da,x,ierr)
238: call VecDuplicate(x,r,ierr)
240: ! Get local grid boundaries (for 2-dimensional DA)
242: call DAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER, &
243: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
244: call DAGetGhostCorners(user%da,user%gxs,user%gys, &
245: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
246: & PETSC_NULL_INTEGER,ierr)
248: ! Here we shift the starting indices up by one so that we can easily
249: ! use the Fortran convention of 1-based indices (rather 0-based indices).
251: user%xs = user%xs+1
252: user%ys = user%ys+1
253: user%gxs = user%gxs+1
254: user%gys = user%gys+1
256: user%ye = user%ys+user%ym-1
257: user%xe = user%xs+user%xm-1
258: user%gye = user%gys+user%gym-1
259: user%gxe = user%gxs+user%gxm-1
261: ! Set function evaluation routine and vector
263: call SNESSetFunction(snes,r,FormFunction,user,ierr)
265: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
266: ! Create matrix data structure; set Jacobian evaluation routine
267: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
269: ! Set Jacobian matrix data structure and default Jacobian evaluation
270: ! routine. User can override with:
271: ! -snes_fd : default finite differencing approximation of Jacobian
272: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
273: ! (unless user explicitly sets preconditioner)
274: ! -snes_mf_operator : form preconditioning matrix as set by the user,
275: ! but use matrix-free approx for Jacobian-vector
276: ! products within Newton-Krylov method
277: !
278: ! Note: For the parallel case, vectors and matrices MUST be partitioned
279: ! accordingly. When using distributed arrays (DAs) to create vectors,
280: ! the DAs determine the problem partitioning. We must explicitly
281: ! specify the local matrix dimensions upon its creation for compatibility
282: ! with the vector distribution. Thus, the generic MatCreate() routine
283: ! is NOT sufficient when working with distributed arrays.
284: !
285: ! Note: Here we only approximately preallocate storage space for the
286: ! Jacobian. See the users manual for a discussion of better techniques
287: ! for preallocating matrix memory.
289: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
290: & matrix_free,ierr)
291: if (matrix_free .eq. 0) then
292: call DAGetMatrix(user%da,MATAIJ,J,ierr)
293: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
294: endif
296: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
297: ! Customize nonlinear solver; set runtime options
298: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
300: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
302: call SNESSetFromOptions(snes,ierr)
304: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
305: ! Evaluate initial guess; then solve nonlinear system.
306: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
308: ! Note: The user should initialize the vector, x, with the initial guess
309: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
310: ! to employ an initial guess of zero, the user should explicitly set
311: ! this vector to zero by calling VecSet().
313: call FormInitialGuess(user,x,ierr)
314: call SNESSolve(snes,x,ierr)
315: call SNESGetIterationNumber(snes,its,ierr);
316: if (user%rank .eq. 0) then
317: write(6,100) its
318: endif
319: 100 format('Number of Newton iterations = ',i5)
321: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
322: ! Free work space. All PETSc objects should be destroyed when they
323: ! are no longer needed.
324: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
326: if (matrix_free .eq. 0) call MatDestroy(J,ierr)
327: call VecDestroy(x,ierr)
328: call VecDestroy(r,ierr)
329: call SNESDestroy(snes,ierr)
330: call DADestroy(user%da,ierr)
331: call PetscFinalize(ierr)
332: end
334: ! ---------------------------------------------------------------------
335: !
336: ! FormInitialGuess - Forms initial approximation.
337: !
338: ! Input Parameters:
339: ! X - vector
340: !
341: ! Output Parameter:
342: ! X - vector
343: !
344: ! Notes:
345: ! This routine serves as a wrapper for the lower-level routine
346: ! "InitialGuessLocal", where the actual computations are
347: ! done using the standard Fortran style of treating the local
348: ! vector data as a multidimensional array over the local mesh.
349: ! This routine merely handles ghost point scatters and accesses
350: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
351: !
352: subroutine FormInitialGuess(user,X,ierr)
353: use f90module
354: implicit none
356: #include "include/finclude/petscvec.h90"
357: #include include/finclude/petsc.h
358: #include include/finclude/petscvec.h
359: #include include/finclude/petscda.h
360: #include include/finclude/petscis.h
361: #include include/finclude/petscmat.h
362: #include include/finclude/petscksp.h
363: #include include/finclude/petscpc.h
364: #include include/finclude/petscsnes.h
366: ! Input/output variables:
367: type (userctx) user
368: Vec X
369: integer ierr
370:
371: ! Declarations for use with local arrays:
372: PetscScalar,pointer :: lx_v(:)
373: Vec localX
375: 0
377: ! Get a pointer to vector data.
378: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
379: ! the data array. Otherwise, the routine is implementation dependent.
380: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
381: ! the array.
382: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
383: ! and is useable from Fortran-90 Only.
385: call DAGetLocalVector(user%da,localX,ierr)
386: call VecGetArrayF90(localX,lx_v,ierr)
388: ! Compute initial guess over the locally owned part of the grid
390: call InitialGuessLocal(user,lx_v,ierr)
392: ! Restore vector
394: call VecRestoreArrayF90(localX,lx_v,ierr)
396: ! Insert values into global vector
398: call DALocalToGlobal(user%da,localX,INSERT_VALUES,X,ierr)
399: call DARestoreLocalVector(user%da,localX,ierr)
401: return
402: end
404: ! ---------------------------------------------------------------------
405: !
406: ! InitialGuessLocal - Computes initial approximation, called by
407: ! the higher level routine FormInitialGuess().
408: !
409: ! Input Parameter:
410: ! x - local vector data
411: !
412: ! Output Parameters:
413: ! x - local vector data
414: ! ierr - error code
415: !
416: ! Notes:
417: ! This routine uses standard Fortran-style computations over a 2-dim array.
418: !
419: subroutine InitialGuessLocal(user,x,ierr)
420: use f90module
421: implicit none
423: #include include/finclude/petsc.h
424: #include include/finclude/petscvec.h
425: #include include/finclude/petscda.h
426: #include include/finclude/petscis.h
427: #include include/finclude/petscmat.h
428: #include include/finclude/petscksp.h
429: #include include/finclude/petscpc.h
430: #include include/finclude/petscsnes.h
432: ! Input/output variables:
433: type (userctx) user
434: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
435: integer ierr
437: ! Local variables:
438: integer i,j,hxdhy,hydhx
439: PetscScalar temp1,temp,hx,hy,sc,one
441: ! Set parameters
443: 0
444: one = 1.0
445: hx = one/(dble(user%mx-1))
446: hy = one/(dble(user%my-1))
447: sc = hx*hy*user%lambda
448: hxdhy = hx/hy
449: hydhx = hy/hx
450: temp1 = user%lambda/(user%lambda + one)
452: do 20 j=user%ys,user%ye
453: temp = dble(min(j-1,user%my-j))*hy
454: do 10 i=user%xs,user%xe
455: if (i .eq. 1 .or. j .eq. 1 &
456: & .or. i .eq. user%mx .or. j .eq. user%my) then
457: x(i,j) = 0.0
458: else
459: x(i,j) = temp1 * &
460: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
461: endif
462: 10 continue
463: 20 continue
465: return
466: end
468: ! ---------------------------------------------------------------------
469: !
470: ! FormFunctionLocal - Computes nonlinear function, called by
471: ! the higher level routine FormFunction().
472: !
473: ! Input Parameter:
474: ! x - local vector data
475: !
476: ! Output Parameters:
477: ! f - local vector data, f(x)
478: ! ierr - error code
479: !
480: ! Notes:
481: ! This routine uses standard Fortran-style computations over a 2-dim array.
482: !
483: subroutine FormFunctionLocal(x,f,user,ierr)
484: use f90module
486: implicit none
488: ! Input/output variables:
489: type (userctx) user
490: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
491: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
492: integer ierr
494: ! Local variables:
495: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
496: PetscScalar u,uxx,uyy
497: integer i,j
499: one = 1.0
500: two = 2.0
501: hx = one/dble(user%mx-1)
502: hy = one/dble(user%my-1)
503: sc = hx*hy*user%lambda
504: hxdhy = hx/hy
505: hydhx = hy/hx
507: ! Compute function over the locally owned part of the grid
509: do 20 j=user%ys,user%ye
510: do 10 i=user%xs,user%xe
511: if (i .eq. 1 .or. j .eq. 1 &
512: & .or. i .eq. user%mx .or. j .eq. user%my) then
513: f(i,j) = x(i,j)
514: else
515: u = x(i,j)
516: uxx = hydhx * (two*u &
517: & - x(i-1,j) - x(i+1,j))
518: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
519: f(i,j) = uxx + uyy - sc*exp(u)
520: endif
521: 10 continue
522: 20 continue
524: return
525: end
527: ! ---------------------------------------------------------------------
528: !
529: ! FormJacobian - Evaluates Jacobian matrix.
530: !
531: ! Input Parameters:
532: ! snes - the SNES context
533: ! x - input vector
534: ! dummy - optional user-defined context, as set by SNESSetJacobian()
535: ! (not used here)
536: !
537: ! Output Parameters:
538: ! jac - Jacobian matrix
539: ! jac_prec - optionally different preconditioning matrix (not used here)
540: ! flag - flag indicating matrix structure
541: !
542: ! Notes:
543: ! This routine serves as a wrapper for the lower-level routine
544: ! "FormJacobianLocal", where the actual computations are
545: ! done using the standard Fortran style of treating the local
546: ! vector data as a multidimensional array over the local mesh.
547: ! This routine merely accesses the local vector data via
548: ! VecGetArrayF90() and VecRestoreArrayF90().
549: !
550: ! Notes:
551: ! Due to grid point reordering with DAs, we must always work
552: ! with the local grid points, and then transform them to the new
553: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
554: ! We cannot work directly with the global numbers for the original
555: ! uniprocessor grid!
556: !
557: ! Two methods are available for imposing this transformation
558: ! when setting matrix entries:
559: ! (A) MatSetValuesLocal(), using the local ordering (including
560: ! ghost points!)
561: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
562: ! - Associate this map with the matrix by calling
563: ! MatSetLocalToGlobalMapping() once
564: ! - Set matrix entries using the local ordering
565: ! by calling MatSetValuesLocal()
566: ! (B) MatSetValues(), using the global ordering
567: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
568: ! - Then apply this map explicitly yourself
569: ! - Set matrix entries using the global ordering by calling
570: ! MatSetValues()
571: ! Option (A) seems cleaner/easier in many cases, and is the procedure
572: ! used in this example.
573: !
574: subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
575: use f90module
576: implicit none
578: #include include/finclude/petsc.h
579: #include include/finclude/petscvec.h
580: #include include/finclude/petscda.h
581: #include include/finclude/petscis.h
582: #include include/finclude/petscmat.h
583: #include include/finclude/petscksp.h
584: #include include/finclude/petscpc.h
585: #include include/finclude/petscsnes.h
587: #include "include/finclude/petscvec.h90"
589: ! Input/output variables:
590: SNES snes
591: Vec X
592: Mat jac,jac_prec
593: MatStructure flag
594: type(userctx) user
595: integer ierr
597: ! Declarations for use with local arrays:
598: PetscScalar,pointer :: lx_v(:)
599: Vec localX
601: ! Scatter ghost points to local vector, using the 2-step process
602: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
603: ! Computations can be done while messages are in transition,
604: ! by placing code between these two statements.
606: call DAGetLocalVector(user%da,localX,ierr)
607: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX, &
608: & ierr)
609: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
611: ! Get a pointer to vector data
613: call VecGetArrayF90(localX,lx_v,ierr)
615: ! Compute entries for the locally owned part of the Jacobian.
617: call FormJacobianLocal(lx_v,jac,jac_prec,user,ierr)
619: ! Assemble matrix, using the 2-step process:
620: ! MatAssemblyBegin(), MatAssemblyEnd()
621: ! Computations can be done while messages are in transition,
622: ! by placing code between these two statements.
624: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
625: call VecRestoreArrayF90(localX,lx_v,ierr)
626: call DARestoreLocalVector(user%da,localX,ierr)
627: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
629: ! Set flag to indicate that the Jacobian matrix retains an identical
630: ! nonzero structure throughout all nonlinear iterations (although the
631: ! values of the entries change). Thus, we can save some work in setting
632: ! up the preconditioner (e.g., no need to redo symbolic factorization for
633: ! ILU/ICC preconditioners).
634: ! - If the nonzero structure of the matrix is different during
635: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
636: ! must be used instead. If you are unsure whether the matrix
637: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
638: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
639: ! believes your assertion and does not check the structure
640: ! of the matrix. If you erroneously claim that the structure
641: ! is the same when it actually is not, the new preconditioner
642: ! will not function correctly. Thus, use this optimization
643: ! feature with caution!
645: flag = SAME_NONZERO_PATTERN
647: ! Tell the matrix we will never add a new nonzero location to the
648: ! matrix. If we do it will generate an error.
650: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,ierr)
652: return
653: end
655: ! ---------------------------------------------------------------------
656: !
657: ! FormJacobianLocal - Computes Jacobian matrix, called by
658: ! the higher level routine FormJacobian().
659: !
660: ! Input Parameters:
661: ! x - local vector data
662: !
663: ! Output Parameters:
664: ! jac - Jacobian matrix
665: ! jac_prec - optionally different preconditioning matrix (not used here)
666: ! ierr - error code
667: !
668: ! Notes:
669: ! This routine uses standard Fortran-style computations over a 2-dim array.
670: !
671: ! Notes:
672: ! Due to grid point reordering with DAs, we must always work
673: ! with the local grid points, and then transform them to the new
674: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
675: ! We cannot work directly with the global numbers for the original
676: ! uniprocessor grid!
677: !
678: ! Two methods are available for imposing this transformation
679: ! when setting matrix entries:
680: ! (A) MatSetValuesLocal(), using the local ordering (including
681: ! ghost points!)
682: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
683: ! - Associate this map with the matrix by calling
684: ! MatSetLocalToGlobalMapping() once
685: ! - Set matrix entries using the local ordering
686: ! by calling MatSetValuesLocal()
687: ! (B) MatSetValues(), using the global ordering
688: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
689: ! - Then apply this map explicitly yourself
690: ! - Set matrix entries using the global ordering by calling
691: ! MatSetValues()
692: ! Option (A) seems cleaner/easier in many cases, and is the procedure
693: ! used in this example.
694: !
695: subroutine FormJacobianLocal(x,jac,jac_prec,user,ierr)
696: use f90module
697: implicit none
699: #include include/finclude/petsc.h
700: #include include/finclude/petscvec.h
701: #include include/finclude/petscda.h
702: #include include/finclude/petscis.h
703: #include include/finclude/petscmat.h
704: #include include/finclude/petscksp.h
705: #include include/finclude/petscpc.h
706: #include include/finclude/petscsnes.h
708: ! Input/output variables:
709: type (userctx) user
710: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
711: Mat jac,jac_prec
712: integer ierr
714: ! Local variables:
715: integer row,col(5),i,j
716: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc,v(5)
718: ! Set parameters
720: one = 1.0
721: two = 2.0
722: hx = one/dble(user%mx-1)
723: hy = one/dble(user%my-1)
724: sc = hx*hy
725: hxdhy = hx/hy
726: hydhx = hy/hx
728: ! Compute entries for the locally owned part of the Jacobian.
729: ! - Currently, all PETSc parallel matrix formats are partitioned by
730: ! contiguous chunks of rows across the processors.
731: ! - Each processor needs to insert only elements that it owns
732: ! locally (but any non-local elements will be sent to the
733: ! appropriate processor during matrix assembly).
734: ! - Here, we set all entries for a particular row at once.
735: ! - We can set matrix entries either using either
736: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
737: ! - Note that MatSetValues() uses 0-based row and column numbers
738: ! in Fortran as well as in C.
740: do 20 j=user%ys,user%ye
741: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
742: do 10 i=user%xs,user%xe
743: row = row + 1
744: ! boundary points
745: if (i .eq. 1 .or. j .eq. 1 &
746: & .or. i .eq. user%mx .or. j .eq. user%my) then
747: col(1) = row
748: v(1) = one
749: call MatSetValuesLocal(jac,1,row,1,col,v, &
750: & INSERT_VALUES,ierr)
751: ! interior grid points
752: else
753: v(1) = -hxdhy
754: v(2) = -hydhx
755: v(3) = two*(hydhx + hxdhy) &
756: & - sc*user%lambda*exp(x(i,j))
757: v(4) = -hydhx
758: v(5) = -hxdhy
759: col(1) = row - user%gxm
760: col(2) = row - 1
761: col(3) = row
762: col(4) = row + 1
763: col(5) = row + user%gxm
764: call MatSetValuesLocal(jac,1,row,5,col,v, &
765: & INSERT_VALUES,ierr)
766: endif
767: 10 continue
768: 20 continue
770: return
771: end