Actual source code: ex21.c

slepc-3.16.2 2022-02-01
Report Typos and Errors
  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-2021, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
  8:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  9: */

 11: static char help[] = "Simple 1-D nonlinear eigenproblem (matrix-free version, sequential).\n\n"
 12:   "The command line options are:\n"
 13:   "  -n <n>, where <n> = number of grid subdivisions\n\n";

 15: /*
 16:    Solve 1-D PDE
 17:             -u'' = lambda*u
 18:    on [0,1] subject to
 19:             u(0)=0, u'(1)=u(1)*lambda*kappa/(kappa-lambda)
 20: */

 22: #include <slepcnep.h>

 24: /*
 25:    User-defined routines
 26: */
 27: PetscErrorCode FormFunction(NEP,PetscScalar,Mat,Mat,void*);
 28: PetscErrorCode FormJacobian(NEP,PetscScalar,Mat,void*);

 30: /*
 31:    Matrix operations and context
 32: */
 33: PetscErrorCode MatMult_Fun(Mat,Vec,Vec);
 34: PetscErrorCode MatGetDiagonal_Fun(Mat,Vec);
 35: PetscErrorCode MatDestroy_Fun(Mat);
 36: PetscErrorCode MatDuplicate_Fun(Mat,MatDuplicateOption,Mat*);
 37: PetscErrorCode MatMult_Jac(Mat,Vec,Vec);
 38: PetscErrorCode MatGetDiagonal_Jac(Mat,Vec);
 39: PetscErrorCode MatDestroy_Jac(Mat);

 41: typedef struct {
 42:   PetscScalar lambda,kappa;
 43:   PetscReal   h;
 44:   PetscMPIInt next,prev;
 45: } MatCtx;

 47: /*
 48:    User-defined application context
 49: */
 50: typedef struct {
 51:   PetscScalar kappa;   /* ratio between stiffness of spring and attached mass */
 52:   PetscReal   h;       /* mesh spacing */
 53: } ApplicationCtx;

 55: int main(int argc,char **argv)
 56: {
 57:   NEP            nep;             /* nonlinear eigensolver context */
 58:   Mat            F,J;             /* Function and Jacobian matrices */
 59:   ApplicationCtx ctx;             /* user-defined context */
 60:   MatCtx         *ctxF,*ctxJ;     /* contexts for shell matrices */
 61:   PetscInt       n=128,nev;
 62:   KSP            ksp;
 63:   PC             pc;
 64:   PetscMPIInt    rank,size;
 65:   PetscBool      terse;

 68:   SlepcInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 69:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 70:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 71:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 72:   PetscPrintf(PETSC_COMM_WORLD,"\n1-D Nonlinear Eigenproblem, n=%D\n\n",n);
 73:   ctx.h = 1.0/(PetscReal)n;
 74:   ctx.kappa = 1.0;

 76:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 77:      Create nonlinear eigensolver context
 78:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 80:   NEPCreate(PETSC_COMM_WORLD,&nep);

 82:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 83:      Create matrix data structure; set Function evaluation routine
 84:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 86:   PetscNew(&ctxF);
 87:   ctxF->h = ctx.h;
 88:   ctxF->kappa = ctx.kappa;
 89:   ctxF->next = rank==size-1? MPI_PROC_NULL: rank+1;
 90:   ctxF->prev = rank==0? MPI_PROC_NULL: rank-1;

 92:   MatCreateShell(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,n,(void*)ctxF,&F);
 93:   MatShellSetOperation(F,MATOP_MULT,(void(*)(void))MatMult_Fun);
 94:   MatShellSetOperation(F,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Fun);
 95:   MatShellSetOperation(F,MATOP_DESTROY,(void(*)(void))MatDestroy_Fun);
 96:   MatShellSetOperation(F,MATOP_DUPLICATE,(void(*)(void))MatDuplicate_Fun);

 98:   /*
 99:      Set Function matrix data structure and default Function evaluation
100:      routine
101:   */
102:   NEPSetFunction(nep,F,F,FormFunction,NULL);

104:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105:      Create matrix data structure; set Jacobian evaluation routine
106:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

108:   PetscNew(&ctxJ);
109:   ctxJ->h = ctx.h;
110:   ctxJ->kappa = ctx.kappa;
111:   ctxJ->next = rank==size-1? MPI_PROC_NULL: rank+1;
112:   ctxJ->prev = rank==0? MPI_PROC_NULL: rank-1;

114:   MatCreateShell(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,n,n,(void*)ctxJ,&J);
115:   MatShellSetOperation(J,MATOP_MULT,(void(*)(void))MatMult_Jac);
116:   MatShellSetOperation(J,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Jac);
117:   MatShellSetOperation(J,MATOP_DESTROY,(void(*)(void))MatDestroy_Jac);

119:   /*
120:      Set Jacobian matrix data structure and default Jacobian evaluation
121:      routine
122:   */
123:   NEPSetJacobian(nep,J,FormJacobian,NULL);

125:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126:      Customize nonlinear solver; set runtime options
127:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

129:   NEPSetType(nep,NEPRII);
130:   NEPRIISetLagPreconditioner(nep,0);
131:   NEPRIIGetKSP(nep,&ksp);
132:   KSPSetType(ksp,KSPBCGS);
133:   KSPGetPC(ksp,&pc);
134:   PCSetType(pc,PCJACOBI);

136:   /*
137:      Set solver parameters at runtime
138:   */
139:   NEPSetFromOptions(nep);

141:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
142:                       Solve the eigensystem
143:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

145:   NEPSolve(nep);
146:   NEPGetDimensions(nep,&nev,NULL,NULL);
147:   PetscPrintf(PETSC_COMM_WORLD," Number of requested eigenvalues: %D\n",nev);

149:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150:                     Display solution and clean up
151:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

153:   /* show detailed info unless -terse option is given by user */
154:   PetscOptionsHasName(NULL,NULL,"-terse",&terse);
155:   if (terse) {
156:     NEPErrorView(nep,NEP_ERROR_RELATIVE,NULL);
157:   } else {
158:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
159:     NEPConvergedReasonView(nep,PETSC_VIEWER_STDOUT_WORLD);
160:     NEPErrorView(nep,NEP_ERROR_RELATIVE,PETSC_VIEWER_STDOUT_WORLD);
161:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
162:   }
163:   NEPDestroy(&nep);
164:   MatDestroy(&F);
165:   MatDestroy(&J);
166:   SlepcFinalize();
167:   return ierr;
168: }

170: /* ------------------------------------------------------------------- */
171: /*
172:    FormFunction - Computes Function matrix  T(lambda)

174:    Input Parameters:
175: .  nep    - the NEP context
176: .  lambda - real part of the scalar argument
177: .  ctx    - optional user-defined context, as set by NEPSetFunction()

179:    Output Parameters:
180: .  fun - Function matrix
181: .  B   - optionally different preconditioning matrix
182: */
183: PetscErrorCode FormFunction(NEP nep,PetscScalar lambda,Mat fun,Mat B,void *ctx)
184: {
186:   MatCtx         *ctxF;

189:   MatShellGetContext(fun,&ctxF);
190:   ctxF->lambda = lambda;
191:   return(0);
192: }

194: /* ------------------------------------------------------------------- */
195: /*
196:    FormJacobian - Computes Jacobian matrix  T'(lambda)

198:    Input Parameters:
199: .  nep    - the NEP context
200: .  lambda - real part of the scalar argument
201: .  ctx    - optional user-defined context, as set by NEPSetJacobian()

203:    Output Parameters:
204: .  jac - Jacobian matrix
205: */
206: PetscErrorCode FormJacobian(NEP nep,PetscScalar lambda,Mat jac,void *ctx)
207: {
209:   MatCtx         *ctxJ;

212:   MatShellGetContext(jac,&ctxJ);
213:   ctxJ->lambda = lambda;
214:   return(0);
215: }

217: /* ------------------------------------------------------------------- */
218: PetscErrorCode MatMult_Fun(Mat A,Vec x,Vec y)
219: {
220:   PetscErrorCode    ierr;
221:   MatCtx            *ctx;
222:   PetscInt          i,n,N;
223:   const PetscScalar *px;
224:   PetscScalar       *py,c,d,de,oe,upper=0.0,lower=0.0;
225:   PetscReal         h;
226:   MPI_Comm          comm;

229:   MatShellGetContext(A,&ctx);
230:   VecGetArrayRead(x,&px);
231:   VecGetArray(y,&py);
232:   VecGetSize(x,&N);
233:   VecGetLocalSize(x,&n);

235:   PetscObjectGetComm((PetscObject)A,&comm);
236:   MPI_Sendrecv(px,1,MPIU_SCALAR,ctx->prev,0,&lower,1,MPIU_SCALAR,ctx->next,0,comm,MPI_STATUS_IGNORE);
237:   MPI_Sendrecv(px+n-1,1,MPIU_SCALAR,ctx->next,0,&upper,1,MPIU_SCALAR,ctx->prev,0,comm,MPI_STATUS_IGNORE);

239:   h = ctx->h;
240:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
241:   d = N;
242:   de = 2.0*(d-ctx->lambda*h/3.0);   /* diagonal entry */
243:   oe = -d-ctx->lambda*h/6.0;        /* offdiagonal entry */
244:   py[0] = oe*upper + de*px[0] + oe*px[1];
245:   for (i=1;i<n-1;i++) py[i] = oe*px[i-1] +de*px[i] + oe*px[i+1];
246:   if (ctx->next==MPI_PROC_NULL) de = d-ctx->lambda*h/3.0+ctx->lambda*c;   /* diagonal entry of last row */
247:   py[n-1] = oe*px[n-2] + de*px[n-1] + oe*lower;

249:   VecRestoreArrayRead(x,&px);
250:   VecRestoreArray(y,&py);
251:   return(0);
252: }

254: /* ------------------------------------------------------------------- */
255: PetscErrorCode MatGetDiagonal_Fun(Mat A,Vec diag)
256: {
258:   MatCtx         *ctx;
259:   PetscInt       n,N;
260:   PetscScalar    *pd,c,d;
261:   PetscReal      h;

264:   MatShellGetContext(A,&ctx);
265:   VecGetSize(diag,&N);
266:   VecGetLocalSize(diag,&n);
267:   h = ctx->h;
268:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
269:   d = N;
270:   VecSet(diag,2.0*(d-ctx->lambda*h/3.0));
271:   VecGetArray(diag,&pd);
272:   pd[n-1] = d-ctx->lambda*h/3.0+ctx->lambda*c;
273:   VecRestoreArray(diag,&pd);
274:   return(0);
275: }

277: /* ------------------------------------------------------------------- */
278: PetscErrorCode MatDestroy_Fun(Mat A)
279: {
280:   MatCtx         *ctx;

284:   MatShellGetContext(A,&ctx);
285:   PetscFree(ctx);
286:   return(0);
287: }

289: /* ------------------------------------------------------------------- */
290: PetscErrorCode MatDuplicate_Fun(Mat A,MatDuplicateOption op,Mat *B)
291: {
292:   MatCtx         *actx,*bctx;
293:   PetscInt       m,n,M,N;
294:   MPI_Comm       comm;

298:   MatShellGetContext(A,&actx);
299:   MatGetSize(A,&M,&N);
300:   MatGetLocalSize(A,&m,&n);

302:   PetscNew(&bctx);
303:   bctx->h      = actx->h;
304:   bctx->kappa  = actx->kappa;
305:   bctx->lambda = actx->lambda;
306:   bctx->next   = actx->next;
307:   bctx->prev   = actx->prev;

309:   PetscObjectGetComm((PetscObject)A,&comm);
310:   MatCreateShell(comm,m,n,M,N,(void*)bctx,B);
311:   MatShellSetOperation(*B,MATOP_MULT,(void(*)(void))MatMult_Fun);
312:   MatShellSetOperation(*B,MATOP_GET_DIAGONAL,(void(*)(void))MatGetDiagonal_Fun);
313:   MatShellSetOperation(*B,MATOP_DESTROY,(void(*)(void))MatDestroy_Fun);
314:   MatShellSetOperation(*B,MATOP_DUPLICATE,(void(*)(void))MatDuplicate_Fun);
315:   return(0);
316: }

318: /* ------------------------------------------------------------------- */
319: PetscErrorCode MatMult_Jac(Mat A,Vec x,Vec y)
320: {
321:   PetscErrorCode    ierr;
322:   MatCtx            *ctx;
323:   PetscInt          i,n;
324:   const PetscScalar *px;
325:   PetscScalar       *py,c,de,oe,upper=0.0,lower=0.0;
326:   PetscReal         h;
327:   MPI_Comm          comm;

330:   MatShellGetContext(A,&ctx);
331:   VecGetArrayRead(x,&px);
332:   VecGetArray(y,&py);
333:   VecGetLocalSize(x,&n);

335:   PetscObjectGetComm((PetscObject)A,&comm);
336:   MPI_Sendrecv(px,1,MPIU_SCALAR,ctx->prev,0,&lower,1,MPIU_SCALAR,ctx->next,0,comm,MPI_STATUS_IGNORE);
337:   MPI_Sendrecv(px+n-1,1,MPIU_SCALAR,ctx->next,0,&upper,1,MPIU_SCALAR,ctx->prev,0,comm,MPI_STATUS_IGNORE);

339:   h = ctx->h;
340:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
341:   de = -2.0*h/3.0;    /* diagonal entry */
342:   oe = -h/6.0;        /* offdiagonal entry */
343:   py[0] = oe*upper + de*px[0] + oe*px[1];
344:   for (i=1;i<n-1;i++) py[i] = oe*px[i-1] +de*px[i] + oe*px[i+1];
345:   if (ctx->next==MPI_PROC_NULL) de = -h/3.0-c*c;    /* diagonal entry of last row */
346:   py[n-1] = oe*px[n-2] + de*px[n-1] + oe*lower;

348:   VecRestoreArrayRead(x,&px);
349:   VecRestoreArray(y,&py);
350:   return(0);
351: }

353: /* ------------------------------------------------------------------- */
354: PetscErrorCode MatGetDiagonal_Jac(Mat A,Vec diag)
355: {
357:   MatCtx         *ctx;
358:   PetscInt       n;
359:   PetscScalar    *pd,c;
360:   PetscReal      h;

363:   MatShellGetContext(A,&ctx);
364:   VecGetLocalSize(diag,&n);
365:   h = ctx->h;
366:   c = ctx->kappa/(ctx->lambda-ctx->kappa);
367:   VecSet(diag,-2.0*h/3.0);
368:   VecGetArray(diag,&pd);
369:   pd[n-1] = -h/3.0-c*c;
370:   VecRestoreArray(diag,&pd);
371:   return(0);
372: }

374: /* ------------------------------------------------------------------- */
375: PetscErrorCode MatDestroy_Jac(Mat A)
376: {
377:   MatCtx         *ctx;

381:   MatShellGetContext(A,&ctx);
382:   PetscFree(ctx);
383:   return(0);
384: }

386: /*TEST

388:    testset:
389:       nsize: {{1 2}}
390:       args: -terse
391:       requires: !single
392:       output_file: output/ex21_1.out
393:       filter: sed -e "s/[+-]0\.0*i//g" -e "s/+0i//g"
394:       test:
395:          suffix: 1_rii
396:          args: -nep_type rii -nep_target 4
397:       test:
398:          suffix: 1_slp
399:          args: -nep_type slp -nep_slp_pc_type jacobi -nep_slp_ksp_type bcgs -nep_target 10

401: TEST*/