[cig-commits] [commit] rajesh-petsc, rajesh-petsc-schur: Added PETSc related initialization to general_stokes_solver_setup Moved MATSHELL and PCSHELL contexts to global_defs.h from petsc_citcoms.h (df559c0)

cig_noreply at geodynamics.org cig_noreply at geodynamics.org
Wed Nov 5 19:02:30 PST 2014


Repository : https://github.com/geodynamics/citcoms

On branches: rajesh-petsc,rajesh-petsc-schur
Link       : https://github.com/geodynamics/citcoms/compare/464e1b32299b15819f93efd98d969cddb84dfe51...f97ae655a50bdbd6dac1923a3471ee4dae178fbd

>---------------------------------------------------------------

commit df559c06d1a6f8fcf08d580055c51396b57f6be0
Author: Rajesh Kommu <rajesh.kommu at gmail.com>
Date:   Wed Aug 20 12:30:48 2014 -0700

    Added PETSc related initialization to general_stokes_solver_setup Moved MATSHELL and PCSHELL contexts to global_defs.h from petsc_citcoms.h


>---------------------------------------------------------------

df559c06d1a6f8fcf08d580055c51396b57f6be0
 bin/Citcom.c        |   4 +-
 lib/Drive_solvers.c | 104 +++++++++++++++++++++++++++++++++++++++++++++++++---
 lib/Instructions.c  |   9 +----
 lib/Petsc_citcoms.c |  64 +++++++++++++++-----------------
 lib/global_defs.h   |  36 ++++++++++++++++++
 lib/petsc_citcoms.h |  62 -------------------------------
 6 files changed, 168 insertions(+), 111 deletions(-)

diff --git a/bin/Citcom.c b/bin/Citcom.c
index b136d67..4dd3e90 100644
--- a/bin/Citcom.c
+++ b/bin/Citcom.c
@@ -103,7 +103,9 @@ int main(argc,argv)
   read_instructions(E, argv[1]);
 
   /* create mesh, setup solvers etc. */
-  /* also initialize E->UVec, E->PVec, E->FVec, E->K, E->G, E->D */
+  /* initial_setup calls general_stokes_solver_setup which also does all
+   * the PETSc related initialization
+   */
   initial_setup(E);
 
   cpu_time_on_vp_it = CPU_time0();
diff --git a/lib/Drive_solvers.c b/lib/Drive_solvers.c
index 2d3e5ad..016b481 100644
--- a/lib/Drive_solvers.c
+++ b/lib/Drive_solvers.c
@@ -30,6 +30,7 @@
 #include "element_definitions.h"
 #include "global_defs.h"
 #include "drive_solvers.h"
+#include "petsc_citcoms.h"
 
 double global_vdot();
 double vnorm_nonnewt();
@@ -41,7 +42,7 @@ int need_to_iterate(struct All_variables *);
 
 void general_stokes_solver_setup(struct All_variables *E)
 {
-  int i, m;
+  int i, m, lev, npno, neq, nel;
   void construct_node_maps();
 
   if (E->control.NMULTIGRID || E->control.NASSEMBLE)
@@ -51,11 +52,104 @@ void general_stokes_solver_setup(struct All_variables *E)
       for (m=1;m<=E->sphere.caps_per_proc;m++)
 	E->elt_k[i][m]=(struct EK *)malloc((E->lmesh.NEL[i]+1)*sizeof(struct EK));
 
-  return;
-}
-
-
+  /*
+   * PETSc related initialization
+   */
+  lev = E->mesh.levmax;
+  npno = E->lmesh.npno;
+  neq = E->lmesh.neq;
+  nel = E->lmesh.nel;
+
+  E->mtx_del2_u.E = E;
+  E->mtx_del2_u.level = E->mesh.levmax;
+  E->mtx_del2_u.iSize = E->lmesh.neq;
+  E->mtx_del2_u.oSize = E->lmesh.nel;
+
+  E->mtx_grad_p.E = E;
+  E->mtx_grad_p.level = E->mesh.levmax;
+  E->mtx_grad_p.iSize = E->lmesh.neq;
+  E->mtx_grad_p.oSize = E->lmesh.nel;
+  
+  E->mtx_div_u.E = E;
+  E->mtx_div_u.level = E->mesh.levmax;
+  E->mtx_div_u.iSize = E->lmesh.neq;
+  E->mtx_div_u.oSize = E->lmesh.nel; 
+
+  E->mtx_div_rho_u.E = E;
+  E->mtx_div_rho_u.level = E->mesh.levmax;
+  E->mtx_div_rho_u.iSize = E->lmesh.neq;
+  E->mtx_div_rho_u.oSize = E->lmesh.npno;
+
+  E->pcshell_ctx.self = E->pc;
+  E->pcshell_ctx.E = E;
+  E->pcshell_ctx.nno = E->lmesh.NEQ[lev];
+  E->pcshell_ctx.acc = 1e-6;
+  E->pcshell_ctx.level = lev;
+
+  PetscMalloc( (neq+1)*sizeof(double), &E->mtx_del2_u.iData[i] );
+  PetscMalloc( (neq+1)*sizeof(double), &E->mtx_del2_u.oData[i] );
+  PetscMalloc( (nel+1)*sizeof(double), &E->mtx_grad_p.iData[i] );
+  PetscMalloc( (neq+1)*sizeof(double), &E->mtx_grad_p.oData[i] );
+  PetscMalloc( (neq+1)*sizeof(double), &E->mtx_div_u.iData[i] );
+  PetscMalloc( (nel+1)*sizeof(double), &E->mtx_div_u.oData[i] );
+  PetscMalloc( (neq+1)*sizeof(double), &E->mtx_div_rho_u.iData[i] );
+  PetscMalloc( (npno+1)*sizeof(double), &E->mtx_div_rho_u.oData[i] );
+  PetscMalloc( (E->lmesh.NEQ[lev])*sizeof(double), &E->pcshell_ctx.V[i] );
+  PetscMalloc( (E->lmesh.NEQ[lev])*sizeof(double), &E->pcshell_ctx.RR[i] );
+
+  // Create a Matrix shell for the K matrix
+  MatCreateShell( PETSC_COMM_WORLD, 
+			            neq+1, neq+1,
+			            PETSC_DETERMINE, PETSC_DETERMINE,
+			            (void *)&E->mtx_del2_u, 
+			            &E->K );
+  // associate a matrix multiplication operation to this shell
+  MatShellSetOperation(E->K, MATOP_MULT, (void(*)(void))MatShellMult_del2_u);
+
+  // Create a Matrix shell for the G matrix
+  MatCreateShell( PETSC_COMM_WORLD, 
+			            neq+1, nel,
+			            PETSC_DETERMINE, PETSC_DETERMINE,
+			            (void *)&E->mtx_grad_p, 
+			            &E->G );
+  // associate a matrix multiplication operation to this shell
+  MatShellSetOperation(E->G, MATOP_MULT, (void(*)(void))MatShellMult_grad_p);
+
+  // Create a Matrix shell for the D matrix
+  MatCreateShell( PETSC_COMM_WORLD, 
+			            nel, neq+1,
+			            PETSC_DETERMINE, PETSC_DETERMINE,
+			            (void *)&E->mtx_div_u, 
+			            &E->D );
+  // associate a matrix multiplication operation to this shell
+  MatShellSetOperation(E->D, MATOP_MULT, (void(*)(void))MatShellMult_div_u);
+
+  // Create a Matrix shell for the DC matrix
+  MatCreateShell( PETSC_COMM_WORLD, 
+			            npno, neq+1,
+			            PETSC_DETERMINE, PETSC_DETERMINE,
+			            (void *)&E->mtx_div_rho_u, 
+			            &E->DC );
+  // associate a matrix multiplication operation to this shell
+  MatShellSetOperation(E->DC,MATOP_MULT,(void(*)(void))MatShellMult_div_rho_u);
+
+  // Create and setup the KSP solver
+  KSPCreate( PETSC_COMM_WORLD, &E->ksp );
+  KSPSetOperators( E->ksp, E->K, E->K);// SAME_NONZERO_PATTERN ); 
+  KSPSetFromOptions( E->ksp );
+  
+  if(strcmp(E->control.SOLVER_TYPE, "multigrid") == 0) 
+  {
+    KSPGetPC( E->ksp, &E->pc );
+    PCSetType( E->pc, PCSHELL );
+    PCShellSetContext( E->pc, (void *)&E->pcshell_ctx );
+    PCShellSetApply( E->pc, PC_Apply_MultiGrid );
+  }
 
+  /************************************************************
+   * End PETSc Initialization
+   ************************************************************/
+}
 
 void general_stokes_solver(struct All_variables *E)
 {
diff --git a/lib/Instructions.c b/lib/Instructions.c
index 2879584..8809f48 100644
--- a/lib/Instructions.c
+++ b/lib/Instructions.c
@@ -223,19 +223,11 @@ void read_instructions(struct All_variables *E, char *filename)
 /* This function is replaced by CitcomS.Solver.initial_setup() in Pyre. */
 void initial_setup(struct All_variables *E)
 {
-    void general_stokes_solver_setup();
-    void initial_mesh_solver_setup();
-
     initial_mesh_solver_setup(E);
-
     general_stokes_solver_setup(E);
-
     (E->next_buoyancy_field_init)(E);
-
-    return;
 }
 
-
 void initialize_material(struct All_variables *E)
 {
     void construct_mat_group();
@@ -790,6 +782,7 @@ void read_initial_settings(struct All_variables *E)
   input_boolean("use_petsc",&E->control.use_petsc,"on",m);
   input_boolean("petsc_linear",&E->control.petsc_linear,"on",m);
   input_boolean("petsc_nonlinear",&E->control.petsc_nonlinear,"off",m);
+  input_boolean("petsc_schur",&E->control.petsc_schur,"off",m);
 
   check_settings_consistency(E);
   return;
diff --git a/lib/Petsc_citcoms.c b/lib/Petsc_citcoms.c
index 42b205d..560d03a 100644
--- a/lib/Petsc_citcoms.c
+++ b/lib/Petsc_citcoms.c
@@ -201,11 +201,8 @@ PetscErrorCode PC_Apply_MultiGrid( PC pc, Vec x, Vec y )
   }
 
   /* initialize the space for the solution */
-  for( m = 1; m <= ctx->caps_per_proc; m++ ) {
-    for( i = 0; i < ctx->nno; i++ ) {
-      ctx->V[m][i] = 0.0;
-    }
-  }
+  for( i = 0; i < ctx->nno; i++ )
+      ctx->V[1][i] = 0.0;
 
   count = 0;
 
@@ -217,11 +214,9 @@ PetscErrorCode PC_Apply_MultiGrid( PC pc, Vec x, Vec y )
   ctx->status = residual;
 
   VecGetOwnershipRange( y, &low, &high );
-  for( m = 1; m <= ctx->caps_per_proc; m++ ) {
-    for( i = 0; i < ctx->nno; i++ ) {
-      ierr = VecSetValue( y, i+low, ctx->V[m][i], INSERT_VALUES ); 
-      CHKERRQ( ierr );
-    }
+  for( i = 0; i < ctx->nno; i++ ) {
+    ierr = VecSetValue( y, i+low, ctx->V[1][i], INSERT_VALUES ); 
+    CHKERRQ( ierr );
   }
   VecAssemblyBegin(y);
   VecAssemblyEnd(y);
@@ -236,10 +231,10 @@ PetscErrorCode MatShellMult_del2_u( Mat K, Vec U, Vec KU )
   // KU is (neq+1)
   int i, j, neq, nel;
   PetscErrorCode ierr;
-  struct MatMultShell_del2_u *ctx;
+  struct MatMultShell *ctx;
   MatShellGetContext( K, (void **)&ctx );
-  neq = ctx->neq;
-  nel = ctx->nel;
+  neq = ctx->iSize;
+  nel = ctx->oSize;
   int low, high;
 
   VecAssemblyBegin(U);
@@ -248,18 +243,18 @@ PetscErrorCode MatShellMult_del2_u( Mat K, Vec U, Vec KU )
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j <= neq; j++ ) {
       PetscInt ix[] = {j+low};
-      ierr = VecGetValues( U, 1, ix, &ctx->u[i][j] );
+      ierr = VecGetValues( U, 1, ix, &ctx->iData[i][j] );
       CHKERRQ( ierr );
     }
   }
 
   // actual CitcomS operation
-  assemble_del2_u( ctx->E, ctx->u, ctx->Ku, ctx->level, 1 );
+  assemble_del2_u( ctx->E, ctx->iData, ctx->oData, ctx->level, 1 );
   
   VecGetOwnershipRange( KU, &low, &high );
   for( i=1; i <= ctx->E->sphere.caps_per_proc; ++i ) {
     for( j = 0; j <= neq; j++ ) {
-      ierr = VecSetValue( KU, j+low, ctx->Ku[i][j], INSERT_VALUES );
+      ierr = VecSetValue( KU, j+low, ctx->oData[i][j], INSERT_VALUES );
       CHKERRQ( ierr );
     }
   }
@@ -276,10 +271,10 @@ PetscErrorCode MatShellMult_grad_p( Mat G, Vec P, Vec GP )
   // GP is (neq+1) x 1 of which first neq entries (0:neq-1) are actual values
   int i, j, neq, nel;
   PetscErrorCode ierr;
-  struct MatMultShell_grad_p *ctx;
+  struct MatMultShell *ctx;
   MatShellGetContext( G, (void **)&ctx );
-  neq = ctx->neq;
-  nel = ctx->nel;
+  neq = ctx->iSize;
+  nel = ctx->iSize;
 
   int low, high;
   VecGetOwnershipRange( P, &low, &high );
@@ -289,18 +284,18 @@ PetscErrorCode MatShellMult_grad_p( Mat G, Vec P, Vec GP )
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j < nel; j++ ) {
       PetscInt ix[] = {j+low};
-      ierr = VecGetValues( P, 1, ix, &ctx->p[i][j+1] );
+      ierr = VecGetValues( P, 1, ix, &ctx->iData[i][j+1] );
       CHKERRQ( ierr );
     }
   }
 
   // actual CitcomS operation
-  assemble_grad_p( ctx->E, ctx->p, ctx->Gp, ctx->level );
+  assemble_grad_p( ctx->E, ctx->iData, ctx->oData, ctx->level );
 
   VecGetOwnershipRange( GP, &low, &high );
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j < neq; j++ ) {
-      ierr = VecSetValue( GP, j+low, ctx->Gp[i][j], INSERT_VALUES );
+      ierr = VecSetValue( GP, j+low, ctx->oData[i][j], INSERT_VALUES );
       CHKERRQ( ierr );
     }
   }
@@ -317,10 +312,10 @@ PetscErrorCode MatShellMult_div_u( Mat D, Vec U, Vec DU )
   // DU is nel x 1
   int i, j, neq, nel;
   PetscErrorCode ierr;
-  struct MatMultShell_div_u *ctx;
+  struct MatMultShell *ctx;
   MatShellGetContext( D, (void **)&ctx );
-  neq = ctx->neq;
-  nel = ctx->nel;
+  neq = ctx->iSize;
+  nel = ctx->oSize;
 
   int low, high;
   VecGetOwnershipRange( U, &low, &high );
@@ -330,18 +325,18 @@ PetscErrorCode MatShellMult_div_u( Mat D, Vec U, Vec DU )
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j < neq; j++ ) {
       PetscInt ix[] = {j+low};
-      ierr = VecGetValues( U, 1, ix, &ctx->u[i][j] );
+      ierr = VecGetValues( U, 1, ix, &ctx->iData[i][j] );
       CHKERRQ( ierr );
     }
   }
 
   // actual CitcomS operation
-  assemble_div_u( ctx->E, ctx->u, ctx->Du, ctx->level );
+  assemble_div_u( ctx->E, ctx->iData, ctx->oData, ctx->level );
 
   VecGetOwnershipRange( DU, &low, &high );
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j < nel; j++ ) {
-      ierr = VecSetValue( DU, j+low, ctx->Du[i][j+1], INSERT_VALUES );
+      ierr = VecSetValue( DU, j+low, ctx->oData[i][j+1], INSERT_VALUES );
       CHKERRQ( ierr );
     }
   }
@@ -358,10 +353,10 @@ PetscErrorCode MatShellMult_div_rho_u( Mat DC, Vec U, Vec DU )
   // DU is nel x 1
   int i, j, neq, nel;
   PetscErrorCode ierr;
-  struct MatMultShell_div_u *ctx;
+  struct MatMultShell *ctx;
   MatShellGetContext( DC, (void **)&ctx );
-  neq = ctx->neq;
-  nel = ctx->nel;
+  neq = ctx->iSize;
+  nel = ctx->oSize;
 
   int low, high;
   VecGetOwnershipRange( U, &low, &high );
@@ -371,18 +366,18 @@ PetscErrorCode MatShellMult_div_rho_u( Mat DC, Vec U, Vec DU )
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j < neq; j++ ) {
       PetscInt ix[] = {j+low};
-      ierr = VecGetValues( U, 1, ix, &ctx->u[i][j] );
+      ierr = VecGetValues( U, 1, ix, &ctx->iData[i][j] );
       CHKERRQ( ierr );
     }
   }
 
   // actual CitcomS operation
-  assemble_div_rho_u( ctx->E, ctx->u, ctx->Du, ctx->level );
+  assemble_div_rho_u( ctx->E, ctx->iData, ctx->oData, ctx->level );
 
   VecGetOwnershipRange( DU, &low, &high );
   for( i = 1; i <= ctx->E->sphere.caps_per_proc; i++ ) {
     for( j = 0; j < nel; j++ ) {
-      ierr = VecSetValue( DU, j+low, ctx->Du[i][j+1], INSERT_VALUES );
+      ierr = VecSetValue( DU, j+low, ctx->oData[i][j+1], INSERT_VALUES );
       CHKERRQ( ierr );
     }
   }
@@ -391,4 +386,3 @@ PetscErrorCode MatShellMult_div_rho_u( Mat DC, Vec U, Vec DU )
 
   PetscFunctionReturn(0);
 }
-
diff --git a/lib/global_defs.h b/lib/global_defs.h
index 89d5b86..55c8b4f 100644
--- a/lib/global_defs.h
+++ b/lib/global_defs.h
@@ -563,6 +563,7 @@ struct CONTROL {
     int use_petsc;
     int petsc_linear;
     int petsc_nonlinear;
+    int petsc_schur;
 };
 
 
@@ -710,6 +711,39 @@ struct CITCOM_GNOMONIC {
 #endif
 #include "tracer_defs.h"
 
+/* PETSc MultiGrid Shell preconditioner */  
+struct MultiGrid_PC
+{
+  PC self;
+  struct All_variables *E;
+  // multigrid stuff
+  double acc;
+  int smooth_up;
+  int smooth_down;
+  int smooth_coarse;
+  int smooth_fine;
+  int max_vel_iterations;
+  int cycle_type;
+  double status;
+  int level;
+  int levmax;
+  PetscBool mg_monitor;
+  int nno;
+  double *V[NCS];
+  double *RR[NCS];
+};
+
+/* PETSc Matrix shell contexts */
+struct MatMultShell
+{
+  struct All_variables *E;
+  int level;
+  int iSize;
+  int oSize;
+  double *iData[NCS];
+  double *oData[NCS];
+};
+
 struct All_variables {
 
 #include "solver.h"
@@ -723,6 +757,8 @@ struct All_variables {
     PC    pc;
     SNES  snes;
     Vec   PVec, NPVec, UVec, FVec;
+    struct MatMultShell mtx_del2_u, mtx_grad_p, mtx_div_u, mtx_div_rho_u;
+    struct MultiGrid_PC pcshell_ctx;
 
     FILE *fp;
     FILE *fptime;
diff --git a/lib/petsc_citcoms.h b/lib/petsc_citcoms.h
index 3c6e95c..a90233b 100644
--- a/lib/petsc_citcoms.h
+++ b/lib/petsc_citcoms.h
@@ -8,68 +8,6 @@
 extern "C" {
 #endif
 
-struct MultiGrid_PC
-{
-  PC self;
-  struct All_variables *E;
-  // multigrid stuff
-  double acc;
-  int smooth_up;
-  int smooth_down;
-  int smooth_coarse;
-  int smooth_fine;
-  int max_vel_iterations;
-  int cycle_type;
-  double status;
-  int level;
-  int levmax;
-  PetscBool mg_monitor;
-  int nno;
-  int caps_per_proc;
-  double *V[NCS];
-  double *RR[NCS];
-};
-
-struct MatMultShell_del2_u
-{
-  struct All_variables *E;
-  int level;
-  int neq;
-  int nel;
-  double *u[NCS];
-  double *Ku[NCS];
-};
-
-struct MatMultShell_grad_p
-{
-  struct All_variables *E;
-  int level;
-  int neq;
-  int nel;
-  double *p[NCS];
-  double *Gp[NCS];
-};
-
-struct MatMultShell_div_u
-{
-  struct All_variables *E;
-  int level;
-  int neq;
-  int nel;
-  double *u[NCS];
-  double *Du[NCS];
-};
-
-struct MatMultShell_div_rho_u
-{
-  struct All_variables *E;
-  int level;
-  int neq;
-  int npno;
-  double *u[NCS];
-  double *Du[NCS];
-};
-
 void strip_bcs_from_residual_PETSc( 
     struct All_variables *E, Vec Res, int level );
 PetscErrorCode initial_vel_residual_PETSc( struct All_variables *E,



More information about the CIG-COMMITS mailing list