Skip to main content Link Search Menu Expand Document (external link)

PETSc hands-on tutorial: Solving a linear system in serial

In this tutorial, we will solve a tridiagonal linear system :math:Ax=b on a single processor. We express a system of linear equations as PETSc vector (Vec) and matrix (Mat) objects. We precondition (PC) the system and find the solution using a linear solver (KSP). The tutorial demonstrates how to:

  • Create PETSc vector Vec and matrix Mat objects
  • Precondition and solve a linear system
  • View solver results and error

Setting up a PETSc program

We will write this program in one main function, but more complex program can be split across functions and files.

   static char help[] = "Solves a tridiagonal linear system with KSP.\n\n";

   /*T
      Concepts: KSP^solving a system of linear equations
      Processors: 1
   T*/

   /*
     Include "petscksp.h" so that we can use KSP solvers.  Note that this file
     automatically includes:
        petscsys.h    - base PETSc routines   petscvec.h - vectors
        petscmat.h    - matrices              petscpc.h  - preconditioners
        petscis.h     - index sets
        petscviewer.h - viewers

     Note:  The corresponding parallel example is ex23.c
   */
   #include <petscksp.h>

   int main(int argc,char **args)
   {
     Vec            x, b, u;      /* approx solution, RHS, exact solution */
     Mat            A;            /* linear system matrix */
     KSP            ksp;          /* linear solver context */
     PC             pc;           /* preconditioner context */
     PetscReal      norm;         /* norm of solution error */
     PetscErrorCode ierr;
     PetscInt       i,n = 10,col[3],its;
     PetscMPIInt    size;
     PetscScalar    value[3]

Now we set up MPI using PetscInitialize which invokes MPI_Init and reads the command line for PETSc runtime options. We want to run this tutorial in serial, so we check the size of the MPI communicator and return an error if more than one rank is used.

   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
   if (size != 1) SETERRQ(PETSC_COMM_WORLD,1,"This is a uniprocessor example only!");
   ierr = PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);CHKERRQ(ierr);

Creating PETSc vectors and matrices

Now we will create the vector objects that we need including the right-hand-side :math:b and space for a solution :math:x and exact solution :math:u. Here, we form one vector from scratch and duplicate it when needed.

   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
          Compute the matrix and right-hand-side vector that define
          the linear system, Ax = b.
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

   /*
      Create vectors.  Note that we form 1 vector from scratch and
      then duplicate as needed.
   */
   ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
   ierr = PetscObjectSetName((PetscObject) x, "Solution");CHKERRQ(ierr);
   ierr = VecSetSizes(x,PETSC_DECIDE,n);CHKERRQ(ierr);
   ierr = VecSetFromOptions(x);CHKERRQ(ierr);
   ierr = VecDuplicate(x,&b);CHKERRQ(ierr);
   ierr = VecDuplicate(x,&u);CHKERRQ(ierr);
   /*
      Create matrix.  When using MatCreate(), the matrix format can
      be specified at runtime.

      Performance tuning note:  For problems of substantial size,
      preallocation of matrix memory is crucial for attaining good
      performance. See the matrix chapter of the users manual for details.
   */
   ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
   ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,n,n);CHKERRQ(ierr);
   ierr = MatSetFromOptions(A);CHKERRQ(ierr);
   ierr = MatSetUp(A);CHKERRQ(ierr);
   /*
      Assemble matrix
   */
   value[0] = -1.0; value[1] = 2.0; value[2] = -1.0;
   for (i=1; i<n-1; i++) {
     col[0] = i-1; col[1] = i; col[2] = i+1;
     ierr   = MatSetValues(A,1,&i,3,col,value,INSERT_VALUES);CHKERRQ(ierr);
   }
   i    = n - 1; col[0] = n - 2; col[1] = n - 1;
   ierr = MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);CHKERRQ(ierr);
   i    = 0; col[0] = 0; col[1] = 1; value[0] = 2.0; value[1] = -1.0;
   ierr = MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);CHKERRQ(ierr);
   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);

   /*
      Set exact solution; then compute right-hand-side vector.
   */
   ierr = VecSet(u,1.0);CHKERRQ(ierr);
   ierr = MatMult(A,u,b);CHKERRQ(ierr);

Create a linear solver and preconditioner

   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                 Create the linear solver and set various options
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
   ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);

   /*
      Set operators. Here the matrix that defines the linear system
      also serves as the matrix that defines the preconditioner.
   */
   ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr);

   /*
      Set linear solver defaults for this problem (optional).
      - By extracting the KSP and PC contexts from the KSP context,
        we can then directly call any KSP and PC routines to set
        various options.
      - The following four statements are optional; all of these
        parameters could alternatively be specified at runtime via
        KSPSetFromOptions();
   */
   ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr);
   ierr = PCSetType(pc,PCJACOBI);CHKERRQ(ierr);
   ierr = KSPSetTolerances(ksp,1.e-5,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);CHKERRQ(ierr);

   /*
     Set runtime options, e.g.,
         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
     These options will override those specified above as long as
     KSPSetFromOptions() is called _after_ any other customization
     routines.
   */
   ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);

Solve a linear system and check the solution

We are ready to solve the system! One call to KSPSolve will solve the system with the options set above or from the command line.

   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                       Solve the linear system
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
   ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr);

   /*
      View solver info; we could instead use the option -ksp_view to
      print this info to the screen at the conclusion of KSPSolve().
   */
   ierr = KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                       Check the solution and clean up
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
   ierr = VecAXPY(x,-1.0,u);CHKERRQ(ierr);
   ierr = VecNorm(x,NORM_2,&norm);CHKERRQ(ierr);
   ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr);
   ierr = PetscPrintf(PETSC_COMM_WORLD,
          "Norm of error %g, Iterations %D\n",(double)norm,its);CHKERRQ(ierr);

End a PETSc program

It’s good practice to free memory that was used during our program. Finally we call PetscFinalize which finalizes PETSc libraries and MPI. It also provides summary information if used with certain runtime options like -log_view.

  /*
     Free work space.  All PETSc objects should be destroyed when they
     are no longer needed.
  */
  ierr = VecDestroy(&x);CHKERRQ(ierr); ierr = VecDestroy(&u);CHKERRQ(ierr);
  ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = MatDestroy(&A);CHKERRQ(ierr);
  ierr = KSPDestroy(&ksp);CHKERRQ(ierr);

  /*
     Always call PetscFinalize() before exiting a program.  This routine
       - finalizes the PETSc libraries as well as MPI
       - provides summary and diagnostic information if certain runtime
         options are chosen (e.g., -log_view).
  */
  ierr = PetscFinalize();
  return ierr;
}

Source

This document is borrowed from PETSc dev documentation and modified.