<PREV> <INDEX> <NEXT>

Scientific Libraries: PETSC

very popular in engineering: uses MPI, BLAS, LAPACK

Parallel solvers and routines for:

env: PETSC_ARCH=linux
PETSC_DIR=/usr/local/petsc-2.1.3

Options file: .petscrc

ierr = PetscInitialize(int *argc, char ***argv, char *file_name, char *help_mess);
ierr = PetscFinalize();

PETSC_COMM_WORLD         PETSC_COMM_SELF

VECTORS:

ierr = VecCreateSeq(PETSC_COMM_SELF, int m, Vec *x);
ierr = VecCreateMPI(MPI_Comm comm, int m, int M, Vec *x);
ierr = VecCreate(MPI_Comm comm, Vec *x);

ierr =
VecSetSizes(Vec vec, int m, int M);
ierr = VecSet(
Vec x, PetscScalar value);
ierr = VecSetValues(Vec x, int n, int *indices, Scalar *values, INSERT_VALUES);
ierr = VecAssemblyBegin(Vec x);
ierr = VecAssemblyEnd(
Vec x);
ierr = VecDuplicate(Vec old, Vec *new);
ierr = VecDuplicateVecs(Vec old, int n, Vec **new);
ierr = VecDestroy(Vec x);
ierr = VecDestroyVecs(Vec *x, int n);
operations:
ierr = VecAXPY(Scalar *a, Vec x, Vec y);    y = y + a * x
ierr = VecMax(Vec x, int *idx, double *r);    r = max(xi)

MATRICES:

ierr = MatCreate(MPI_Comm comm, Mat *A);
ierr = MatSetSizes(Mat A,
int m, int n, int M, int N);
ierr = MatSetValues(Mat A, int m, int *im, int n, int *in, Scalar *values,
                                                                                         INSERT_VALUES);
ierr = MatAssemblyBegin(Mat A, MAT_FINAL_ASSEMBLY);
ierr = MatAssemblyEnd(
Mat A, MAT_FINAL_ASSEMBLY);
operations:
ierr = MatMult(Mat A, Vec x, Vec y);    y = A * x
ierr = MatScale(Mat A, PetscScalar a);    A = a * A


SOLVERS:

ierr = KSPCreate(MPI_Comm comm, KSP *ksp);
ierr = KSPSetOperators(KSP ksp, Mat A, Mat PrecA, MatStructure flag);
ierr = KSPSetFromOptions(KSP ksp);
ierr = KSPSolve(KSP ksp, Vec b, Vec x, int *its);       Ax = b
ierr = KSPDestroy(KSP ksp);

option database:
  • file
  • env
  • command line:  -optionname value

ERROR CHECKING:

CHKERRQ(ierr)
CHKMEMQ


<PREV> <INDEX> <NEXT>