Actual source code: ex41.c

  1: /*$Id: ex41.c,v 1.25 2001/08/07 03:03:07 balay Exp $*/

  3: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
  4: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
  5:   -f <input_file> : file to load.  For a 5X5 example of the 5-pt. stencil,\n\
  6:                        use the file petsc/src/mat/examples/matbinary.ex\n\
  7:   -nd <size>      : > 0  no of domains per processor \n\
  8:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

 10:  #include petscsles.h

 14: int main(int argc,char **args)
 15: {
 16:   int         ierr,nd = 2,ov=1,i,j,size,m,n,rank,*idx;
 17:   PetscTruth  flg;
 18:   Mat         A,B;
 19:   char        file[128];
 20:   PetscViewer fd;
 21:   IS          *is1,*is2;
 22:   PetscRandom r;
 23:   PetscScalar rand;

 25:   PetscInitialize(&argc,&args,(char *)0,help);
 26: #if defined(PETSC_USE_COMPLEX)
 27:   SETERRQ(1,"This example does not work with complex numbers");
 28: #else
 29: 
 30:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 31:   PetscOptionsGetString(PETSC_NULL,"-f",file,127,PETSC_NULL);
 32:   PetscOptionsGetInt(PETSC_NULL,"-nd",&nd,PETSC_NULL);
 33:   PetscOptionsGetInt(PETSC_NULL,"-ov",&ov,PETSC_NULL);

 35:   /* Read matrix and RHS */
 36:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,PETSC_BINARY_RDONLY,&fd);
 37:   MatLoad(fd,MATMPIAIJ,&A);
 38:   PetscViewerDestroy(fd);

 40:   /* Read the matrix again as a seq matrix */
 41:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,PETSC_BINARY_RDONLY,&fd);
 42:   MatLoad(fd,MATSEQAIJ,&B);
 43:   PetscViewerDestroy(fd);
 44: 
 45:   /* Create the Random no generator */
 46:   MatGetSize(A,&m,&n);
 47:   PetscRandomCreate(PETSC_COMM_SELF,RANDOM_DEFAULT,&r);
 48: 
 49:   /* Create the IS corresponding to subdomains */
 50:   PetscMalloc(nd*sizeof(IS **),&is1);
 51:   PetscMalloc(nd*sizeof(IS **),&is2);
 52:   PetscMalloc(m *sizeof(int),&idx);

 54:   /* Create the random Index Sets */
 55:   for (i=0; i<nd; i++) {
 56:     for (j=0; j<rank; j++) {
 57:       PetscRandomGetValue(r,&rand);
 58:     }
 59:     PetscRandomGetValue(r,&rand);
 60:     size   = (int)(rand*m);
 61:     for (j=0; j<size; j++) {
 62:       PetscRandomGetValue(r,&rand);
 63:       idx[j] = (int)(rand*m);
 64:     }
 65:     ISCreateGeneral(PETSC_COMM_SELF,size,idx,is1+i);
 66:     ISCreateGeneral(PETSC_COMM_SELF,size,idx,is2+i);
 67:   }
 68: 
 69:   MatIncreaseOverlap(A,nd,is1,ov);
 70:   MatIncreaseOverlap(B,nd,is2,ov);
 71: 
 72:   /* Now see if the serial and parallel case have the same answers */
 73:   for (i=0; i<nd; ++i) {
 74:     int sz1,sz2;
 75:     ISEqual(is1[i],is2[i],&flg);
 76:     ISGetSize(is1[i],&sz1);
 77:     ISGetSize(is2[i],&sz2);
 78:     PetscPrintf(PETSC_COMM_SELF,"proc:[%d], i=%d, flg =%d sz1 = %d sz2 = %d\n",rank,i,flg,sz1,sz2);
 79:     /* ISView(is1[i],PETSC_VIEWER_STDOUT_SELF);
 80:     ISView(is2[i],PETSC_VIEWER_STDOUT_SELF); */
 81:   }

 83:   /* Free Allocated Memory */
 84:   for (i=0; i<nd; ++i) {
 85:     ISDestroy(is1[i]);
 86:     ISDestroy(is2[i]);
 87:   }
 88:   PetscRandomDestroy(r);
 89:   PetscFree(is1);
 90:   PetscFree(is2);
 91:   MatDestroy(A);
 92:   MatDestroy(B);
 93:   PetscFree(idx);

 95:   PetscFinalize();
 96: #endif
 97:   return 0;
 98: }