loaded modules Currently Loaded Modulefiles: 1) intel/11.1.069(default) 5) libnuma/2.0.7 2) openmpi/1.5.4(default) 6) sbank/1.2 3) git/1.8.3.4 7) cmake/2.8.6 4) acml/5.0.0_fma4(default) limits cputime unlimited filesize unlimited datasize unlimited stacksize unlimited coredumpsize unlimited memoryuse unlimited vmemoryuse unlimited descriptors 64000 memorylocked unlimited maxproc 16000 Environment Variables USER=d3m956 LOGNAME=d3m956 HOME=/people/d3m956 PATH=/share/apps/cmake/2.8.6//bin:/share/apps/sbank/1.2//bin:/share/apps/libnuma/2.0.7//bin:/share/apps/git/1.8.3.4//bin:/share/apps/intel/11.1/069/bin/intel64:/pic/people/scicons/bin:/usr/java/latest/bin:/share/apps/openmpi/1.5.4/intel/11.1.069/bin:/usr/lib64/qt-3.3/bin:/pic/people/scicons/bin:/usr/NX/bin:/usr/kerberos/bin:/usr/java/latest/bin:/usr/local/bin:/bin:/usr/bin:/usr/X11R6/bin:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/hpss/bin:/opt/pdsh/bin:/opt/rocks/bin:/opt/rocks/sbin:/opt/ganglia/bin:/opt/ganglia/sbin:/opt/hpss/bin:/opt/rocks/bin:/opt/rocks/sbin MAIL=/var/spool/mail/d3m956 SHELL=/bin/tcsh SSH_CLIENT=130.20.35.206 50070 22 SSH_CONNECTION=130.20.35.206 50070 130.20.68.15 22 SSH_TTY=/dev/pts/132 TERM=xterm KRB5CCNAME=FILE:/tmp/krb5cc_22956_of6fD6 HOSTTYPE=x86_64-linux VENDOR=unknown OSTYPE=linux MACHTYPE=x86_64 SHLVL=2 PWD=/pic/projects/ds/DS_PETSc/ss16 GROUP=users HOST=node0256.local REMOTEHOST=we19351.pnl.gov HOSTNAME=node0256 INPUTRC=/etc/inputrc ANT_HOME=/opt/rocks LS_COLORS=no=00:fi=00:di=00;34:ln=00;36:pi=40;33:so=00;35:bd=40;33;01:cd=40;33;01:or=01;05;37;41:mi=01;05;37;41:ex=00;32:*.cmd=00;32:*.exe=00;32:*.com=00;32:*.btm=00;32:*.bat=00;32:*.sh=00;32:*.csh=00;32:*.tar=00;31:*.tgz=00;31:*.arj=00;31:*.taz=00;31:*.lzh=00;31:*.zip=00;31:*.z=00;31:*.Z=00;31:*.gz=00;31:*.bz2=00;31:*.bz=00;31:*.tz=00;31:*.rpm=00;31:*.cpio=00;31:*.jpg=00;35:*.gif=00;35:*.bmp=00;35:*.xbm=00;35:*.xpm=00;35:*.png=00;35:*.tif=00;35: G_BROKEN_FILENAMES=1 SSH_ASKPASS=/usr/libexec/openssh/gnome-ssh-askpass HPSS_PRIMARY_AUTHN_MECH=krb5 JAVA_HOME=/usr/java/latest LANG=en_US.UTF-8 LESSOPEN=|/usr/bin/lesspipe.sh %s MODULE_VERSION=3.2.8 MODULE_VERSION_STACK=3.2.8 MODULESHOME=/share/apps/modules/Modules/3.2.8 MODULEPATH=$MODULESHOME/modulefiles/environment:$MODULESHOME/modulefiles/development/compilers:$MODULESHOME/modulefiles/development/mpi:$MODULESHOME/modulefiles/development/mlib:$MODULESHOME/modulefiles/development/tools:$MODULESHOME/modulefiles/apps:$MODULESHOME/modulefiles/libs:/share/apps/modules/Modules/versions NXDIR=/usr/NX PDSHROOT=/opt/pdsh MANPATH=/share/apps/sbank/1.2//share/man:/share/apps/libnuma/2.0.7//share/man:/share/apps/git/1.8.3.4//share/man:/share/apps/intel/11.1/069/man/en_US/:/share/apps/openmpi/1.5.4/intel/11.1.069/man:/usr/share/man:: SBATCH_NO_REQUEUE=1 SRUN_NO_REQUEUE=1 SLURM_NO_REQUEUE=1 QTDIR=/usr/lib64/qt-3.3 QTINC=/usr/lib64/qt-3.3/include QTLIB=/usr/lib64/qt-3.3/lib ROCKS_ROOT=/opt/rocks LD_LIBRARY_PATH=/share/apps/libnuma/2.0.7//lib64:/share/apps/intel/11.1/069/lib/intel64:/share/apps/acml/5.0.0/ifort64_fma4/lib:/share/apps/openmpi/1.5.4/intel/11.1.069/lib:/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/lib PETSC_DIR=/pic/projects/ds/petsc-dev.6.06.13 PETSC_ARCH=arch-complex-opt SLURM_JOB_NAME=DSmpi SLURM_PRIO_PROCESS=0 SLURM_SUBMIT_DIR=/pic/projects/ds/DS_PETSc/ss16 SLURM_JOB_ID=3717091 SLURM_JOB_NUM_NODES=1 SLURM_JOB_NODELIST=node0256 SLURM_NODE_ALIASES=(null) SLURM_JOB_CPUS_PER_NODE=32 ENVIRONMENT=BATCH SLURM_JOBID=3717091 SLURM_NNODES=1 SLURM_NODELIST=node0256 SLURM_NTASKS=32 SLURM_NPROCS=32 SLURM_TASKS_PER_NODE=32 SLURM_TOPOLOGY_ADDR=root.ql01.node0256 SLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.node TMPDIR=/tmp SLURM_TASK_PID=1237 SLURM_CPUS_ON_NODE=32 SBATCH_CPU_BIND_VERBOSE=quiet SBATCH_CPU_BIND_TYPE=mask_cpu: SBATCH_CPU_BIND_LIST=0xFFFFFFFF SBATCH_CPU_BIND=quiet,mask_cpu:0xFFFFFFFF SLURM_NODEID=0 SLURM_PROCID=0 SLURM_LOCALID=0 SLURM_GTIDS=0 SLURM_CHECKPOINT_IMAGE_DIR=/pic/projects/ds/DS_PETSc/ss16 SLURMD_NODENAME=node0256 LOADEDMODULES=intel/11.1.069:openmpi/1.5.4:git/1.8.3.4:acml/5.0.0_fma4:libnuma/2.0.7:sbank/1.2:cmake/2.8.6 PNNL_COMPILER=intel PNNL_COMPILER_VERSION=11.1.069 _LMFILES_=/share/apps/modules/Modules/3.2.8/modulefiles/development/compilers/intel/11.1.069:/share/apps/modules/Modules/3.2.8/modulefiles/development/mpi/openmpi/1.5.4:/share/apps/modules/Modules/3.2.8/modulefiles/development/tools/git/1.8.3.4:/share/apps/modules/Modules/3.2.8/modulefiles/development/mlib/acml/5.0.0_fma4:/share/apps/modules/Modules/3.2.8/modulefiles/development/tools/libnuma/2.0.7:/share/apps/modules/Modules/3.2.8/modulefiles/development/tools/sbank/1.2:/share/apps/modules/Modules/3.2.8/modulefiles/development/tools/cmake/2.8.6 IBV_FORK_SAFE=1 MPI_ROOT=/share/apps/openmpi/1.5.4/intel/11.1.069 PNNL_MPI=openmpi PNNL_MPI_VERSION=1.5.4 MLIB_CFLAGS=-I/share/apps/acml/5.0.0/ifort64_fma4/include MLIB_FFLAGS=-I/share/apps/acml/5.0.0/ifort64_fma4/include MLIB_LDFLAGS=/share/apps/acml/5.0.0/ifort64_fma4/lib/libacml.a CMAKE=/share/apps/cmake/2.8.6/ ldd output Number of buses: 1081 Number of branches: 1689 Number of swing buses: 1 Number of PQ buses: 793 Number of PV buses: 287 Number of generators: 288 Number of switches: 4 Initialization time: 0.0110772 Alloc main data time: 0.031729 Read input data time: 0.0377419 Ext2int_gen time: 0.000801086 1.1.------preparing matric for prefy11 time: 0.0269499 solvingAXB using 1 level of parallelization... SUPERLU_DIST LU: 0-the LU numfactorization 1-the LU numfactorization 1.2.------solvingAXB for prefy11 time: 0.225076 1.3.------fill in prefy11 time: 0.00296402 1.------computing prefy11 time: 0.255291 solvingAXB using 1 level of parallelization... SUPERLU_DIST LU: 0-the LU numfactorization 1-the LU numfactorization 1.2.------solvingAXB for fy11 time: 0.110318 1.3.------fill in fy11 time: 0.00349498 1.------computing fy11 time: 0.113967 solvingAXB using 1 level of parallelization... SUPERLU_DIST LU: 0-the LU numfactorization 1-the LU numfactorization 1.2.------solvingAXB for fy11 time: 0.107989 1.3.------fill in posfy11 time: 0.00276303 1.------computing posfy11 time: 0.110897 Build admittance matrix time: 0.558165 Scattering prefy11 time: 0.0989711 20 steps, ftime 0.1 Scattering fy11 time: 0.060595 9 steps, ftime 0.05 Scattering posfy11 time: 0.062969 571 steps, ftime 2.855 Run simulation time: 353.815 ************************************************************************************************************************ *** WIDEN YOUR WINDOW TO 120 CHARACTERS. Use 'enscript -r -fCourier9' to print this document *** ************************************************************************************************************************ ---------------------------------------------- PETSc Performance Summary: ---------------------------------------------- dynSim on a arch-complex-opt named node0256.local with 32 processors, by d3m956 Thu Aug 15 11:48:22 2013 Using Petsc Development GIT revision: a0a914e661bf6402b8edabe0f5a2dad46323f69f GIT Date: 2013-06-05 14:18:39 -0500 Max Max/Min Avg Total Time (sec): 3.545e+02 1.00000 3.545e+02 Objects: 2.740e+04 1.00022 2.739e+04 Flops: 3.761e+09 1.00286 3.756e+09 1.202e+11 Flops/sec: 1.061e+07 1.00286 1.060e+07 3.391e+08 MPI Messages: 3.383e+05 1.00074 3.382e+05 1.082e+07 MPI Message Lengths: 1.951e+08 1.00275 5.757e+02 6.230e+09 MPI Reductions: 7.824e+04 1.00008 Flop counting convention: 1 flop = 1 real number operation of type (multiply/divide/add/subtract) e.g., VecAXPY() for real vectors of length N --> 2N flops and VecAXPY() for complex vectors of length N --> 8N flops Summary of Stages: ----- Time ------ ----- Flops ----- --- Messages --- -- Message Lengths -- -- Reductions -- Avg %Total Avg %Total counts %Total Avg %Total counts %Total 0: Main Stage: 4.0670e+01 11.5% 1.2019e+11 100.0% 1.082e+07 100.0% 5.757e+02 100.0% 6.386e+04 81.6% 1: My IJacobian stage: 3.1379e+02 88.5% 0.0000e+00 0.0% 1.984e+03 0.0% 1.357e-02 0.0% 1.438e+04 18.4% ------------------------------------------------------------------------------------------------------------------------ See the 'Profiling' chapter of the users' manual for details on interpreting output. Phase summary info: Count: number of times phase was executed Time and Flops: Max - maximum over all processors Ratio - ratio of maximum to minimum over all processors Mess: number of messages sent Avg. len: average message length (bytes) Reduct: number of global reductions Global: entire computation Stage: stages of a computation. Set stages with PetscLogStagePush() and PetscLogStagePop(). %T - percent time in this phase %f - percent flops in this phase %M - percent messages in this phase %L - percent message lengths in this phase %R - percent reductions in this phase Total Mflop/s: 10e-6 * (sum of flops over all processors)/(max time over all processors) ------------------------------------------------------------------------------------------------------------------------ ########################################################## # # # WARNING!!! # # # # The code for various complex numbers numerical # # kernels uses C++, which generally is not well # # optimized. For performance that is about 4-5 times # # faster, specify --with-fortran-kernels=1 # # when running ./configure.py. # # # ########################################################## Event Count Time (sec) Flops --- Global --- --- Stage --- Total Max Ratio Max Ratio Max Ratio Mess Avg len Reduct %T %f %M %L %R %T %f %M %L %R Mflop/s ------------------------------------------------------------------------------------------------------------------------ --- Event Stage 0: Main Stage VecDot 1796 1.0 3.1030e-01 2.9 5.10e+05 1.0 0.0e+00 0.0e+00 1.8e+03 0 0 0 0 2 1 0 0 0 3 53 VecMDot 9090 1.0 1.8859e+00 2.6 7.96e+06 1.0 0.0e+00 0.0e+00 9.1e+03 0 0 0 0 12 3 0 0 0 14 135 VecNorm 14478 1.0 1.7498e+0119.0 4.17e+06 1.0 0.0e+00 0.0e+00 1.4e+04 3 0 0 0 19 23 0 0 0 23 8 VecScale 11486 1.0 2.4124e-02 1.7 1.65e+06 1.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 2194 VecCopy 7188 1.0 1.0141e-02 1.5 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecSet 22877 1.0 1.2533e-01 1.1 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecAXPY 4192 1.0 1.0326e-02 1.2 1.21e+06 1.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 3741 VecAXPBYCZ 2396 1.0 6.9904e-03 1.4 1.04e+06 1.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 4738 VecWAXPY 1796 1.0 2.6858e-03 1.3 2.59e+05 1.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 3081 VecMAXPY 10886 1.0 1.5635e-02 1.1 1.07e+07 1.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 21890 VecAssemblyBegin 4010.0 3.2108e-03 1.5 0.00e+00 0.0 0.0e+00 0.0e+00 1.2e+01 0 0 0 0 0 0 0 0 0 0 0 VecAssemblyEnd 4010.0 1.7643e-05 3.7 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecScatterBegin 17746 1.0 2.6188e+00 1.5 0.00e+00 0.0 1.1e+07 5.8e+02 6.9e+03 1 0100100 9 6 0100100 11 0 VecScatterEnd 10886 1.0 1.9374e+00 1.8 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 4 0 0 0 0 0 VecReduceArith 4192 1.0 1.3658e-02 2.9 1.19e+06 1.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 2789 VecReduceComm 2396 1.0 5.6600e+0029.9 0.00e+00 0.0 0.0e+00 0.0e+00 2.4e+03 1 0 0 0 3 7 0 0 0 4 0 VecNormalize 10886 1.0 6.1851e-01 1.3 4.70e+06 1.0 0.0e+00 0.0e+00 1.1e+04 0 0 0 0 14 1 0 0 0 17 243 MatMult 10886 1.0 6.5511e+00 1.3 3.61e+09 1.0 1.1e+07 5.8e+02 0.0e+00 2 96100100 0 15 96100100 0 17634 MatSolve 10886 1.0 8.2049e-01 1.0 1.11e+08 1.0 0.0e+00 0.0e+00 0.0e+00 0 3 0 0 0 2 3 0 0 0 4341 MatLUFactorSym 3 1.0 3.1352e-03 1.2 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatLUFactorNum 1802 1.0 3.5002e-01 1.2 1.10e+0742.4 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 1 0 0 0 0 514 MatILUFactorSym 3 1.0 1.9660e-03 2.0 0.00e+00 0.0 0.0e+00 0.0e+00 3.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatCopy 3 1.0 2.4970e-03 1.5 0.00e+00 0.0 0.0e+00 0.0e+00 2.4e+01 0 0 0 0 0 0 0 0 0 0 0 MatConvert 14 1.0 2.2999e-01 1.0 0.00e+00 0.0 7.5e+03 1.8e+01 1.4e+02 0 0 0 0 0 1 0 0 0 0 0 MatScale 1 1.0 9.4900e-03 1.1 1.36e+02 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatAssemblyBegin 75 1.0 5.9553e-02 3.2 0.00e+00 0.0 2.3e+02 5.9e+01 1.1e+02 0 0 0 0 0 0 0 0 0 0 0 MatAssemblyEnd 75 1.0 4.4107e-02 1.1 0.00e+00 0.0 1.7e+04 1.8e+01 2.8e+02 0 0 0 0 0 0 0 0 0 0 0 MatGetRow 8542 1.0 2.1618e-02 1.1 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatGetRowIJ 6 1.0 1.6670e-03 2.7 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatGetOrdering 6 1.0 3.2930e-03 1.5 0.00e+00 0.0 0.0e+00 0.0e+00 1.8e+01 0 0 0 0 0 0 0 0 0 0 0 MatAXPY 3 1.0 6.8130e-03 1.2 0.00e+00 0.0 1.4e+03 1.1e+01 4.8e+01 0 0 0 0 0 0 0 0 0 0 0 MatTranspose 2 1.0 9.7950e-03 1.0 0.00e+00 0.0 3.1e+02 3.4e+01 3.4e+01 0 0 0 0 0 0 0 0 0 0 0 MatMatMult 9 1.0 2.3024e-02 1.0 6.52e+04 1.0 9.5e+03 9.5e+02 2.2e+02 0 0 0 0 0 0 0 0 0 0 89 MatMatSolve 3 1.0 8.4599e-02 1.0 0.00e+00 0.0 0.0e+00 0.0e+00 2.4e+01 0 0 0 0 0 0 0 0 0 0 0 MatMatMultSym 9 1.0 1.8894e-02 1.1 0.00e+00 0.0 8.8e+03 5.8e+02 2.0e+02 0 0 0 0 0 0 0 0 0 0 0 MatMatMultNum 9 1.0 4.1387e-03 1.2 6.52e+04 1.0 7.4e+02 5.4e+03 1.8e+01 0 0 0 0 0 0 0 0 0 0 495 MatGetLocalMat 18 1.0 1.9829e-03 1.7 0.00e+00 0.0 0.0e+00 0.0e+00 1.8e+01 0 0 0 0 0 0 0 0 0 0 0 MatGetBrAoCol 18 1.0 6.5045e-03 1.5 0.00e+00 0.0 2.9e+03 3.0e+03 1.8e+01 0 0 0 0 0 0 0 0 0 0 0 Warning -- total time of even greater than time of entire stage -- something is wrong with the timer TSStep 600 1.0 3.5351e+02 1.0 3.76e+09 1.0 1.1e+07 5.8e+02 7.2e+04100100100100 92 869100100100113 340 TSFunctionEval 2996 1.0 2.9446e+01 3.9 0.00e+00 0.0 0.0e+00 0.0e+00 3.0e+04 5 0 0 0 38 45 0 0 0 47 0 Warning -- total time of even greater than time of entire stage -- something is wrong with the timer TSJacobianEval 1796 1.0 3.1384e+02 1.0 0.00e+00 0.0 2.0e+03 7.4e+01 1.4e+04 89 0 0 0 18 772 0 0 0 23 0 Warning -- total time of even greater than time of entire stage -- something is wrong with the timer SNESSolve 600 1.0 3.5310e+02 1.0 3.76e+09 1.0 1.1e+07 5.8e+02 6.6e+04 99100100100 85 862100100100104 340 SNESFunctionEval 2396 1.0 2.3621e+01 3.3 1.04e+06 1.0 0.0e+00 0.0e+00 2.4e+04 4 0 0 0 31 38 0 0 0 38 1 Warning -- total time of even greater than time of entire stage -- something is wrong with the timer SNESJacobianEval 1796 1.0 3.1385e+02 1.0 0.00e+00 0.0 2.0e+03 7.4e+01 1.4e+04 89 0 0 0 18 772 0 0 0 23 0 SNESLineSearch 1796 1.0 1.9440e+01 1.0 6.00e+08 1.0 1.8e+06 5.8e+02 2.5e+04 5 16 16 16 32 48 16 16 16 39 987 KSPGMRESOrthog 9090 1.0 1.9159e+00 2.5 1.60e+07 1.0 0.0e+00 0.0e+00 9.1e+03 0 0 0 0 12 3 0 0 0 14 268 KSPSetUp 3592 1.0 1.3601e-02 1.1 0.00e+00 0.0 0.0e+00 0.0e+00 3.0e+01 0 0 0 0 0 0 0 0 0 0 0 KSPSolve 1796 1.0 8.2506e+00 1.0 3.16e+09 1.0 9.0e+06 5.8e+02 2.0e+04 2 84 83 83 26 20 84 83 83 31 12238 PCSetUp 3592 1.0 2.5937e-01 1.3 1.10e+0742.4 0.0e+00 0.0e+00 2.1e+01 0 0 0 0 0 1 0 0 0 0 693 PCSetUpOnBlocks 1796 1.0 2.4673e-01 1.3 1.10e+0742.4 0.0e+00 0.0e+00 1.5e+01 0 0 0 0 0 1 0 0 0 0 729 PCApply 10886 1.0 1.0384e+00 1.1 1.11e+08 1.0 0.0e+00 0.0e+00 0.0e+00 0 3 0 0 0 2 3 0 0 0 3430 --- Event Stage 1: My IJacobian stage VecSet 1797 1.0 4.7467e-02 1.9 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecScatterBegin 1796 1.0 4.3967e-01 1.1 0.00e+00 0.0 0.0e+00 0.0e+00 1.8e+03 0 0 0 0 2 0 0 0 0 12 0 MatAssemblyBegin 1796 1.0 7.2787e+00 1.8 0.00e+00 0.0 0.0e+00 0.0e+00 3.6e+03 2 0 0 0 5 2 0 0 0 25 0 MatAssemblyEnd 1796 1.0 2.1605e-01 1.1 0.00e+00 0.0 2.0e+03 7.4e+01 1.8e+03 0 0 0 0 2 0 0100100 13 0 ------------------------------------------------------------------------------------------------------------------------ Memory usage is given in bytes: Object Type Creations Destructions Memory Descendants' Mem. Reports information only for process 0. --- Event Stage 0: Main Stage Vector 7921 144 750128 0 Vector Scatter 6901 6897 4455060 0 Matrix 160 154 1658100 0 Index Set 6959 6953 5294912 0 IS L to G Mapping 1 0 0 0 Bipartite Graph 16 14 11424 0 Distributed Mesh 7 6 26304 0 TSAdapt 6 4 4784 0 TS 3 1 1224 0 DMTS 3 2 1424 0 SNES 3 1 1316 0 SNESLineSearch 3 1 864 0 DMSNES 4 3 2016 0 Krylov Solver 6 2 35896 0 DMKSP interface 1 0 0 0 Preconditioner 6 2 1864 0 Viewer 1 0 0 0 --- Event Stage 1: My IJacobian stage Vector 1798 1 1552 0 Vector Scatter 1797 1796 1156624 0 Index Set 1798 1798 1370952 0 ======================================================================================================================== Average time to get PetscTime(): 2.14577e-07 Average time for MPI_Barrier(): 3.66211e-05 Average time for zero size MPI_Send(): 8.59797e-06 #PETSc Option Table entries: -i /pic/projects/ds/DS_PETSc/txt/d288gen.txt -log_summary -ts_theta_endpoint #End of PETSc Option Table entries Compiled without FORTRAN kernels Compiled with full precision matrices (default) sizeof(short) 2 sizeof(int) 4 sizeof(long) 8 sizeof(void*) 8 sizeof(PetscScalar) 16 sizeof(PetscInt) 4 Configure run at: Mon Aug 5 13:50:31 2013 Configure options: --with-scalar-type=complex --with-clanguage=C++ PETSC_ARCH=arch-complex-opt --with-fortran-kernels=generic --download-superlu_dist --download-mumps --download-scalapack --download-parmetis --download-metis --download-elemental --with-debugging=0 ----------------------------------------- Libraries compiled on Mon Aug 5 13:50:31 2013 on olympus.local Machine characteristics: Linux-2.6.32-131.17.1.el6.x86_64-x86_64-with-redhat-5.7-Tikanga Using PETSc directory: /pic/projects/ds/petsc-dev.6.06.13 Using PETSc arch: arch-complex-opt ----------------------------------------- Using C compiler: mpicxx -wd1572 -O3 -fPIC ${COPTFLAGS} ${CFLAGS} Using Fortran compiler: mpif90 -fPIC -O3 ${FOPTFLAGS} ${FFLAGS} ----------------------------------------- Using include paths: -I/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/include -I/pic/projects/ds/petsc-dev.6.06.13/include -I/pic/projects/ds/petsc-dev.6.06.13/include -I/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/include -I/share/apps/openmpi/1.5.4/intel/11.1/include ----------------------------------------- Using C linker: mpicxx Using Fortran linker: mpif90 Using libraries: -Wl,-rpath,/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/lib -L/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/lib -lpetsc -Wl,-rpath,/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/lib -L/pic/projects/ds/petsc-dev.6.06.13/arch-complex-opt/lib -lcmumps -ldmumps -lsmumps -lzmumps -lmumps_common -lpord -lscalapack -lsuperlu_dist_3.3 -lelemental -lpmrrr -llapack -lblas -lX11 -lparmetis -lmetis -lpthread -Wl,-rpath,/share/apps/openmpi/1.5.4/intel/11.1/lib -L/share/apps/openmpi/1.5.4/intel/11.1/lib -Wl,-rpath,/share/apps/intel/11.1/069/lib/intel64 -L/share/apps/intel/11.1/069/lib/intel64 -Wl,-rpath,/usr/lib/gcc/x86_64-redhat-linux/4.1.2 -L/usr/lib/gcc/x86_64-redhat-linux/4.1.2 -lmpi_f90 -lmpi_f77 -lifport -lifcore -lm -lm -lmpi_cxx -lstdc++ -lmpi_cxx -lstdc++ -ldl -lmpi -lnsl -lutil -limf -lsvml -lipgo -ldecimal -lgcc_s -lirc -lpthread -lirc_s -ldl -----------------------------------------