+ module load intel-knl-workshop/env python/3.5.2 ++ /usr/Modules/3.2.10/bin/modulecmd bash load intel-knl-workshop/env python/3.5.2 + eval DYLD_LIBRARY_PATH=/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64 ';export' 'DYLD_LIBRARY_PATH;LD_LIBRARY_PATH=/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/opt/cray/lib64:/usr/lib64:/opt/cray/lib64::/global/opt/slurm/default/lib64' ';export' 'LD_LIBRARY_PATH;LIBRARY_PATH=/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64' ';export' 'LIBRARY_PATH;LM_LICENSE_FILE=/global/modules/intel-knl-workshop/COM_L___H9TX-75MVHF5J.lic:/global/modules/intel-knl-workshop/EVAL_L___VFMV-67S58RFH.lic' ';export' 'LM_LICENSE_FILE;LOADEDMODULES=intel-ics/2017.1.132:intel-vtune/2017.1.0.486011:intel-advisor/2017.1.2.500157:intel-inspector/2017.1.1.484836:intel-impi/2017.1.132:intel-knl-workshop/env:python/3.5.2' ';export' 'LOADEDMODULES;PATH=/global/opt/jsc/Python-3.5.2/bin:/global/opt/jsc/intel/impi/2017.1.132/bin64:/global/opt/jsc/intel/inspector_2017.1.1.484836/bin64:/global/opt/jsc/intel/advisor_2017.1.2.500157/bin64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/bin64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/bin/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/bin64:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/opt/munge/default/bin:/global/opt/slurm/default/bin' ';export' 'PATH;PYTHONHOME=/global/opt/jsc/Python-3.5.2' ';export' 'PYTHONHOME;PYTHONPATH=/global/opt/jsc/Python-3.5.2/lib' ';export' 'PYTHONPATH;_LMFILES_=/global/modules/intel-ics/2017.1.132:/global/modules/intel-vtune/2017.1.0.486011:/global/modules/intel-advisor/2017.1.2.500157:/global/modules/intel-inspector/2017.1.1.484836:/global/modules/intel-impi/2017.1.132:/global/modules/intel-knl-workshop/env:/global/modules/python/3.5.2' ';export' '_LMFILES_;' ++ DYLD_LIBRARY_PATH=/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64 ++ export DYLD_LIBRARY_PATH ++ LD_LIBRARY_PATH=/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/opt/cray/lib64:/usr/lib64:/opt/cray/lib64::/global/opt/slurm/default/lib64 ++ export LD_LIBRARY_PATH ++ LIBRARY_PATH=/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64 ++ export LIBRARY_PATH ++ LM_LICENSE_FILE=/global/modules/intel-knl-workshop/COM_L___H9TX-75MVHF5J.lic:/global/modules/intel-knl-workshop/EVAL_L___VFMV-67S58RFH.lic ++ export LM_LICENSE_FILE ++ LOADEDMODULES=intel-ics/2017.1.132:intel-vtune/2017.1.0.486011:intel-advisor/2017.1.2.500157:intel-inspector/2017.1.1.484836:intel-impi/2017.1.132:intel-knl-workshop/env:python/3.5.2 ++ export LOADEDMODULES ++ PATH=/global/opt/jsc/Python-3.5.2/bin:/global/opt/jsc/intel/impi/2017.1.132/bin64:/global/opt/jsc/intel/inspector_2017.1.1.484836/bin64:/global/opt/jsc/intel/advisor_2017.1.2.500157/bin64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/bin64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/bin/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/bin64:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/opt/munge/default/bin:/global/opt/slurm/default/bin ++ export PATH ++ PYTHONHOME=/global/opt/jsc/Python-3.5.2 ++ export PYTHONHOME ++ PYTHONPATH=/global/opt/jsc/Python-3.5.2/lib ++ export PYTHONPATH ++ _LMFILES_=/global/modules/intel-ics/2017.1.132:/global/modules/intel-vtune/2017.1.0.486011:/global/modules/intel-advisor/2017.1.2.500157:/global/modules/intel-inspector/2017.1.1.484836:/global/modules/intel-impi/2017.1.132:/global/modules/intel-knl-workshop/env:/global/modules/python/3.5.2 ++ export _LMFILES_ + source /gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl/bin/activate ++ deactivate nondestructive ++ unset -f pydoc ++ '[' -z '' ']' ++ '[' -z '' ']' ++ '[' -n /bin/bash ']' ++ hash -r ++ '[' -z '' ']' ++ unset VIRTUAL_ENV ++ '[' '!' nondestructive = nondestructive ']' ++ VIRTUAL_ENV=/gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl ++ export VIRTUAL_ENV ++ _OLD_VIRTUAL_PATH=/global/opt/jsc/Python-3.5.2/bin:/global/opt/jsc/intel/impi/2017.1.132/bin64:/global/opt/jsc/intel/inspector_2017.1.1.484836/bin64:/global/opt/jsc/intel/advisor_2017.1.2.500157/bin64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/bin64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/bin/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/bin64:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/opt/munge/default/bin:/global/opt/slurm/default/bin ++ PATH=/gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl/bin:/global/opt/jsc/Python-3.5.2/bin:/global/opt/jsc/intel/impi/2017.1.132/bin64:/global/opt/jsc/intel/inspector_2017.1.1.484836/bin64:/global/opt/jsc/intel/advisor_2017.1.2.500157/bin64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/bin64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/bin/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/bin64:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/opt/munge/default/bin:/global/opt/slurm/default/bin ++ export PATH ++ '[' -z _ ']' ++ _OLD_VIRTUAL_PYTHONHOME=/global/opt/jsc/Python-3.5.2 ++ unset PYTHONHOME ++ '[' -z '' ']' ++ _OLD_VIRTUAL_PS1= ++ '[' x '!=' x ']' +++ basename /gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl ++ PS1='(petsc_knl) ' ++ export PS1 ++ alias pydoc ++ '[' -n /bin/bash ']' ++ hash -r + export PYTHONPATH=/gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl/lib/python3.5/site-packages + PYTHONPATH=/gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl/lib/python3.5/site-packages + export LD_LIBRARY_PATH=/lib64:/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/opt/cray/lib64:/usr/lib64:/opt/cray/lib64::/global/opt/slurm/default/lib64 + LD_LIBRARY_PATH=/lib64:/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/opt/cray/lib64:/usr/lib64:/opt/cray/lib64::/global/opt/slurm/default/lib64 + export OMP_NUM_THREADS=1 + OMP_NUM_THREADS=1 + export I_MPI_DEBUG=5 + I_MPI_DEBUG=5 + export OMP_PLACES=cores + OMP_PLACES=cores + export OMP_PROC_BIND=spread + OMP_PROC_BIND=spread + SCRIPT_NAME=FD_3D_TwoPointFluxApproximation_MiniApp.py + SCRIPT_PATH=/gpfs/homeb/pcp0/pcp0050/eocoe/TPFA/ + SCRIPT_PATH+=FD_3D_TwoPointFluxApproximation_MiniApp.py + echo /gpfs/homeb/pcp0/pcp0050/eocoe/TPFA/FD_3D_TwoPointFluxApproximation_MiniApp.py /gpfs/homeb/pcp0/pcp0050/eocoe/TPFA/FD_3D_TwoPointFluxApproximation_MiniApp.py + TMPDIR=/gpfs/work/pcp0/pcp0050/23321 + mkdir /gpfs/work/pcp0/pcp0050/23321 + echo /gpfs/work/pcp0/pcp0050/23321 /gpfs/work/pcp0/pcp0050/23321 + export + grep LD_LIBRARY_PATH declare -x DYLD_LIBRARY_PATH="/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64" declare -x LD_LIBRARY_PATH="/lib64:/global/opt/jsc/Python-3.5.2/lib:/global/opt/jsc/intel/impi/2017.1.132/lib64:/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64:/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64:/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/lib/intel64:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64:/opt/cray/lib64:/usr/lib64:/opt/cray/lib64::/global/opt/slurm/default/lib64" declare -x MIC_LD_LIBRARY_PATH="/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/mic:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/mic:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpirt/lib/mic:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/lib/mic:/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/mic" + cp /gpfs/homeb/pcp0/pcp0050/eocoe/TPFA/FD_3D_TwoPointFluxApproximation_MiniApp.py /gpfs/work/pcp0/pcp0050/23321 ++ echo /gpfs/work/pcp0/pcp0050/23321/FD_3D_TwoPointFluxApproximation_MiniApp.py + DEST=/gpfs/work/pcp0/pcp0050/23321/FD_3D_TwoPointFluxApproximation_MiniApp.py + echo /gpfs/work/pcp0/pcp0050/23321/FD_3D_TwoPointFluxApproximation_MiniApp.py /gpfs/work/pcp0/pcp0050/23321/FD_3D_TwoPointFluxApproximation_MiniApp.py + NX=10 + NY=10 + NZ=10 + NP=128 ++ echo /gpfs/work/pcp0/pcp0050/TPFA/matrix-A-10x10x10.dat + AMAT=/gpfs/work/pcp0/pcp0050/TPFA/matrix-A-10x10x10.dat ++ echo /gpfs/work/pcp0/pcp0050/TPFA/vector-b-10x10x10.dat + BVEC=/gpfs/work/pcp0/pcp0050/TPFA/vector-b-10x10x10.dat ++ echo /gpfs/work/pcp0/pcp0050/TPFA/vector-x-10x10x10.dat + XVEC=/gpfs/work/pcp0/pcp0050/TPFA/vector-x-10x10x10.dat + sleep 3 + '[' -r == -w ']' + '[' -r == -r ']' + /usr/bin/time -f %e,%M,%W,%I,%O,%r,%s,%x srun python /gpfs/work/pcp0/pcp0050/23321/FD_3D_TwoPointFluxApproximation_MiniApp.py -s 1.0 10 10 10 2 1e-15 1e-06 1e+05 100000 PETSC CPU N -r BIN /gpfs/work/pcp0/pcp0050/TPFA/matrix-A-10x10x10.dat /gpfs/work/pcp0/pcp0050/TPFA/vector-b-10x10x10.dat /gpfs/work/pcp0/pcp0050/TPFA/vector-x-10x10x10.dat -log_view -ksp_view -on_error_attach_debugger [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1d4f100 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=189fe80 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2f213d0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2ce08f0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2aed190 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2a4f1c0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=17164d0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=124af20 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=25c9890 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=10aec70 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=11c85f0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=129cae0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1443ff0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2c3a880 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=234a060 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2aef6b0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=18b3260 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=12e9a60 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=10debd0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2bda120 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2c8d660 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2833c90 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1675cd0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1e66460 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2529010 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1038d50 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=24cd0d0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=28a6c20 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=27164d0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2303610 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=f88fb0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=22f8d00 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=185ddd0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=18ddc60 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=152f020 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=286fab0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=17bfc00 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2ecee60 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2de7ba0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=23bd8f0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2e79ca0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1029bc0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=17c0060 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=21317b0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1c457f0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=244dcb0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=12b7780 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1ad71c0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=217d730 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=12ad9c0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=29f0620 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2cf6580 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=209fe40 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=18d48a0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=21fb160 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=25242f0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2332520 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=29dc990 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1feb680 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2bc71f0 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2d9a280 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2f0fd90 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=2d79e10 [-1] MPI startup(): Imported environment partly inaccesible. Map=0 Info=1421fd0 [0] MPI startup(): Multi-threaded optimized library [2] MPI startup(): shm data transfer mode [3] MPI startup(): shm data transfer mode [13] MPI startup(): shm data transfer mode [16] MPI startup(): shm data transfer mode [17] MPI startup(): shm data transfer mode [22] MPI startup(): shm data transfer mode [23] MPI startup(): shm data transfer mode [25] MPI startup(): shm data transfer mode [26] MPI startup(): shm data transfer mode [27] MPI startup(): shm data transfer mode [34] MPI startup(): shm data transfer mode [35] MPI startup(): shm data transfer mode [36] MPI startup(): shm data transfer mode [37] MPI startup(): shm data transfer mode [38] MPI startup(): shm data transfer mode [39] MPI startup(): shm data transfer mode [44] MPI startup(): shm data transfer mode [45] MPI startup(): shm data transfer mode [46] MPI startup(): shm data transfer mode [47] MPI startup(): shm data transfer mode [53] MPI startup(): shm data transfer mode [56] MPI startup(): shm data transfer mode [58] MPI startup(): shm data transfer mode [59] MPI startup(): shm data transfer mode [62] MPI startup(): shm data transfer mode [0] MPI startup(): shm data transfer mode [1] MPI startup(): shm data transfer mode [4] MPI startup(): shm data transfer mode [5] MPI startup(): shm data transfer mode [6] MPI startup(): shm data transfer mode [7] MPI startup(): shm data transfer mode [8] MPI startup(): shm data transfer mode [9] MPI startup(): shm data transfer mode [10] MPI startup(): shm data transfer mode [11] MPI startup(): shm data transfer mode [12] MPI startup(): shm data transfer mode [14] MPI startup(): shm data transfer mode [15] MPI startup(): shm data transfer mode [18] MPI startup(): shm data transfer mode [19] MPI startup(): shm data transfer mode [20] MPI startup(): shm data transfer mode [21] MPI startup(): shm data transfer mode [24] MPI startup(): shm data transfer mode [28] MPI startup(): shm data transfer mode [29] MPI startup(): shm data transfer mode [30] MPI startup(): shm data transfer mode [31] MPI startup(): shm data transfer mode [32] MPI startup(): shm data transfer mode [33] MPI startup(): shm data transfer mode [40] MPI startup(): shm data transfer mode [41] MPI startup(): shm data transfer mode [42] MPI startup(): shm data transfer mode [43] MPI startup(): shm data transfer mode [48] MPI startup(): shm data transfer mode [49] MPI startup(): shm data transfer mode [50] MPI startup(): shm data transfer mode [51] MPI startup(): shm data transfer mode [52] MPI startup(): shm data transfer mode [54] MPI startup(): shm data transfer mode [55] MPI startup(): shm data transfer mode [57] MPI startup(): shm data transfer mode [60] MPI startup(): shm data transfer mode [61] MPI startup(): shm data transfer mode [63] MPI startup(): shm data transfer mode [0] MPI startup(): Rank Pid Node name Pin cpu [0] MPI startup(): 0 281677 prod-0060 +1 [0] MPI startup(): 1 281678 prod-0060 +1 [0] MPI startup(): 2 281679 prod-0060 +1 [0] MPI startup(): 3 281680 prod-0060 +1 [0] MPI startup(): 4 281681 prod-0060 +1 [0] MPI startup(): 5 281682 prod-0060 +1 [0] MPI startup(): 6 281683 prod-0060 +1 [0] MPI startup(): 7 281684 prod-0060 +1 [0] MPI startup(): 8 281685 prod-0060 +1 [0] MPI startup(): 9 281686 prod-0060 +1 [0] MPI startup(): 10 281687 prod-0060 +1 [0] MPI startup(): 11 281688 prod-0060 +1 [0] MPI startup(): 12 281689 prod-0060 +1 [0] MPI startup(): 13 281690 prod-0060 +1 [0] MPI startup(): 14 281691 prod-0060 +1 [0] MPI startup(): 15 281692 prod-0060 +1 [0] MPI startup(): 16 281693 prod-0060 +1 [0] MPI startup(): 17 281694 prod-0060 +1 [0] MPI startup(): 18 281695 prod-0060 +1 [0] MPI startup(): 19 281696 prod-0060 +1 [0] MPI startup(): 20 281697 prod-0060 +1 [0] MPI startup(): 21 281698 prod-0060 +1 [0] MPI startup(): 22 281699 prod-0060 +1 [0] MPI startup(): 23 281700 prod-0060 +1 [0] MPI startup(): 24 281701 prod-0060 +1 [0] MPI startup(): 25 281702 prod-0060 +1 [0] MPI startup(): 26 281703 prod-0060 +1 [0] MPI startup(): 27 281704 prod-0060 +1 [0] MPI startup(): 28 281705 prod-0060 +1 [0] MPI startup(): 29 281706 prod-0060 +1 [0] MPI startup(): 30 281707 prod-0060 +1 [0] MPI startup(): 31 281708 prod-0060 +1 [0] MPI startup(): 32 281709 prod-0060 +1 [0] MPI startup(): 33 281710 prod-0060 +1 [0] MPI startup(): 34 281711 prod-0060 +1 [0] MPI startup(): 35 281712 prod-0060 +1 [0] MPI startup(): 36 281713 prod-0060 +1 [0] MPI startup(): 37 281714 prod-0060 +1 [0] MPI startup(): 38 281715 prod-0060 +1 [0] MPI startup(): 39 281716 prod-0060 +1 [0] MPI startup(): 40 281717 prod-0060 +1 [0] MPI startup(): 41 281718 prod-0060 +1 [0] MPI startup(): 42 281719 prod-0060 +1 [0] MPI startup(): 43 281720 prod-0060 +1 [0] MPI startup(): 44 281721 prod-0060 +1 [0] MPI startup(): 45 281722 prod-0060 +1 [0] MPI startup(): 46 281723 prod-0060 +1 [0] MPI startup(): 47 281724 prod-0060 +1 [0] MPI startup(): 48 281725 prod-0060 +1 [0] MPI startup(): 49 281726 prod-0060 +1 [0] MPI startup(): 50 281727 prod-0060 +1 [0] MPI startup(): 51 281728 prod-0060 +1 [0] MPI startup(): 52 281729 prod-0060 +1 [0] MPI startup(): 53 281730 prod-0060 +1 [0] MPI startup(): 54 281731 prod-0060 +1 [0] MPI startup(): 55 281732 prod-0060 +1 [0] MPI startup(): 56 281733 prod-0060 +1 [0] MPI startup(): 57 281734 prod-0060 +1 [0] MPI startup(): 58 281735 prod-0060 +1 [0] MPI startup(): 59 281736 prod-0060 +1 [0] MPI startup(): 60 281737 prod-0060 +1 [0] MPI startup(): 61 281738 prod-0060 +1 [0] MPI startup(): 62 281739 prod-0060 +1 [0] MPI startup(): 63 281740 prod-0060 +1 [0] MPI startup(): I_MPI_DEBUG=5 KSP Object: 64 MPI processes type: gmres GMRES: restart=30, using Classical (unmodified) Gram-Schmidt Orthogonalization with no iterative refinement GMRES: happy breakdown tolerance 1e-30 maximum iterations=100000, initial guess is zero tolerances: relative=1e-06, absolute=1e-15, divergence=100000. left preconditioning using PRECONDITIONED norm type for convergence test PC Object: 64 MPI processes type: bjacobi block Jacobi: number of blocks = 64 Local solve is same for all blocks, in the following KSP and PC objects: KSP Object: (sub_) 1 MPI processes type: preonly maximum iterations=10000, initial guess is zero tolerances: relative=1e-05, absolute=1e-50, divergence=10000. left preconditioning using NONE norm type for convergence test PC Object: (sub_) 1 MPI processes type: ilu out-of-place factorization 0 levels of fill tolerance for zero pivot 2.22045e-14 matrix ordering: natural factor fill ratio given 1., needed 1. Factored matrix follows: Mat Object: 1 MPI processes type: seqaij rows=16, cols=16 package used to perform factorization: petsc total: nonzeros=56, allocated nonzeros=56 total number of mallocs used during MatSetValues calls =0 not using I-node routines linear system matrix = precond matrix: Mat Object: 1 MPI processes type: seqaij rows=16, cols=16 total: nonzeros=56, allocated nonzeros=56 total number of mallocs used during MatSetValues calls =0 not using I-node routines linear system matrix = precond matrix: Mat Object: 64 MPI processes type: mpiaij rows=1000, cols=1000 total: nonzeros=6400, allocated nonzeros=6400 total number of mallocs used during MatSetValues calls =0 not using I-node (on process 0) routines Initializing PETSC ... Before bcast After bcast USING PETSC SOLVER LIB Loading matrix /gpfs/work/pcp0/pcp0050/TPFA/matrix-A-10x10x10.dat Size=1000x1000 Loading vector /gpfs/work/pcp0/pcp0050/TPFA/vector-b-10x10x10.dat Creating vector x... Created Solving... Saving results /gpfs/work/pcp0/pcp0050/TPFA/vector-x-10x10x10.dat Solved... No of steps: 511 Net time: 0.1681058406829834 Gross time: 0.21166682243347168 ************************************************************************************************************************ *** WIDEN YOUR WINDOW TO 120 CHARACTERS. Use 'enscript -r -fCourier9' to print this document *** ************************************************************************************************************************ ---------------------------------------------- PETSc Performance Summary: ---------------------------------------------- /gpfs/work/pcp0/pcp0050/23321/FD_3D_TwoPointFluxApproximation_MiniApp.py on a arch-linux2-c-opt named prod-0060 with 64 processors, by pcp0050 Fri Jun 16 13:29:57 2017 Using Petsc Development GIT revision: v3.7.6-4104-g1acf903 GIT Date: 2017-06-11 15:19:20 -0500 Max Max/Min Avg Total Time (sec): 4.138e-01 1.02659 4.096e-01 Objects: 5.900e+01 1.00000 5.900e+01 Flop: 6.995e+05 1.13648 6.729e+05 4.306e+07 Flop/sec: 1.735e+06 1.15687 1.643e+06 1.051e+08 MPI Messages: 3.184e+03 1.99780 2.922e+03 1.870e+05 MPI Message Lengths: 2.207e+05 2.07569 6.738e+01 1.260e+07 MPI Reductions: 1.059e+03 1.00000 Flop counting convention: 1 flop = 1 real number operation of type (multiply/divide/add/subtract) e.g., VecAXPY() for real vectors of length N --> 2N flop and VecAXPY() for complex vectors of length N --> 8N flop Summary of Stages: ----- Time ------ ----- Flop ----- --- Messages --- -- Message Lengths -- -- Reductions -- Avg %Total Avg %Total counts %Total Avg %Total counts %Total 0: Main Stage: 4.0961e-01 100.0% 4.3063e+07 100.0% 1.870e+05 100.0% 6.738e+01 100.0% 1.058e+03 99.9% ------------------------------------------------------------------------------------------------------------------------ See the 'Profiling' chapter of the users' manual for details on interpreting output. Phase summary info: Count: number of times phase was executed Time and Flop: Max - maximum over all processors Ratio - ratio of maximum to minimum over all processors Mess: number of messages sent Avg. len: average message length (bytes) Reduct: number of global reductions Global: entire computation Stage: stages of a computation. Set stages with PetscLogStagePush() and PetscLogStagePop(). %T - percent time in this phase %F - percent flop in this phase %M - percent messages in this phase %L - percent message lengths in this phase %R - percent reductions in this phase Total Mflop/s: 10e-6 * (sum of flop over all processors)/(max time over all processors) ------------------------------------------------------------------------------------------------------------------------ Event Count Time (sec) Flop --- Global --- --- Stage --- Total Max Ratio Max Ratio Max Ratio Mess Avg len Reduct %T %F %M %L %R %T %F %M %L %R Mflop/s ------------------------------------------------------------------------------------------------------------------------ --- Event Stage 0: Main Stage VecView 1 1.0 1.0927e-02 1.3 0.00e+00 0.0 6.3e+01 1.3e+02 0.0e+00 2 0 0 0 0 2 0 0 0 0 0 VecMDot 511 1.0 2.3635e-02 1.6 2.45e+05 1.1 0.0e+00 0.0e+00 5.1e+02 5 36 0 0 48 5 36 0 0 48 648 VecNorm 529 1.0 6.2158e-02 1.1 1.69e+04 1.1 0.0e+00 0.0e+00 5.3e+02 15 2 0 0 50 15 2 0 0 50 17 VecScale 529 1.0 1.8744e-03 1.1 8.46e+03 1.1 0.0e+00 0.0e+00 0.0e+00 0 1 0 0 0 0 1 0 0 0 282 VecCopy 18 1.0 1.1659e-04 2.7 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecSet 549 1.0 9.3055e-04 1.2 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecAXPY 35 1.0 1.6806e-03 5.1 1.12e+03 1.1 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 42 VecMAXPY 529 1.0 3.0921e-03 1.1 2.69e+05 1.1 0.0e+00 0.0e+00 0.0e+00 1 39 0 0 0 1 39 0 0 0 5444 VecAssemblyBegin 1 1.0 4.3581e-03 1.8 0.00e+00 0.0 0.0e+00 0.0e+00 3.0e+00 1 0 0 0 0 1 0 0 0 0 0 VecAssemblyEnd 1 1.0 1.0967e-05 2.2 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 VecLoad 1 1.0 9.7179e-03 1.5 0.00e+00 0.0 6.3e+01 1.2e+02 4.0e+00 2 0 0 0 0 2 0 0 0 0 0 VecScatterBegin 528 1.0 7.6351e-03 1.4 0.00e+00 0.0 1.9e+05 6.7e+01 0.0e+00 2 0 99 99 0 2 0 99 99 0 0 VecScatterEnd 528 1.0 1.0414e-02 1.8 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 2 0 0 0 0 2 0 0 0 0 0 VecNormalize 529 1.0 6.5737e-02 1.0 2.54e+04 1.1 0.0e+00 0.0e+00 5.3e+02 16 4 0 0 50 16 4 0 0 50 24 MatMult 528 1.0 2.2577e-02 1.4 1.08e+05 1.5 1.9e+05 6.7e+01 0.0e+00 5 14 99 99 0 5 14 99 99 0 276 MatSolve 529 1.0 2.2182e-03 1.2 5.08e+04 1.4 0.0e+00 0.0e+00 0.0e+00 1 7 0 0 0 1 7 0 0 0 1366 MatLUFactorNum 1 1.0 1.8120e-04 5.5 1.00e+02 1.9 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 32 MatILUFactorSym 1 1.0 2.2411e-04 2.4 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatAssemblyBegin 1 1.0 5.0130e-03 3.1 0.00e+00 0.0 0.0e+00 0.0e+00 2.0e+00 1 0 0 0 0 1 0 0 0 0 0 MatAssemblyEnd 1 1.0 7.9231e-03 1.2 0.00e+00 0.0 7.0e+02 1.9e+01 8.0e+00 2 0 0 0 1 2 0 0 0 1 0 MatGetRowIJ 1 1.0 8.4877e-0522.2 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatGetOrdering 1 1.0 2.0099e-04 2.3 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 MatLoad 1 1.0 2.2202e-02 1.1 0.00e+00 0.0 8.9e+02 1.0e+02 1.3e+01 5 0 0 1 1 5 0 0 1 1 0 MatView 3 3.0 3.3309e-03 2.1 0.00e+00 0.0 0.0e+00 0.0e+00 1.0e+00 1 0 0 0 0 1 0 0 0 0 0 PCSetUp 2 1.0 1.3149e-03 1.4 1.00e+02 1.9 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 4 PCSetUpOnBlocks 1 1.0 8.2898e-04 2.1 1.00e+02 1.9 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 7 PCApply 529 1.0 3.0347e-02 1.1 5.08e+04 1.4 0.0e+00 0.0e+00 0.0e+00 7 7 0 0 0 7 7 0 0 0 100 KSPGMRESOrthog 511 1.0 3.0744e-02 1.4 4.98e+05 1.1 0.0e+00 0.0e+00 5.1e+02 6 72 0 0 48 6 72 0 0 48 1012 KSPSetUp 2 1.0 2.7108e-04 2.2 0.00e+00 0.0 0.0e+00 0.0e+00 0.0e+00 0 0 0 0 0 0 0 0 0 0 0 KSPSolve 1 1.0 1.5059e-01 1.0 7.00e+05 1.1 1.9e+05 6.7e+01 1.0e+03 37100 99 99 98 37100 99 99 98 286 ------------------------------------------------------------------------------------------------------------------------ Memory usage is given in bytes: Object Type Creations Destructions Memory Descendants' Mem. Reports information only for process 0. --- Event Stage 0: Main Stage Viewer 5 4 3280 0. Index Set 5 5 4152 0. Vector 40 40 70376 0. Vector Scatter 1 1 1072 0. Matrix 4 4 14928 0. Preconditioner 2 2 1896 0. Krylov Solver 2 2 19576 0. ======================================================================================================================== Average time to get PetscTime(): 2.14577e-07 Average time for MPI_Barrier(): 3.9196e-05 Average time for zero size MPI_Send(): 5.45382e-06 #PETSc Option Table entries: -ksp_view -log_view -on_error_attach_debugger #End of PETSc Option Table entries Compiled without FORTRAN kernels Compiled with full precision matrices (default) sizeof(short) 2 sizeof(int) 4 sizeof(long) 8 sizeof(void*) 8 sizeof(PetscScalar) 8 sizeof(PetscInt) 4 Configure options: --prefix=/gpfs/homeb/pcp0/pcp0050/eocoe/petsc_knl --COPTFLAGS="-O3 -qopenmp -axCORE-AVX2,AVX -xCOMMON-AVX512" --CXXFLAGS="-O3 -qopenmp -axCORE-AVX2,AVX -xCOMMON-AVX512" --FOPTFLAGS="-O3 -qopenmp -axCORE-AVX2,AVX -xCOMMON-AVX512" --with-mpi=1 --with-openmp=1 --with-cc=mpicc --with-fc=mpif90 --with-cuda=0 --with-debugging=0 --with-lapack-lib=/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64/libmkl_rt.so --with-blas-lib=/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64/libmkl_rt.so --with-sundials=1 --with-sundials-dir=/gpfs/homeb/pcp0/pcp0050/eocoe/sundials --with-x=0 --with-sowing=1 --with-sowing-dir=/gpfs/homeb/pcp0/pcp0050/eocoe/sowing --with-sowing-include=/gpfs/homeb/pcp0/pcp0050/eocoe/sowing/include --with-sowing-lib=/gpfs/homeb/pcp0/pcp0050/eocoe/sowing/lib/libsowing.a --download-sowing=no --with-x=0 ----------------------------------------- Libraries compiled on Mon Jun 12 13:46:35 2017 on login2 Machine characteristics: Linux-3.10.0-327.el7.x86_64-x86_64-with-centos-7.2.1511-Core Using PETSc directory: /gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket Using PETSc arch: arch-linux2-c-opt ----------------------------------------- Using C compiler: mpicc -fPIC -Wall -Wwrite-strings -Wno-strict-aliasing -Wno-unknown-pragmas -fstack-protector -fvisibility=hidden -fopenmp ${COPTFLAGS} ${CFLAGS} Using Fortran compiler: mpif90 -fPIC -Wall -ffree-line-length-0 -Wno-unused-dummy-argument -fopenmp ${FOPTFLAGS} ${FFLAGS} ----------------------------------------- Using include paths: -I/gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket/arch-linux2-c-opt/include -I/gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket/include -I/gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket/include -I/gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket/arch-linux2-c-opt/include -I/gpfs/homeb/pcp0/pcp0050/eocoe/sundials/include ----------------------------------------- Using C linker: mpicc Using Fortran linker: mpif90 Using libraries: -Wl,-rpath,/gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket/arch-linux2-c-opt/lib -L/gpfs/homeb/pcp0/pcp0050/src/petsc_bitbucket/arch-linux2-c-opt/lib -lpetsc -Wl,-rpath,/gpfs/homeb/pcp0/pcp0050/eocoe/sundials/lib -L/gpfs/homeb/pcp0/pcp0050/eocoe/sundials/lib -Wl,-rpath,/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64 -L/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mkl/lib/intel64 -Wl,-rpath,/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpi/intel64/lib/release_mt -L/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpi/intel64/lib/release_mt -Wl,-rpath,/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpi/intel64/lib -L/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/mpi/intel64/lib -Wl,-rpath,/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64 -L/global/opt/jsc/intel/inspector_2017.1.1.484836/lib64 -Wl,-rpath,/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64 -L/global/opt/jsc/intel/advisor_2017.1.2.500157/lib64 -Wl,-rpath,/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64 -L/global/opt/jsc/intel/vtune_amplifier_xe_2017.1.0.486011/lib64 -Wl,-rpath,/usr/lib/gcc/x86_64-redhat-linux/4.8.5 -L/usr/lib/gcc/x86_64-redhat-linux/4.8.5 -Wl,-rpath,/global/opt/jsc/Python-2.7.12/lib -L/global/opt/jsc/Python-2.7.12/lib -Wl,-rpath,/global/opt/jsc/intel/impi/2017.1.132/lib64 -L/global/opt/jsc/intel/impi/2017.1.132/lib64 -Wl,-rpath,/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64 -L/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/ipp/lib/intel64 -Wl,-rpath,/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7 -L/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/tbb/lib/intel64/gcc4.7 -Wl,-rpath,/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64 -L/global/opt/jsc/intel/compilers_and_libraries_2017.1.132/linux/compiler/lib/intel64 -Wl,-rpath,/opt/intel/mpi-rt/2107.0.0/intel64/lib/release_mt -Wl,-rpath,/opt/intel/mpi-rt/2017.0.0/intel64/lib -Wl,-rpath,/opt/intel/mpi-rt/2017.0.0/intel64/lib/release_mt -lsundials_cvode -lsundials_nvecserial -lsundials_nvecparallel -lmkl_rt -lm -lgfortran -lm -lgfortran -lm -lquadmath -ldl -lmpifort -lmpi -lmpigi -lrt -lpthread -lgcc_s -ldl ----------------------------------------- 10.93,4852,0,192,0,0,0,0