Warning: no access to tty (Bad file descriptor). Thus no job control in this shell. host: node43-038 ================================================================================================== mpiexec options: ---------------- Base path: /cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin/ Launcher: ssh Debug level: 1 Enable X: -1 Global environment: ------------------- I_MPI_PERHOST=allcores LD_LIBRARY_PATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/mic/lib:/cm/shared/apps/moab/7.2.9/lib:/cm/shared/apps/torque/4.2.4.1/lib:/cm/shared/tools/subversion-1.8.4/lib:/cm/shared/tools/cmake-3.1.0/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/ipp/lib/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/compiler/lib/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mkl/lib/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/tbb/lib/intel64/gcc4.4:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/lib/intel64_lin:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/../tbb/lib/intel64_lin/gcc4.4:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/../compiler/lib/intel64_lin:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/libipt/intel64/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/lib:/cm/shared/libraries/intel_build/papi-5.3.0/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/mic/lib MODULE_VERSION_STACK=3.2.6 MKLROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mkl MANPATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/man::ignore:/cm/local/apps/environment-modules/3.2.6/man:/cm/shared/apps/torque/4.2.4.1/share/man:/cm/shared/tools/subversion-1.8.4/share/man:/cm/shared/tools/cmake-3.1.0/share:/cm/shared/languages/Intel-Compiler-XE-16-U2/man/common:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/man/en_US:/cm/shared/languages/Intel-Compiler-XE-16-U2/documentation_2016/en/debugger/gdb-ia/man:/cm/shared/languages/Intel-Compiler-XE-16-U2/documentation_2016/en/debugger/gdb-mic/man:/cm/shared/languages/Intel-Compiler-XE-16-U2/documentation_2016/en/debugger/gdb-igfx/man HOSTNAME=node43-038 PBS_VERSION=TORQUE-4.2.4.1 INTEL_LICENSE_FILE=/cm/shared/licenses/intel SHELL=/bin/tcsh HOST=node43-038 PBS_JOBNAME=9lap GDBSERVER_MIC=/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/targets/mic/bin/gdbserver INTEL_FC_HOME=/cm/shared/languages/Intel-Compiler-XE-16-U2 PBS_ENVIRONMENT=PBS_BATCH QTDIR=/usr/lib64/qt-3.3 QTINC=/usr/lib64/qt-3.3/include PBS_HOME=/cm/shared/apps/torque/4.2.4.1/spool PBS_O_WORKDIR=/panfs/panasas01/mech/mexas/nobkp/coarrays/head/examples/prof/tau/9laplace MIC_LD_LIBRARY_PATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/mic/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/compiler/lib/mic:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mkl/lib/mic:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/tbb/lib/mic TORQUE_HOME=/cm/shared/apps/torque/4.2.4.1/spool USER=mexas PBS_TASKNUM=1 GROUP=mech LS_COLORS= PBS_O_HOME=/panfs/panasas01/mech/mexas I_MPI_MPIRUN=mpirun HOSTTYPE=x86_64-linux PBS_WALLTIME=3600 PBS_MOMPORT=15003 PBS_GPUFILE=/cm/local/apps/torque/4.2.4.1/spool/aux//4579303.master.cm.clustergpu INTEL_CC_HOME=/cm/shared/languages/Intel-Compiler-XE-16-U2 PBS_O_QUEUE=testq NLSPATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/cm/shared/languages/Intel-Compiler-XE-16/compilers_and_libraries_2016.2.181/linux/compiler/lib/intel64/locale/%l_%t/%N:/cm/shared/languages/Intel-Compiler-XE-16/compilers_and_libraries_2016.2.181/linux/mkl/lib/intel64/locale/%l_%t/%N:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/share/locale/%l_%t/%N:/cm/shared/languages/Intel-Compiler-XE-16/debugger_2016/gdb/intel64/share/locale/%l_%t/%N PATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin:/panfs/panasas01/mech/mexas/pdtoolkit-3.22/x86_64/bin:/panfs/panasas01/mech/mexas/tau-2.25.1/x86_64/bin:/cm/shared/apps/moab/7.2.9/sbin:/cm/shared/apps/moab/7.2.9/bin:/usr/lib64/qt-3.3/bin:/bin:/usr/bin:/sbin:/usr/sbin:/opt/dell/srvadmin/bin:.:/cm/shared/apps/torque/4.2.4.1/bin:/cm/shared/apps/torque/4.2.4.1/sbin:/cm/shared/tools/subversion-1.8.4/bin:/cm/shared/tools/cmake-3.1.0/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/bin/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/bin:/cm/shared/libraries/intel_build/papi-5.3.0/bin PBS_O_LOGNAME=mexas MAIL=/var/spool/mail/mexas MODULE_VERSION=3.2.6 PBS_O_LANG=en_US PBS_JOBCOOKIE=E1BCE09429324FD4CB2B728C9A2522E8 BLOCKSIZE=512 C_INCLUDE_PATH=/cm/shared/libraries/intel_build/papi-5.3.0/include TBBROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/tbb PWD=/panfs/panasas01/mech/mexas/nobkp/coarrays/head/examples/prof/tau/9laplace _LMFILES_=/cm/local/modulefiles/shared:/cm/local/modulefiles/dot:/cm/shared/modulefiles/torque/4.2.4.1:/cm/shared/modulefiles/moab/7.2.9:/cm/shared/modulefiles/default-environment:/cm/shared/modulefiles/tools/subversion-1.8.4:/cm/shared/modulefiles/tools/cmake-3.1.0:/cm/shared/modulefiles/languages/intel-compiler-16-u2:/cm/shared/modulefiles/libraries/intel_builds/papi-5.3.0 EDITOR=vi LANG=en_US.UTF-8 PBS_NODENUM=0 GDB_CROSS=/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/bin/gdb-mic MODULEPATH=/cm/local/modulefiles:/cm/shared/modulefiles MOABHOMEDIR=/cm/shared/apps/moab/7.2.9 I_MPI_HYDRA_RMK=pbs PBS_NUM_NODES=1 LOADEDMODULES=shared:dot:torque/4.2.4.1:moab/7.2.9:default-environment:tools/subversion-1.8.4:tools/cmake-3.1.0:languages/intel-compiler-16-u2:libraries/intel_builds/papi-5.3.0 confile=nodes PBS_O_SHELL=/bin/tcsh PBS_JOBID=4579303.master.cm.cluster DAALROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal ENVIRONMENT=BATCH INTEL_PYTHONHOME=/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/python/intel64 HOME=/panfs/panasas01/mech/mexas SHLVL=3 OSTYPE=linux PBS_O_HOST=newblue2.cm.cluster PBS_VNODENUM=0 VENDOR=unknown LOGNAME=mexas MACHTYPE=x86_64 CVS_RSH=ssh QTLIB=/usr/lib64/qt-3.3/lib PBS_QUEUE=testq CLASSPATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/lib/daal.jar MODULESHOME=/cm/local/apps/environment-modules/3.2.6//Modules/3.2.6 PBS_O_MAIL=/var/spool/mail/mexas PBS_MICFILE=/cm/local/apps/torque/4.2.4.1/spool/aux//4579303.master.cm.clustermic LESSOPEN=|/usr/bin/lesspipe.sh %s PBS_NP=16 PBS_O_SERVER=master.cm.cluster PBS_NUM_PPN=16 PBS_NODEFILE=/cm/local/apps/torque/4.2.4.1/spool/aux//4579303.master.cm.cluster G_BROKEN_FILENAMES=1 I_MPI_ROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi PBS_O_PATH=/panfs/panasas01/mech/mexas/pdtoolkit-3.22/x86_64/bin:/panfs/panasas01/mech/mexas/tau-2.25.1/x86_64/bin:/cm/shared/apps/moab/7.2.9/sbin:/cm/shared/apps/moab/7.2.9/bin:/usr/lib64/qt-3.3/bin:/usr/local/bin:/bin:/usr/bin:/sbin:/usr/sbin:/opt/dell/srvadmin/bin:.:/cm/shared/apps/torque/4.2.4.1/bin:/cm/shared/apps/torque/4.2.4.1/sbin:/cm/shared/tools/subversion-1.8.4/bin:/cm/shared/tools/cmake-3.1.0/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/bin/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/bin:/cm/shared/libraries/intel_build/papi-5.3.0/bin _=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin/mpiexec.hydra Hydra internal environment: --------------------------- MPIR_CVAR_NEMESIS_ENABLE_CKPOINT=1 GFORTRAN_UNBUFFERED_PRECONNECTED=y I_MPI_HYDRA_UUID=7ea90000-8c80-52b6-a032-050001620a83 IPATH_NO_BACKTRACE=1 DAPL_NETWORK_PROCESS_NUM=16 User set environment: --------------------- I_MPI_FABRICS=shm:dapl I_MPI_HYDRA_DEBUG=1 Proxy information: ********************* [1] proxy: node43-038 (16 cores) Exec list: IMB-MPI1 (16 processes); ================================================================================================== [mpiexec@node43-038] Timeout set to -1 (-1 means infinite) [mpiexec@node43-038] Got a control port string of node43-038.cm.cluster:34903 Proxy launch args: /cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin/pmi_proxy --control-port node43-038.cm.cluster:34903 --debug --pmi-connect lazy-cache --pmi-aggregate -s 0 --rmk user --launcher ssh --demux poll --pgid 0 --enable-stdin 1 --retries 10 --control-code 1080312432 --usize -2 --proxy-id Arguments being passed to proxy 0: --version 3.1.2 --iface-ip-env-name MPIR_CVAR_CH3_INTERFACE_HOSTNAME --hostname node43-038 --global-core-map 0,16,16 --pmi-id-map 0,0 --global-process-count 16 --auto-cleanup 1 --pmi-kvsname kvs_43390_0 --pmi-process-mapping (vector,(0,1,16)) --topolib ipl --ckpointlib blcr --ckpoint-prefix /tmp --ckpoint-preserve 1 --ckpoint off --ckpoint-num -1 --global-inherited-env 83 'I_MPI_PERHOST=allcores' 'LD_LIBRARY_PATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/mic/lib:/cm/shared/apps/moab/7.2.9/lib:/cm/shared/apps/torque/4.2.4.1/lib:/cm/shared/tools/subversion-1.8.4/lib:/cm/shared/tools/cmake-3.1.0/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/ipp/lib/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/compiler/lib/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mkl/lib/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/tbb/lib/intel64/gcc4.4:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/lib/intel64_lin:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/../tbb/lib/intel64_lin/gcc4.4:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/../compiler/lib/intel64_lin:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/libipt/intel64/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/lib:/cm/shared/libraries/intel_build/papi-5.3.0/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/mic/lib' 'MODULE_VERSION_STACK=3.2.6' 'MKLROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mkl' 'MANPATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/man::ignore:/cm/local/apps/environment-modules/3.2.6/man:/cm/shared/apps/torque/4.2.4.1/share/man:/cm/shared/tools/subversion-1.8.4/share/man:/cm/shared/tools/cmake-3.1.0/share:/cm/shared/languages/Intel-Compiler-XE-16-U2/man/common:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/man/en_US:/cm/shared/languages/Intel-Compiler-XE-16-U2/documentation_2016/en/debugger/gdb-ia/man:/cm/shared/languages/Intel-Compiler-XE-16-U2/documentation_2016/en/debugger/gdb-mic/man:/cm/shared/languages/Intel-Compiler-XE-16-U2/documentation_2016/en/debugger/gdb-igfx/man' 'HOSTNAME=node43-038' 'PBS_VERSION=TORQUE-4.2.4.1' 'INTEL_LICENSE_FILE=/cm/shared/licenses/intel' 'SHELL=/bin/tcsh' 'HOST=node43-038' 'PBS_JOBNAME=9lap' 'GDBSERVER_MIC=/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/targets/mic/bin/gdbserver' 'INTEL_FC_HOME=/cm/shared/languages/Intel-Compiler-XE-16-U2' 'PBS_ENVIRONMENT=PBS_BATCH' 'QTDIR=/usr/lib64/qt-3.3' 'QTINC=/usr/lib64/qt-3.3/include' 'PBS_HOME=/cm/shared/apps/torque/4.2.4.1/spool' 'PBS_O_WORKDIR=/panfs/panasas01/mech/mexas/nobkp/coarrays/head/examples/prof/tau/9laplace' 'MIC_LD_LIBRARY_PATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/mic/lib:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/compiler/lib/mic:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mkl/lib/mic:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/tbb/lib/mic' 'TORQUE_HOME=/cm/shared/apps/torque/4.2.4.1/spool' 'USER=mexas' 'PBS_TASKNUM=1' 'GROUP=mech' 'LS_COLORS=' 'PBS_O_HOME=/panfs/panasas01/mech/mexas' 'I_MPI_MPIRUN=mpirun' 'HOSTTYPE=x86_64-linux' 'PBS_WALLTIME=3600' 'PBS_MOMPORT=15003' 'PBS_GPUFILE=/cm/local/apps/torque/4.2.4.1/spool/aux//4579303.master.cm.clustergpu' 'INTEL_CC_HOME=/cm/shared/languages/Intel-Compiler-XE-16-U2' 'PBS_O_QUEUE=testq' 'NLSPATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/cm/shared/languages/Intel-Compiler-XE-16/compilers_and_libraries_2016.2.181/linux/compiler/lib/intel64/locale/%l_%t/%N:/cm/shared/languages/Intel-Compiler-XE-16/compilers_and_libraries_2016.2.181/linux/mkl/lib/intel64/locale/%l_%t/%N:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/share/locale/%l_%t/%N:/cm/shared/languages/Intel-Compiler-XE-16/debugger_2016/gdb/intel64/share/locale/%l_%t/%N' 'PATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin:/panfs/panasas01/mech/mexas/pdtoolkit-3.22/x86_64/bin:/panfs/panasas01/mech/mexas/tau-2.25.1/x86_64/bin:/cm/shared/apps/moab/7.2.9/sbin:/cm/shared/apps/moab/7.2.9/bin:/usr/lib64/qt-3.3/bin:/bin:/usr/bin:/sbin:/usr/sbin:/opt/dell/srvadmin/bin:.:/cm/shared/apps/torque/4.2.4.1/bin:/cm/shared/apps/torque/4.2.4.1/sbin:/cm/shared/tools/subversion-1.8.4/bin:/cm/shared/tools/cmake-3.1.0/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/bin/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/bin:/cm/shared/libraries/intel_build/papi-5.3.0/bin' 'PBS_O_LOGNAME=mexas' 'MAIL=/var/spool/mail/mexas' 'MODULE_VERSION=3.2.6' 'PBS_O_LANG=en_US' 'PBS_JOBCOOKIE=E1BCE09429324FD4CB2B728C9A2522E8' 'BLOCKSIZE=512' 'C_INCLUDE_PATH=/cm/shared/libraries/intel_build/papi-5.3.0/include' 'TBBROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/tbb' 'PWD=/panfs/panasas01/mech/mexas/nobkp/coarrays/head/examples/prof/tau/9laplace' '_LMFILES_=/cm/local/modulefiles/shared:/cm/local/modulefiles/dot:/cm/shared/modulefiles/torque/4.2.4.1:/cm/shared/modulefiles/moab/7.2.9:/cm/shared/modulefiles/default-environment:/cm/shared/modulefiles/tools/subversion-1.8.4:/cm/shared/modulefiles/tools/cmake-3.1.0:/cm/shared/modulefiles/languages/intel-compiler-16-u2:/cm/shared/modulefiles/libraries/intel_builds/papi-5.3.0' 'EDITOR=vi' 'LANG=en_US.UTF-8' 'PBS_NODENUM=0' 'GDB_CROSS=/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/bin/gdb-mic' 'MODULEPATH=/cm/local/modulefiles:/cm/shared/modulefiles' 'MOABHOMEDIR=/cm/shared/apps/moab/7.2.9' 'I_MPI_HYDRA_RMK=pbs' 'PBS_NUM_NODES=1' 'LOADEDMODULES=shared:dot:torque/4.2.4.1:moab/7.2.9:default-environment:tools/subversion-1.8.4:tools/cmake-3.1.0:languages/intel-compiler-16-u2:libraries/intel_builds/papi-5.3.0' 'confile=nodes' 'PBS_O_SHELL=/bin/tcsh' 'PBS_JOBID=4579303.master.cm.cluster' 'DAALROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal' 'ENVIRONMENT=BATCH' 'INTEL_PYTHONHOME=/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/python/intel64' 'HOME=/panfs/panasas01/mech/mexas' 'SHLVL=3' 'OSTYPE=linux' 'PBS_O_HOST=newblue2.cm.cluster' 'PBS_VNODENUM=0' 'VENDOR=unknown' 'LOGNAME=mexas' 'MACHTYPE=x86_64' 'CVS_RSH=ssh' 'QTLIB=/usr/lib64/qt-3.3/lib' 'PBS_QUEUE=testq' 'CLASSPATH=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/daal/lib/daal.jar' 'MODULESHOME=/cm/local/apps/environment-modules/3.2.6//Modules/3.2.6' 'PBS_O_MAIL=/var/spool/mail/mexas' 'PBS_MICFILE=/cm/local/apps/torque/4.2.4.1/spool/aux//4579303.master.cm.clustermic' 'LESSOPEN=|/usr/bin/lesspipe.sh %s' 'PBS_NP=16' 'PBS_O_SERVER=master.cm.cluster' 'PBS_NUM_PPN=16' 'PBS_NODEFILE=/cm/local/apps/torque/4.2.4.1/spool/aux//4579303.master.cm.cluster' 'G_BROKEN_FILENAMES=1' 'I_MPI_ROOT=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi' 'PBS_O_PATH=/panfs/panasas01/mech/mexas/pdtoolkit-3.22/x86_64/bin:/panfs/panasas01/mech/mexas/tau-2.25.1/x86_64/bin:/cm/shared/apps/moab/7.2.9/sbin:/cm/shared/apps/moab/7.2.9/bin:/usr/lib64/qt-3.3/bin:/usr/local/bin:/bin:/usr/bin:/sbin:/usr/sbin:/opt/dell/srvadmin/bin:.:/cm/shared/apps/torque/4.2.4.1/bin:/cm/shared/apps/torque/4.2.4.1/sbin:/cm/shared/tools/subversion-1.8.4/bin:/cm/shared/tools/cmake-3.1.0/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/bin/intel64:/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin:/cm/shared/languages/Intel-Compiler-XE-16-U2/debugger_2016/gdb/intel64_mic/bin:/cm/shared/libraries/intel_build/papi-5.3.0/bin' '_=/cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin/mpiexec.hydra' --global-user-env 2 'I_MPI_FABRICS=shm:dapl' 'I_MPI_HYDRA_DEBUG=1' --global-system-env 5 'MPIR_CVAR_NEMESIS_ENABLE_CKPOINT=1' 'GFORTRAN_UNBUFFERED_PRECONNECTED=y' 'I_MPI_HYDRA_UUID=7ea90000-8c80-52b6-a032-050001620a83' 'IPATH_NO_BACKTRACE=1' 'DAPL_NETWORK_PROCESS_NUM=16' --genv-prop all --proxy-core-count 16 --mpi-cmd-env mpirun -genvall -genv I_MPI_FABRICS shm:dapl -genv I_MPI_HYDRA_DEBUG 1 -n 16 -machinefile ./nodes IMB-MPI1 --exec --exec-appnum 0 --exec-proc-count 16 --exec-local-env 0 --exec-wdir /panfs/panasas01/mech/mexas/nobkp/coarrays/head/examples/prof/tau/9laplace --exec-args 1 IMB-MPI1 [mpiexec@node43-038] Launch arguments: /cm/shared/languages/Intel-Compiler-XE-16-U2/compilers_and_libraries_2016.2.181/linux/mpi/intel64/bin/pmi_proxy --control-port node43-038.cm.cluster:34903 --debug --pmi-connect lazy-cache --pmi-aggregate -s 0 --rmk user --launcher ssh --demux poll --pgid 0 --enable-stdin 1 --retries 10 --control-code 1080312432 --usize -2 --proxy-id 0 [mpiexec@node43-038] STDIN will be redirected to 1 fd(s): 11 [proxy:0:0@node43-038] Start PMI_proxy 0 [proxy:0:0@node43-038] STDIN will be redirected to 1 fd(s): 17 [proxy:0:0@node43-038] got pmi command (from 24): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 24): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 24): barrier_in [proxy:0:0@node43-038] got pmi command (from 16): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 16): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 27): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 16): barrier_in [proxy:0:0@node43-038] got pmi command (from 27): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 33): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 27): barrier_in [proxy:0:0@node43-038] got pmi command (from 36): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 57): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 14): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 21): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 30): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 33): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 36): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 12): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 14): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 21): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 30): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 33): barrier_in [proxy:0:0@node43-038] got pmi command (from 39): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 42): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 45): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 48): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 51): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 54): init pmi_version=1 pmi_subversion=1 [proxy:0:0@node43-038] PMI response: cmd=response_to_init pmi_version=1 pmi_subversion=1 rc=0 [proxy:0:0@node43-038] got pmi command (from 57): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 12): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 14): barrier_in [proxy:0:0@node43-038] got pmi command (from 21): barrier_in [proxy:0:0@node43-038] got pmi command (from 30): barrier_in [proxy:0:0@node43-038] got pmi command (from 36): barrier_in [proxy:0:0@node43-038] got pmi command (from 39): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 42): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 45): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 48): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 51): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 54): get_maxes [proxy:0:0@node43-038] PMI response: cmd=maxes kvsname_max=256 keylen_max=64 vallen_max=1024 [proxy:0:0@node43-038] got pmi command (from 12): barrier_in [proxy:0:0@node43-038] got pmi command (from 39): barrier_in [proxy:0:0@node43-038] got pmi command (from 42): barrier_in [proxy:0:0@node43-038] got pmi command (from 45): barrier_in [proxy:0:0@node43-038] got pmi command (from 48): barrier_in [proxy:0:0@node43-038] got pmi command (from 51): barrier_in [proxy:0:0@node43-038] got pmi command (from 57): barrier_in [proxy:0:0@node43-038] got pmi command (from 54): barrier_in [proxy:0:0@node43-038] forwarding command (cmd=barrier_in) upstream [mpiexec@node43-038] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@node43-038] PMI response to fd 8 pid 54: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] got pmi command (from 12): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 16): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 21): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 24): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 27): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 36): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 39): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 12): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 16): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 21): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 24): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 27): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 30): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 33): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 42): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 45): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 48): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 51): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 54): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 57): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 12): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 16): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 21): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 24): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 27): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 30): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 33): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 36): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 39): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 42): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 45): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 48): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 51): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 54): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 12): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 16): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 21): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 24): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 27): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 30): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 33): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 36): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 39): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 42): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 45): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 48): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 51): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 57): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 30): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 33): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 36): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 39): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 42): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 45): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 48): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 51): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 54): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 12): put kvsname=kvs_43390_0 key=sharedFilename[0] value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] forwarding command (cmd=put kvsname=kvs_43390_0 key=sharedFilename[0] value=/dev/shm/Intel_MPI_IJgRoE) upstream [mpiexec@node43-038] [pgid: 0] got PMI command: cmd=put kvsname=kvs_43390_0 key=sharedFilename[0] value=/dev/shm/Intel_MPI_IJgRoE [mpiexec@node43-038] PMI response to fd 8 pid 12: cmd=put_result rc=0 msg=success [proxy:0:0@node43-038] got pmi command (from 16): barrier_in [proxy:0:0@node43-038] got pmi command (from 21): barrier_in [proxy:0:0@node43-038] got pmi command (from 24): barrier_in [proxy:0:0@node43-038] got pmi command (from 27): barrier_in [proxy:0:0@node43-038] got pmi command (from 57): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] we don't understand the response put_result; forwarding downstream [proxy:0:0@node43-038] got pmi command (from 30): barrier_in [proxy:0:0@node43-038] got pmi command (from 54): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 12): barrier_in [proxy:0:0@node43-038] got pmi command (from 57): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 33): barrier_in [proxy:0:0@node43-038] got pmi command (from 36): barrier_in [proxy:0:0@node43-038] got pmi command (from 39): barrier_in [proxy:0:0@node43-038] got pmi command (from 42): barrier_in [proxy:0:0@node43-038] got pmi command (from 45): barrier_in [proxy:0:0@node43-038] got pmi command (from 48): barrier_in [proxy:0:0@node43-038] got pmi command (from 51): barrier_in [proxy:0:0@node43-038] got pmi command (from 14): get_ranks2hosts [proxy:0:0@node43-038] PMI response: put_ranks2hosts 55 1 10 node43-038 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, [proxy:0:0@node43-038] got pmi command (from 14): get_appnum [proxy:0:0@node43-038] PMI response: cmd=appnum appnum=0 [proxy:0:0@node43-038] got pmi command (from 14): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 54): barrier_in [proxy:0:0@node43-038] got pmi command (from 14): get_my_kvsname [proxy:0:0@node43-038] PMI response: cmd=my_kvsname kvsname=kvs_43390_0 [proxy:0:0@node43-038] got pmi command (from 57): barrier_in [proxy:0:0@node43-038] got pmi command (from 14): barrier_in [proxy:0:0@node43-038] forwarding command (cmd=barrier_in) upstream [mpiexec@node43-038] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@node43-038] PMI response to fd 8 pid 14: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] got pmi command (from 14): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 36): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 54): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 16): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 21): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 24): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 27): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 30): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 33): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 39): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 42): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 45): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 48): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 51): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 57): get kvsname=kvs_43390_0 key=sharedFilename[0] [proxy:0:0@node43-038] PMI response: cmd=get_result rc=0 msg=success value=/dev/shm/Intel_MPI_IJgRoE [proxy:0:0@node43-038] got pmi command (from 12): put kvsname=kvs_43390_0 key=P0-businesscard-0 value=rdma_port0#43395$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 14): put kvsname=kvs_43390_0 key=P1-businesscard-0 value=rdma_port0#43396$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 16): put kvsname=kvs_43390_0 key=P2-businesscard-0 value=rdma_port0#43397$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 21): put kvsname=kvs_43390_0 key=P3-businesscard-0 value=rdma_port0#43398$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 24): put kvsname=kvs_43390_0 key=P4-businesscard-0 value=rdma_port0#43399$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 27): put kvsname=kvs_43390_0 key=P5-businesscard-0 value=rdma_port0#43400$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 30): put kvsname=kvs_43390_0 key=P6-businesscard-0 value=rdma_port0#43401$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 33): put kvsname=kvs_43390_0 key=P7-businesscard-0 value=rdma_port0#43402$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 36): put kvsname=kvs_43390_0 key=P8-businesscard-0 value=rdma_port0#43403$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 39): put kvsname=kvs_43390_0 key=P9-businesscard-0 value=rdma_port0#43404$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 42): put kvsname=kvs_43390_0 key=P10-businesscard-0 value=rdma_port0#43405$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 45): put kvsname=kvs_43390_0 key=P11-businesscard-0 value=rdma_port0#43406$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 48): put kvsname=kvs_43390_0 key=P12-businesscard-0 value=rdma_port0#43407$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 51): put kvsname=kvs_43390_0 key=P13-businesscard-0 value=rdma_port0#43408$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 54): put kvsname=kvs_43390_0 key=P14-businesscard-0 value=rdma_port0#43409$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [proxy:0:0@node43-038] got pmi command (from 57): put kvsname=kvs_43390_0 key=P15-businesscard-0 value=rdma_port0#43410$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P0-businesscard-0 value=rdma_port0#43395$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P1-businesscard-0 value=rdma_port0#43396$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P2-businesscard-0 value=rdma_port0#43397$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P3-businesscard-0 value=rdma_port0#43398$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P4-businesscard-0 value=rdma_port0#43399$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P5-businesscard-0 value=rdma_port0#43400$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P6-businesscard-0 value=rdma_port0#43401$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P7-businesscard-0 value=rdma_port0#43402$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P8-businesscard-0 value=rdma_port0#43403$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P9-businesscard-0 value=rdma_port0#43404$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P10-businesscard-0 value=rdma_port0#43405$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P11-businesscard-0 value=rdma_port0#43406$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P12-businesscard-0 value=rdma_port0#43407$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P13-businesscard-0 value=rdma_port0#43408$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P14-businesscard-0 value=rdma_port0#43409$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [mpiexec@node43-038] [pgid: 0] got aggregated PMI command (part of it): cmd=put kvsname=kvs_43390_0 key=P15-businesscard-0 value=rdma_port0#43410$rdma_host0#020000000A8B01620000000000000000$arch_code#6$fabrics_list#shm_and_dapl$ [mpiexec@node43-038] reply: cmd=put_result rc=0 msg=success [proxy:0:0@node43-038] got pmi command (from 14): barrier_in [proxy:0:0@node43-038] got pmi command (from 36): barrier_in [proxy:0:0@node43-038] got pmi command (from 42): barrier_in [proxy:0:0@node43-038] got pmi command (from 16): barrier_in [proxy:0:0@node43-038] got pmi command (from 21): barrier_in [proxy:0:0@node43-038] got pmi command (from 24): barrier_in [proxy:0:0@node43-038] got pmi command (from 27): barrier_in [proxy:0:0@node43-038] got pmi command (from 30): barrier_in [proxy:0:0@node43-038] got pmi command (from 33): barrier_in [proxy:0:0@node43-038] got pmi command (from 39): barrier_in [proxy:0:0@node43-038] got pmi command (from 45): barrier_in [proxy:0:0@node43-038] got pmi command (from 48): barrier_in [proxy:0:0@node43-038] got pmi command (from 51): barrier_in [proxy:0:0@node43-038] got pmi command (from 54): barrier_in [proxy:0:0@node43-038] got pmi command (from 57): barrier_in [proxy:0:0@node43-038] got pmi command (from 12): barrier_in [proxy:0:0@node43-038] forwarding command (cmd=barrier_in) upstream [mpiexec@node43-038] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@node43-038] PMI response to fd 8 pid 12: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out #------------------------------------------------------------ # Intel (R) MPI Benchmarks 4.1 Update 1, MPI-1 part #------------------------------------------------------------ # Date : Thu May 12 09:11:58 2016 # Machine : x86_64 # System : Linux # Release : 2.6.32-220.23.1.el6.x86_64 # Version : #1 SMP Mon Jun 18 09:58:09 CDT 2012 # MPI Version : 3.0 # MPI Thread Environment: # New default behavior from Version 3.2 on: # the number of iterations per message size is cut down # dynamically when a certain run time (per message size sample) # is expected to be exceeded. Time limit is defined by variable # "SECS_PER_SAMPLE" (=> IMB_settings.h) # or through the flag => -time # Calling sequence was: # IMB-MPI1 # Minimum message length in bytes: 0 # Maximum message length in bytes: 4194304 # # MPI_Datatype : MPI_BYTE # MPI_Datatype for reductions : MPI_FLOAT # MPI_Op : MPI_SUM # # # List of Benchmarks to run: # PingPong # PingPing # Sendrecv # Exchange # Allreduce # Reduce # Reduce_scatter # Allgather # Allgatherv # Gather # Gatherv # Scatter # Scatterv # Alltoall # Alltoallv # Bcast # Barrier #--------------------------------------------------- # Benchmarking PingPong # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #--------------------------------------------------- #bytes #repetitions t[usec] Mbytes/sec 0 1000 0.26 0.00 1 1000 0.28 3.38 2 1000 0.29 6.55 4 1000 0.29 13.20 8 1000 0.29 26.40 16 1000 0.26 58.69 32 1000 0.31 99.57 64 1000 0.31 196.85 128 1000 0.34 355.31 256 1000 0.38 650.16 512 1000 0.44 1109.73 1024 1000 0.52 1881.49 2048 1000 0.66 2957.40 4096 1000 1.04 3772.07 8192 1000 1.73 4510.70 16384 1000 3.04 5139.88 32768 1000 5.24 5967.72 65536 640 8.64 7235.93 131072 320 15.38 8125.94 262144 160 29.04 8607.67 524288 80 53.07 9420.64 1048576 40 100.82 9918.25 2097152 20 196.75 10165.24 4194304 10 387.14 10332.07 #--------------------------------------------------- # Benchmarking PingPing # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #--------------------------------------------------- #bytes #repetitions t[usec] Mbytes/sec 0 1000 0.40 0.00 1 1000 0.40 2.36 2 1000 0.41 4.60 4 1000 0.42 9.13 8 1000 0.43 17.87 16 1000 0.42 36.16 32 1000 0.43 70.33 64 1000 0.42 144.23 128 1000 0.44 275.57 256 1000 0.48 506.43 512 1000 0.54 900.62 1024 1000 0.64 1533.51 2048 1000 0.90 2167.77 4096 1000 1.15 3385.12 8192 1000 1.81 4325.81 16384 1000 3.19 4903.92 32768 1000 6.15 5081.29 65536 640 16.98 3680.18 131072 320 31.04 4026.60 262144 160 57.95 4314.02 524288 80 106.00 4716.94 1048576 40 201.20 4970.29 2097152 20 392.15 5100.08 4194304 10 773.69 5170.02 #----------------------------------------------------------------------------- # Benchmarking Sendrecv # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.37 0.37 0.37 0.00 1 1000 0.38 0.38 0.38 4.96 2 1000 0.38 0.38 0.38 9.96 4 1000 0.38 0.38 0.38 20.13 8 1000 0.38 0.38 0.38 40.05 16 1000 0.39 0.39 0.39 78.62 32 1000 0.40 0.40 0.40 153.02 64 1000 0.40 0.40 0.40 303.68 128 1000 0.42 0.42 0.42 581.49 256 1000 0.45 0.45 0.45 1087.63 512 1000 0.52 0.52 0.52 1884.95 1024 1000 0.60 0.60 0.60 3234.11 2048 1000 0.76 0.76 0.76 5160.31 4096 1000 1.11 1.11 1.11 7031.76 8192 1000 1.79 1.79 1.79 8743.96 16384 1000 3.20 3.20 3.20 9753.83 32768 1000 5.80 5.80 5.80 10768.32 65536 640 16.63 16.63 16.63 7516.00 131072 320 30.12 30.13 30.13 8297.13 262144 160 57.06 57.08 57.07 8759.35 524288 80 105.15 105.21 105.18 9504.43 1048576 40 200.50 200.58 200.54 9971.30 2097152 20 390.76 390.90 390.83 10232.82 4194304 10 773.22 773.60 773.41 10341.30 #----------------------------------------------------------------------------- # Benchmarking Sendrecv # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.51 0.51 0.51 0.00 1 1000 0.40 0.40 0.40 4.77 2 1000 0.40 0.40 0.40 9.44 4 1000 0.39 0.39 0.39 19.66 8 1000 0.39 0.39 0.39 39.34 16 1000 0.39 0.39 0.39 78.82 32 1000 0.40 0.40 0.40 152.20 64 1000 0.43 0.43 0.43 283.19 128 1000 0.44 0.44 0.44 551.13 256 1000 0.49 0.50 0.49 984.62 512 1000 0.61 0.61 0.61 1598.13 1024 1000 0.81 0.81 0.81 2408.00 2048 1000 0.85 0.85 0.85 4600.95 4096 1000 1.30 1.30 1.30 5994.88 8192 1000 1.82 1.82 1.82 8580.26 16384 1000 3.57 3.57 3.57 8765.60 32768 1000 5.76 5.76 5.76 10856.62 65536 640 16.45 16.45 16.45 7596.66 131072 320 30.46 30.47 30.47 8203.62 262144 160 57.24 57.38 57.28 8713.63 524288 80 107.43 107.68 107.50 9287.14 1048576 40 204.45 204.92 204.59 9759.87 2097152 20 404.44 405.49 404.89 9864.60 4194304 10 1194.10 1202.30 1198.17 6653.93 #----------------------------------------------------------------------------- # Benchmarking Sendrecv # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.43 0.43 0.43 0.00 1 1000 0.56 0.56 0.56 3.38 2 1000 0.46 0.46 0.46 8.26 4 1000 0.47 0.47 0.47 16.30 8 1000 0.47 0.47 0.47 32.60 16 1000 0.44 0.44 0.44 69.68 32 1000 0.46 0.46 0.46 133.26 64 1000 0.48 0.49 0.49 249.15 128 1000 0.51 0.51 0.51 481.66 256 1000 0.53 0.53 0.53 917.97 512 1000 0.60 0.60 0.60 1633.17 1024 1000 1.05 1.06 1.05 1849.63 2048 1000 0.83 0.83 0.83 4683.82 4096 1000 1.17 1.17 1.17 6660.16 8192 1000 1.96 1.96 1.96 7963.06 16384 1000 3.28 3.28 3.28 9513.14 32768 1000 6.98 6.99 6.99 8940.18 65536 640 15.75 15.77 15.76 7926.31 131072 320 31.59 31.67 31.63 7894.23 262144 160 58.56 58.68 58.62 8520.68 524288 80 110.46 111.04 110.82 9005.97 1048576 40 225.60 226.42 226.04 8833.14 2097152 20 986.80 1002.60 997.46 3989.64 4194304 10 2465.70 2545.60 2506.02 3142.68 #----------------------------------------------------------------------------- # Benchmarking Sendrecv # #processes = 16 #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.81 0.82 0.81 0.00 1 1000 0.72 0.72 0.72 2.63 2 1000 0.77 0.78 0.77 4.91 4 1000 0.80 0.81 0.81 9.37 8 1000 0.72 0.73 0.73 20.93 16 1000 0.71 0.71 0.71 42.74 32 1000 0.73 0.74 0.74 82.79 64 1000 0.75 0.76 0.76 161.26 128 1000 0.72 0.72 0.72 337.73 256 1000 0.76 0.76 0.76 639.20 512 1000 0.90 0.91 0.90 1077.89 1024 1000 1.04 1.05 1.04 1865.21 2048 1000 1.27 1.28 1.28 3054.44 4096 1000 1.77 1.77 1.77 4409.04 8192 1000 2.87 2.88 2.88 5423.82 16384 1000 4.81 4.83 4.82 6476.53 32768 1000 9.02 9.06 9.04 6898.53 65536 640 20.52 20.63 20.57 6059.71 131072 320 39.35 39.59 39.48 6314.58 262144 160 71.44 72.31 71.88 6914.45 524288 80 132.34 135.55 134.00 7377.36 1048576 40 281.05 295.67 289.17 6764.19 2097152 20 1116.60 1162.79 1138.45 3440.00 4194304 10 2427.70 2523.90 2474.16 3169.70 #----------------------------------------------------------------------------- # Benchmarking Exchange # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.84 0.84 0.84 0.00 1 1000 0.90 0.90 0.90 4.23 2 1000 0.91 0.91 0.91 8.37 4 1000 0.90 0.90 0.90 17.03 8 1000 0.91 0.91 0.91 33.36 16 1000 0.91 0.91 0.91 66.77 32 1000 0.93 0.93 0.93 131.11 64 1000 0.97 0.97 0.97 251.47 128 1000 1.01 1.01 1.01 484.39 256 1000 1.06 1.06 1.06 924.60 512 1000 1.20 1.20 1.20 1627.66 1024 1000 1.37 1.37 1.37 2846.92 2048 1000 1.70 1.70 1.70 4600.95 4096 1000 2.32 2.32 2.32 6732.00 8192 1000 3.51 3.51 3.51 8895.28 16384 1000 6.02 6.02 6.02 10377.01 32768 1000 10.97 10.97 10.97 11392.61 65536 640 35.15 35.15 35.15 7112.68 131072 320 62.50 62.51 62.51 7998.86 262144 160 114.94 114.96 114.95 8698.94 524288 80 210.81 210.85 210.83 9485.49 1048576 40 401.85 401.95 401.90 9951.49 2097152 20 782.69 782.90 782.79 10218.48 4194304 10 1851.61 1851.89 1851.75 8639.81 #----------------------------------------------------------------------------- # Benchmarking Exchange # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.87 0.87 0.87 0.00 1 1000 0.82 0.82 0.82 4.63 2 1000 0.81 0.81 0.81 9.40 4 1000 0.82 0.82 0.82 18.65 8 1000 0.81 0.81 0.81 37.90 16 1000 0.82 0.82 0.82 74.53 32 1000 0.84 0.84 0.84 146.16 64 1000 0.91 0.91 0.91 267.08 128 1000 0.91 0.91 0.91 535.42 256 1000 1.00 1.00 1.00 974.54 512 1000 1.16 1.16 1.16 1686.64 1024 1000 1.40 1.40 1.40 2794.00 2048 1000 1.72 1.72 1.72 4544.80 4096 1000 2.21 2.21 2.21 7083.44 8192 1000 3.36 3.36 3.36 9289.30 16384 1000 6.18 6.18 6.18 10106.56 32768 1000 11.06 11.06 11.06 11299.07 65536 640 33.21 33.22 33.21 7526.28 131072 320 61.97 61.99 61.98 8066.16 262144 160 113.07 113.20 113.11 8833.95 524288 80 214.01 214.19 214.08 9337.66 1048576 40 408.55 408.67 408.62 9787.77 2097152 20 991.89 992.85 992.36 8057.64 4194304 10 2658.89 2664.30 2661.53 6005.32 #----------------------------------------------------------------------------- # Benchmarking Exchange # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 0.86 0.87 0.86 0.00 1 1000 0.87 0.88 0.88 4.35 2 1000 0.88 0.88 0.88 8.69 4 1000 0.88 0.88 0.88 17.30 8 1000 0.88 0.88 0.88 34.72 16 1000 0.88 0.88 0.88 69.51 32 1000 0.91 0.92 0.91 133.40 64 1000 0.92 0.92 0.92 266.25 128 1000 0.96 0.96 0.96 508.57 256 1000 1.02 1.02 1.02 953.67 512 1000 1.20 1.20 1.20 1620.89 1024 1000 1.41 1.41 1.41 2764.30 2048 1000 1.86 1.86 1.86 4204.80 4096 1000 2.58 2.58 2.58 6046.87 8192 1000 3.74 3.74 3.74 8346.41 16384 1000 6.30 6.31 6.30 9911.30 32768 1000 11.43 11.44 11.43 10929.50 65536 640 34.03 34.05 34.04 7342.49 131072 320 63.61 63.64 63.63 7856.25 262144 160 118.54 118.76 118.64 8420.18 524288 80 243.69 244.76 244.24 8171.15 1048576 40 616.72 617.05 616.86 6482.45 2097152 20 2125.31 2159.70 2137.04 3704.21 4194304 10 4750.11 4842.00 4816.39 3304.42 #----------------------------------------------------------------------------- # Benchmarking Exchange # #processes = 16 #----------------------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] Mbytes/sec 0 1000 1.31 1.32 1.32 0.00 1 1000 1.36 1.37 1.37 2.79 2 1000 1.37 1.38 1.37 5.55 4 1000 1.36 1.37 1.37 11.14 8 1000 1.37 1.38 1.38 22.10 16 1000 1.38 1.39 1.39 43.88 32 1000 1.39 1.40 1.40 87.13 64 1000 1.46 1.47 1.46 166.42 128 1000 1.50 1.50 1.50 325.34 256 1000 1.53 1.53 1.53 637.01 512 1000 1.85 1.86 1.86 1051.20 1024 1000 2.15 2.15 2.15 1813.39 2048 1000 2.66 2.67 2.66 2928.33 4096 1000 3.62 3.63 3.62 4304.50 8192 1000 5.63 5.64 5.63 5539.81 16384 1000 9.93 9.96 9.94 6273.79 32768 1000 16.66 16.72 16.69 7475.62 65536 640 39.04 39.13 39.08 6389.56 131072 320 76.25 76.61 76.39 6526.64 262144 160 129.88 131.01 130.31 7633.21 524288 80 310.58 315.87 312.96 6331.62 1048576 40 815.67 866.57 843.28 4615.88 2097152 20 2299.11 2906.66 2702.04 2752.30 4194304 10 4892.11 5345.32 5139.06 2993.27 #---------------------------------------------------------------- # Benchmarking Allreduce # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.08 0.08 4 1000 0.67 0.68 0.68 8 1000 0.69 0.70 0.69 16 1000 0.67 0.68 0.68 32 1000 0.75 0.75 0.75 64 1000 0.76 0.76 0.76 128 1000 0.75 0.77 0.76 256 1000 0.82 0.82 0.82 512 1000 0.98 0.99 0.99 1024 1000 1.13 1.13 1.13 2048 1000 1.33 1.34 1.33 4096 1000 2.38 2.38 2.38 8192 1000 3.41 3.42 3.42 16384 1000 5.51 5.53 5.52 32768 1000 9.15 9.16 9.16 65536 640 17.09 17.10 17.09 131072 320 49.13 49.15 49.14 262144 160 92.51 92.51 92.51 524288 80 175.31 175.33 175.32 1048576 40 329.25 329.36 329.30 2097152 20 635.10 635.25 635.18 4194304 10 1466.51 1466.77 1466.64 #---------------------------------------------------------------- # Benchmarking Allreduce # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.08 0.08 4 1000 1.09 1.10 1.10 8 1000 1.13 1.16 1.14 16 1000 1.09 1.10 1.10 32 1000 1.21 1.24 1.23 64 1000 1.18 1.20 1.19 128 1000 1.24 1.25 1.25 256 1000 1.31 1.32 1.31 512 1000 1.59 1.60 1.59 1024 1000 1.81 1.83 1.82 2048 1000 2.20 2.21 2.21 4096 1000 3.72 3.73 3.73 8192 1000 5.12 5.12 5.12 16384 1000 8.01 8.02 8.01 32768 1000 13.27 13.28 13.27 65536 640 24.39 24.41 24.40 131072 320 65.17 65.20 65.19 262144 160 135.38 135.45 135.43 524288 80 253.74 253.82 253.78 1048576 40 477.49 477.61 477.55 2097152 20 995.67 995.84 995.72 4194304 10 3160.52 3164.34 3162.34 #---------------------------------------------------------------- # Benchmarking Allreduce # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.08 4 1000 1.53 1.54 1.54 8 1000 1.53 1.55 1.54 16 1000 1.57 1.58 1.58 32 1000 1.64 1.65 1.65 64 1000 1.64 1.66 1.65 128 1000 1.73 1.75 1.74 256 1000 1.84 1.85 1.85 512 1000 2.16 2.17 2.17 1024 1000 2.48 2.49 2.49 2048 1000 3.15 3.17 3.16 4096 1000 4.97 4.98 4.98 8192 1000 6.62 6.63 6.62 16384 1000 10.11 10.12 10.12 32768 1000 16.22 16.24 16.23 65536 640 29.33 29.37 29.35 131072 320 74.48 74.53 74.51 262144 160 154.42 154.53 154.48 524288 80 301.71 301.92 301.82 1048576 40 777.14 777.32 777.22 2097152 20 2449.86 2454.51 2452.11 4194304 10 6058.12 6069.64 6063.36 #---------------------------------------------------------------- # Benchmarking Allreduce # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.07 0.09 0.08 4 1000 2.55 2.58 2.57 8 1000 2.55 2.57 2.57 16 1000 2.54 2.57 2.55 32 1000 2.63 2.66 2.64 64 1000 2.80 2.82 2.82 128 1000 2.90 2.93 2.91 256 1000 3.08 3.10 3.09 512 1000 3.68 3.70 3.69 1024 1000 4.44 4.46 4.45 2048 1000 5.82 5.85 5.84 4096 1000 7.91 7.94 7.93 8192 1000 9.74 9.76 9.75 16384 1000 14.28 14.32 14.30 32768 1000 21.78 21.80 21.78 65536 640 37.90 37.93 37.92 131072 320 90.26 90.36 90.31 262144 160 177.52 177.64 177.57 524288 80 349.79 350.02 349.96 1048576 40 904.58 905.13 904.85 2097152 20 2565.59 2569.25 2567.19 4194304 10 6504.30 6537.03 6522.00 #---------------------------------------------------------------- # Benchmarking Reduce # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 4 1000 0.61 0.61 0.61 8 1000 0.61 0.62 0.61 16 1000 0.65 0.65 0.65 32 1000 0.61 0.61 0.61 64 1000 0.61 0.61 0.61 128 1000 0.66 0.68 0.67 256 1000 0.69 0.70 0.69 512 1000 0.80 0.80 0.80 1024 1000 0.90 0.91 0.91 2048 1000 1.10 1.10 1.10 4096 1000 1.53 1.53 1.53 8192 1000 2.34 2.34 2.34 16384 1000 4.29 4.29 4.29 32768 1000 6.62 6.62 6.62 65536 640 11.30 11.31 11.30 131072 320 23.24 23.28 23.26 262144 160 45.82 45.90 45.86 524288 80 90.01 90.46 90.23 1048576 40 174.93 176.70 175.81 2097152 20 342.31 349.26 345.78 4194304 10 761.39 795.08 778.23 #---------------------------------------------------------------- # Benchmarking Reduce # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.08 4 1000 1.14 1.16 1.15 8 1000 1.20 1.21 1.21 16 1000 1.15 1.17 1.16 32 1000 1.15 1.16 1.16 64 1000 1.18 1.19 1.19 128 1000 1.29 1.30 1.30 256 1000 1.32 1.34 1.33 512 1000 1.47 1.48 1.47 1024 1000 1.71 1.72 1.72 2048 1000 2.11 2.12 2.12 4096 1000 3.05 3.06 3.05 8192 1000 4.88 4.88 4.88 16384 1000 8.18 8.19 8.18 32768 1000 11.95 11.97 11.96 65536 640 19.81 19.85 19.83 131072 320 36.43 36.52 36.48 262144 160 67.96 68.23 68.15 524288 80 130.49 131.55 131.26 1048576 40 252.88 256.60 255.61 2097152 20 503.75 515.45 512.36 4194304 10 1136.42 1186.70 1173.99 #---------------------------------------------------------------- # Benchmarking Reduce # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 4 1000 1.11 1.13 1.12 8 1000 1.15 1.16 1.16 16 1000 1.12 1.14 1.13 32 1000 1.15 1.16 1.15 64 1000 1.15 1.17 1.16 128 1000 1.29 1.30 1.30 256 1000 1.37 1.40 1.39 512 1000 1.48 1.50 1.49 1024 1000 1.66 1.68 1.67 2048 1000 2.23 2.25 2.24 4096 1000 3.21 3.23 3.22 8192 1000 4.96 4.97 4.96 16384 1000 8.48 8.50 8.49 32768 1000 12.20 12.25 12.23 65536 640 19.87 19.94 19.91 131072 320 36.67 36.81 36.75 262144 160 68.76 69.20 69.05 524288 80 132.52 133.55 133.31 1048576 40 265.20 268.00 267.44 2097152 20 634.01 648.02 645.78 4194304 10 1330.52 1386.88 1378.79 #---------------------------------------------------------------- # Benchmarking Reduce # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.07 0.09 0.08 4 1000 1.38 1.41 1.40 8 1000 1.39 1.41 1.40 16 1000 1.41 1.44 1.43 32 1000 1.40 1.43 1.42 64 1000 1.46 1.49 1.48 128 1000 1.56 1.59 1.57 256 1000 1.67 1.69 1.67 512 1000 1.96 1.98 1.97 1024 1000 2.24 2.27 2.25 2048 1000 2.71 2.74 2.73 4096 1000 3.72 3.76 3.74 8192 1000 5.87 5.92 5.90 16384 1000 10.57 10.63 10.60 32768 1000 16.33 16.40 16.36 65536 640 27.56 27.69 27.64 131072 320 51.55 51.86 51.74 262144 160 98.16 99.07 98.73 524288 80 189.60 191.84 191.05 1048576 40 375.88 383.32 381.04 2097152 20 830.41 897.65 887.18 4194304 10 1748.80 1930.57 1892.86 #---------------------------------------------------------------- # Benchmarking Reduce_scatter # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.17 0.19 0.18 4 1000 0.88 1.02 0.95 8 1000 1.23 1.23 1.23 16 1000 1.05 1.05 1.05 32 1000 1.06 1.06 1.06 64 1000 1.07 1.07 1.07 128 1000 1.29 1.33 1.31 256 1000 1.35 1.35 1.35 512 1000 1.27 1.27 1.27 1024 1000 1.32 1.32 1.32 2048 1000 1.49 1.49 1.49 4096 1000 1.79 1.80 1.80 8192 1000 2.58 2.60 2.59 16384 1000 4.02 4.02 4.02 32768 1000 6.92 6.92 6.92 65536 640 12.86 12.86 12.86 131072 320 36.58 36.59 36.59 262144 160 51.18 51.18 51.18 524288 80 97.04 97.05 97.05 1048576 40 185.61 185.63 185.62 2097152 20 362.16 362.21 362.18 4194304 10 715.92 715.92 715.92 #---------------------------------------------------------------- # Benchmarking Reduce_scatter # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.17 0.19 0.18 4 1000 0.79 1.04 0.91 8 1000 1.34 1.56 1.45 16 1000 1.54 1.55 1.55 32 1000 1.50 1.51 1.50 64 1000 1.59 1.60 1.60 128 1000 1.78 1.79 1.79 256 1000 1.78 1.80 1.79 512 1000 1.72 1.74 1.73 1024 1000 1.93 1.94 1.93 2048 1000 2.15 2.18 2.17 4096 1000 2.51 2.52 2.51 8192 1000 3.45 3.46 3.45 16384 1000 5.37 5.39 5.38 32768 1000 9.06 9.08 9.07 65536 640 17.10 17.12 17.11 131072 320 46.29 46.32 46.31 262144 160 76.96 77.00 76.97 524288 80 144.55 144.65 144.60 1048576 40 270.67 270.75 270.71 2097152 20 501.81 501.99 501.91 4194304 10 1182.89 1183.46 1183.10 #---------------------------------------------------------------- # Benchmarking Reduce_scatter # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.18 0.19 0.18 4 1000 0.80 1.26 0.98 8 1000 1.86 2.50 2.14 16 1000 1.80 2.33 2.07 32 1000 1.97 1.98 1.98 64 1000 3.21 3.23 3.22 128 1000 2.38 2.42 2.41 256 1000 2.27 2.31 2.29 512 1000 2.32 2.34 2.33 1024 1000 2.50 2.53 2.51 2048 1000 2.79 2.80 2.79 4096 1000 3.25 3.27 3.26 8192 1000 4.25 4.27 4.26 16384 1000 6.13 6.16 6.14 32768 1000 10.28 10.29 10.28 65536 640 18.91 18.93 18.92 131072 320 49.74 49.79 49.77 262144 160 58.23 58.36 58.31 524288 80 174.37 174.50 174.44 1048576 40 334.91 335.35 335.09 2097152 20 870.66 871.78 871.14 4194304 10 1849.70 1851.08 1850.38 #---------------------------------------------------------------- # Benchmarking Reduce_scatter # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.19 0.20 0.19 4 1000 0.82 1.29 1.05 8 1000 3.29 5.54 4.30 16 1000 2.61 3.51 3.03 32 1000 2.66 7.41 5.17 64 1000 3.08 3.10 3.09 128 1000 3.31 3.33 3.32 256 1000 3.43 3.46 3.45 512 1000 3.59 3.62 3.60 1024 1000 3.94 3.96 3.95 2048 1000 4.21 4.24 4.22 4096 1000 4.78 4.81 4.80 8192 1000 6.42 6.44 6.43 16384 1000 8.31 8.34 8.32 32768 1000 12.97 12.99 12.98 65536 640 23.18 23.21 23.19 131072 320 59.53 59.60 59.58 262144 160 124.79 124.91 124.86 524288 80 238.16 238.34 238.23 1048576 40 489.70 490.19 489.93 2097152 20 1211.30 1212.39 1211.89 4194304 10 2771.16 2775.03 2773.10 #---------------------------------------------------------------- # Benchmarking Allgather # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 0.52 0.52 0.52 2 1000 0.51 0.52 0.52 4 1000 0.50 0.51 0.51 8 1000 0.51 0.52 0.52 16 1000 0.52 0.52 0.52 32 1000 0.53 0.53 0.53 64 1000 0.57 0.57 0.57 128 1000 0.54 0.54 0.54 256 1000 0.58 0.58 0.58 512 1000 0.64 0.64 0.64 1024 1000 0.76 0.76 0.76 2048 1000 0.92 0.93 0.92 4096 1000 1.34 1.34 1.34 8192 1000 2.25 2.27 2.26 16384 1000 3.88 3.88 3.88 32768 1000 7.16 7.16 7.16 65536 640 21.49 21.50 21.49 131072 320 39.89 39.90 39.89 262144 160 77.28 77.35 77.32 524288 80 143.71 143.72 143.71 1048576 40 275.24 275.28 275.26 2097152 20 627.60 627.70 627.65 4194304 10 1876.14 1879.67 1877.90 #---------------------------------------------------------------- # Benchmarking Allgather # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 0.97 0.99 0.98 2 1000 1.01 1.02 1.02 4 1000 0.99 0.99 0.99 8 1000 0.95 0.97 0.96 16 1000 0.97 0.99 0.98 32 1000 0.96 0.97 0.97 64 1000 1.02 1.02 1.02 128 1000 1.40 1.41 1.41 256 1000 1.54 1.54 1.54 512 1000 1.84 1.86 1.85 1024 1000 2.10 2.11 2.11 2048 1000 2.62 2.64 2.63 4096 1000 3.75 3.78 3.77 8192 1000 5.85 5.86 5.86 16384 1000 10.02 10.03 10.02 32768 1000 19.02 19.03 19.03 65536 640 59.62 59.65 59.64 131072 320 104.76 104.78 104.77 262144 160 194.21 194.34 194.27 524288 80 360.97 361.20 361.03 1048576 40 914.37 915.25 914.83 2097152 20 3043.71 3047.01 3045.25 4194304 10 6024.74 6034.40 6029.70 #---------------------------------------------------------------- # Benchmarking Allgather # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 1.40 1.41 1.41 2 1000 1.44 1.45 1.45 4 1000 1.41 1.43 1.42 8 1000 1.43 1.45 1.44 16 1000 1.42 1.46 1.45 32 1000 1.43 1.45 1.44 64 1000 1.54 1.55 1.54 128 1000 3.40 3.41 3.40 256 1000 3.78 3.81 3.80 512 1000 4.16 4.18 4.17 1024 1000 4.81 4.83 4.82 2048 1000 6.00 6.02 6.01 4096 1000 8.42 8.43 8.42 8192 1000 13.18 13.20 13.19 16384 1000 23.18 23.20 23.19 32768 1000 53.56 53.58 53.57 65536 640 127.46 127.54 127.51 131072 320 232.15 232.21 232.18 262144 160 554.60 554.97 554.80 524288 80 1734.87 1737.20 1736.11 1048576 40 4063.43 4068.95 4066.49 2097152 20 9468.11 9474.27 9471.21 4194304 10 19671.85 19686.20 19679.03 #---------------------------------------------------------------- # Benchmarking Allgather # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.08 1 1000 2.36 2.38 2.37 2 1000 2.36 2.39 2.37 4 1000 2.42 2.43 2.42 8 1000 2.54 2.56 2.55 16 1000 2.60 2.62 2.61 32 1000 2.70 2.72 2.71 64 1000 3.04 3.06 3.05 128 1000 10.93 10.97 10.95 256 1000 11.13 11.16 11.14 512 1000 13.18 13.21 13.19 1024 1000 15.56 15.59 15.58 2048 1000 19.26 19.28 19.27 4096 1000 27.10 27.12 27.11 8192 1000 43.65 43.67 43.66 16384 1000 80.33 80.38 80.36 32768 1000 149.26 149.30 149.28 65536 640 334.59 334.75 334.67 131072 320 640.41 640.76 640.56 262144 160 1870.58 1872.36 1871.35 524288 80 3702.32 3706.32 3704.23 1048576 40 8793.10 8800.35 8796.72 2097152 20 19615.46 19724.64 19683.48 4194304 10 41117.76 41302.47 41203.22 #---------------------------------------------------------------- # Benchmarking Allgatherv # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.09 0.10 0.10 1 1000 0.73 0.74 0.74 2 1000 0.73 0.73 0.73 4 1000 0.77 0.77 0.77 8 1000 0.72 0.73 0.73 16 1000 0.73 0.73 0.73 32 1000 0.79 0.80 0.80 64 1000 0.78 0.78 0.78 128 1000 0.81 0.82 0.81 256 1000 0.88 0.88 0.88 512 1000 0.95 0.96 0.96 1024 1000 1.08 1.08 1.08 2048 1000 0.99 0.99 0.99 4096 1000 1.38 1.38 1.38 8192 1000 2.31 2.31 2.31 16384 1000 3.95 3.96 3.96 32768 1000 7.19 7.20 7.20 65536 640 21.66 21.67 21.67 131072 320 39.97 39.98 39.97 262144 160 77.08 77.09 77.08 524288 80 143.65 143.69 143.67 1048576 40 275.15 275.27 275.21 2097152 20 537.68 537.88 537.78 4194304 10 1859.50 1863.53 1861.51 #---------------------------------------------------------------- # Benchmarking Allgatherv # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.09 0.10 0.10 1 1000 1.21 1.22 1.21 2 1000 1.19 1.20 1.20 4 1000 1.21 1.22 1.22 8 1000 1.19 1.20 1.19 16 1000 1.25 1.26 1.26 32 1000 1.24 1.25 1.24 64 1000 1.27 1.28 1.28 128 1000 1.34 1.36 1.35 256 1000 1.46 1.48 1.47 512 1000 1.62 1.63 1.63 1024 1000 1.93 1.94 1.94 2048 1000 2.69 2.69 2.69 4096 1000 3.85 3.86 3.85 8192 1000 5.91 5.91 5.91 16384 1000 10.11 10.12 10.12 32768 1000 19.10 19.12 19.11 65536 640 60.36 60.40 60.38 131072 320 104.52 104.56 104.55 262144 160 194.17 194.21 194.19 524288 80 360.48 360.72 360.55 1048576 40 915.55 916.45 916.04 2097152 20 3054.39 3057.96 3056.14 4194304 10 6102.99 6108.26 6105.46 #---------------------------------------------------------------- # Benchmarking Allgatherv # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.11 0.12 0.11 1 1000 1.75 1.76 1.76 2 1000 1.74 1.75 1.75 4 1000 1.79 1.80 1.79 8 1000 1.82 1.84 1.83 16 1000 1.84 1.85 1.85 32 1000 1.84 1.86 1.85 64 1000 1.89 1.91 1.90 128 1000 1.98 1.99 1.99 256 1000 2.26 2.28 2.27 512 1000 2.64 2.66 2.65 1024 1000 3.42 3.44 3.43 2048 1000 6.14 6.16 6.15 4096 1000 8.62 8.64 8.63 8192 1000 13.30 13.32 13.31 16384 1000 23.33 23.35 23.34 32768 1000 52.78 52.79 52.79 65536 640 127.66 127.70 127.69 131072 320 233.83 233.92 233.87 262144 160 558.72 559.42 559.06 524288 80 1710.25 1711.03 1710.63 1048576 40 4086.35 4094.27 4089.98 2097152 20 9679.47 9685.95 9683.75 4194304 10 19740.72 19760.54 19751.10 #---------------------------------------------------------------- # Benchmarking Allgatherv # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.13 0.13 0.13 1 1000 2.99 3.01 3.00 2 1000 2.91 2.93 2.92 4 1000 2.99 3.00 2.99 8 1000 3.17 3.19 3.18 16 1000 3.24 3.25 3.24 32 1000 3.30 3.32 3.31 64 1000 3.58 3.60 3.59 128 1000 4.12 4.16 4.14 256 1000 5.06 5.09 5.08 512 1000 6.86 6.89 6.88 1024 1000 10.42 10.45 10.43 2048 1000 19.48 19.50 19.49 4096 1000 27.53 27.56 27.55 8192 1000 45.11 45.14 45.12 16384 1000 81.83 81.86 81.85 32768 1000 149.49 149.53 149.51 65536 640 334.50 334.67 334.59 131072 320 665.72 666.02 665.86 262144 160 1833.76 1835.28 1834.48 524288 80 3694.44 3696.18 3695.16 1048576 40 8730.63 8754.83 8746.56 2097152 20 19763.93 19801.64 19780.60 4194304 10 41581.18 42228.22 41922.32 #---------------------------------------------------------------- # Benchmarking Gather # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 0.55 0.55 0.55 2 1000 0.55 0.56 0.56 4 1000 0.56 0.57 0.57 8 1000 0.62 0.64 0.63 16 1000 0.55 0.56 0.56 32 1000 0.59 0.60 0.59 64 1000 0.61 0.61 0.61 128 1000 0.62 0.63 0.63 256 1000 0.68 0.69 0.68 512 1000 0.74 0.75 0.75 1024 1000 0.69 0.69 0.69 2048 1000 0.84 0.84 0.84 4096 1000 1.20 1.21 1.20 8192 1000 1.84 1.84 1.84 16384 1000 3.09 3.09 3.09 32768 1000 5.47 5.48 5.48 65536 640 10.82 10.82 10.82 131072 320 20.91 20.92 20.91 262144 160 41.73 41.77 41.75 524288 80 83.71 83.82 83.77 1048576 40 167.67 167.88 167.78 2097152 20 335.38 335.50 335.44 4194304 10 1146.29 1149.68 1147.99 #---------------------------------------------------------------- # Benchmarking Gather # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.09 0.10 0.09 1 1000 0.94 0.95 0.94 2 1000 0.93 0.94 0.93 4 1000 0.93 0.93 0.93 8 1000 0.94 0.95 0.94 16 1000 0.99 1.01 0.99 32 1000 0.98 0.99 0.99 64 1000 1.03 1.04 1.03 128 1000 1.04 1.05 1.05 256 1000 1.12 1.14 1.13 512 1000 1.28 1.30 1.29 1024 1000 0.67 0.69 0.68 2048 1000 0.85 0.87 0.85 4096 1000 1.22 1.25 1.24 8192 1000 2.11 2.12 2.12 16384 1000 3.15 3.17 3.16 32768 1000 5.83 5.84 5.84 65536 640 16.65 16.69 16.67 131072 320 29.35 29.48 29.43 262144 160 54.90 55.27 55.13 524288 80 103.16 104.44 103.92 1048576 40 277.52 285.50 282.31 2097152 20 748.50 787.64 772.12 4194304 10 1555.23 1708.89 1649.43 #---------------------------------------------------------------- # Benchmarking Gather # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 1.30 1.31 1.31 2 1000 1.34 1.36 1.35 4 1000 1.30 1.32 1.31 8 1000 1.34 1.37 1.36 16 1000 1.42 1.44 1.43 32 1000 1.38 1.39 1.38 64 1000 1.44 1.45 1.45 128 1000 1.51 1.53 1.52 256 1000 1.70 1.72 1.71 512 1000 2.06 2.09 2.08 1024 1000 0.67 0.69 0.68 2048 1000 0.89 0.92 0.90 4096 1000 1.26 1.28 1.27 8192 1000 2.13 2.16 2.15 16384 1000 3.45 3.49 3.47 32768 1000 6.91 6.95 6.93 65536 640 17.05 17.16 17.12 131072 320 30.33 30.66 30.51 262144 160 106.82 109.13 108.20 524288 80 283.15 293.89 289.37 1048576 40 588.28 624.13 608.92 2097152 20 1124.47 1251.61 1196.98 4194304 10 2596.19 3079.41 2875.24 #---------------------------------------------------------------- # Benchmarking Gather # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 2.52 2.55 2.53 2 1000 2.45 2.48 2.46 4 1000 2.49 2.52 2.50 8 1000 2.58 2.62 2.60 16 1000 2.66 2.70 2.69 32 1000 2.69 2.72 2.70 64 1000 2.89 2.91 2.91 128 1000 3.22 3.24 3.23 256 1000 3.80 3.84 3.82 512 1000 5.04 5.07 5.05 1024 1000 0.95 0.98 0.96 2048 1000 1.31 1.37 1.34 4096 1000 2.14 2.18 2.16 8192 1000 3.77 3.81 3.79 16384 1000 7.58 7.66 7.62 32768 1000 15.18 15.30 15.24 65536 640 29.96 30.32 30.17 131072 320 105.68 108.11 107.22 262144 160 244.56 255.45 251.61 524288 80 512.02 548.24 535.13 1048576 40 1173.73 1299.55 1254.17 2097152 20 2592.69 2972.65 2807.84 4194304 10 4739.43 5820.70 5316.60 #---------------------------------------------------------------- # Benchmarking Gatherv # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.16 0.16 0.16 1 1000 0.45 0.45 0.45 2 1000 0.45 0.46 0.46 4 1000 0.45 0.45 0.45 8 1000 0.46 0.47 0.47 16 1000 0.45 0.47 0.46 32 1000 0.47 0.48 0.48 64 1000 0.48 0.49 0.48 128 1000 0.51 0.52 0.52 256 1000 0.55 0.57 0.56 512 1000 0.62 0.62 0.62 1024 1000 0.68 0.69 0.69 2048 1000 0.89 0.89 0.89 4096 1000 1.22 1.22 1.22 8192 1000 1.86 1.88 1.87 16384 1000 3.13 3.13 3.13 32768 1000 5.50 5.51 5.51 65536 640 10.83 10.84 10.83 131072 320 20.98 20.99 20.98 262144 160 41.86 41.90 41.88 524288 80 83.89 83.90 83.90 1048576 40 167.69 167.90 167.80 2097152 20 335.24 335.55 335.40 4194304 10 1143.81 1146.77 1145.29 #---------------------------------------------------------------- # Benchmarking Gatherv # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.11 0.18 0.14 1 1000 0.49 0.50 0.49 2 1000 0.49 0.50 0.49 4 1000 0.48 0.50 0.49 8 1000 0.48 0.49 0.48 16 1000 0.48 0.50 0.49 32 1000 0.49 0.50 0.50 64 1000 0.52 0.54 0.53 128 1000 0.57 0.59 0.58 256 1000 0.56 0.56 0.56 512 1000 0.63 0.65 0.64 1024 1000 0.72 0.75 0.73 2048 1000 0.88 0.91 0.90 4096 1000 1.29 1.30 1.29 8192 1000 2.20 2.21 2.20 16384 1000 3.25 3.27 3.26 32768 1000 5.86 5.89 5.87 65536 640 16.61 16.62 16.61 131072 320 29.36 29.48 29.43 262144 160 54.74 55.10 54.96 524288 80 103.26 104.70 104.13 1048576 40 274.40 282.26 279.13 2097152 20 733.90 773.22 757.70 4194304 10 1555.92 1709.60 1650.24 #---------------------------------------------------------------- # Benchmarking Gatherv # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.11 0.12 0.11 1 1000 0.56 0.57 0.56 2 1000 0.56 0.58 0.57 4 1000 0.55 0.58 0.56 8 1000 0.54 0.56 0.55 16 1000 0.56 0.57 0.57 32 1000 0.56 0.59 0.58 64 1000 0.57 0.60 0.58 128 1000 0.61 0.63 0.62 256 1000 0.63 0.65 0.64 512 1000 0.70 0.73 0.71 1024 1000 0.77 0.80 0.79 2048 1000 0.97 0.99 0.98 4096 1000 1.41 1.44 1.42 8192 1000 2.30 2.32 2.31 16384 1000 3.65 3.69 3.67 32768 1000 7.05 7.08 7.06 65536 640 17.06 17.19 17.13 131072 320 30.36 30.71 30.56 262144 160 106.70 109.17 108.14 524288 80 275.50 286.21 281.70 1048576 40 544.68 580.33 565.18 2097152 20 1060.25 1187.79 1133.33 4194304 10 2827.12 3335.00 3124.06 #---------------------------------------------------------------- # Benchmarking Gatherv # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.10 0.13 0.11 1 1000 0.73 0.75 0.74 2 1000 0.72 0.75 0.74 4 1000 0.80 0.96 0.94 8 1000 0.72 0.74 0.73 16 1000 0.72 0.75 0.74 32 1000 0.66 0.71 0.69 64 1000 0.72 0.76 0.74 128 1000 0.75 0.77 0.76 256 1000 0.79 0.83 0.81 512 1000 0.86 0.89 0.87 1024 1000 1.03 1.09 1.06 2048 1000 1.36 1.41 1.39 4096 1000 2.19 2.23 2.20 8192 1000 3.84 3.90 3.87 16384 1000 7.61 7.69 7.64 32768 1000 15.18 15.33 15.25 65536 640 28.38 28.74 28.60 131072 320 105.55 108.01 107.11 262144 160 274.14 284.98 281.12 524288 80 479.71 516.54 502.98 1048576 40 1083.55 1209.90 1164.17 2097152 20 2404.31 2797.68 2631.87 4194304 10 4346.90 5419.61 4920.10 #---------------------------------------------------------------- # Benchmarking Scatter # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.09 0.09 0.09 1 1000 0.47 0.47 0.47 2 1000 0.46 0.46 0.46 4 1000 0.46 0.46 0.46 8 1000 0.50 0.51 0.50 16 1000 0.46 0.46 0.46 32 1000 0.49 0.49 0.49 64 1000 0.49 0.50 0.50 128 1000 0.51 0.52 0.51 256 1000 0.55 0.56 0.55 512 1000 0.65 0.66 0.66 1024 1000 0.74 0.74 0.74 2048 1000 0.95 0.95 0.95 4096 1000 1.37 1.38 1.37 8192 1000 2.22 2.24 2.23 16384 1000 3.93 3.94 3.94 32768 1000 7.06 7.07 7.07 65536 640 13.21 13.21 13.21 131072 320 24.39 24.43 24.41 262144 160 47.28 47.28 47.28 524288 80 89.43 89.44 89.44 1048576 40 173.18 173.33 173.25 2097152 20 340.60 340.75 340.68 4194304 10 988.77 992.39 990.58 #---------------------------------------------------------------- # Benchmarking Scatter # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.08 1 1000 0.73 0.75 0.74 2 1000 0.73 0.75 0.74 4 1000 0.72 0.74 0.73 8 1000 0.73 0.74 0.73 16 1000 0.78 0.79 0.79 32 1000 0.81 0.81 0.81 64 1000 0.80 0.83 0.82 128 1000 0.89 0.91 0.90 256 1000 0.92 0.93 0.92 512 1000 1.08 1.10 1.09 1024 1000 1.26 1.27 1.27 2048 1000 1.01 1.03 1.02 4096 1000 1.49 1.49 1.49 8192 1000 2.38 2.39 2.38 16384 1000 3.97 3.98 3.97 32768 1000 7.35 7.37 7.36 65536 640 20.18 20.22 20.20 131072 320 35.68 35.74 35.71 262144 160 65.81 66.08 65.96 524288 80 127.93 129.14 128.65 1048576 40 288.77 294.20 292.11 2097152 20 776.55 802.55 791.87 4194304 10 1722.79 1858.93 1811.25 #---------------------------------------------------------------- # Benchmarking Scatter # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 0.92 0.94 0.93 2 1000 1.02 1.04 1.03 4 1000 0.94 0.95 0.95 8 1000 0.90 0.95 0.93 16 1000 0.94 0.95 0.95 32 1000 0.95 0.96 0.95 64 1000 0.97 1.02 1.01 128 1000 1.06 1.12 1.09 256 1000 1.18 1.19 1.18 512 1000 1.47 1.49 1.48 1024 1000 2.11 2.14 2.12 2048 1000 1.05 1.07 1.06 4096 1000 1.52 1.55 1.54 8192 1000 2.42 2.43 2.42 16384 1000 4.29 4.30 4.30 32768 1000 8.03 8.06 8.05 65536 640 22.52 22.59 22.56 131072 320 40.43 40.68 40.58 262144 160 116.25 117.41 116.89 524288 80 246.10 251.21 249.00 1048576 40 556.78 578.50 568.71 2097152 20 1027.86 1137.41 1092.88 4194304 10 2245.81 2694.58 2511.54 #---------------------------------------------------------------- # Benchmarking Scatter # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.09 1 1000 1.54 1.56 1.55 2 1000 1.53 1.56 1.55 4 1000 1.55 1.59 1.58 8 1000 1.57 1.60 1.59 16 1000 1.56 1.60 1.59 32 1000 1.63 1.68 1.66 64 1000 1.81 1.85 1.82 128 1000 2.07 2.10 2.09 256 1000 2.35 2.39 2.38 512 1000 3.20 3.25 3.23 1024 1000 4.90 4.92 4.90 2048 1000 1.63 1.66 1.65 4096 1000 2.52 2.55 2.54 8192 1000 4.15 4.19 4.18 16384 1000 14.90 14.95 14.93 32768 1000 15.68 15.72 15.69 65536 640 44.13 44.32 44.24 131072 320 88.31 89.06 88.73 262144 160 223.94 227.74 226.03 524288 80 406.68 420.25 413.99 1048576 40 728.95 808.15 778.23 2097152 20 1406.36 1724.10 1586.31 4194304 10 2745.06 4244.30 3635.83 #---------------------------------------------------------------- # Benchmarking Scatterv # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.16 0.16 0.16 1 1000 0.52 0.53 0.53 2 1000 0.51 0.53 0.52 4 1000 0.51 0.52 0.52 8 1000 0.52 0.53 0.52 16 1000 0.52 0.52 0.52 32 1000 0.51 0.53 0.52 64 1000 0.55 0.56 0.55 128 1000 0.56 0.57 0.56 256 1000 0.59 0.60 0.59 512 1000 0.66 0.67 0.67 1024 1000 0.79 0.79 0.79 2048 1000 0.98 0.98 0.98 4096 1000 1.37 1.39 1.38 8192 1000 2.09 2.09 2.09 16384 1000 3.56 3.57 3.57 32768 1000 6.29 6.32 6.31 65536 640 13.18 13.19 13.19 131072 320 24.34 24.34 24.34 262144 160 47.08 47.09 47.08 524288 80 89.11 89.17 89.14 1048576 40 172.78 172.92 172.85 2097152 20 340.10 340.25 340.18 4194304 10 994.30 997.26 995.78 #---------------------------------------------------------------- # Benchmarking Scatterv # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.12 0.14 0.13 1 1000 0.67 0.68 0.68 2 1000 0.68 0.70 0.69 4 1000 0.67 0.68 0.68 8 1000 0.69 0.69 0.69 16 1000 0.67 0.68 0.68 32 1000 0.72 0.74 0.73 64 1000 0.71 0.71 0.71 128 1000 0.72 0.74 0.73 256 1000 0.80 0.81 0.81 512 1000 0.87 0.88 0.88 1024 1000 0.98 1.00 0.99 2048 1000 1.25 1.27 1.26 4096 1000 1.88 1.90 1.89 8192 1000 2.96 2.98 2.97 16384 1000 4.89 4.91 4.90 32768 1000 9.15 9.16 9.15 65536 640 20.19 20.24 20.22 131072 320 37.32 37.39 37.37 262144 160 66.27 66.59 66.46 524288 80 132.98 134.16 133.71 1048576 40 285.29 290.78 288.73 2097152 20 777.40 803.51 792.65 4194304 10 1717.31 1855.61 1807.06 #---------------------------------------------------------------- # Benchmarking Scatterv # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.11 0.13 0.12 1 1000 1.09 1.11 1.10 2 1000 1.17 1.19 1.18 4 1000 1.09 1.10 1.10 8 1000 1.12 1.15 1.14 16 1000 1.09 1.11 1.10 32 1000 1.15 1.17 1.16 64 1000 1.13 1.15 1.14 128 1000 1.21 1.23 1.22 256 1000 1.21 1.24 1.23 512 1000 1.41 1.45 1.44 1024 1000 1.59 1.61 1.60 2048 1000 2.01 2.03 2.02 4096 1000 2.92 2.94 2.93 8192 1000 4.70 4.73 4.71 16384 1000 8.40 8.43 8.42 32768 1000 16.14 16.16 16.15 65536 640 22.61 22.68 22.64 131072 320 40.35 40.59 40.48 262144 160 101.29 102.44 101.94 524288 80 244.02 249.03 246.84 1048576 40 518.15 539.48 529.66 2097152 20 1028.00 1136.35 1091.85 4194304 10 2245.38 2765.20 2550.71 #---------------------------------------------------------------- # Benchmarking Scatterv # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.09 0.13 0.11 1 1000 2.58 2.62 2.60 2 1000 2.52 2.55 2.53 4 1000 2.53 2.56 2.54 8 1000 2.50 2.54 2.52 16 1000 2.50 2.53 2.51 32 1000 2.54 2.57 2.56 64 1000 2.56 2.60 2.58 128 1000 2.64 2.67 2.66 256 1000 2.74 2.78 2.76 512 1000 3.51 3.53 3.52 1024 1000 4.00 4.03 4.02 2048 1000 5.18 5.21 5.19 4096 1000 7.26 7.29 7.28 8192 1000 12.36 12.40 12.38 16384 1000 22.07 22.12 22.10 32768 1000 41.05 41.13 41.09 65536 640 45.52 45.66 45.59 131072 320 88.95 89.66 89.35 262144 160 228.72 232.04 230.55 524288 80 407.05 420.62 414.37 1048576 40 727.92 807.20 777.07 2097152 20 1402.62 1721.95 1584.62 4194304 10 2867.89 4245.33 3659.10 #---------------------------------------------------------------- # Benchmarking Alltoall # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.08 1 1000 1.59 1.60 1.60 2 1000 1.58 1.59 1.58 4 1000 1.67 1.69 1.68 8 1000 1.66 1.66 1.66 16 1000 0.55 0.55 0.55 32 1000 1.05 1.07 1.06 64 1000 0.55 0.55 0.55 128 1000 1.92 1.92 1.92 256 1000 1.15 1.15 1.15 512 1000 1.19 1.21 1.20 1024 1000 1.29 1.29 1.29 2048 1000 1.52 1.52 1.52 4096 1000 2.03 2.03 2.03 8192 1000 2.92 2.93 2.92 16384 1000 4.67 4.68 4.68 32768 1000 7.89 7.90 7.89 65536 640 24.50 24.50 24.50 131072 320 41.68 41.70 41.69 262144 160 77.46 77.47 77.46 524288 80 142.20 142.22 142.21 1048576 40 275.15 275.30 275.23 2097152 20 539.11 539.15 539.13 4194304 10 2174.66 2179.00 2176.83 #---------------------------------------------------------------- # Benchmarking Alltoall # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.08 0.08 1 1000 3.38 3.41 3.39 2 1000 3.40 3.41 3.41 4 1000 3.39 3.41 3.40 8 1000 3.41 3.42 3.41 16 1000 1.28 1.30 1.29 32 1000 1.90 1.91 1.90 64 1000 1.38 1.40 1.39 128 1000 3.67 3.68 3.67 256 1000 2.07 2.08 2.08 512 1000 2.27 2.29 2.28 1024 1000 2.56 2.58 2.57 2048 1000 3.16 3.17 3.17 4096 1000 4.18 4.18 4.18 8192 1000 6.17 6.19 6.18 16384 1000 10.75 10.77 10.76 32768 1000 21.95 21.99 21.97 65536 640 58.90 58.92 58.91 131072 320 103.96 104.07 104.00 262144 160 191.83 191.93 191.88 524288 80 393.45 393.60 393.55 1048576 40 1410.76 1413.81 1412.55 2097152 20 3091.42 3094.29 3092.88 4194304 10 6228.52 6240.89 6234.79 #---------------------------------------------------------------- # Benchmarking Alltoall # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.08 0.08 1 1000 5.17 5.19 5.18 2 1000 5.19 5.21 5.20 4 1000 5.22 5.24 5.23 8 1000 5.32 5.35 5.34 16 1000 2.87 2.89 2.88 32 1000 3.58 3.60 3.59 64 1000 3.01 3.01 3.01 128 1000 6.07 6.09 6.08 256 1000 4.05 4.07 4.06 512 1000 4.49 4.50 4.50 1024 1000 5.09 5.11 5.10 2048 1000 6.15 6.17 6.16 4096 1000 8.43 8.46 8.45 8192 1000 13.56 13.58 13.57 16384 1000 26.89 26.92 26.91 32768 1000 52.50 52.53 52.51 65536 640 123.30 123.37 123.34 131072 320 276.72 276.98 276.90 262144 160 1377.94 1378.87 1378.44 524288 80 2617.51 2618.12 2617.79 1048576 40 5339.66 5347.45 5344.19 2097152 20 10624.35 10635.24 10629.98 4194304 10 20520.09 20608.23 20572.18 #---------------------------------------------------------------- # Benchmarking Alltoall # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.08 0.09 0.08 1 1000 8.71 8.74 8.72 2 1000 8.68 8.71 8.70 4 1000 8.77 8.79 8.78 8 1000 9.14 9.17 9.16 16 1000 10.37 10.40 10.39 32 1000 10.95 10.98 10.96 64 1000 11.07 11.09 11.07 128 1000 10.80 10.82 10.81 256 1000 12.70 12.73 12.72 512 1000 15.07 15.11 15.09 1024 1000 18.41 18.44 18.42 2048 1000 23.35 23.38 23.36 4096 1000 33.15 33.19 33.17 8192 1000 56.13 56.18 56.15 16384 1000 104.17 104.27 104.23 32768 1000 202.38 202.50 202.45 65536 640 493.54 493.86 493.71 131072 320 1788.92 1790.00 1789.52 262144 160 3992.60 3998.69 3996.58 524288 80 6950.83 6952.50 6951.75 1048576 40 14875.73 14949.27 14929.59 2097152 20 29076.92 29419.06 29281.82 4194304 10 56718.64 57763.62 57364.33 #---------------------------------------------------------------- # Benchmarking Alltoallv # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.30 0.30 0.30 1 1000 1.09 1.09 1.09 2 1000 1.08 1.11 1.09 4 1000 1.08 1.08 1.08 8 1000 1.11 1.11 1.11 16 1000 1.07 1.08 1.08 32 1000 1.13 1.13 1.13 64 1000 1.08 1.08 1.08 128 1000 1.14 1.14 1.14 256 1000 1.17 1.17 1.17 512 1000 1.24 1.26 1.25 1024 1000 1.31 1.33 1.32 2048 1000 1.55 1.56 1.55 4096 1000 2.03 2.03 2.03 8192 1000 2.94 2.94 2.94 16384 1000 4.59 4.61 4.60 32768 1000 7.93 7.93 7.93 65536 640 24.61 24.64 24.62 131072 320 41.78 41.81 41.80 262144 160 77.47 77.50 77.48 524288 80 143.51 143.54 143.53 1048576 40 275.08 275.15 275.11 2097152 20 538.50 538.58 538.54 4194304 10 2193.93 2198.31 2196.12 #---------------------------------------------------------------- # Benchmarking Alltoallv # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.31 0.34 0.33 1 1000 1.96 1.97 1.97 2 1000 1.91 1.92 1.91 4 1000 1.87 1.88 1.88 8 1000 1.89 1.90 1.89 16 1000 1.87 1.88 1.88 32 1000 1.89 1.94 1.93 64 1000 1.95 2.01 1.98 128 1000 2.02 2.03 2.03 256 1000 2.11 2.12 2.11 512 1000 2.31 2.33 2.32 1024 1000 2.64 2.66 2.65 2048 1000 3.19 3.20 3.19 4096 1000 4.23 4.25 4.24 8192 1000 6.23 6.25 6.24 16384 1000 10.93 10.95 10.93 32768 1000 22.00 22.03 22.02 65536 640 58.87 58.92 58.90 131072 320 103.78 103.84 103.80 262144 160 191.88 192.09 192.00 524288 80 384.88 385.04 384.98 1048576 40 1422.55 1424.03 1423.30 2097152 20 3095.34 3099.91 3097.52 4194304 10 6204.51 6209.66 6207.09 #---------------------------------------------------------------- # Benchmarking Alltoallv # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.36 0.40 0.38 1 1000 3.60 3.60 3.60 2 1000 3.59 3.60 3.59 4 1000 3.60 3.63 3.62 8 1000 3.66 3.67 3.67 16 1000 3.62 3.65 3.64 32 1000 3.68 3.71 3.69 64 1000 3.82 3.82 3.82 128 1000 3.90 3.92 3.92 256 1000 4.13 4.16 4.15 512 1000 4.62 4.64 4.63 1024 1000 5.23 5.25 5.24 2048 1000 6.30 6.32 6.31 4096 1000 8.59 8.60 8.60 8192 1000 13.70 13.71 13.70 16384 1000 26.90 26.94 26.92 32768 1000 52.32 52.37 52.33 65536 640 123.82 123.89 123.86 131072 320 279.86 280.24 280.12 262144 160 1379.14 1380.44 1379.77 524288 80 2743.09 2748.28 2745.95 1048576 40 5345.45 5371.27 5359.21 2097152 20 10597.48 10623.28 10614.62 4194304 10 20470.62 20550.16 20512.51 #---------------------------------------------------------------- # Benchmarking Alltoallv # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.48 0.53 0.50 1 1000 10.77 10.81 10.79 2 1000 10.85 10.87 10.86 4 1000 10.90 10.93 10.91 8 1000 10.85 10.88 10.87 16 1000 10.89 10.92 10.90 32 1000 10.96 10.98 10.97 64 1000 11.94 11.98 11.96 128 1000 12.32 12.35 12.34 256 1000 12.93 12.96 12.94 512 1000 15.14 15.16 15.15 1024 1000 17.96 17.99 17.98 2048 1000 22.50 22.54 22.52 4096 1000 32.71 32.75 32.73 8192 1000 55.54 55.59 55.56 16384 1000 104.93 105.04 104.99 32768 1000 202.95 203.08 203.02 65536 640 503.46 503.67 503.56 131072 320 1788.15 1789.07 1788.56 262144 160 4010.83 4013.25 4011.83 524288 80 7577.78 7594.85 7589.67 1048576 40 14824.43 14903.85 14876.11 2097152 20 29174.14 29391.92 29289.00 4194304 10 56697.39 57729.01 57254.66 #---------------------------------------------------------------- # Benchmarking Bcast # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.07 0.07 0.07 1 1000 0.39 0.39 0.39 2 1000 0.39 0.39 0.39 4 1000 0.38 0.38 0.38 8 1000 0.46 0.47 0.47 16 1000 0.46 0.46 0.46 32 1000 0.48 0.48 0.48 64 1000 0.54 0.54 0.54 128 1000 0.50 0.50 0.50 256 1000 0.54 0.54 0.54 512 1000 0.61 0.62 0.62 1024 1000 0.76 0.76 0.76 2048 1000 0.88 0.89 0.88 4096 1000 1.25 1.26 1.25 8192 1000 1.90 1.91 1.90 16384 1000 3.19 3.20 3.20 32768 1000 5.49 5.50 5.50 65536 640 8.96 8.99 8.98 131072 320 16.03 16.03 16.03 262144 160 29.69 29.69 29.69 524288 80 53.69 53.74 53.71 1048576 40 101.58 101.66 101.62 2097152 20 196.75 196.99 196.87 4194304 10 392.60 392.89 392.75 #---------------------------------------------------------------- # Benchmarking Bcast # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.06 0.07 0.07 1 1000 0.64 0.64 0.64 2 1000 0.64 0.66 0.65 4 1000 0.64 0.65 0.64 8 1000 0.71 0.73 0.72 16 1000 0.72 0.73 0.73 32 1000 0.79 0.80 0.80 64 1000 0.80 0.82 0.81 128 1000 0.84 0.85 0.85 256 1000 0.91 0.92 0.91 512 1000 1.04 1.07 1.05 1024 1000 1.15 1.17 1.16 2048 1000 1.46 1.47 1.47 4096 1000 1.99 2.01 2.01 8192 1000 3.05 3.06 3.06 16384 1000 4.70 4.72 4.71 32768 1000 8.28 8.29 8.29 65536 640 18.68 18.70 18.69 131072 320 34.06 34.10 34.08 262144 160 59.66 59.71 59.68 524288 80 108.36 108.40 108.38 1048576 40 205.52 205.74 205.59 2097152 20 402.27 402.63 402.53 4194304 10 1173.69 1188.40 1182.80 #---------------------------------------------------------------- # Benchmarking Bcast # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.07 0.08 0.07 1 1000 0.75 0.77 0.76 2 1000 0.77 0.80 0.79 4 1000 0.81 0.83 0.82 8 1000 0.80 0.81 0.81 16 1000 0.79 0.81 0.80 32 1000 0.83 0.86 0.85 64 1000 0.90 0.92 0.91 128 1000 0.92 0.94 0.92 256 1000 1.00 1.02 1.01 512 1000 1.19 1.21 1.20 1024 1000 1.28 1.30 1.28 2048 1000 1.64 1.66 1.65 4096 1000 2.46 2.49 2.48 8192 1000 3.73 3.78 3.75 16384 1000 6.86 6.89 6.88 32768 1000 10.12 10.16 10.14 65536 640 16.90 16.97 16.95 131072 320 30.75 30.84 30.81 262144 160 61.81 62.11 62.04 524288 80 124.38 125.24 125.01 1048576 40 253.36 255.03 254.66 2097152 20 809.49 821.82 819.93 4194304 10 1888.61 1916.60 1906.56 #---------------------------------------------------------------- # Benchmarking Bcast # #processes = 16 #---------------------------------------------------------------- #bytes #repetitions t_min[usec] t_max[usec] t_avg[usec] 0 1000 0.07 0.08 0.07 1 1000 1.32 1.35 1.34 2 1000 1.31 1.33 1.32 4 1000 1.30 1.34 1.32 8 1000 1.09 1.11 1.10 16 1000 1.08 1.12 1.10 32 1000 1.02 1.13 1.10 64 1000 1.09 1.19 1.17 128 1000 1.16 1.22 1.21 256 1000 1.21 1.27 1.25 512 1000 1.35 1.57 1.53 1024 1000 2.21 2.30 2.28 2048 1000 2.78 2.86 2.83 4096 1000 3.04 3.16 3.13 8192 1000 4.81 4.93 4.89 16384 1000 9.12 9.23 9.19 32768 1000 14.27 14.35 14.31 65536 640 24.89 25.02 24.99 131072 320 46.60 46.82 46.72 262144 160 92.51 93.08 92.86 524288 80 180.26 181.67 181.20 1048576 40 361.85 365.88 364.55 2097152 20 863.27 923.00 911.16 4194304 10 2254.77 2295.02 2280.12 #--------------------------------------------------- # Benchmarking Barrier # #processes = 2 # ( 14 additional processes waiting in MPI_Barrier) #--------------------------------------------------- #repetitions t_min[usec] t_max[usec] t_avg[usec] 1000 0.38 0.38 0.38 #--------------------------------------------------- # Benchmarking Barrier # #processes = 4 # ( 12 additional processes waiting in MPI_Barrier) #--------------------------------------------------- #repetitions t_min[usec] t_max[usec] t_avg[usec] 1000 0.75 0.75 0.75 #--------------------------------------------------- # Benchmarking Barrier # #processes = 8 # ( 8 additional processes waiting in MPI_Barrier) #--------------------------------------------------- #repetitions t_min[usec] t_max[usec] t_avg[usec] 1000 1.22 1.22 1.22 #--------------------------------------------------- # Benchmarking Barrier # #processes = 16 #--------------------------------------------------- #repetitions t_min[usec] t_max[usec] t_avg[usec] 1000 2.02 2.02 2.02 # All processes entering MPI_Finalize [proxy:0:0@node43-038] got pmi command (from 12): barrier_in [proxy:0:0@node43-038] got pmi command (from 14): barrier_in [proxy:0:0@node43-038] got pmi command (from 16): barrier_in [proxy:0:0@node43-038] got pmi command (from 21): barrier_in [proxy:0:0@node43-038] got pmi command (from 24): barrier_in [proxy:0:0@node43-038] got pmi command (from 27): barrier_in [proxy:0:0@node43-038] got pmi command (from 30): barrier_in [proxy:0:0@node43-038] got pmi command (from 33): barrier_in [proxy:0:0@node43-038] got pmi command (from 36): barrier_in [proxy:0:0@node43-038] got pmi command (from 39): barrier_in [proxy:0:0@node43-038] got pmi command (from 42): barrier_in [proxy:0:0@node43-038] got pmi command (from 45): barrier_in [proxy:0:0@node43-038] got pmi command (from 48): barrier_in [proxy:0:0@node43-038] got pmi command (from 51): barrier_in [proxy:0:0@node43-038] got pmi command (from 54): barrier_in [proxy:0:0@node43-038] got pmi command (from 57): barrier_in [mpiexec@node43-038] [pgid: 0] got PMI command: cmd=barrier_in [mpiexec@node43-038] PMI response to fd 8 pid 57: cmd=barrier_out [proxy:0:0@node43-038] forwarding command (cmd=barrier_in) upstream [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] PMI response: cmd=barrier_out [proxy:0:0@node43-038] got pmi command (from 12): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 42): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 45): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 51): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 54): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 14): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 16): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 27): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 33): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 36): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 39): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 48): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 57): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 21): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 24): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack [proxy:0:0@node43-038] got pmi command (from 30): finalize [proxy:0:0@node43-038] PMI response: cmd=finalize_ack