dear Paul,

I'am not angry. Linux is volunteer based system
and if someone ( quite many ) dose something
that's great.

Even I have read bug report, been Alan's pages as
weel BLRC home,.... I could not figure out there
is experimental version available !!

I have wordblindness but that only help's me that
I do not see small errors.

This case I got uncorrect information.

But what you write 200% true, thanks paul
i will download test expreimental version!!!

Experimental version did not work at my machine
also removing it was painful !

joni

===============

#ubuntu 12.4 LTS
sudo apt-get update
sudo apt-get upgrade
#sudo apt-get install linux-sources linux-header
wget https://upc-bugs.lbl.gov/blcr-dist/blcr-0.8.5_b4.tar.gz
#we assume you have kernel at default location
make clean
sudo ./configure --with-system-map=/boot/System.map-3.2.0-35-generic
sudo make
sudo make install
sudo chmod -R 775 /mpi3/C2/blcr-0.8.5_b4


=================

host: mpi1

==================================================================================================
mpiexec options:
----------------
  Base path: /mpi3/C2/mpich2-1.5/bin/
  Launcher: (null)
  Debug level: 1
  Enable X: -1

  Global environment:
  -------------------
    MUMPS=/mpi3/S2/MUMPS_4.10.0
    LC_PAPER=fi_FI.UTF-8
    LC_ADDRESS=fi_FI.UTF-8
    SSH_AGENT_PID=3750
    LC_MONETARY=fi_FI.UTF-8
    MUMPS_I=/mpi3/C2/MUMPS_4.10.0
    GPG_AGENT_INFO=/tmp/keyring-qtrpvR/gpg:0:1
    TERM=xterm
    SHELL=/bin/bash
    
XDG_SESSION_COOKIE=6d6390cb56a32b6678c10da600000412-1358089517.620726-1634397259
    JPK_NETGEN=/mpi3/C2/netgen_623
    JPK_VER_S=S2
    HYDRA_CKPOINTLIB=blcr
    HYDRA_CKPOINT_INTERVAL=10
    WINDOWID=50331655
    LC_NUMERIC=fi_FI.UTF-8
    HYDRA_CKPOINT_PREFIX=/mpi3/chekpoint/default.chk
    GNOME_KEYRING_CONTROL=/tmp/keyring-qtrpvR
    JPK_ELMER=/mpi3/C2/elmer_6003
    PARDISO_LIC_PATH=/mpi3/C2/pardiso
    METIS_INCLUDE_DIR=/mpi3/C2/ParMetis-3.2.0
    JPK_NETGEN_S=/mpi3/S2/netgen_623
    USER=joni
    
LS_COLORS=rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lz=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36:
    
LD_LIBRARY_PATH=/mpi3/C2/mpich2-1.5/lib:/mpi3/C2/mpich2-1.5/bin:/mpi3/C2/ParMetis-3.2.0:/mpi3/C2/hypre-2.8.0b/lib:/mpi3/C2/scotch_6.0.0/lib:/mpi3/S2/MUMPS_4.10.0/lib:/mpi3/C2/gfortran64_mp/lib:/mpi3/C2/scalapack-2.0.2/lib:/mpi3/C2/hypre-2.8.0b/lib:/mpi3/C2/pardiso:/mpi3/C2/ParMetis-3.2.0:/mpi3/C2/hdf5-1.8.10/lib:/mpi3/C2/vtk-5.8.0/lib/vtk-5.8:/mpi3/C2/elmer_6003/lib::/mpi3/C2/netgen_623/lib:/usr/lib/
    LC_TELEPHONE=fi_FI.UTF-8
    XDG_SESSION_PATH=/org/freedesktop/DisplayManager/Session0
    JPK_OCC=/usr/include/oce
    XDG_SEAT_PATH=/org/freedesktop/DisplayManager/Seat0
    HYDRA_HOST_FILE=/mpi4/hosts
    SSH_AUTH_SOCK=/tmp/ssh-PBZWvXss3749/agent.3749
    SCOTCHDIR=/mpi3/C2/scotch_6.0.0
    HYDRA_LAUNCHER=rsh
    
SESSION_MANAGER=local/mpi1:@/tmp/.ICE-unix/3416,unix/mpi1:/tmp/.ICE-unix/3416
    DEFAULTS_PATH=/usr/share/gconf/ubuntu.default.path
    ELMER_HOME=/mpi3/C2/elmer_6003
    BLACS=/mpi3/C2/scalapack-2.0.2
    BLAS32=/mpi3/C2/gfortran64_mp
    METIS_DIR=
    MPI_LIBS=-L/mpi3/C2/mpich2-1.5/lib -lmpich -lmpichf90 -lmpe -lopa -lmpe
    XDG_CONFIG_DIRS=/etc/xdg/xdg-ubuntu:/etc/xdg
    JPK_MPI_DIR=/mpi3
    JPK_HDF5_S=/mpi3/S2/hdf5-1.8.10
    MPIEXEC_PORT_RANGE=7000:7100
    
PATH=/usr/lib/lightdm/lightdm:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/mpi3/C2/mpich2-1.5/bin:/mpi3/C2/blcr-0.8.5_b4://mpi3/C2/cmake-2.8.10.2/bin:/mpi3/C2/elmer_6003/bin:/mpi3/C2/elmer_6003/lib:/mpi3/C2/ParaView-3.14.1-Linux-64bit
    DESKTOP_SESSION=ubuntu
    BLAS=/mpi3/C2/gfortran64_mp
    METIS_LIBDIR=/mpi3/C2/ParMetis-3.2.0
    CMAKE_COMMAND=//mpi3/C2/cmake-2.8.10.2/bin
    QT_QMAKE_EXECUTABLE=/usr/bin/qmake-qt4
    LC_IDENTIFICATION=fi_FI.UTF-8
    JPK_SCOTCHDIR_S=/mpi3/S2/scotch_6.0.0_esmumps
    JPK_LMETISDIR_S=/mpi3/S2/ParMetis-3.2.0
    PWD=/mpi3/palkki6/ajo1
    NETGENDIR=/mpi3/C2/netgen_623/bin
    EDITOR=nano
    JPK_LMETISDIR=/mpi3/C2/ParMetis-3.2.0
    GNOME_KEYRING_PID=3405
    LANG=fi_FI.UTF-8
    MANDATORY_PATH=/usr/share/gconf/ubuntu.mandatory.path
    LC_MEASUREMENT=fi_FI.UTF-8
    JPK_HDF5=/mpi3/C2/hdf5-1.8.10
    UBUNTU_MENUPROXY=libappmenu.so
    COMPIZ_CONFIG_PROFILE=ubuntu
    ELMER_POST_HOME=/mpi3/C2/elmer_6003/bin
    JPK_INS=/mpi3/C2
    ELMER_LIB=/mpi3/C2/elmer_6003/share/elmersolver/lib
    HYDRA_PROXY_RETRY_COUNT=100
    GDMSESSION=ubuntu
    JPK_ELMER_S=/mpi3/S2/elmerfem_6003
    JPK_LMETISDIR32=/mpi3/C2/ParMetis-3.2.0
    JPK_BUI=/mpi3/S2
    VTK_INCLUDEPATH=/mpi3/C2/vtk-5.8.0/include
    SHLVL=1
    HOME=/home/joni
    LANGUAGE=fi:en
    ELMERGUI_HOME=/mpi3/C2/elmer_6003/bin
    GNOME_DESKTOP_SESSION_ID=this-is-deprecated
    MKL_SERIAL=YES
    LOGNAME=joni
    HYPRE=/mpi3/C2/hypre-2.8.0b
    JPK_JOBS=4
    JPK_VTK_DIR=/mpi3/C2/vtk-5.8.0
    SCALAPACK=/mpi3/C2/scalapack-2.0.2
    
XDG_DATA_DIRS=/usr/share/ubuntu:/usr/share/gnome:/usr/local/share/:/usr/share/
    
DBUS_SESSION_BUS_ADDRESS=unix:abstract=/tmp/dbus-fZ7oZRAmPU,guid=c7e5e52cb8a7e60c35234fd20000003c
    MPI_HOME=/mpi3/C2/mpich2-1.5
    LESSOPEN=| /usr/bin/lesspipe %s
    LACPACK=/mpi3/C2/gfortran64_mp
    OMP_NUM_THREADS=2
    HYDRA_LAUNCHER_EXEC=/usr/bin/netkit-rsh
    JPK_MPICH2=/mpi3/C2/mpich2-1.5
    PARDISO=/mpi3/C2/pardiso
    PARDISOLICMESSAGE=1
    JPK_VER=C2
    XDG_CURRENT_DESKTOP=Unity
    LESSCLOSE=/usr/bin/lesspipe %s %s
    LC_TIME=fi_FI.UTF-8
    JPK_HYPRE_S=/mpi3/S2/hypre-2.8.0b
    JPK_MPICH2_S=/mpi3/S2/mpich2-1.5
    COLORTERM=gnome-terminal
    XAUTHORITY=/home/joni/.Xauthority
    LC_NAME=fi_FI.UTF-8
    OLDPWD=/home/joni
    _=/mpi3/C2/mpich2-1.5/bin/mpiexec

  Hydra internal environment:
  ---------------------------
    MPICH_ENABLE_CKPOINT=1
    GFORTRAN_UNBUFFERED_PRECONNECTED=y


    Proxy information:
    *********************
      [1] proxy: mpi1 (1 cores)
      Exec list: /mpi3/C2/elmer_6003/bin/ElmerSolver_mpi (3 processes);


==================================================================================================

[mpiexec@mpi1] Timeout set to -1 (-1 means infinite)
[mpiexec@mpi1] Got a control port string of mpi1:7000

Proxy launch args: /mpi3/C2/mpich2-1.5/bin/hydra_pmi_proxy
--control-port mpi1:7000 --debug --rmk user --launcher rsh
--launcher-exec /usr/bin/netkit-rsh --demux poll --pgid 0 --retries
100 --usize -2 --proxy-id

Arguments being passed to proxy 0:
--version 1.5 --iface-ip-env-name MPICH_INTERFACE_HOSTNAME --hostname
mpi1 --global-core-map 0,1,1 --pmi-id-map 0,0 --global-process-count 3
--auto-cleanup 1 --pmi-kvsname kvs_30779_0 --pmi-process-mapping
(vector,(0,1,1)) --ckpointlib blcr --ckpoint-prefix
/mpi3/chekpoint/default.chk --ckpoint-num -1 --global-inherited-env
105 'MUMPS=/mpi3/S2/MUMPS_4.10.0' 'LC_PAPER=fi_FI.UTF-8'
'LC_ADDRESS=fi_FI.UTF-8' 'SSH_AGENT_PID=3750'
'LC_MONETARY=fi_FI.UTF-8' 'MUMPS_I=/mpi3/C2/MUMPS_4.10.0'
'GPG_AGENT_INFO=/tmp/keyring-qtrpvR/gpg:0:1' 'TERM=xterm'
'SHELL=/bin/bash'
'XDG_SESSION_COOKIE=6d6390cb56a32b6678c10da600000412-1358089517.620726-1634397259'
'JPK_NETGEN=/mpi3/C2/netgen_623' 'JPK_VER_S=S2'
'HYDRA_CKPOINTLIB=blcr' 'HYDRA_CKPOINT_INTERVAL=10'
'WINDOWID=50331655' 'LC_NUMERIC=fi_FI.UTF-8'
'HYDRA_CKPOINT_PREFIX=/mpi3/chekpoint/default.chk'
'GNOME_KEYRING_CONTROL=/tmp/keyring-qtrpvR'
'JPK_ELMER=/mpi3/C2/elmer_6003' 'PARDISO_LIC_PATH=/mpi3/C2/pardiso'
'METIS_INCLUDE_DIR=/mpi3/C2/ParMetis-3.2.0'
'JPK_NETGEN_S=/mpi3/S2/netgen_623' 'USER=joni'
'LS_COLORS=rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lz=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36:'
'LD_LIBRARY_PATH=/mpi3/C2/mpich2-1.5/lib:/mpi3/C2/mpich2-1.5/bin:/mpi3/C2/ParMetis-3.2.0:/mpi3/C2/hypre-2.8.0b/lib:/mpi3/C2/scotch_6.0.0/lib:/mpi3/S2/MUMPS_4.10.0/lib:/mpi3/C2/gfortran64_mp/lib:/mpi3/C2/scalapack-2.0.2/lib:/mpi3/C2/hypre-2.8.0b/lib:/mpi3/C2/pardiso:/mpi3/C2/ParMetis-3.2.0:/mpi3/C2/hdf5-1.8.10/lib:/mpi3/C2/vtk-5.8.0/lib/vtk-5.8:/mpi3/C2/elmer_6003/lib::/mpi3/C2/netgen_623/lib:/usr/lib/'
'LC_TELEPHONE=fi_FI.UTF-8'
'XDG_SESSION_PATH=/org/freedesktop/DisplayManager/Session0'
'JPK_OCC=/usr/include/oce'
'XDG_SEAT_PATH=/org/freedesktop/DisplayManager/Seat0'
'HYDRA_HOST_FILE=/mpi4/hosts'
'SSH_AUTH_SOCK=/tmp/ssh-PBZWvXss3749/agent.3749'
'SCOTCHDIR=/mpi3/C2/scotch_6.0.0' 'HYDRA_LAUNCHER=rsh'
'SESSION_MANAGER=local/mpi1:@/tmp/.ICE-unix/3416,unix/mpi1:/tmp/.ICE-unix/3416'
'DEFAULTS_PATH=/usr/share/gconf/ubuntu.default.path'
'ELMER_HOME=/mpi3/C2/elmer_6003' 'BLACS=/mpi3/C2/scalapack-2.0.2'
'BLAS32=/mpi3/C2/gfortran64_mp' 'METIS_DIR='
'MPI_LIBS=-L/mpi3/C2/mpich2-1.5/lib -lmpich -lmpichf90 -lmpe -lopa
-lmpe' 'XDG_CONFIG_DIRS=/etc/xdg/xdg-ubuntu:/etc/xdg'
'JPK_MPI_DIR=/mpi3' 'JPK_HDF5_S=/mpi3/S2/hdf5-1.8.10'
'MPIEXEC_PORT_RANGE=7000:7100'
'PATH=/usr/lib/lightdm/lightdm:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/mpi3/C2/mpich2-1.5/bin:/mpi3/C2/blcr-0.8.5_b4://mpi3/C2/cmake-2.8.10.2/bin:/mpi3/C2/elmer_6003/bin:/mpi3/C2/elmer_6003/lib:/mpi3/C2/ParaView-3.14.1-Linux-64bit'
'DESKTOP_SESSION=ubuntu' 'BLAS=/mpi3/C2/gfortran64_mp'
'METIS_LIBDIR=/mpi3/C2/ParMetis-3.2.0'
'CMAKE_COMMAND=//mpi3/C2/cmake-2.8.10.2/bin'
'QT_QMAKE_EXECUTABLE=/usr/bin/qmake-qt4'
'LC_IDENTIFICATION=fi_FI.UTF-8'
'JPK_SCOTCHDIR_S=/mpi3/S2/scotch_6.0.0_esmumps'
'JPK_LMETISDIR_S=/mpi3/S2/ParMetis-3.2.0' 'PWD=/mpi3/palkki6/ajo1'
'NETGENDIR=/mpi3/C2/netgen_623/bin' 'EDITOR=nano'
'JPK_LMETISDIR=/mpi3/C2/ParMetis-3.2.0' 'GNOME_KEYRING_PID=3405'
'LANG=fi_FI.UTF-8'
'MANDATORY_PATH=/usr/share/gconf/ubuntu.mandatory.path'
'LC_MEASUREMENT=fi_FI.UTF-8' 'JPK_HDF5=/mpi3/C2/hdf5-1.8.10'
'UBUNTU_MENUPROXY=libappmenu.so' 'COMPIZ_CONFIG_PROFILE=ubuntu'
'ELMER_POST_HOME=/mpi3/C2/elmer_6003/bin' 'JPK_INS=/mpi3/C2'
'ELMER_LIB=/mpi3/C2/elmer_6003/share/elmersolver/lib'
'HYDRA_PROXY_RETRY_COUNT=100' 'GDMSESSION=ubuntu'
'JPK_ELMER_S=/mpi3/S2/elmerfem_6003'
'JPK_LMETISDIR32=/mpi3/C2/ParMetis-3.2.0' 'JPK_BUI=/mpi3/S2'
'VTK_INCLUDEPATH=/mpi3/C2/vtk-5.8.0/include' 'SHLVL=1'
'HOME=/home/joni' 'LANGUAGE=fi:en'
'ELMERGUI_HOME=/mpi3/C2/elmer_6003/bin'
'GNOME_DESKTOP_SESSION_ID=this-is-deprecated' 'MKL_SERIAL=YES'
'LOGNAME=joni' 'HYPRE=/mpi3/C2/hypre-2.8.0b' 'JPK_JOBS=4'
'JPK_VTK_DIR=/mpi3/C2/vtk-5.8.0' 'SCALAPACK=/mpi3/C2/scalapack-2.0.2'
'XDG_DATA_DIRS=/usr/share/ubuntu:/usr/share/gnome:/usr/local/share/:/usr/share/'
'DBUS_SESSION_BUS_ADDRESS=unix:abstract=/tmp/dbus-fZ7oZRAmPU,guid=c7e5e52cb8a7e60c35234fd20000003c'
'MPI_HOME=/mpi3/C2/mpich2-1.5' 'LESSOPEN=| /usr/bin/lesspipe %s'
'LACPACK=/mpi3/C2/gfortran64_mp' 'OMP_NUM_THREADS=2'
'HYDRA_LAUNCHER_EXEC=/usr/bin/netkit-rsh'
'JPK_MPICH2=/mpi3/C2/mpich2-1.5' 'PARDISO=/mpi3/C2/pardiso'
'PARDISOLICMESSAGE=1' 'JPK_VER=C2' 'XDG_CURRENT_DESKTOP=Unity'
'LESSCLOSE=/usr/bin/lesspipe %s %s' 'LC_TIME=fi_FI.UTF-8'
'JPK_HYPRE_S=/mpi3/S2/hypre-2.8.0b' 'JPK_MPICH2_S=/mpi3/S2/mpich2-1.5'
'COLORTERM=gnome-terminal' 'XAUTHORITY=/home/joni/.Xauthority'
'LC_NAME=fi_FI.UTF-8' 'OLDPWD=/home/joni'
'_=/mpi3/C2/mpich2-1.5/bin/mpiexec' --global-user-env 0
--global-system-env 2 'MPICH_ENABLE_CKPOINT=1'
'GFORTRAN_UNBUFFERED_PRECONNECTED=y' --proxy-core-count 1 --exec
--exec-appnum 0 --exec-proc-count 3 --exec-local-env 0 --exec-wdir
/mpi3/palkki6/ajo1 --exec-args 1
/mpi3/C2/elmer_6003/bin/ElmerSolver_mpi

[mpiexec@mpi1] Launch arguments:
/mpi3/C2/mpich2-1.5/bin/hydra_pmi_proxy --control-port mpi1:7000
--debug --rmk user --launcher rsh --launcher-exec /usr/bin/netkit-rsh
--demux poll --pgid 0 --retries 100 --usize -2 --proxy-id 0
ELMER SOLVER (v 7.0) STARTED AT: 2013/01/13 17:18:55
[proxy:0:0@mpi1] got pmi command (from 15): initack
pmiid=0
[proxy:0:0@mpi1] PMI response: cmd=initack
cmd=set size=3
cmd=set rank=0
cmd=set debug=1
[proxy:0:0@mpi1] got pmi command (from 15): init
pmi_version=1 pmi_subversion=1
[proxy:0:0@mpi1] PMI response: cmd=response_to_init pmi_version=1
pmi_subversion=1 rc=0
[proxy:0:0@mpi1] got pmi command (from 15): get_maxes

[proxy:0:0@mpi1] PMI response: cmd=maxes kvsname_max=256 keylen_max=64
vallen_max=1024
[proxy:0:0@mpi1] got pmi command (from 15): get_appnum

[proxy:0:0@mpi1] PMI response: cmd=appnum appnum=0
[proxy:0:0@mpi1] got pmi command (from 15): get_my_kvsname

[proxy:0:0@mpi1] PMI response: cmd=my_kvsname kvsname=kvs_30779_0
[proxy:0:0@mpi1] got pmi command (from 15): get_my_kvsname

[proxy:0:0@mpi1] PMI response: cmd=my_kvsname kvsname=kvs_30779_0
[proxy:0:0@mpi1] got pmi command (from 15): get
kvsname=kvs_30779_0 key=PMI_process_mapping
[proxy:0:0@mpi1] PMI response: cmd=get_result rc=0 msg=success
value=(vector,(0,1,1))
[proxy:0:0@mpi1] got pmi command (from 15): put
kvsname=kvs_30779_0 key=sharedFilename[0] value=/dev/shm/mpich_shar_tmpS76lYv
[proxy:0:0@mpi1] we don't understand this command put; forwarding upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=put kvsname=kvs_30779_0
key=sharedFilename[0] value=/dev/shm/mpich_shar_tmpS76lYv
[mpiexec@mpi1] PMI response to fd 6 pid 15: cmd=put_result rc=0 msg=success
[proxy:0:0@mpi1] we don't understand the response put_result;
forwarding downstream
[proxy:0:0@mpi1] got pmi command (from 15): barrier_in

ELMER SOLVER (v 7.0) STARTED AT: 2013/01/13 17:18:55
ELMER SOLVER (v 7.0) STARTED AT: 2013/01/13 17:18:55
[proxy:0:0@mpi1] got pmi command (from 17): initack
pmiid=1
[proxy:0:0@mpi1] PMI response: cmd=initack
cmd=set size=3
cmd=set rank=1
cmd=set debug=1
[proxy:0:0@mpi1] got pmi command (from 17): init
pmi_version=1 pmi_subversion=1
[proxy:0:0@mpi1] PMI response: cmd=response_to_init pmi_version=1
pmi_subversion=1 rc=0
[proxy:0:0@mpi1] got pmi command (from 17): get_maxes

[proxy:0:0@mpi1] PMI response: cmd=maxes kvsname_max=256 keylen_max=64
vallen_max=1024
[proxy:0:0@mpi1] got pmi command (from 17): get_appnum

[proxy:0:0@mpi1] PMI response: cmd=appnum appnum=0
[proxy:0:0@mpi1] got pmi command (from 17): get_my_kvsname

[proxy:0:0@mpi1] PMI response: cmd=my_kvsname kvsname=kvs_30779_0
[proxy:0:0@mpi1] got pmi command (from 17): get_my_kvsname

[proxy:0:0@mpi1] PMI response: cmd=my_kvsname kvsname=kvs_30779_0
[proxy:0:0@mpi1] got pmi command (from 17): get
kvsname=kvs_30779_0 key=PMI_process_mapping
[proxy:0:0@mpi1] PMI response: cmd=get_result rc=0 msg=success
value=(vector,(0,1,1))
[proxy:0:0@mpi1] got pmi command (from 18): initack
pmiid=2
[proxy:0:0@mpi1] PMI response: cmd=initack
cmd=set size=3
cmd=set rank=2
cmd=set debug=1
[proxy:0:0@mpi1] got pmi command (from 17): barrier_in

[proxy:0:0@mpi1] got pmi command (from 18): init
pmi_version=1 pmi_subversion=1
[proxy:0:0@mpi1] PMI response: cmd=response_to_init pmi_version=1
pmi_subversion=1 rc=0
[proxy:0:0@mpi1] got pmi command (from 18): get_maxes

[proxy:0:0@mpi1] PMI response: cmd=maxes kvsname_max=256 keylen_max=64
vallen_max=1024
[proxy:0:0@mpi1] got pmi command (from 18): get_appnum

[proxy:0:0@mpi1] PMI response: cmd=appnum appnum=0
[proxy:0:0@mpi1] got pmi command (from 18): get_my_kvsname

[proxy:0:0@mpi1] PMI response: cmd=my_kvsname kvsname=kvs_30779_0
[proxy:0:0@mpi1] got pmi command (from 18): get_my_kvsname

[proxy:0:0@mpi1] PMI response: cmd=my_kvsname kvsname=kvs_30779_0
[proxy:0:0@mpi1] got pmi command (from 18): get
kvsname=kvs_30779_0 key=PMI_process_mapping
[proxy:0:0@mpi1] PMI response: cmd=get_result rc=0 msg=success
value=(vector,(0,1,1))
[proxy:0:0@mpi1] got pmi command (from 18): barrier_in

[proxy:0:0@mpi1] forwarding command (cmd=barrier_in) upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=barrier_in
[mpiexec@mpi1] PMI response to fd 6 pid 18: cmd=barrier_out
[proxy:0:0@mpi1] PMI response: cmd=barrier_out
[proxy:0:0@mpi1] PMI response: cmd=barrier_out
[proxy:0:0@mpi1] PMI response: cmd=barrier_out
[proxy:0:0@mpi1] got pmi command (from 17): get
kvsname=kvs_30779_0 key=sharedFilename[0]
[proxy:0:0@mpi1] forwarding command (cmd=get kvsname=kvs_30779_0
key=sharedFilename[0]) upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=get kvsname=kvs_30779_0
key=sharedFilename[0]
[mpiexec@mpi1] PMI response to fd 6 pid 17: cmd=get_result rc=0
msg=success value=/dev/shm/mpich_shar_tmpS76lYv
[proxy:0:0@mpi1] got pmi command (from 18): get
kvsname=kvs_30779_0 key=sharedFilename[0]
[proxy:0:0@mpi1] forwarding command (cmd=get kvsname=kvs_30779_0
key=sharedFilename[0]) upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=get kvsname=kvs_30779_0
key=sharedFilename[0]
[mpiexec@mpi1] PMI response to fd 6 pid 18: cmd=get_result rc=0
msg=success value=/dev/shm/mpich_shar_tmpS76lYv
[proxy:0:0@mpi1] we don't understand the response get_result;
forwarding downstream
[proxy:0:0@mpi1] we don't understand the response get_result;
forwarding downstream
[proxy:0:0@mpi1] got pmi command (from 15): put
kvsname=kvs_30779_0 key=P0-businesscard
value=description#mpi1$port#47496$ifname#192.168.0.41$
[proxy:0:0@mpi1] we don't understand this command put; forwarding upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=put kvsname=kvs_30779_0
key=P0-businesscard
value=description#mpi1$port#47496$ifname#192.168.0.41$
[mpiexec@mpi1] PMI response to fd 6 pid 15: cmd=put_result rc=0 msg=success
[proxy:0:0@mpi1] got pmi command (from 17): put
kvsname=kvs_30779_0 key=P1-businesscard
value=description#mpi1$port#44638$ifname#192.168.0.41$
[proxy:0:0@mpi1] we don't understand this command put; forwarding upstream
[proxy:0:0@mpi1] got pmi command (from 18): put
kvsname=kvs_30779_0 key=P2-businesscard
value=description#mpi1$port#58398$ifname#192.168.0.41$
[proxy:0:0@mpi1] we don't understand this command put; forwarding upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=put kvsname=kvs_30779_0
key=P1-businesscard
value=description#mpi1$port#44638$ifname#192.168.0.41$
[mpiexec@mpi1] PMI response to fd 6 pid 17: cmd=put_result rc=0 msg=success
[proxy:0:0@mpi1] we don't understand the response put_result;
forwarding downstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=put kvsname=kvs_30779_0
key=P2-businesscard
value=description#mpi1$port#58398$ifname#192.168.0.41$
[mpiexec@mpi1] PMI response to fd 6 pid 18: cmd=put_result rc=0 msg=success
[proxy:0:0@mpi1] we don't understand the response put_result;
forwarding downstream
[proxy:0:0@mpi1] got pmi command (from 15): barrier_in

[proxy:0:0@mpi1] got pmi command (from 17): barrier_in

[proxy:0:0@mpi1] we don't understand the response put_result;
forwarding downstream
[proxy:0:0@mpi1] got pmi command (from 18): barrier_in

[proxy:0:0@mpi1] forwarding command (cmd=barrier_in) upstream
[mpiexec@mpi1] [pgid: 0] got PMI command: cmd=barrier_in
[mpiexec@mpi1] PMI response to fd 6 pid 18: cmd=barrier_out
[proxy:0:0@mpi1] PMI response: cmd=barrier_out
[proxy:0:0@mpi1] PMI response: cmd=barrier_out
[proxy:0:0@mpi1] PMI response: cmd=barrier_out

===================================================================================
=   BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
=   EXIT CODE: 1
=   CLEANING UP REMAINING PROCESSES
=   YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================

======================
joni@mpi1:/mpi3/S2/blcr-0.8.4$ sudo make distclean
[sudo] password for joni:
make: *** Kohteen "distclean" tuottamiseen ei ole sääntöä. Seis.
joni@mpi1:/mpi3/S2/blcr-0.8.4$ sudo make clean
make: *** Kohteen "clean" tuottamiseen ei ole sääntöä. Seis.
joni@mpi1:/mpi3/S2/blcr-0.8.4$

-- 
You received this bug notification because you are a member of Ubuntu
Bugs, which is subscribed to Ubuntu.
https://bugs.launchpad.net/bugs/804943

Title:
  blcr kernel module failed to build with kernel 3.0 : configure: error:
  --with-linux argument '3.0-x' is neither a kernel version string nor a
  full path

To manage notifications about this bug go to:
https://bugs.launchpad.net/ubuntu/+source/blcr/+bug/804943/+subscriptions

-- 
ubuntu-bugs mailing list
ubuntu-bugs@lists.ubuntu.com
https://lists.ubuntu.com/mailman/listinfo/ubuntu-bugs

Reply via email to