Skip to content

Cannot install Rmpi/pbdMPI_0.1-7.tar.gz in R-3.0.0 on Cray XE6 HERMIT (R-sig-hpc Digest, Vol 56, Issue 7) R-sig-hpc Digest, Vol 56, Issue 11

5 messages · Pragnesh Patel, Martin Ivanov

#
Hello, Pragnesh,

I did what You suggested, pbdMPI compiles ok, but at loading into R 
there is an errro message. Here is detailed output:

ipmiva at eslogin001:/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/build/source> R 
CMD INSTALL 
--configure-args="--with-mpi=/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47" 
--no-test-load ../../pbdMPI_0.1-7.tar.gz
* installing to library 
?/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/lib64/R/library?
* installing *source* package ?pbdMPI? ...
checking for gcc... gcc -std=gnu99
checking whether the C compiler works... yes
checking for C compiler default output file name... a.out
checking for suffix of executables...
checking whether we are cross compiling... no
checking for suffix of object files... o
checking whether we are using the GNU C compiler... yes
checking whether gcc -std=gnu99 accepts -g... yes
checking for gcc -std=gnu99 option to accept ISO C89... none needed
checking for mpirun... F
checking for mpiexec... F
checking for orterun... F
checking for sed... /usr/bin/sed
checking for mpicc... F
checking for ompi_info... F
checking for mpich2version... F
Trying to find mpi.h ...
Found in /opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include
Trying to find libmpi.so or libmpich.a ...
Found libmpich in /opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/lib
checking whether MPICH2 is declared... yes
checking whether MPICH2 is declared... (cached) yes
checking for openpty in -lutil... yes
checking for main in -lpthread... yes
 >>>> MPI executable is not in PATH ...
 >>>> Please export or setenv PATH ...

******************* Results of pbdMPI package configure *****************

 >> TMP_INC =
 >> TMP_LIB =
 >> MPI_ROOT = /opt/cray/mpt/5.6.4/gni/mpich2-gnu/47
 >> MPITYPE = MPICH2
 >> MPI_INCLUDE_PATH = /opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include
 >> MPI_LIBPATH = /opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/lib
 >> MPI_LIBS =  -lutil -lpthread
 >> MPI_DEFS = -DMPI2
 >> MPI_INCL2 =
 >> PKG_CPPFLAGS = -I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  
-DMPI2 -DMPICH2
 >> PKG_LIBS = -L/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/lib -lmpich -lmpl 
-lopa  -lutil -lpthread

*************************************************************************

configure: creating ./config.status
config.status: creating src/Makevars
configure: creating ./config.status
config.status: creating src/Makevars
config.status: creating R/zzz.r
** libs
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c comm_errors.c -o comm_errors.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c comm_sort_double.c -o 
comm_sort_double.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c comm_sort_integer.c -o 
comm_sort_integer.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c pkg_dl.c -o pkg_dl.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c pkg_tools.c -o pkg_tools.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd.c -o spmd.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_allgather.c -o 
spmd_allgather.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_allgatherv.c -o 
spmd_allgatherv.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_allreduce.c -o 
spmd_allreduce.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_bcast.c -o spmd_bcast.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_communicator.c -o 
spmd_communicator.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_gather.c -o spmd_gather.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_gatherv.c -o spmd_gatherv.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_info.c -o spmd_info.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_recv.c -o spmd_recv.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_reduce.c -o spmd_reduce.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_scatter.c -o spmd_scatter.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_scatterv.c -o spmd_scatterv.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_send.c -o spmd_send.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_sendrecv.c -o spmd_sendrecv.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_sendrecv_replace.c -o 
spmd_sendrecv_replace.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_tool.c -o spmd_tool.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_utility.c -o spmd_utility.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c spmd_wait.c -o spmd_wait.o
gcc -std=gnu99 -shared -L/usr/local/lib64 -o pbdMPI.so comm_errors.o 
comm_sort_double.o comm_sort_integer.o pkg_dl.o pkg_tools.o spmd.o 
spmd_allgather.o spmd_allgatherv.o spmd_allreduce.o spmd_bcast.o 
spmd_communicator.o spmd_gather.o spmd_gatherv.o spmd_info.o spmd_recv.o 
spmd_reduce.o spmd_scatter.o spmd_scatterv.o spmd_send.o spmd_sendrecv.o 
spmd_sendrecv_replace.o spmd_tool.o spmd_utility.o spmd_wait.o 
-L/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/lib -lmpich -lmpl -lopa -lutil 
-lpthread 
-L/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/lib64/R/lib -lR
installing via 'install.libs.R' to 
/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/lib64/R/library/pbdMPI
** R
** data
*** moving datasets to lazyload DB
** demo
** inst
** preparing package for lazy loading
** help
*** installing help indices
   converting help for package ?pbdMPI?
     finding HTML links ... done
     00_pbdMPI-package                       html
     11_d.control                            html
     aa_allgather-method                     html
     aa_allreduce-method                     html
     aa_bcast-method                         html
     aa_gather-method                        html
     aa_reduce-method                        html
     aa_scatter-method                       html
     ab_recv-method                          html
     ab_send-method                          html
     ac_irecv-method                         html
     ac_isend-method                         html
     ad_sendrecv-method                      html
     ad_sendrecv.replace-method              html
     cc_arrange.mpi.apts                     html
     cc_comm                                 html
     dd_info                                 html
     dd_prob                                 html
     dd_wait                                 html
     ww_get.jid                              html
     ww_print_cat                            html
     ww_seed                                 html
     xx_comm.sort                            html
     xx_comm_any_all                         html
     xx_comm_stop_warning                    html
     yy_api.apply                            html
     zz_internal                             html
     zz_spmd.internal                        html
** building package indices
** installing vignettes
    ?pbdMPI-guide.Rnw?
* DONE (pbdMPI)
Making 'packages.html' ... done
ipmiva at eslogin001:/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/build/source> R

R version 3.0.0 (2013-04-03) -- "Masked Marvel"
Copyright (C) 2013 The R Foundation for Statistical Computing
Platform: x86_64-unknown-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

   Natural language support but running in an English locale

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

 > library(pbdMPI)
Loading required package: rlecuyer
[Thu May 16 08:58:03 2013] [unknown] Fatal error in MPI_Init: Other MPI 
error, error stack:
MPIR_Init_thread(428):
MPID_Init(184).......: channel initialization failed
MPID_Init(538).......:  PMI2 init failed: 1



With regard to Your access to HERMIT, it is only possible from my IP and 
I think it is not a good idea.


Thank you very much for Your responsiveness.

Best regards,
Martin
On 05/15/2013 08:41 PM, Pragnesh Patel wrote:

  
    
#
Hi Martin,

Below error make sense, since you are running R on login node. You
need to submit job(using qsub command) from login node and then run R
on compute node. Let me know, if you don't know how to submit job.

==============
==============

Thanks
Pragnesh
NICS
On 5/16/13, Martin Ivanov <martin.ivanov at ifg.uni-tuebingen.de> wrote:
#
Dear Pragnesh,

I submitted job in the interactive mode and the same error still 
appears. This is the exact output:


ipmiva at eslogin001:/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/batchJobs> qsub 
-I -l mppwidth=1,mppnppn=1,mppmem=20M,walltime=00:05:00
qsub: waiting for job 607644.sdb to start
qsub: job 607644.sdb ready

Directory: /zhome/academic/HLRS/ipm/ipmiva
Thu May 16 14:45:48 CEST 2013
ipmiva at nid03524:~> R

R version 3.0.0 (2013-04-03) -- "Masked Marvel"
Copyright (C) 2013 The R Foundation for Statistical Computing
Platform: x86_64-unknown-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

   Natural language support but running in an English locale

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

 > library(pbdMPI)
Loading required package: rlecuyer
[Thu May 16 14:46:09 2013] [c1-0c1s2n0] Fatal error in MPI_Init: Other 
MPI error, error stack:
MPIR_Init_thread(428):
MPID_Init(184).......: channel initialization failed
MPID_Init(538).......:  PMI2 init failed: 1
ipmiva at nid03524:~>


Best regards,
Martin
On 05/16/2013 02:32 PM, Pragnesh Patel wrote:

  
    
#
Hi Martin,

Once you submit job, you need to run R using "aprun" command.

e.g aprun -n numberofprocessors Rscript example.R ("aprun -n 2 Rscript
example.R")


Thanks
Pragnesh
NICS
On 5/16/13, Martin Ivanov <martin.ivanov at ifg.uni-tuebingen.de> wrote:
#
Hello, Pragnesh, thank You very much for Your responsivenss.

I followed Your advice and used pbdMPI with aprun, everything was ok. 
Now I have a problem with
pbdNCDF4 with the --enable-parallel flag. I suppose it comes from the 
fact that netcdf-4.1.1 is no longer supported, and I am forced to use 4.2.
Anyway, I am posting here the output:

This is how I submitted the job for compiling pbdNCDF4:

#!/bin/bash
#PBS -l nodes=1:ppn=1
#PBS -l walltime=00:05:00
#PBS -l pmem=100kb

. /$HOME/.bashrc
module swap PrgEnv-cray PrgEnv-gnu

aprun -n 1 /univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/bin/R CMD 
INSTALL 
--configure-args="--with-nc-config=/opt/cray/netcdf/4.2.0/bin/nc-config 
--enable-parallel" 
/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/pbdNCDF4_0.1-1.tar.gz

and this is the output:

ipmiva at eslogin002:/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/test> cat 
testNodes.pbs.o608337
user specified nc-config is 
/opt/cray/netcdf-hdf5parallel/4.2.0/gnu/47/bin/nc-config
checking for nc-config... yes
Netcdf library version: netCDF 4.2
Netcdf library has version 4 interface present: yes
Netcdf library was compiled with C compiler: cc
configure: creating ./config.status
config.status: creating src/Makevars
configure: creating ./config.status
config.status: creating src/Makevars
config.status: creating R/zzz.R

****************** Results of ncdf4 package configure *******************

SPMD_CPPFLAGS   = -I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include -DMPI2 
-DMPICH2
SPMD_LDFLAGS    = -L/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/lib -lmpich 
-lmpl -lopa  -lutil -lpthread
NETCDF_HEADER   = /opt/cray/netcdf/4.2.0/gnu/46/include/netcdf_par.h
NETCDF_INCLUDE  = 0
NETCDF_PAR      = TRUE
NETCDF_PAR_OBJS = pncdf4.o
netCDF v4 CPP flags           = -I/opt/cray/netcdf/4.2.0/gnu/46/include 
-DgFortran -I/opt/cray/netcdf/4.2.0/gnu/46/include
netCDF v4 LD flags            = -L/opt/cray/netcdf/4.2.0/gnu/46/lib -lnetcdf
netCDF v4 runtime path        =

*************************************************************************

gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DgFortran 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DNETCDF_INCLUDE=0 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c ncdf2.c -o ncdf2.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DgFortran 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DNETCDF_INCLUDE=0 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c ncdf3.c -o ncdf3.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DgFortran 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DNETCDF_INCLUDE=0 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c ncdf.c -o ncdf.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DgFortran 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DNETCDF_INCLUDE=0 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c src_ncdf4.c -o src_ncdf4.o
gcc -std=gnu99 
-I/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/include -DNDEBUG 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DgFortran 
-I/opt/cray/netcdf/4.2.0/gnu/46/include -DNETCDF_INCLUDE=0 
-I/opt/cray/mpt/5.6.4/gni/mpich2-gnu/47/include  -DMPI2 -DMPICH2 
-I/usr/local/include    -fpic  -g -O2  -c pncdf4.c -o pncdf4.o
Application 2281804 exit codes: 1
Application 2281804 resources: utime ~3s, stime ~1s


ipmiva at eslogin002:/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/test> cat 
testNodes.pbs.e608228
* installing to library 
?/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/lib64/R/library?
* installing *source* package ?pbdNCDF4? ...
** libs
pncdf4.c: In function 'R_nc4_open_par':
pncdf4.c:25:2: error: unknown type name 'MPI_Comm'
pncdf4.c:26:2: error: unknown type name 'MPI_Info'
pncdf4.c:40:2: warning: implicit declaration of function 'MPI_Comm_f2c' 
[-Wimplicit-function-declaration]
pncdf4.c:43:2: warning: implicit declaration of function 'MPI_Info_f2c' 
[-Wimplicit-function-declaration]
pncdf4.c:46:2: warning: implicit declaration of function 'nc_open_par' 
[-Wimplicit-function-declaration]
pncdf4.c: In function 'R_nc4_create_par':
pncdf4.c:57:2: error: unknown type name 'MPI_Comm'
pncdf4.c:58:2: error: unknown type name 'MPI_Info'
pncdf4.c:96:2: warning: implicit declaration of function 'nc_create_par' 
[-Wimplicit-function-declaration]
make: *** [pncdf4.o] Error 1
ERROR: compilation failed for package ?pbdNCDF4?
* removing 
?/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/lib64/R/library/pbdNCDF4?
* restoring previous 
?/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/lib64/R/library/pbdNCDF4?



I also tried installing pbdNCDF4 without the option --enable-parallel. 
The compilation finished succesfully, but
see what happens when I try to test pbdMPI with the demos:
My PBS script:
#PBS -l nodes=1:ppn=1
#PBS -l walltime=00:05:00
#PBS -l pmem=500kb

. /$HOME/.bashrc
module swap PrgEnv-cray PrgEnv-gnu

cd $PBS_O_WORKDIR

#echo "My machine will have the following nodes:"
#echo "-----------------------------------------"
#cat ${PBS_NODEFILE}
#echo "-----------------------------------------"

aprun -n 1 /univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/system/usr/bin/Rscript 
testNodes.R

The file testNodes.R contains the following:

library(pbdMPI)
source("/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/pbdNCDF4/demo/ncwrite_ser.r")
source("/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/pbdNCDF4/demo/ncread_ser.r")

ipmiva at eslogin002:/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/test> cat 
testNodes.pbs.e608281
Loading required package: methods
Loading required package: rlecuyer

   High Performance Computing Resource is available via
   ``http://www.nics.tennessee.edu/getting-an-allocation''.


  *** caught segfault ***
address (nil), cause 'memory not mapped'

Traceback:
  1: .Call("spmd_comm_rank", as.integer(comm), PACKAGE = "pbdMPI")
  2: comm.rank()
  3: eval(expr, envir, enclos)
  4: eval(ei, envir)
  5: withVisible(eval(ei, envir))
  6: 
source("/univ_2/ws3/ws/ipmiva-WRF_331_CORDEX-0/pbdNCDF4/demo/ncread_ser.r")
aborting ...


Best regards,
Martin
On 05/16/2013 07:10 PM, Pragnesh Patel wrote: