[mvapich-discuss] MPI IO,
MPI_Read does not read in correct buffer from previous MPI_Write
Ryan Crocker
rcrocker at uvm.edu
Thu Mar 28 14:54:36 EDT 2013
Hi all,
So i've written some binary files, the binary output is compatible with ensight gold formatted vector field files. I can read them in with paraview and they look fine, but what i'd like to do is recover the raw data. When i read them in all the header files look fine, but when i try to read back in the data vectors i just get all the same number in the vector. I'm pretty sure it doesn't matter, but i'm writing this on a few hundred cores, and reading on 1. I posted this on the mpich forum too, but when i tried with mvapich i have the same issue.
I've posted the write, then subsequent read below.
write snippet:
call MPI_FILE_OPEN(comm,file,IOR(MPI_MODE_WRONLY,MPI_MODE_CREATE),mpi_info,iunit,ierr)
! Write header (only root)
if (irank.eq.iroot) then
buffer = trim(adjustl(name))
size = 80
call MPI_FILE_WRITE(iunit,buffer,size,MPI_CHARACTER,status,ierr)
buffer = 'part'
size = 80
call MPI_FILE_WRITE(iunit,buffer,size,MPI_CHARACTER,status,ierr)
ibuffer = 1
size = 1
call MPI_FILE_WRITE(iunit,ibuffer,size,MPI_INTEGER,status,ierr)
buffer = 'hexa8'
size = 80
call MPI_FILE_WRITE(iunit,buffer,size,MPI_CHARACTER,status,ierr)
end if
! Write the data
disp = 3*80+4+0*ncells_hexa*4
call MPI_FILE_SET_VIEW(iunit,disp,MPI_REAL_SP,fileview_hexa,"native",mpi_info,ierr)
call MPI_FILE_WRITE_ALL(iunit,buffer3_hexa(:,1),ncells_hexa_,MPI_REAL_SP,status,ierr)
disp = 3*80+4+1*ncells_hexa*4
call MPI_FILE_SET_VIEW(iunit,disp,MPI_REAL_SP,fileview_hexa,"native",mpi_info,ierr)
call MPI_FILE_WRITE_ALL(iunit,buffer3_hexa(:,2),ncells_hexa_,MPI_REAL_SP,status,ierr)
disp = 3*80+4+2*ncells_hexa*4
call MPI_FILE_SET_VIEW(iunit,disp,MPI_REAL_SP,fileview_hexa,"native",mpi_info,ierr)
call MPI_FILE_WRITE_ALL(iunit,buffer3_hexa(:,3),ncells_hexa_,MPI_REAL_SP,status,ierr)
read snippet:
parallel_sum(ncells_hexa_,ncells)
allocate(buffer3(ncells,3))
openfile=trim(workdir)//'/'//'V/V.000002'
call MPI_FILE_OPEN(comm,openfile,MPI_MODE_RDONLY,mpi_info,iunit,ierr)
! Read header
bsize = 80
call MPI_FILE_READ(iunit,cbuffer,bsize,MPI_CHARACTER,status,ierr)
print*,trim(cbuffer)
bsize = 80
call MPI_FILE_READ(iunit,cbuffer,bsize,MPI_CHARACTER,status,ierr)
print*,trim(cbuffer)
bsize = 1
call MPI_FILE_READ(iunit,ibuffer,bsize,MPI_INTEGER,status,ierr)
print*,ibuffer
bsize = 80
call MPI_FILE_READ(iunit,cbuffer,bsize,MPI_CHARACTER,status,ierr)
print*,trim(cbuffer),ncells
! Read the data
disp = 3*80+4+0*ncells*4
call MPI_FILE_SET_VIEW(iunit,disp,MPI_REAL_SP,fileview_hexa,"native",mpi_info,ierr)
call MPI_FILE_READ_ALL(iunit,buffer3(:,1),ncells,MPI_REAL_SP,status,ierr)
disp = 3*80+4+1*ncells*4
call MPI_FILE_SET_VIEW(iunit,disp,MPI_REAL_SP,fileview_hexa,"native",mpi_info,ierr)
call MPI_FILE_READ_ALL(iunit,buffer3(:,2),ncells,MPI_REAL_SP,status,ierr)
disp = 3*80+4+2*ncells*4
call MPI_FILE_SET_VIEW(iunit,disp,MPI_REAL_SP,fileview_hexa,"native",mpi_info,ierr)
call MPI_FILE_READ_ALL(iunit,buffer3(:,3),ncells,MPI_REAL_SP,status,ierr)
! Close the file
call MPI_FILE_CLOSE(iunit,ierr)
Thanks for the help,
Ryan Crocker
University of Vermont, School of Engineering
Mechanical Engineering Department
-------------- next part --------------
An HTML attachment was scrubbed...
URL: http://mail.cse.ohio-state.edu/pipermail/mvapich-discuss/attachments/20130328/e32c1749/attachment-0001.html
More information about the mvapich-discuss
mailing list