|
[Sponsors] |
July 16, 2021, 13:30 |
Write .plt file without tecio
|
#1 |
New Member
Chunheng Zhao
Join Date: Jul 2021
Posts: 1
Rep Power: 0 |
Hi,
I am trying to write the binary .plt file without tecio using fortran. With the help of the data format guide, I am still confused about the header part and the connection part. It is hard to follow the instruction at the end of the guide, so I want to find some help here. I paste my code here, it is written by mpiio. I check the data format and compare it with the .plt file by preplot, the .plt file by preplot give REAL4, but mine is REAL8. They all give the right numbers. However, the header of mine is very different with preplot version. Code:
subroutine MPIoutput integer::i,j,filesize,Status(MPI_STATUS_SIZE),filetype,count integer::nproc,cart_comm,ierr,fh,datatype INTEGER(KIND=MPI_OFFSET_KIND) offset integer,dimension(dim):: start character:: zonep*100,version*9,para*100 !I GIVE THE HEADER PART value HERE version='#!TDV112'//char(0) para='VARIABLES="x","y","u","v","phi1","phi2","phi3","con","p","rho","pp"'//char(0) write(zonep,'(A,I3,A,I3,A)')'ZONE I=',nx+1,' J=',ny+1 do j=local_start(2),local_end(2) do i=local_start(1),local_end(1) xx(i,j)=(i+0.d0)/nx yy(i,j)=(j+0.d0)/ny enddo enddo start=(/ghost,ghost/) offset=0 If( rank == 0 ) Then Call mpi_file_delete( 'out.dat', MPI_INFO_NULL, ierr ) End If Call mpi_barrier( mpi_comm_world, ierr ) call MPI_TYPE_CREATE_SUBARRAY(2,local_length+2*ghost,local_length,start,MPI_ORDER_FORTRAN,& MPI_DOUBLE_PRECISION,datatype,ierr) call MPI_TYPE_COMMIT(datatype,ierr) call MPI_TYPE_CREATE_SUBARRAY(2,global_length,local_length,local_start,MPI_ORDER_FORTRAN,& MPI_DOUBLE_PRECISION,filetype,ierr) call MPI_TYPE_COMMIT(filetype,ierr) call MPI_File_open(MPI_COMM_WORLD,'out.dat', & MPI_MODE_WRONLY + MPI_MODE_CREATE,MPI_INFO_NULL,fh,ierr) !WRITE THE HEADER PART if(rank.eq.0)then call MPI_File_seek (fh,offset,MPI_seek_set,ierr) call MPI_File_write(fh,trim(version),len(trim(version)),MPI_char,status,ierr) call MPI_File_write(fh,trim(para) ,len(trim(para)),MPI_char,status,ierr) call MPI_File_write(fh,trim(zonep) ,len(trim(zonep)),MPI_char,status,ierr) endif offset=len(trim(version))+len(trim(para))+len(trim(zonep)) call MPI_File_set_view(fh,offset,MPI_DOUBLE_PRECISION,filetype,& "native",MPI_INFO_NULL,ierr) !WRITE THE DATA PART CALL MPI_FILE_WRITE_all(fh, xx, local_length(1)*local_length(2), MPI_DOUBLE_PRECISION, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, yy, local_length(1)*local_length(2), MPI_DOUBLE_PRECISION, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, ua ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, va ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, phi1 ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, phi2 ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, phi3 ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, con ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, p ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, rho ,1, datatype, MPI_STATUS_ignore, ierr) CALL MPI_FILE_WRITE_all(fh, pp ,1, datatype, MPI_STATUS_ignore, ierr) call MPI_File_close(fh,ierr) call MPI_BARRIER(CART_COMM,ierr) endsubroutine MPIoutput |
|
Tags |
binary file, fortran 90, mpi libraries, mpi parallel, tecplot 360 |
|
|
Similar Threads | ||||
Thread | Thread Starter | Forum | Replies | Last Post |
[openSmoke] LaminarSMOKE compilation error | mdhfiz | OpenFOAM Community Contributions | 8 | July 2, 2024 11:32 |
[swak4Foam] Installation Problem with OF 6 version | Aurel | OpenFOAM Community Contributions | 14 | November 18, 2020 17:18 |
[foam-extend.org] Problems installing foam-extend-4.0 on openSUSE 42.2 and Ubuntu 16.04 | ordinary | OpenFOAM Installation | 19 | September 3, 2019 19:13 |
what is swap4foam ?? | AB08 | OpenFOAM | 28 | February 2, 2016 02:22 |
"parabolicVelocity" in OpenFoam 2.1.0 ? | sawyer86 | OpenFOAM Running, Solving & CFD | 21 | February 7, 2012 12:44 |