|
[Sponsors] |
August 16, 2013, 17:52 |
cell loop in parallel Fluent
|
#1 |
Member
Join Date: Jul 2013
Posts: 80
Rep Power: 13 |
Some of you may recognise the following parallel UDF from the UDF Manual of Fluent(I have deleted the comments):
#include "udf.h" #define WALLID 3 DEFINE_ON_DEMAND(face_p_list) { #if !RP_HOST face_t f; Thread *tf; Domain *domain; real *p_array; real x[ND_ND], (*x_array)[ND_ND]; int n_faces, i, j; domain=Get_Domain(1); tf=Lookup_Thread(domain, WALLID); n_faces=THREAD_N_ELEMENTS_INT(tf); #if RP_NODE if(! I_AM_NODE_ZERO_P) { PRF_CSEND_INT(node_zero, &n_faces, 1, myid); #endif p_array=(real * )malloc(n_faces*sizeof(real)); x_array=(real (*)[ND_ND])malloc(ND_ND*n_faces*sizeof(real)); begin_f_loop(f, tf) { p_array[f] = F_P(f, tf); F_CENTROID(x_array[f], f, tf); } end_f_loop(f, tf) Message0("\nstart\n"); #if RP_NODE if(! I_AM_NODE_ZERO_P) { PRF_CSEND_REAL(node_zero, p_array, n_faces, myid); PRF_CSEND_REAL(node_zero, x_array[0], ND_ND*n_faces, myid); } else #endif { Message0("\n\nList of Pressures...\n"); for(j=0; j<n_faces; j++) { # if RP_3D Message0("%12.4e %12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], x_array[j][2], p_array[j]); # else /* 2D */ Message0("%12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], p_array[j]); # endif } } #if RP_NODE if(I_AM_NODE_ZERO_P) { compute_node_loop_not_zero(i) { PRF_CRECV_INT(i, &n_faces, 1, i); p_array=(real *)realloc(p_array, n_faces*sizeof(real)); x_array=(real(*)[ND_ND])realloc(x_array,ND_ND*n_faces*sizeof(real)); PRF_CRECV_REAL(i, p_array, n_faces, i); PRF_CRECV_REAL(i, x_array[0], ND_ND*n_faces, i); for(j=0; j<n_faces; j++) { # if RP_3D Message0("%12.4e %12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], x_array[j][2], p_array[j]); # else /* 2D */ Message0("%12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], p_array[j]); # endif } } } #endif /* RP_NODE */ free(p_array); free(x_array); #endif /* ! RP_HOST */ } What I want to do is exactly the same, but instead of looping around the faces, I want to loop around the cell of the adjacent domain of a face given, so I changed that UDF to the following: #include "udf.h" #define WALLID 10025 DEFINE_ON_DEMAND(face_p_list) { #if !RP_HOST face_t f; cell_t c; Thread *tf; Thread *t0; Domain *domain; real *p_array; real x[ND_ND], (*x_array)[ND_ND]; int n_cell, i, j; domain=Get_Domain(1); tf=Lookup_Thread(domain, WALLID); t0=THREAD_T0(tf); n_cell=THREAD_N_ELEMENTS_INT(t0); #if RP_NODE if(! I_AM_NODE_ZERO_P) { PRF_CSEND_INT(node_zero, &n_cell, 1, myid); } #endif p_array=(real * )malloc(n_cell*sizeof(real)); x_array=(real (*)[ND_ND])malloc(ND_ND*n_cell*sizeof(real)); begin_c_loop(c, t0) { p_array[c] = C_P(c, t0); F_CENTROID(x_array[f], f, tf); } end_c_loop(c, t0) Message0("\nstart\n"); #if RP_NODE if(! I_AM_NODE_ZERO_P) { PRF_CSEND_REAL(node_zero, p_array, n_cell, myid); PRF_CSEND_REAL(node_zero, x_array[0], ND_ND*n_cell, myid); } else #endif { Message0("\n\nList of Pressures...\n"); for(j=0; j<n_cell; j++) { # if RP_3D Message0("%12.4e %12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], x_array[j][2], p_array[j]); # else /* 2D */ Message0("%12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], p_array[j]); # endif } } #if RP_NODE if(I_AM_NODE_ZERO_P) { compute_node_loop_not_zero(i) { PRF_CRECV_INT(i, &n_cell, 1, i); p_array=(real *)realloc(p_array, n_cell*sizeof(real)); x_array=(real(*)[ND_ND])realloc(x_array,ND_ND*n_cell*sizeof(real)); PRF_CRECV_REAL(i, p_array, n_cell, i); PRF_CRECV_REAL(i, x_array[0], ND_ND*n_cell, i); for(j=0; j<n_cell; j++) { # if RP_3D Message0("%12.4e %12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], x_array[j][2], p_array[j]); # else /* 2D */ Message0("%12.4e %12.4e %12.4e\n", x_array[j][0], x_array[j][1], p_array[j]); # endif } } } #endif /* RP_NODE */ free(p_array); free(x_array); #endif /* ! RP_HOST */ } There are not any problem when I build of load that new UDF, but when I execute the demand, there is this message in the TUI: 999999 (..\src\mpsystem.c@1173): mpt_read: failed: errno = 10054 999999: mpt_read: error: read failed trying to read 4 bytes: No such file or directory MPI Application rank 0 exited before MPI_Finalize() with status -1073741819 The Parallel FLUENT process could not be started. ¿Any ideas? By the way, I have around 2000 faces and 50000 cell in each partition, ¿could that be a problem? Thanks in advance, best regards. |
|
August 18, 2013, 05:10 |
|
#2 |
New Member
Join Date: Jun 2013
Posts: 15
Rep Power: 13 |
Hi
I believe the problem may be in begin_c_loop(c, t0) { p_array[c] = C_P(c, t0); F_CENTROID(x_array[f], f, tf); } end_c_loop(c, t0) In original example it is the face loop. Hope it helps. Btw, I have the feeling that sometimes when there is an error in the UDF, serial mode can describe it better than parallel mode, i.e. give better error message. kornetka |
|
August 18, 2013, 09:31 |
|
#3 | |
Member
Join Date: Jul 2013
Posts: 80
Rep Power: 13 |
Quote:
Anyway, I correct it and I still have the same error massage. I changed the macro THREAD_N_ELEMENTS_INT for THREAD_N_ELEMENTS and I works now. However, I am still not sure why it works with the macro PRF_CSEND_INT and PRF_CSEND_LONG, when in my case "n_cell" it is longer than 4 bytes. I will do some further research and then I let everybody know just in case it helps. Greetings, and thanks again. |
||
|
|
Similar Threads | ||||
Thread | Thread Starter | Forum | Replies | Last Post |
udf problem | jane | Fluent UDF and Scheme Programming | 37 | February 20, 2018 05:17 |
fluent parallel processing problem | pedram.sotudeh | FLUENT | 0 | June 19, 2012 02:32 |
Parallel FLUENT received fatal signal | ramrocket | FLUENT | 3 | April 30, 2012 04:19 |
Parallel Processing Problems Fluent 13 | lstebe | FLUENT | 3 | February 15, 2012 10:54 |
Fluent Parallel Error | Will Humber | FLUENT | 2 | April 3, 2008 15:15 |