The TELEMAC-MASCARET system  trunk
partitioner_para.F
Go to the documentation of this file.
1 ! **************
2  SUBROUTINE partitioner_para
3 ! **************
4  & (pmethod, nelem, npoin, ndp, nparts, ikles, epart, npart)
5 !
6 !
7 !***********************************************************************
8 ! PARALLEL V6P2 21/08/2010
9 !***********************************************************************
10 !
11 !brief call to the partionning software
12 !
13 !history R. KOPMANN (BAW)
14 !+
15 !+
16 !+ created
17 !
18 !
19 !~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
20 !| PMETHOD |<--| 1: FOR METIS 3: FOR PARMETIS 4: FOR PTSCOTCH
21 !| NELEM |<--| THE NUMBER OF ELEMENTS
22 !| NDP |<--| THE NUMBE OF POINT PER ELEMENT
23 !| NPARTS |<--| NUMBER OF PARTITIONS
24 !| IKLES |<--| CONNECTIVITY TABLE
25 !| EPART |-->| PARTITION NUMBER OF AN ELEMENT
26 !| MYPART |-->| PARTITION NUMBER OF A POINT
27 !~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
28 !
29 #if defined HAVE_PTSCOTCH
30  USE iso_c_binding
31 #endif
33  USE bief
36  IMPLICIT NONE
37 !
38 ! PTSCOTCH'S HEADER (FORTRAN)
39 #if defined HAVE_PTSCOTCH
40  include "ptscotchf.h"
41 #endif
42 !
43  INTEGER, INTENT(IN) :: pmethod
44  INTEGER, INTENT(IN) :: nelem
45  INTEGER, INTENT(IN) :: npoin
46  INTEGER, INTENT(IN) :: ndp
47  INTEGER, INTENT(IN) :: ikles(nelem*ndp)
48  INTEGER, INTENT(IN) :: nparts
49  INTEGER, INTENT(OUT) :: epart(nelem)
50 !TODO: Remove ??
51  INTEGER, INTENT(OUT) :: npart(nelem)
52 !
53 !-----------------------------------------------------------------------
54 !
55 !
56 !
57  INTEGER :: err, i, j, k
58 !
59 ! FOR PARMETIS
60 !
61  INTEGER :: ncommonnodes
62  INTEGER :: edgecut
63  INTEGER, ALLOCATABLE :: eptr(:), eind(:)
64  INTEGER, ALLOCATABLE :: nulltable(:)
65 #if defined HAVE_PTSCOTCH || HAVE_PARMETIS
66  INTEGER, ALLOCATABLE :: epart_loc(:)
67  INTEGER :: numflag
68  INTEGER :: ncon
69  INTEGER :: nelem_loc
70  INTEGER :: options(0:2)
71  REAL*4, ALLOCATABLE :: tpwgts(:), ubvec(:)
72  INTEGER :: wgtflag
73  INTEGER, ALLOCATABLE :: vwgt(:)
74  INTEGER, ALLOCATABLE :: recvcount(:)
75  INTEGER, ALLOCATABLE :: recvbuf(:)
76  INTEGER,ALLOCATABLE :: elmdist(:)
77  INTEGER,ALLOCATABLE :: displs(:)
78 #endif
79 
80 #if defined HAVE_PTSCOTCH
81 !
82  REAL*8, DIMENSION(SCOTCH_DGRAPHDIM) :: ptscotchgraph
83  REAL*8, DIMENSION(SCOTCH_STRATDIM) :: ptscotchstrat
84  TYPE(c_ptr), POINTER :: ptxadj(:), ptadjncy(:)
85  INTEGER, POINTER :: xadj2(:), adjncy2(:)
86  INTEGER :: nedge
87 !
88 #endif
89  IF(pmethod.EQ.3) THEN
90 #if defined HAVE_PARMETIS
91  WRITE(lu,*) 'BEGIN PARTITIONING WITH PARMETIS'
92  ! Calling parmetis metis parallel partitionner
93  ALLOCATE (elmdist(nparts+1),stat=err)
94  CALL check_allocate (err, 'ELMDIST')
95 !
96 ! THE NUMBER OF COMMON POINT NEEDED BETWEEN 2 ELEMENTS TO MAKE AN
97 ! EDGE
98  IF (ndp==3.OR.ndp==6) THEN
99  ncommonnodes = 2 ! FOR TRIANGLE OR RECTANGLE
100  ELSE
101  WRITE(lu,*) 'PARMETIS: IMPLEMENTED FOR ',
102  & 'TRIANGLES OR PRISMS ONLY'
103  CALL plante(1)
104  stop
105  ENDIF
106 !
107 ! ELMDIST: THIS ARRAY DESCRIBES HOW THE ELEMENTS OF THE MESH ARE DISTRIBUTED AMONG THE PROCESSORS.
108 ! IT IS ANALOGOUS TO THE VTXDIST ARRAY. ITS CONTENTS ARE IDENTICAL FOR EVERY PROCESSOR.
109  elmdist(1)=1
110  DO i=1,nparts-1
111  elmdist(i+1) = elmdist(i) + nelem/nparts
112  ENDDO
113  elmdist(nparts+1) = nelem + 1
114 !
115  nelem_loc = elmdist(ipid+2) - elmdist(ipid+1)
116 !
117  ALLOCATE (eptr(nelem_loc+1),stat=err)
118  CALL check_allocate(err, 'EPTR')
119  ALLOCATE (eind(nelem_loc*ndp),stat=err)
120  CALL check_allocate(err, 'EIND')
121  ALLOCATE (epart_loc(nelem_loc),stat=err)
122  CALL check_allocate(err, 'EPART_LOC')
123 !
124 ! EPTR, EIND: THESE ARRAYS SPECIFIES THE ELEMENTS THAT ARE STORED LOCALLY AT EACH PROCESSOR.
125 !
126  DO i=1,nelem_loc+1
127  eptr(i) = (i-1)*ndp + 1
128  ENDDO
129 !
130  k=1
131  DO i=elmdist(ipid+1),elmdist(ipid+2)-1
132  DO j=1,ndp
133  eind(k) = ikles((i-1)*ndp+j)
134  k = k + 1
135  ENDDO
136  ENDDO
137 !
138 ! REPRESENT THE NUMBER OF PARAMETER FOR BALANCING THE PARTIRION
139  ncon = 1
140 !
141  ALLOCATE (tpwgts(ncon*nparts),stat=err)
142  CALL check_allocate(err, 'TPWGTS')
143  ALLOCATE (ubvec(ncon),stat=err)
144  CALL check_allocate(err, 'UBVEC')
145  ALLOCATE(vwgt(nelem_loc),stat=err)
146  CALL check_allocate(err, 'VWGT')
147 !
148  tpwgts(:) = 1.d0/float(nparts)
149  options(:)=0
150  ubvec(:) = 1.d05
151  vwgt(:) = 1
152  wgtflag = 0
153  numflag = 1
154 !
155  CALL parmetis_v3_partmeshkway(elmdist, eptr, eind, vwgt,
156  & wgtflag, numflag,
157  & ncon, ncommonnodes, nparts, tpwgts,
158  & ubvec, options, edgecut, epart_loc,
159  & comm)
160  DEALLOCATE(tpwgts)
161  DEALLOCATE(ubvec)
162  DEALLOCATE(vwgt)
163  DEALLOCATE(eind)
164  DEALLOCATE(eptr)
165 !
166  ALLOCATE(recvcount(nparts),stat=err)
167  CALL check_allocate(err,'RECVCOUNT')
168  ALLOCATE(displs(nparts),stat=err)
169  CALL check_allocate(err,'RECVCOUNT')
170  DO i=1,nparts
171  recvcount(i) = elmdist(i+1) - elmdist(i)
172  ENDDO
173  displs(1) = 0
174  DO i=2,nparts
175  displs(i) = displs(i-1) + recvcount(i-1)
176  ENDDO
177 !
178  CALL p_allgatherv_i(epart_loc,nelem_loc,epart,
179  & recvcount,displs, err)
180 !
181  DEALLOCATE(elmdist)
182  DEALLOCATE(displs)
183  DEALLOCATE(epart_loc)
184  DEALLOCATE(recvcount)
185 #else
186  WRITE(lu,*) "TRYING TO USE PARMETIS TO PARTIONNE WHEN PARMETIS",
187  & " IS NOT INSTALLED"
188  CALL plante(1)
189 #endif
190  ELSE IF(pmethod.EQ.4) THEN
191 #if defined HAVE_PTSCOTCH
192  WRITE(lu,*) 'BEGIN PARTITIONING WITH PTSCOTCH'
193 !
194  ALLOCATE (elmdist(nparts+1),stat=err)
195  CALL check_allocate(err, 'ELMDIST')
196 !
197 !! The number of common point needed between 2 elements to make an
198 ! edge
199  IF (ndp==3.OR.ndp==6) THEN
200  ncommonnodes = 2 ! FOR TRIANGLE OR RECTANGLE
201  ELSE
202  WRITE(lu,*) 'PTSCOTCH: IMPLEMENTED FOR ',
203  & 'TRIANGLES OR PRISMS ONLY'
204  CALL plante(1)
205  stop
206  ENDIF
207 !
208 ! ELM DIST: THIS ARRAY DESCRIBES HOW THE ELEMENTS OF THE MESH ARE DISTRIBUTED AMONG THE PROCESSORS.
209 !! IT IS ANALOGOUS TO THE VTXDIST ARRAY. ITS CONTENTS ARE IDENTICAL FOR EVERY PROCESSOR.
210 ! SAME PRINCIPAL AS FOR XADJ AND ADJCNY
211  elmdist(1)=1
212  DO i=1,nparts-1
213  elmdist(i+1) = elmdist(i) + nelem/nparts
214  ENDDO
215  elmdist(nparts+1) = nelem + 1
216 !
217  nelem_loc = elmdist(ipid+2) - elmdist(ipid+1)
218 !
219  ALLOCATE (eptr(nelem_loc+1),stat=err)
220  CALL check_allocate(err, 'EPTR')
221  ALLOCATE (eind(nelem_loc*ndp),stat=err)
222  CALL check_allocate(err, 'EIND')
223  ALLOCATE (epart_loc(nelem_loc),stat=err)
224  CALL check_allocate(err, 'EPART_LOC')
225 !
226 ! EPTR, EIND: THESE ARRAYS SPECIFIES THE ELEMENTS THAT ARE STORED LOCALLY AT EACH PROCESSOR.
227 ! (SEE DISCUSSION IN SECTION 4.3).
228 !
229  DO i=1,nelem_loc+1
230  eptr(i) = (i-1)*ndp + 1
231  ENDDO
232 !
233  k=1
234  DO i=elmdist(ipid+1),elmdist(ipid+2)-1
235  DO j=1,ndp
236  eind(k) = ikles((i-1)*ndp+j)
237  k = k + 1
238  ENDDO
239  ENDDO
240 !
241 !!!! REP! RESENT THE NUMBER OF PARAMETER FOR BALANCING THE PARTIRION
242  ncon = 1
243  numflag = 1
244 !
245  ALLOCATE(ptxadj(1))
246  ALLOCATE(ptadjncy(1))
247  CALL parmetis_v3_mesh2dual(elmdist, eptr, eind,
248  & numflag, ncommonnodes,
249  & ptxadj,ptadjncy,
250  & comm)
251 !
252 ! DEALLOCATE(EPTR,EIND)
253  ALLOCATE(recvbuf(1))
254  recvbuf(1) = nelem_loc+1
255  CALL c_f_pointer(ptxadj(1),xadj2,recvbuf)
256 !
257  nedge = xadj2(nelem_loc+1)-1
258  recvbuf(1) = nedge
259  CALL c_f_pointer(ptadjncy(1),adjncy2,recvbuf)
260  DEALLOCATE(recvbuf)
261 !
262 ! BEGINNING PT-SCOTCH PARTIONNING
263 !
264  CALL scotchfstratinit(ptscotchstrat,err)
265  IF (err.NE.0) THEN
266  WRITE(lu,*) 'PTSCOTCH ERROR: CANNOT INITIALIZE STRAT',err
267  CALL plante(1)
268  ENDIF
269 !
270  CALL scotchfdgraphinit(ptscotchgraph,comm,err)
271  IF (err.NE.0) THEN
272  WRITE(lu,*) 'PTSCOTCH ERROR: CANNOT INITIALIZE GRAPH'
273  CALL plante(1)
274  ENDIF
275 !
276  CALL scotchfdgraphbuild ( ptscotchgraph, ! GRAFDAT
277  & numflag, ! BASEVAL
278  & nelem_loc, ! VERTLOCNBR
279  & nelem_loc, ! VERTLOCMAX=VERLOCNBR (NO HOLES IN GLOBAL
280  ! NUMBERING)
281  & xadj2(1:nelem_loc), ! VERTLOCTAB
282  & xadj2(2:nelem_loc+1), ! VENDLOCTAB = NULL
283  & xadj2, ! VELOLOCTAB = NULL
284  & xadj2, ! VLBLLOCTAB = NULL
285  & xadj2(nelem_loc+1)-1, ! EDGELOCNBR
286  & xadj2(nelem_loc+1)-1, ! EDGELOCSIZ
287  & adjncy2, ! EDGELOCATAB
288  & adjncy2, ! EDGEGSTTAB = NULL
289  & adjncy2, ! EDLOLOCTAB = NULL
290  & err)
291  IF (err.NE.0) THEN
292  WRITE(lu,*) 'PTSCOTCH ERROR: CANNOT BUILD GRAPH'
293  CALL plante(1)
294  ENDIF
295 !
296  CALL scotchfdgraphcheck(ptscotchgraph,err)
297  IF (err.NE.0) THEN
298  WRITE(lu,*) 'PTSCOTCH ERROR: GRAPH NOT CONSISTANT'
299  CALL plante(1)
300  ENDIF
301 !
302  CALL scotchfdgraphpart ( ptscotchgraph,
303  & nparts,
304  & ptscotchstrat,
305  & epart_loc,
306  & err )
307 !
308  IF (err.NE.0) THEN
309  WRITE(lu,*) 'PTSCOTCH ERROR: CANNOT PARTITION GRAPH'
310  CALL plante(1)
311  END IF
312 !!!! CHANGING EPART NUMBERING TO 1-NPART
313  DO i=1,nelem_loc
314  epart_loc(i) = epart_loc(i) + 1
315  ENDDO
316 !
317  CALL scotchfgraphexit(ptscotchgraph)
318 !
319  CALL scotchfstratexit(ptscotchstrat)
320 !
321 ! Computing epart
322  ALLOCATE(recvcount(nparts),stat=err)
323  CALL check_allocate(err,'RECVCOUNT')
324  ALLOCATE(displs(nparts),stat=err)
325  CALL check_allocate(err,'RECVCOUNT')
326  DO i=1,nparts
327  recvcount(i) = elmdist(i+1) - elmdist(i)
328  ENDDO
329  displs(1) = 0
330  DO i=2,nparts
331  displs(i) = displs(i-1) + recvcount(i-1)
332  ENDDO
333 !
334  CALL p_allgatherv_i(epart_loc,nelem_loc,epart,
335  & recvcount,displs,err)
336 ! DEALLOCATE(XADJ2)
337 ! DEALLOCATE(ADJNCY2)
338  DEALLOCATE(ptxadj)
339  DEALLOCATE(ptadjncy)
340  DEALLOCATE(elmdist)
341  DEALLOCATE(epart_loc)
342 #else
343  WRITE(lu,*) "TRYING TO PARTITIONNE WITH PTSCOTCH WHEN",
344  & "PTSCOTCH IS NOT INSTALLED"
345  CALL plante(1)
346 #endif
347  ELSEIF(pmethod.EQ.1) THEN
348  WRITE(lu,*) 'BEGIN PARTITIONING WITH METIS'
349  ALLOCATE (eptr(nelem+1),stat=err)
350  CALL check_allocate(err, 'EPTR')
351  ALLOCATE (eind(nelem*ndp),stat=err)
352  CALL check_allocate(err, 'EIND')
353 !
354  DO i=1,nelem+1
355  eptr(i) = (i-1)*ndp + 1
356  ENDDO
357 !
358  k=1
359  DO i=1,nelem
360  DO j=1,ndp
361  eind(k) = ikles((i-1)*ndp+j)
362  k = k + 1
363  ENDDO
364  ENDDO
365 !
366 ! SWITCH TO C NUMBERING
367  eind = eind -1
368  eptr = eptr -1
369 !
370 ! METIS REQUIRES THE NUMBER OF COMMON POINT NEEDED BETWEEN 2 ELEMENTS TO MAKE AN EDGE
371 ! NCOMMONNODES = 2 FOR TRIANGLE OR RECTANGLE
372 ! NCOMMONNODES = 3 FOR TETRAHEDRE
373 ! NCOMMONNODES = 4 FOR HEXAHEDRE
374 
375 !
376  IF (ndp==3.OR.ndp==6) THEN
377  ncommonnodes = 2 ! FOR TRIANGLE OR RECTANGLE
378  ELSE
379  WRITE(lu,*) 'METIS: IMPLEMENTED FOR TRIANGLES OR PRISMS ONLY'
380  CALL plante(1)
381  stop
382  ENDIF
383 
384 ! WE ONLY USE METIS_PARTMESHDUAL AS ONLY THE FINITE ELEMENTS PARTITION
385 ! IS RELEVANT HERE.
386 !
387 #if defined HAVE_MPI
388  CALL metis_partmeshdual
389  & (nelem, npoin, eptr, eind, nulltable,
390  & nulltable, ncommonnodes, nparts, nulltable,
391  & nulltable, edgecut, epart, npart)
392 #else
393  WRITE(lu,*) 'ERROR: TRY TO RUN PARTEL_PARA WITH A '//
394  & 'SERIAL CONFIGURATION'
395  CALL plante(1)
396 #endif
397 !
398 !
399 ! DEALLOCATING TEMPORARY ARRAYS FOR METIS
400 ! SWITCHING EPART TO FORTRAN NUMBERING (1...N)
401  epart = epart+1
402  DEALLOCATE(eptr)
403  DEALLOCATE(eind)
404 !
405  ENDIF
406  END SUBROUTINE partitioner_para
407 
subroutine p_allgatherv_i(SEND_BUFFER, SEND_COUNT, RECV_BUFFER, RECV_COUNT, DISPLS, IERR)
Definition: p_allgatherv_i.F:8
Definition: bief.f:3