xref: /petsc/src/sys/mpiuni/mpi.c (revision 6a98f8dc3f2c9149905a87dc2e9d0fedaf64e09a)
1 /*
2       This provides a few of the MPI-uni functions that cannot be implemented
3     with C macros
4 */
5 #include <petscsys.h>
6 #if !defined(MPIUNI_H)
7 #error "Wrong mpi.h included! require mpi.h from MPIUNI"
8 #endif
9 
10 #define MPI_SUCCESS 0
11 #define MPI_FAILURE 1
12 
13 void *MPIUNI_TMP = NULL;
14 
15 /*
16        With MPI Uni there are exactly four distinct communicators:
17     MPI_COMM_SELF, MPI_COMM_WORLD, and a MPI_Comm_dup() of each of these (duplicates of duplicates return the same communictor)
18 
19     MPI_COMM_SELF and MPI_COMM_WORLD are MPI_Comm_free() in MPI_Finalize() but in general with PETSc,
20      the other communicators are freed once the last PETSc object is freed (before MPI_Finalize()).
21 
22 */
23 #define MAX_ATTR 256
24 #define MAX_COMM 128
25 
26 static int MaxComm = 2;
27 
28 typedef struct {
29   void *attribute_val;
30   int  active;
31 } MPI_Attr;
32 
33 typedef struct {
34   void                *extra_state;
35   MPI_Delete_function *del;
36 } MPI_Attr_keyval;
37 
38 static MPI_Attr_keyval attr_keyval[MAX_ATTR];
39 static MPI_Attr        attr[MAX_COMM][MAX_ATTR];
40 static int             comm_active[MAX_COMM];
41 static int             num_attr = 1,mpi_tag_ub = 100000000;
42 static void*           MPIUNIF_mpi_in_place = 0;
43 
44 #if defined(__cplusplus)
45 extern "C" {
46 #endif
47 
48 /*
49    To avoid problems with prototypes to the system memcpy() it is duplicated here
50 */
51 int MPIUNI_Memcpy(void *a,const void *b,int n)
52 {
53   int  i;
54   char *aa= (char*)a;
55   char *bb= (char*)b;
56 
57   if (a == MPI_IN_PLACE || a == MPIUNIF_mpi_in_place) return MPI_SUCCESS;
58   if (b == MPI_IN_PLACE || b == MPIUNIF_mpi_in_place) return MPI_SUCCESS;
59   for (i=0; i<n; i++) aa[i] = bb[i];
60   return MPI_SUCCESS;
61 }
62 
63 static int classcnt = 0;
64 static int codecnt = 0;
65 
66 int MPI_Add_error_class(int *cl)
67 {
68   *cl = classcnt++;
69   return MPI_SUCCESS;
70 }
71 
72 int MPI_Add_error_code(int cl,int *co)
73 {
74   if (cl >= classcnt) return MPI_FAILURE;
75   *co = codecnt++;
76   return MPI_SUCCESS;
77 }
78 
79 int MPI_Type_get_envelope(MPI_Datatype datatype,int *num_integers,int *num_addresses,int *num_datatypes,int *combiner)
80 {
81   int comb = datatype >> 28;
82   switch (comb) {
83   case MPI_COMBINER_NAMED:
84     *num_integers = 0;
85     *num_addresses = 0;
86     *num_datatypes = 0;
87     *combiner = comb;
88     break;
89   case MPI_COMBINER_DUP:
90     *num_integers = 0;
91     *num_addresses = 0;
92     *num_datatypes = 1;
93     *combiner = comb;
94     break;
95   case MPI_COMBINER_CONTIGUOUS:
96     *num_integers = 1;
97     *num_addresses = 0;
98     *num_datatypes = 1;
99     *combiner = comb;
100     break;
101   default:
102     return MPIUni_Abort(MPI_COMM_SELF,1);
103   }
104   return MPI_SUCCESS;
105 }
106 
107 int MPI_Type_get_contents(MPI_Datatype datatype,int max_integers,int max_addresses,int max_datatypes,int *array_of_integers,MPI_Aint *array_of_addresses,MPI_Datatype *array_of_datatypes)
108 {
109   int comb = datatype >> 28;
110   switch (comb) {
111   case MPI_COMBINER_NAMED:
112     return MPIUni_Abort(MPI_COMM_SELF,1);
113     break;
114   case MPI_COMBINER_DUP:
115     if (max_datatypes < 1) return MPIUni_Abort(MPI_COMM_SELF,1);
116     array_of_datatypes[0] = datatype & 0x0fffffff;
117     break;
118   case MPI_COMBINER_CONTIGUOUS:
119     if (max_integers < 1 || max_datatypes < 1) return MPIUni_Abort(MPI_COMM_SELF,1);
120     array_of_integers[0] = (datatype >> 8) & 0xfff; /* count */
121     array_of_datatypes[0] = (datatype & 0x0ff000ff) | 0x100;  /* basic named type (count=1) from which the contiguous type is derived */
122     break;
123   default:
124     return MPIUni_Abort(MPI_COMM_SELF,1);
125   }
126   return MPI_SUCCESS;
127 }
128 
129 /*
130    Used to set the built-in MPI_TAG_UB attribute
131 */
132 static int Keyval_setup(void)
133 {
134   attr[MPI_COMM_WORLD-1][0].active        = 1;
135   attr[MPI_COMM_WORLD-1][0].attribute_val = &mpi_tag_ub;
136   attr[MPI_COMM_SELF-1][0].active         = 1;
137   attr[MPI_COMM_SELF-1][0].attribute_val  = &mpi_tag_ub;
138   return MPI_SUCCESS;
139 }
140 
141 int MPI_Comm_create_keyval(MPI_Copy_function *copy_fn,MPI_Delete_function *delete_fn,int *keyval,void *extra_state)
142 {
143   if (num_attr >= MAX_ATTR) return MPIUni_Abort(MPI_COMM_WORLD,1);
144 
145   attr_keyval[num_attr].extra_state = extra_state;
146   attr_keyval[num_attr].del         = delete_fn;
147   *keyval                           = num_attr++;
148   return MPI_SUCCESS;
149 }
150 
151 int MPI_Comm_free_keyval(int *keyval)
152 {
153   attr_keyval[*keyval].extra_state = 0;
154   attr_keyval[*keyval].del         = 0;
155 
156   *keyval = 0;
157   return MPI_SUCCESS;
158 }
159 
160 int MPI_Comm_set_attr(MPI_Comm comm,int keyval,void *attribute_val)
161 {
162   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
163   attr[comm-1][keyval].active        = 1;
164   attr[comm-1][keyval].attribute_val = attribute_val;
165   return MPI_SUCCESS;
166 }
167 
168 int MPI_Comm_delete_attr(MPI_Comm comm,int keyval)
169 {
170   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
171   if (attr[comm-1][keyval].active && attr_keyval[keyval].del) {
172     void *save_attribute_val = attr[comm-1][keyval].attribute_val;
173     attr[comm-1][keyval].active        = 0;
174     attr[comm-1][keyval].attribute_val = 0;
175     (*(attr_keyval[keyval].del))(comm,keyval,save_attribute_val,attr_keyval[keyval].extra_state);
176   }
177   return MPI_SUCCESS;
178 }
179 
180 int MPI_Comm_get_attr(MPI_Comm comm,int keyval,void *attribute_val,int *flag)
181 {
182   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
183   if (!keyval) Keyval_setup();
184   *flag                  = attr[comm-1][keyval].active;
185   *(void**)attribute_val = attr[comm-1][keyval].attribute_val;
186   return MPI_SUCCESS;
187 }
188 
189 int MPI_Comm_create(MPI_Comm comm,MPI_Group group,MPI_Comm *newcomm)
190 {
191   int j;
192   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
193   for (j=3; j<MaxComm; j++) {
194     if (!comm_active[j-1]) {
195       comm_active[j-1] = 1;
196       *newcomm = j;
197       return MPI_SUCCESS;
198     }
199   }
200   if (MaxComm > MAX_COMM) return MPI_FAILURE;
201   *newcomm =  MaxComm++;
202   comm_active[*newcomm-1] = 1;
203   return MPI_SUCCESS;
204 }
205 
206 int MPI_Comm_dup(MPI_Comm comm,MPI_Comm *out)
207 {
208   int j;
209   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
210   for (j=3; j<MaxComm; j++) {
211     if (!comm_active[j-1]) {
212       comm_active[j-1] = 1;
213       *out = j;
214       return MPI_SUCCESS;
215     }
216   }
217   if (MaxComm > MAX_COMM) return MPI_FAILURE;
218   *out = MaxComm++;
219   comm_active[*out-1] = 1;
220   return MPI_SUCCESS;
221 }
222 
223 int MPI_Comm_free(MPI_Comm *comm)
224 {
225   int i;
226 
227   if (*comm-1 < 0 || *comm-1 > MaxComm) return MPI_FAILURE;
228   for (i=0; i<num_attr; i++) {
229     if (attr[*comm-1][i].active && attr_keyval[i].del) (*attr_keyval[i].del)(*comm,i,attr[*comm-1][i].attribute_val,attr_keyval[i].extra_state);
230     attr[*comm-1][i].active        = 0;
231     attr[*comm-1][i].attribute_val = 0;
232   }
233   if (*comm >= 3) comm_active[*comm-1] = 0;
234   *comm = 0;
235   return MPI_SUCCESS;
236 }
237 
238 int MPI_Comm_size(MPI_Comm comm, int *size)
239 {
240   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
241   *size=1;
242   return MPI_SUCCESS;
243 }
244 
245 int MPI_Comm_rank(MPI_Comm comm, int *rank)
246 {
247   if (comm-1 < 0 || comm-1 > MaxComm) return MPI_FAILURE;
248   *rank=0;
249   return MPI_SUCCESS;
250 }
251 
252 int MPIUni_Abort(MPI_Comm comm,int errorcode)
253 {
254   printf("MPI operation not supported by PETSc's sequential MPI wrappers\n");
255   return MPI_FAILURE;
256 }
257 
258 int MPI_Abort(MPI_Comm comm,int errorcode)
259 {
260   abort();
261   return MPI_SUCCESS;
262 }
263 
264 /* --------------------------------------------------------------------------*/
265 
266 static int MPI_was_initialized = 0;
267 static int MPI_was_finalized   = 0;
268 
269 int MPI_Init(int *argc, char ***argv)
270 {
271   if (MPI_was_initialized) return MPI_FAILURE;
272   if (MPI_was_finalized) return MPI_FAILURE;
273   MPI_was_initialized = 1;
274   return MPI_SUCCESS;
275 }
276 
277 int MPI_Finalize(void)
278 {
279   MPI_Comm comm;
280   if (MPI_was_finalized) return MPI_FAILURE;
281   if (!MPI_was_initialized) return MPI_FAILURE;
282   comm = MPI_COMM_WORLD;
283   MPI_Comm_free(&comm);
284   comm = MPI_COMM_SELF;
285   MPI_Comm_free(&comm);
286   MPI_was_finalized = 1;
287   return MPI_SUCCESS;
288 }
289 
290 int MPI_Initialized(int *flag)
291 {
292   *flag = MPI_was_initialized;
293   return MPI_SUCCESS;
294 }
295 
296 int MPI_Finalized(int *flag)
297 {
298   *flag = MPI_was_finalized;
299   return MPI_SUCCESS;
300 }
301 
302 /* -------------------     Fortran versions of several routines ------------------ */
303 
304 #if defined(PETSC_HAVE_FORTRAN_CAPS)
305 #define mpiunisetmoduleblock_          MPIUNISETMODULEBLOCK
306 #define mpiunisetfortranbasepointers_  MPIUNISETFORTRANBASEPOINTERS
307 #define petsc_mpi_init_                PETSC_MPI_INIT
308 #define petsc_mpi_finalize_            PETSC_MPI_FINALIZE
309 #define petsc_mpi_comm_size_           PETSC_MPI_COMM_SIZE
310 #define petsc_mpi_comm_rank_           PETSC_MPI_COMM_RANK
311 #define petsc_mpi_abort_               PETSC_MPI_ABORT
312 #define petsc_mpi_reduce_              PETSC_MPI_REDUCE
313 #define petsc_mpi_allreduce_           PETSC_MPI_ALLREDUCE
314 #define petsc_mpi_barrier_             PETSC_MPI_BARRIER
315 #define petsc_mpi_bcast_               PETSC_MPI_BCAST
316 #define petsc_mpi_gather_              PETSC_MPI_GATHER
317 #define petsc_mpi_allgather_           PETSC_MPI_ALLGATHER
318 #define petsc_mpi_comm_split_          PETSC_MPI_COMM_SPLIT
319 #define petsc_mpi_scan_                PETSC_MPI_SCAN
320 #define petsc_mpi_send_                PETSC_MPI_SEND
321 #define petsc_mpi_recv_                PETSC_MPI_RECV
322 #define petsc_mpi_reduce_scatter_      PETSC_MPI_REDUCE_SCATTER
323 #define petsc_mpi_irecv_               PETSC_MPI_IRECV
324 #define petsc_mpi_isend_               PETSC_MPI_ISEND
325 #define petsc_mpi_sendrecv_            PETSC_MPI_SENDRECV
326 #define petsc_mpi_test_                PETSC_MPI_TEST
327 #define petsc_mpi_waitall_             PETSC_MPI_WAITALL
328 #define petsc_mpi_waitany_             PETSC_MPI_WAITANY
329 #define petsc_mpi_allgatherv_          PETSC_MPI_ALLGATHERV
330 #define petsc_mpi_alltoallv_           PETSC_MPI_ALLTOALLV
331 #define petsc_mpi_comm_create_         PETSC_MPI_COMM_CREATE
332 #define petsc_mpi_address_             PETSC_MPI_ADDRESS
333 #define petsc_mpi_pack_                PETSC_MPI_PACK
334 #define petsc_mpi_unpack_              PETSC_MPI_UNPACK
335 #define petsc_mpi_pack_size_           PETSC_MPI_PACK_SIZE
336 #define petsc_mpi_type_struct_         PETSC_MPI_TYPE_STRUCT
337 #define petsc_mpi_type_commit_         PETSC_MPI_TYPE_COMMIT
338 #define petsc_mpi_wtime_               PETSC_MPI_WTIME
339 #define petsc_mpi_cancel_              PETSC_MPI_CANCEL
340 #define petsc_mpi_comm_dup_            PETSC_MPI_COMM_DUP
341 #define petsc_mpi_comm_free_           PETSC_MPI_COMM_FREE
342 #define petsc_mpi_get_count_           PETSC_MPI_GET_COUNT
343 #define petsc_mpi_get_processor_name_  PETSC_MPI_GET_PROCESSOR_NAME
344 #define petsc_mpi_initialized_         PETSC_MPI_INITIALIZED
345 #define petsc_mpi_iprobe_              PETSC_MPI_IPROBE
346 #define petsc_mpi_probe_               PETSC_MPI_PROBE
347 #define petsc_mpi_request_free_        PETSC_MPI_REQUEST_FREE
348 #define petsc_mpi_ssend_               PETSC_MPI_SSEND
349 #define petsc_mpi_wait_                PETSC_MPI_WAIT
350 #define petsc_mpi_comm_group_          PETSC_MPI_COMM_GROUP
351 #define petsc_mpi_exscan_              PETSC_MPI_EXSCAN
352 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
353 #define mpiunisetmoduleblock_          mpiunisetmoduleblock
354 #define mpiunisetfortranbasepointers_  mpiunisetfortranbasepointers
355 #define petsc_mpi_init_                petsc_mpi_init
356 #define petsc_mpi_finalize_            petsc_mpi_finalize
357 #define petsc_mpi_comm_size_           petsc_mpi_comm_size
358 #define petsc_mpi_comm_rank_           petsc_mpi_comm_rank
359 #define petsc_mpi_abort_               petsc_mpi_abort
360 #define petsc_mpi_reduce_              petsc_mpi_reduce
361 #define petsc_mpi_allreduce_           petsc_mpi_allreduce
362 #define petsc_mpi_barrier_             petsc_mpi_barrier
363 #define petsc_mpi_bcast_               petsc_mpi_bcast
364 #define petsc_mpi_gather_              petsc_mpi_gather
365 #define petsc_mpi_allgather_           petsc_mpi_allgather
366 #define petsc_mpi_comm_split_          petsc_mpi_comm_split
367 #define petsc_mpi_scan_                petsc_mpi_scan
368 #define petsc_mpi_send_                petsc_mpi_send
369 #define petsc_mpi_recv_                petsc_mpi_recv
370 #define petsc_mpi_reduce_scatter_      petsc_mpi_reduce_scatter
371 #define petsc_mpi_irecv_               petsc_mpi_irecv
372 #define petsc_mpi_isend_               petsc_mpi_isend
373 #define petsc_mpi_sendrecv_            petsc_mpi_sendrecv
374 #define petsc_mpi_test_                petsc_mpi_test
375 #define petsc_mpi_waitall_             petsc_mpi_waitall
376 #define petsc_mpi_waitany_             petsc_mpi_waitany
377 #define petsc_mpi_allgatherv_          petsc_mpi_allgatherv
378 #define petsc_mpi_alltoallv_           petsc_mpi_alltoallv
379 #define petsc_mpi_comm_create_         petsc_mpi_comm_create
380 #define petsc_mpi_address_             petsc_mpi_address
381 #define petsc_mpi_pack_                petsc_mpi_pack
382 #define petsc_mpi_unpack_              petsc_mpi_unpack
383 #define petsc_mpi_pack_size_           petsc_mpi_pack_size
384 #define petsc_mpi_type_struct_         petsc_mpi_type_struct
385 #define petsc_mpi_type_commit_         petsc_mpi_type_commit
386 #define petsc_mpi_wtime_               petsc_mpi_wtime
387 #define petsc_mpi_cancel_              petsc_mpi_cancel
388 #define petsc_mpi_comm_dup_            petsc_mpi_comm_dup
389 #define petsc_mpi_comm_free_           petsc_mpi_comm_free
390 #define petsc_mpi_get_count_           petsc_mpi_get_count
391 #define petsc_mpi_get_processor_name_  petsc_mpi_get_processor_name
392 #define petsc_mpi_initialized_         petsc_mpi_initialized
393 #define petsc_mpi_iprobe_              petsc_mpi_iprobe
394 #define petsc_mpi_probe_               petsc_mpi_probe
395 #define petsc_mpi_request_free_        petsc_mpi_request_free
396 #define petsc_mpi_ssend_               petsc_mpi_ssend
397 #define petsc_mpi_wait_                petsc_mpi_wait
398 #define petsc_mpi_comm_group_          petsc_mpi_comm_group
399 #define petsc_mpi_exscan_              petsc_mpi_exscan
400 #endif
401 
402 #if defined(PETSC_HAVE_FORTRAN_UNDERSCORE_UNDERSCORE)
403 #define petsc_mpi_init_                petsc_mpi_init__
404 #define petsc_mpi_finalize_            petsc_mpi_finalize__
405 #define petsc_mpi_comm_size_           petsc_mpi_comm_size__
406 #define petsc_mpi_comm_rank_           petsc_mpi_comm_rank__
407 #define petsc_mpi_abort_               petsc_mpi_abort__
408 #define petsc_mpi_reduce_              petsc_mpi_reduce__
409 #define petsc_mpi_allreduce_           petsc_mpi_allreduce__
410 #define petsc_mpi_barrier_             petsc_mpi_barrier__
411 #define petsc_mpi_bcast_               petsc_mpi_bcast__
412 #define petsc_mpi_gather_              petsc_mpi_gather__
413 #define petsc_mpi_allgather_           petsc_mpi_allgather__
414 #define petsc_mpi_comm_split_          petsc_mpi_comm_split__
415 #define petsc_mpi_scan_                petsc_mpi_scan__
416 #define petsc_mpi_send_                petsc_mpi_send__
417 #define petsc_mpi_recv_                petsc_mpi_recv__
418 #define petsc_mpi_reduce_scatter_      petsc_mpi_reduce_scatter__
419 #define petsc_mpi_irecv_               petsc_mpi_irecv__
420 #define petsc_mpi_isend_               petsc_mpi_isend__
421 #define petsc_mpi_sendrecv_            petsc_mpi_sendrecv__
422 #define petsc_mpi_test_                petsc_mpi_test__
423 #define petsc_mpi_waitall_             petsc_mpi_waitall__
424 #define petsc_mpi_waitany_             petsc_mpi_waitany__
425 #define petsc_mpi_allgatherv_          petsc_mpi_allgatherv__
426 #define petsc_mpi_alltoallv_           petsc_mpi_alltoallv__
427 #define petsc_mpi_comm_create_         petsc_mpi_comm_create__
428 #define petsc_mpi_address_             petsc_mpi_address__
429 #define petsc_mpi_pack_                petsc_mpi_pack__
430 #define petsc_mpi_unpack_              petsc_mpi_unpack__
431 #define petsc_mpi_pack_size_           petsc_mpi_pack_size__
432 #define petsc_mpi_type_struct_         petsc_mpi_type_struct__
433 #define petsc_mpi_type_commit_         petsc_mpi_type_commit__
434 #define petsc_mpi_wtime_               petsc_mpi_wtime__
435 #define petsc_mpi_cancel_              petsc_mpi_cancel__
436 #define petsc_mpi_comm_dup_            petsc_mpi_comm_dup__
437 #define petsc_mpi_comm_free_           petsc_mpi_comm_free__
438 #define petsc_mpi_get_count_           petsc_mpi_get_count__
439 #define petsc_mpi_get_processor_name_  petsc_mpi_get_processor_name__
440 #define petsc_mpi_initialized_         petsc_mpi_initialized__
441 #define petsc_mpi_iprobe_              petsc_mpi_iprobe__
442 #define petsc_mpi_probe_               petsc_mpi_probe__
443 #define petsc_mpi_request_free_        petsc_mpi_request_free__
444 #define petsc_mpi_ssend_               petsc_mpi_ssend__
445 #define petsc_mpi_wait_                petsc_mpi_wait__
446 #define petsc_mpi_comm_group_          petsc_mpi_comm_group__
447 #define petsc_mpi_exscan_              petsc_mpi_exscan__
448 #endif
449 
450 /* Do not build fortran interface if MPI namespace colision is to be avoided */
451 #if defined(PETSC_HAVE_FORTRAN)
452 
453 PETSC_EXTERN void mpiunisetmoduleblock_(void);
454 
455 PETSC_EXTERN void mpiunisetfortranbasepointers_(void *f_mpi_in_place)
456 {
457   MPIUNIF_mpi_in_place   = f_mpi_in_place;
458 }
459 
460 PETSC_EXTERN void petsc_mpi_init_(int *ierr)
461 {
462   mpiunisetmoduleblock_();
463   *ierr = MPI_Init((int*)0, (char***)0);
464 }
465 
466 PETSC_EXTERN void petsc_mpi_finalize_(int *ierr)
467 {
468   *ierr = MPI_Finalize();
469 }
470 
471 PETSC_EXTERN void petsc_mpi_comm_size_(MPI_Comm *comm,int *size,int *ierr)
472 {
473   *size = 1;
474   *ierr = 0;
475 }
476 
477 PETSC_EXTERN void petsc_mpi_comm_rank_(MPI_Comm *comm,int *rank,int *ierr)
478 {
479   *rank = 0;
480   *ierr = MPI_SUCCESS;
481 }
482 
483 PETSC_EXTERN void petsc_mpi_comm_split_(MPI_Comm *comm,int *color,int *key, MPI_Comm *newcomm, int *ierr)
484 {
485   *newcomm = *comm;
486   *ierr    = MPI_SUCCESS;
487 }
488 
489 PETSC_EXTERN void petsc_mpi_abort_(MPI_Comm *comm,int *errorcode,int *ierr)
490 {
491   abort();
492   *ierr = MPI_SUCCESS;
493 }
494 
495 PETSC_EXTERN void petsc_mpi_reduce_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *root,int *comm,int *ierr)
496 {
497   *ierr = MPI_Reduce(sendbuf,recvbuf,*count,*datatype,*op,*root,*comm);
498 }
499 
500 PETSC_EXTERN void petsc_mpi_allreduce_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *comm,int *ierr)
501 {
502   *ierr = MPI_Allreduce(sendbuf,recvbuf,*count,*datatype,*op,*comm);
503 }
504 
505 PETSC_EXTERN void petsc_mpi_barrier_(MPI_Comm *comm,int *ierr)
506 {
507   *ierr = MPI_SUCCESS;
508 }
509 
510 PETSC_EXTERN void petsc_mpi_bcast_(void *buf,int *count,int *datatype,int *root,int *comm,int *ierr)
511 {
512   *ierr = MPI_SUCCESS;
513 }
514 
515 PETSC_EXTERN void petsc_mpi_gather_(void *sendbuf,int *scount,int *sdatatype, void *recvbuf, int *rcount, int *rdatatype, int *root,int *comm,int *ierr)
516 {
517   *ierr = MPI_Gather(sendbuf,*scount,*sdatatype,recvbuf,rcount,rdatatype,*root,*comm);
518 }
519 
520 PETSC_EXTERN void petsc_mpi_allgather_(void *sendbuf,int *scount,int *sdatatype, void *recvbuf, int *rcount, int *rdatatype,int *comm,int *ierr)
521 {
522   *ierr = MPI_Allgather(sendbuf,*scount,*sdatatype,recvbuf,rcount,rdatatype,*comm);
523 }
524 
525 PETSC_EXTERN void petsc_mpi_scan_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *comm,int *ierr)
526 {
527   *ierr = MPIUNI_Memcpy(recvbuf,sendbuf,(*count)*MPI_sizeof(*datatype));
528 }
529 
530 PETSC_EXTERN void petsc_mpi_send_(void *buf,int *count,int *datatype,int *dest,int *tag,int *comm,int *ierr)
531 {
532   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
533 }
534 
535 PETSC_EXTERN void petsc_mpi_recv_(void *buf,int *count,int *datatype,int *source,int *tag,int *comm,int status,int *ierr)
536 {
537   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
538 }
539 
540 PETSC_EXTERN void petsc_mpi_reduce_scatter_(void *sendbuf,void *recvbuf,int *recvcounts,int *datatype,int *op,int *comm,int *ierr)
541 {
542   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
543 }
544 
545 PETSC_EXTERN void petsc_mpi_irecv_(void *buf,int *count, int *datatype, int *source, int *tag, int *comm, int *request, int *ierr)
546 {
547   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
548 }
549 
550 PETSC_EXTERN void petsc_mpi_isend_(void *buf,int *count,int *datatype,int *dest,int *tag,int *comm,int *request, int *ierr)
551 {
552   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
553 }
554 
555 PETSC_EXTERN void petsc_mpi_sendrecv_(void *sendbuf,int *sendcount,int *sendtype,int *dest,int *sendtag,void *recvbuf,int *recvcount,int *recvtype,int *source,int *recvtag,int *comm,int *status,int *ierr)
556 {
557   *ierr = MPIUNI_Memcpy(recvbuf,sendbuf,(*sendcount)*MPI_sizeof(*sendtype));
558 }
559 
560 PETSC_EXTERN void petsc_mpi_test_(int *request,int *flag,int *status,int *ierr)
561 {
562   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
563 }
564 
565 PETSC_EXTERN void petsc_mpi_waitall_(int *count,int *array_of_requests,int *array_of_statuses,int *ierr)
566 {
567   *ierr = MPI_SUCCESS;
568 }
569 
570 PETSC_EXTERN void petsc_mpi_waitany_(int *count,int *array_of_requests,int * index, int *status,int *ierr)
571 {
572   *ierr = MPI_SUCCESS;
573 }
574 
575 PETSC_EXTERN void petsc_mpi_allgatherv_(void *sendbuf,int *sendcount,int *sendtype,void *recvbuf,int *recvcounts,int *displs,int *recvtype,int *comm,int *ierr)
576 {
577   *ierr = MPI_Allgatherv(sendbuf,*sendcount,*sendtype,recvbuf,recvcounts,displs,*recvtype,*comm);
578 }
579 
580 PETSC_EXTERN void petsc_mpi_alltoallv_(void *sendbuf,int *sendcounts,int *sdispls,int *sendtype,void *recvbuf,int *recvcounts,int *rdispls,int *recvtype,int *comm,int *ierr)
581 {
582   *ierr = MPI_Alltoallv(sendbuf,sendcounts,sdispls,*sendtype,recvbuf,recvcounts,rdispls,*recvtype,*comm);
583 }
584 
585 PETSC_EXTERN void petsc_mpi_comm_create_(int *comm,int *group,int *newcomm,int *ierr)
586 {
587   *newcomm =  *comm;
588   *ierr    = MPI_SUCCESS;
589 }
590 
591 PETSC_EXTERN void petsc_mpi_address_(void *location,MPI_Aint *address,int *ierr)
592 {
593   *address =  (MPI_Aint) ((char *)location);
594   *ierr    = MPI_SUCCESS;
595 }
596 
597 PETSC_EXTERN void petsc_mpi_pack_(void *inbuf,int *incount,int *datatype,void *outbuf,int *outsize,int *position,int *comm,int *ierr)
598 {
599   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
600 }
601 
602 PETSC_EXTERN void petsc_mpi_unpack_(void *inbuf,int *insize,int *position,void *outbuf,int *outcount,int *datatype,int *comm,int *ierr)
603 {
604   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
605 }
606 
607 PETSC_EXTERN void petsc_mpi_pack_size_(int *incount,int *datatype,int *comm,int *size,int *ierr)
608 {
609   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
610 }
611 
612 PETSC_EXTERN void petsc_mpi_type_struct_(int *count,int *array_of_blocklengths,int * array_of_displaments,int *array_of_types,int *newtype,int *ierr)
613 {
614   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
615 }
616 
617 PETSC_EXTERN void petsc_mpi_type_commit_(int *datatype,int *ierr)
618 {
619   *ierr = MPI_SUCCESS;
620 }
621 
622 double petsc_mpi_wtime_(void)
623 {
624   return 0.0;
625 }
626 
627 PETSC_EXTERN void petsc_mpi_cancel_(int *request,int *ierr)
628 {
629   *ierr = MPI_SUCCESS;
630 }
631 
632 PETSC_EXTERN void petsc_mpi_comm_dup_(int *comm,int *out,int *ierr)
633 {
634   *out  = *comm;
635   *ierr = MPI_SUCCESS;
636 }
637 
638 PETSC_EXTERN void petsc_mpi_comm_free_(int *comm,int *ierr)
639 {
640   *ierr = MPI_SUCCESS;
641 }
642 
643 PETSC_EXTERN void petsc_mpi_get_count_(int *status,int *datatype,int *count,int *ierr)
644 {
645   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
646 }
647 
648 PETSC_EXTERN void petsc_mpi_get_processor_name_(char *name,int *result_len,int *ierr,PETSC_FORTRAN_CHARLEN_T len)
649 {
650   MPIUNI_Memcpy(name,"localhost",9*sizeof(char));
651   *result_len = 9;
652   *ierr       = MPI_SUCCESS;
653 }
654 
655 PETSC_EXTERN void petsc_mpi_initialized_(int *flag,int *ierr)
656 {
657   *flag = MPI_was_initialized;
658   *ierr = MPI_SUCCESS;
659 }
660 
661 PETSC_EXTERN void petsc_mpi_iprobe_(int *source,int *tag,int *comm,int *glag,int *status,int *ierr)
662 {
663   *ierr = MPI_SUCCESS;
664 }
665 
666 PETSC_EXTERN void petsc_mpi_probe_(int *source,int *tag,int *comm,int *flag,int *status,int *ierr)
667 {
668   *ierr = MPI_SUCCESS;
669 }
670 
671 PETSC_EXTERN void petsc_mpi_request_free_(int *request,int *ierr)
672 {
673   *ierr = MPI_SUCCESS;
674 }
675 
676 PETSC_EXTERN void petsc_mpi_ssend_(void *buf,int *count,int *datatype,int *dest,int *tag,int *comm,int *ierr)
677 {
678   *ierr = MPIUni_Abort(MPI_COMM_WORLD,0);
679 }
680 
681 PETSC_EXTERN void petsc_mpi_wait_(int *request,int *status,int *ierr)
682 {
683   *ierr = MPI_SUCCESS;
684 }
685 
686 PETSC_EXTERN void petsc_mpi_comm_group_(int *comm,int *group,int *ierr)
687 {
688   *ierr = MPI_SUCCESS;
689 }
690 
691 PETSC_EXTERN void petsc_mpi_exscan_(void *sendbuf,void *recvbuf,int *count,int *datatype,int *op,int *comm,int *ierr)
692 {
693   *ierr = MPI_SUCCESS;
694 }
695 
696 #endif /* PETSC_HAVE_FORTRAN */
697 
698 #if defined(__cplusplus)
699 }
700 #endif
701