1 /*
2 This file contains routines for Parallel vector operations that use shared memory
3 */
4 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/
5
6 #if defined(PETSC_USE_SHARED_MEMORY)
7
8 extern PetscErrorCode PetscSharedMalloc(MPI_Comm, PetscInt, PetscInt, void **);
9
VecDuplicate_Shared(Vec win,Vec * v)10 PetscErrorCode VecDuplicate_Shared(Vec win, Vec *v)
11 {
12 Vec_MPI *w = (Vec_MPI *)win->data;
13 PetscScalar *array;
14
15 PetscFunctionBegin;
16 /* first processor allocates entire array and sends its address to the others */
17 PetscCall(PetscSharedMalloc(PetscObjectComm((PetscObject)win), win->map->n * sizeof(PetscScalar), win->map->N * sizeof(PetscScalar), (void **)&array));
18
19 PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
20 PetscCall(VecSetSizes(*v, win->map->n, win->map->N));
21 PetscCall(VecCreate_MPI_Private(*v, PETSC_FALSE, w->nghost, array));
22 PetscCall(PetscLayoutReference(win->map, &(*v)->map));
23
24 /* New vector should inherit stashing property of parent */
25 (*v)->stash.donotstash = win->stash.donotstash;
26 (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
27
28 PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)*v)->olist));
29 PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)*v)->qlist));
30
31 (*v)->ops->duplicate = VecDuplicate_Shared;
32 (*v)->bstash.bs = win->bstash.bs;
33 PetscFunctionReturn(PETSC_SUCCESS);
34 }
35
VecCreate_Shared(Vec vv)36 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
37 {
38 PetscScalar *array;
39
40 PetscFunctionBegin;
41 PetscCall(PetscSplitOwnership(PetscObjectComm((PetscObject)vv), &vv->map->n, &vv->map->N));
42 PetscCall(PetscSharedMalloc(PetscObjectComm((PetscObject)vv), vv->map->n * sizeof(PetscScalar), vv->map->N * sizeof(PetscScalar), (void **)&array));
43
44 PetscCall(VecCreate_MPI_Private(vv, PETSC_FALSE, 0, array));
45 vv->ops->duplicate = VecDuplicate_Shared;
46 PetscFunctionReturn(PETSC_SUCCESS);
47 }
48
49 /*
50 Code to manage shared memory allocation using standard Unix shared memory
51 */
52 #include <petscsys.h>
53 #if defined(PETSC_HAVE_PWD_H)
54 #include <pwd.h>
55 #endif
56 #include <ctype.h>
57 #include <sys/stat.h>
58 #if defined(PETSC_HAVE_UNISTD_H)
59 #include <unistd.h>
60 #endif
61 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
62 #include <sys/utsname.h>
63 #endif
64 #include <fcntl.h>
65 #include <time.h>
66 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
67 #include <sys/systeminfo.h>
68 #endif
69 #include <sys/shm.h>
70 #include <sys/mman.h>
71
72 static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;
73
74 /*
75 Private routine to delete internal storage when a communicator is freed.
76 This is called by MPI, not by users.
77
78 The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
79 it was MPI_Comm *comm.
80 */
Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void * attr_val,void * extra_state)81 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm, PetscInt keyval, void *attr_val, void *extra_state)
82 {
83 PetscFunctionBegin;
84 PetscCall(PetscFree(attr_val));
85 PetscFunctionReturn(MPI_SUCCESS);
86 }
87
88 /*
89
90 This routine is still incomplete and needs work.
91
92 For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
93 cat /etc/sysctl.conf
94 kern.sysv.shmmax=67108864
95 kern.sysv.shmmin=1
96 kern.sysv.shmmni=32
97 kern.sysv.shmseg=512
98 kern.sysv.shmall=1024
99
100 This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
101 ipcrm to remove the shared memory in use.
102
103 */
PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void ** result)104 PetscErrorCode PetscSharedMalloc(MPI_Comm comm, PetscInt llen, PetscInt len, void **result)
105 {
106 PetscInt shift;
107 PetscMPIInt rank, flag;
108 int *arena, id, key = 0;
109 char *value;
110
111 PetscFunctionBegin;
112 *result = 0;
113
114 PetscCallMPI(MPI_Scan(&llen, &shift, 1, MPI_INT, MPI_SUM, comm));
115 shift -= llen;
116
117 PetscCallMPI(MPI_Comm_rank(comm, &rank));
118 if (rank == 0) {
119 id = shmget(key, len, 0666 | IPC_CREAT);
120 if (id == -1) {
121 perror("Unable to malloc shared memory");
122 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to malloc shared memory");
123 }
124 } else {
125 id = shmget(key, len, 0666);
126 if (id == -1) {
127 perror("Unable to malloc shared memory");
128 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to malloc shared memory");
129 }
130 }
131 value = shmat(id, NULL, 0);
132 if (value == (char *)-1) {
133 perror("Unable to access shared memory allocated");
134 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to access shared memory allocated");
135 }
136 *result = (void *)(value + shift);
137 PetscFunctionReturn(PETSC_SUCCESS);
138 }
139
140 #else
141
VecCreate_Shared(Vec vv)142 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
143 {
144 PetscMPIInt size;
145
146 PetscFunctionBegin;
147 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)vv), &size));
148 PetscCheck(size <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP_SYS, "No supported for shared memory vector objects on this machine");
149 PetscCall(VecCreate_Seq(vv));
150 PetscFunctionReturn(PETSC_SUCCESS);
151 }
152
153 #endif
154
155 /*@
156 VecCreateShared - Creates a parallel vector that uses shared memory.
157
158 Collective
159
160 Input Parameters:
161 + comm - the MPI communicator to use
162 . n - local vector length (or `PETSC_DECIDE` to have calculated if `N` is given)
163 - N - global vector length (or `PETSC_DECIDE` to have calculated if `n` is given)
164
165 Output Parameter:
166 . v - the vector
167
168 Level: advanced
169
170 Notes:
171 Currently `VecCreateShared()` is available only on the SGI; otherwise,
172 this routine is the same as `VecCreateMPI()`.
173
174 Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
175 same type as an existing vector.
176
177 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecCreateMPI()`, `VecDuplicate()`, `VecDuplicateVecs()`,
178 `VecCreateGhost()`, `VecCreateMPIWithArray()`, `VecCreateGhostWithArray()`
179 @*/
VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec * v)180 PetscErrorCode VecCreateShared(MPI_Comm comm, PetscInt n, PetscInt N, Vec *v)
181 {
182 PetscFunctionBegin;
183 PetscCall(VecCreate(comm, v));
184 PetscCall(VecSetSizes(*v, n, N));
185 PetscCall(VecSetType(*v, VECSHARED));
186 PetscFunctionReturn(PETSC_SUCCESS);
187 }
188