Actual source code: vecnode.c
petsc-3.14.5 2021-03-03
2: #include <../src/vec/vec/impls/node/vecnodeimpl.h>
3: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
5: #if defined(PETSC_HAVE_MPI_PROCESS_SHARED_MEMORY)
7: PetscErrorCode VecSetValues_Node(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
8: {
10: SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_SUP,"Not implemented yet");
11: }
13: /* check all blocks are filled */
14: static PetscErrorCode VecAssemblyBegin_Node(Vec v)
15: {
17: return(0);
18: }
20: static PetscErrorCode VecAssemblyEnd_Node(Vec v)
21: {
22: Vec_Node *s = (Vec_Node*)v->data;
25: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
26: /* printf("VecAssemblyEnd_Node s->array[-1] %g\n",s->array[-1]); */
27: return(0);
28: }
30: static PetscErrorCode VecScale_Node(Vec v, PetscScalar alpha)
31: {
33: Vec_Node *s = (Vec_Node*)v->data;
36: VecScale_Seq(v,alpha);
37: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
38: /* printf("VecScale_Node s->array[-1] %g\n",s->array[-1]); */
39: return(0);
40: }
42: static PetscErrorCode VecCopy_Node(Vec v,Vec y)
43: {
45: Vec_Node *s = (Vec_Node*)y->data;
48: VecCopy_Seq(v,y);
49: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of y */
50: return(0);
51: }
53: static PetscErrorCode VecSet_Node(Vec v,PetscScalar alpha)
54: {
56: Vec_Node *s = (Vec_Node*)v->data;
59: VecSet_Seq(v,alpha);
60: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
61: /* printf("VecSet_Node s->array[-1] %g\n",s->array[-1]); */
62: return(0);
63: }
65: static PetscErrorCode VecDestroy_Node(Vec v)
66: {
67: Vec_Node *vs = (Vec_Node*)v->data;
71: MPI_Win_free(&vs->win);
72: MPI_Comm_free(&vs->shmcomm);
73: PetscFree(vs->winarray);
74: PetscFree(vs);
75: return(0);
76: }
78: static PetscErrorCode VecDuplicate_Node(Vec x,Vec *y)
79: {
83: VecCreate(PetscObjectComm((PetscObject)x),y);
84: VecSetSizes(*y,x->map->n,x->map->N);
85: VecSetType(*y,((PetscObject)x)->type_name);
86: PetscLayoutReference(x->map,&(*y)->map);
87: PetscObjectListDuplicate(((PetscObject)x)->olist,&((PetscObject)(*y))->olist);
88: PetscFunctionListDuplicate(((PetscObject)x)->qlist,&((PetscObject)(*y))->qlist);
90: PetscMemcpy((*y)->ops,x->ops,sizeof(struct _VecOps));
92: /* New vector should inherit stashing property of parent */
93: (*y)->stash.donotstash = x->stash.donotstash;
94: (*y)->stash.ignorenegidx = x->stash.ignorenegidx;
96: (*y)->map->bs = PetscAbs(x->map->bs);
97: (*y)->bstash.bs = x->bstash.bs;
98: return(0);
99: }
101: static PetscErrorCode VecAYPX_Node(Vec y,PetscScalar alpha,Vec x)
102: {
104: Vec_Node *s = (Vec_Node*)y->data;
107: VecAYPX_Seq(y,alpha,x);
108: s->array[-1] += 1.0;
109: return(0);
110: }
112: static PetscErrorCode VecAXPBY_Node(Vec y,PetscScalar alpha,PetscScalar beta,Vec x)
113: {
115: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
116: }
118: static PetscErrorCode VecAXPBYPCZ_Node(Vec z,PetscScalar alpha,PetscScalar beta,PetscScalar gamma,Vec x,Vec y)
119: {
121: Vec_Node *s = (Vec_Node*)z->data;
124: VecAXPBYPCZ_Seq(z,alpha,beta,gamma,x,y);
125: s->array[-1] += 1.0;
126: return(0);
127: }
130: static PetscErrorCode VecConjugate_Node(Vec x)
131: {
133: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
134: }
136: static PetscErrorCode VecWAXPY_Node(Vec w,PetscScalar alpha,Vec x,Vec y)
137: {
139: Vec_Node *s = (Vec_Node*)w->data;
142: VecWAXPY_Seq(w,alpha,x,y);
143: s->array[-1] += 1.0;
144: return(0);
145: }
147: static PetscErrorCode VecMax_Node(Vec x,PetscInt *p,PetscReal *max)
148: {
150: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
151: }
153: static PetscErrorCode VecMin_Node(Vec x,PetscInt *p,PetscReal *min)
154: {
156: SETERRQ(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Not implemented yet");
157: }
159: /* supports nested blocks */
160: static PetscErrorCode VecView_Node(Vec x,PetscViewer viewer)
161: {
165: VecView_MPI(x,viewer);
166: return(0);
167: }
169: static PetscErrorCode VecGetArray_Node(Vec x,PetscScalar **a)
170: {
171: Vec_Node *s = (Vec_Node*)x->data;
173: *a = s->array;
174: return(0);
175: }
177: static PetscErrorCode VecRestoreArray_Node(Vec x,PetscScalar **a)
178: {
179: Vec_Node *s = (Vec_Node*)x->data;
182: s->array[-1] += 1.0; /* update local object state counter if this routine changes values of v */
183: /* printf("VecRestoreArray_Node s->array[-1] %g\n",s->array[-1]); */
184: return(0);
185: }
187: static PetscErrorCode VecGetArrayRead_Node(Vec x,const PetscScalar **a)
188: {
189: Vec_Node *s = (Vec_Node*)x->data;
192: *a = s->array;
193: return(0);
194: }
196: /* This routine prevents VecRestoreArrayRead() calls VecRestoreArray_Node(), which increaments s->array[-1] */
197: static PetscErrorCode VecRestoreArrayRead_Node(Vec x,const PetscScalar **a)
198: {
200: return(0);
201: }
203: static struct _VecOps DvOps = { VecDuplicate_Node, /* 1 */
204: VecDuplicateVecs_Default,
205: VecDestroyVecs_Default,
206: VecDot_MPI,
207: VecMDot_MPI,
208: VecNorm_MPI,
209: VecTDot_MPI,
210: VecMTDot_MPI,
211: VecScale_Node,
212: VecCopy_Node, /* 10 */
213: VecSet_Node,
214: VecSwap_Seq,
215: VecAXPY_Seq,
216: VecAXPBY_Node,
217: VecMAXPY_Seq,
218: VecAYPX_Node,
219: VecWAXPY_Node,
220: VecAXPBYPCZ_Node,
221: NULL,
222: NULL,
223: VecSetValues_Node, /* 20 */
224: VecAssemblyBegin_Node,
225: VecAssemblyEnd_Node,
226: VecGetArray_Node,
227: VecGetSize_MPI,
228: VecGetSize_Seq,
229: VecRestoreArray_Node,
230: VecMax_Node,
231: VecMin_Node,
232: VecSetRandom_Seq,
233: NULL,
234: VecSetValuesBlocked_Seq,
235: VecDestroy_Node,
236: VecView_Node,
237: VecPlaceArray_Seq,
238: VecReplaceArray_Seq,
239: VecDot_Seq,
240: VecTDot_Seq,
241: VecNorm_Seq,
242: VecMDot_Seq,
243: VecMTDot_Seq,
244: VecLoad_Default,
245: VecReciprocal_Default,
246: VecConjugate_Node,
247: NULL,
248: NULL,
249: VecResetArray_Seq,
250: NULL,/*set from options */
251: NULL,
252: NULL,
253: NULL,
254: NULL,
255: NULL,
256: NULL,
257: NULL,
258: NULL,
259: NULL,
260: NULL,
261: NULL,
262: NULL,
263: NULL,
264: NULL,
265: NULL,
266: NULL,
267: VecGetArrayRead_Node,
268: VecRestoreArrayRead_Node,
269: VecStrideSubSetGather_Default,
270: VecStrideSubSetScatter_Default,
271: NULL,
272: NULL,
273: NULL,
274: NULL,
275: NULL,
276: NULL
277: };
279: /*@C
280: VecCreateNode - Creates a new parallel vector whose arrays are stored in shared memory
282: Collective on Vec
284: Input Parameter:
285: + comm - Communicator for the new Vec
286: . n - local vector length (or PETSC_DECIDE to have calculated if N is given)
287: - N - global vector length (or PETSC_DETERMINE to have calculated if n is given)
289: Output Parameter:
290: . v - new vector
292: Level: advanced
294: .seealso: VecCreate(), VecType(), VecCreateMPIWithArray(), VECNODE
295: @*/
296: PetscErrorCode VecCreateNode(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
297: {
301: VecCreate(comm,v);
302: VecSetSizes(*v,n,N);
303: VecSetType(*v,VECNODE);
304: return(0);
305: }
307: /*MC
308: VECNODE - VECNODE = "node" - Vector type uses on-node shared memory.
310: Level: intermediate
312: Notes:
313: This vector type uses on-node shared memory.
315: .seealso: VecCreate(), VecType.
316: M*/
318: PETSC_EXTERN PetscErrorCode VecCreate_Node(Vec v)
319: {
321: Vec_Node *s;
322: PetscBool alloc=PETSC_TRUE;
323: PetscScalar *array=NULL;
324: MPI_Comm shmcomm;
325: MPI_Win win;
328: PetscNewLog(v,&s);
329: v->data = (void*)s;
330: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
331: v->petscnative = PETSC_FALSE;
333: PetscLayoutSetUp(v->map);
335: s->array = (PetscScalar*)array;
336: s->array_allocated = NULL;
338: if (alloc && !array) {
339: PetscInt n = v->map->n;
340: PetscMPIInt msize,mrank,disp_unit;
341: PetscInt i;
342: MPI_Aint sz;
344: MPI_Comm_split_type(PetscObjectComm((PetscObject)v),MPI_COMM_TYPE_SHARED,0,MPI_INFO_NULL,&shmcomm);
345: MPIU_Win_allocate_shared((n+1)*sizeof(PetscScalar),sizeof(PetscScalar),MPI_INFO_NULL,shmcomm,&s->array,&win);
346: PetscLogObjectMemory((PetscObject)v,(n+1)*sizeof(PetscScalar));
347: PetscArrayzero(s->array,n+1);
348: s->array++; /* create initial space for object state counter */
350: MPI_Comm_size(shmcomm,&msize);
351: MPI_Comm_rank(shmcomm,&mrank);
352: PetscMalloc1(msize,&s->winarray);
353: for (i=0; i<msize; i++) {
354: if (i != mrank) {
355: MPIU_Win_shared_query(win,i,&sz,&disp_unit,&s->winarray[i]);
356: s->winarray[i]++;
357: }
358: }
359: s->win = win;
360: s->shmcomm = shmcomm;
361: }
363: PetscObjectChangeTypeName((PetscObject)v,VECNODE);
364: return(0);
365: }
367: #endif