Skip to content

Commit 1455144

Browse files
authored
Merge pull request #4374 from lindsayad/petsc-vec-mods
Some PETSc vector modifications
2 parents 6f21dc9 + 879862a commit 1455144

2 files changed

Lines changed: 106 additions & 115 deletions

File tree

include/numerics/petsc_vector.h

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,11 @@ class PetscVector final : public NumericVector<T>
121121

122122
/**
123123
* Copy assignment operator.
124-
* Calls VecCopy after performing various checks.
124+
* Supported assignments based on ParallelType combination (note that we lump ghosted into
125+
* parallel for this method documentation):
126+
* - Assign from parallel to parallel
127+
* - Assign from serial to serial
128+
* - Assign from parallel to serial
125129
* \returns A reference to *this as the derived type.
126130
*/
127131
PetscVector<T> & operator= (const PetscVector<T> & v);
@@ -703,7 +707,11 @@ void PetscVector<T>::init (const numeric_index_type n,
703707
if (this->initialized())
704708
this->clear();
705709

706-
if (ptype == AUTOMATIC)
710+
if (this->comm().size() == 1)
711+
// This can help with some branching decisions and is also consistent with what PETSc does... a
712+
// single rank Vec is going to be a sequential vector
713+
this->_type = SERIAL;
714+
else if (ptype == AUTOMATIC)
707715
{
708716
if (n == n_local)
709717
this->_type = SERIAL;
@@ -773,6 +781,13 @@ void PetscVector<T>::init (const numeric_index_type n,
773781
{
774782
parallel_object_only();
775783

784+
if (this->comm().size() == 1)
785+
{
786+
libmesh_assert(ghost.empty());
787+
this->init(n, n_local, fast, SERIAL);
788+
return;
789+
}
790+
776791
PetscInt petsc_n=static_cast<PetscInt>(n);
777792
PetscInt petsc_n_local=static_cast<PetscInt>(n_local);
778793
PetscInt petsc_n_ghost=static_cast<PetscInt>(ghost.size());

src/numerics/petsc_vector.C

Lines changed: 89 additions & 113 deletions
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,8 @@ void PetscVector<T>::add (const T a_in, const NumericVector<T> & v_in)
335335

336336
LibmeshPetscCall(VecAXPY(_vec, a, v->vec()));
337337

338-
libmesh_assert(this->comm().verify(int(this->type())));
338+
libmesh_assert(this->comm().verify(
339+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
339340

340341
if (this->type() == GHOSTED)
341342
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -375,7 +376,8 @@ void PetscVector<T>::scale (const T factor_in)
375376

376377
LibmeshPetscCall(VecScale(_vec, factor));
377378

378-
libmesh_assert(this->comm().verify(int(this->type())));
379+
libmesh_assert(this->comm().verify(
380+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
379381

380382
if (this->type() == GHOSTED)
381383
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -416,7 +418,8 @@ void PetscVector<T>::abs()
416418

417419
LibmeshPetscCall(VecAbs(_vec));
418420

419-
libmesh_assert(this->comm().verify(int(this->type())));
421+
libmesh_assert(this->comm().verify(
422+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
420423

421424
if (this->type() == GHOSTED)
422425
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -480,7 +483,8 @@ PetscVector<T>::operator = (const T s_in)
480483
{
481484
LibmeshPetscCall(VecSet(_vec, s));
482485

483-
libmesh_assert(this->comm().verify(int(this->type())));
486+
libmesh_assert(this->comm().verify(
487+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
484488

485489
if (this->type() == GHOSTED)
486490
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -511,19 +515,59 @@ template <typename T>
511515
PetscVector<T> &
512516
PetscVector<T>::operator = (const PetscVector<T> & v)
513517
{
518+
enum AssignmentType { ParallelToSerial, SerialToParallel, SameToSame, Error };
519+
514520
parallel_object_only();
521+
libmesh_assert(this->comm().verify(
522+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
523+
libmesh_assert(this->comm().verify(
524+
static_cast<typename std::underlying_type<ParallelType>::type>(v.type())));
525+
526+
if (this == &v)
527+
return *this;
515528

516529
this->_restore_array();
517530
v._restore_array();
518531

519532
libmesh_assert_equal_to (this->size(), v.size());
520-
libmesh_assert_equal_to (this->local_size(), v.local_size());
521533
libmesh_assert (v.closed());
522534

535+
AssignmentType assign_type = Error;
536+
if (this->type() == SERIAL && v.type() != SERIAL)
537+
assign_type = ParallelToSerial;
538+
else if (this->type() != SERIAL && v.type() == SERIAL)
539+
assign_type = SerialToParallel;
540+
else if (this->local_size() == v.local_size())
541+
assign_type = SameToSame;
542+
543+
libmesh_assert(this->comm().verify(
544+
static_cast<typename std::underlying_type<AssignmentType>::type>(assign_type)));
523545

524-
LibmeshPetscCall(VecCopy (v._vec, this->_vec));
546+
switch (assign_type)
547+
{
548+
case ParallelToSerial:
549+
{
550+
// scatter from parallel to serial
551+
libmesh_assert(v.comm().size() > 1);
552+
WrappedPetsc<VecScatter> scatter;
553+
LibmeshPetscCall(VecScatterCreateToAll(v._vec, scatter.get(), nullptr));
554+
VecScatterBeginEnd(v.comm(), scatter, v._vec, _vec, INSERT_VALUES, SCATTER_FORWARD);
555+
break;
556+
}
557+
case SameToSame:
558+
// serial to serial or parallel to parallel
559+
LibmeshPetscCall(VecCopy(v._vec, _vec));
560+
break;
561+
case SerialToParallel:
562+
libmesh_not_implemented_msg(
563+
"Scattering from a serial vector on every rank to a parallel vector is not behavior we "
564+
"define because we do not verify the serial vector is the same on each rank");
565+
default:
566+
libmesh_error_msg("Unhandled vector combination");
567+
}
525568

526-
libmesh_assert(this->comm().verify(int(this->type())));
569+
libmesh_assert(this->comm().verify(
570+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
527571

528572
if (this->type() == GHOSTED)
529573
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -579,99 +623,21 @@ void PetscVector<T>::localize (NumericVector<T> & v_local_in) const
579623
{
580624
parallel_object_only();
581625

582-
this->_restore_array();
583-
584-
// Make sure the NumericVector passed in is really a PetscVector
585-
PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);
586-
587-
libmesh_assert(v_local);
588-
// v_local_in should be closed
589-
libmesh_assert(v_local->closed());
590-
libmesh_assert_equal_to (v_local->size(), this->size());
591-
// 1) v_local_in is a large vector to hold the whole world
592-
// 2) v_local_in is a ghosted vector
593-
// 3) v_local_in is a parallel vector
594-
// Cases 2) and 3) should be scalable
595-
libmesh_assert(this->size()==v_local->local_size() || this->local_size()==v_local->local_size());
596-
626+
libmesh_assert(this->comm().verify(
627+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
628+
libmesh_assert(this->comm().verify(
629+
static_cast<typename std::underlying_type<ParallelType>::type>(v_local_in.type())));
597630

598-
if (v_local->type() == SERIAL && this->size() == v_local->local_size())
599-
{
600-
WrappedPetsc<VecScatter> scatter;
601-
LibmeshPetscCall(VecScatterCreateToAll(_vec, scatter.get(), nullptr));
602-
VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
603-
}
604-
// Two vectors have the same size, and we should just do a simple copy.
605-
// v_local could be either a parallel or ghosted vector
606-
else if (this->local_size() == v_local->local_size())
607-
LibmeshPetscCall(VecCopy(_vec,v_local->_vec));
608-
else
609-
libmesh_error_msg("Vectors are inconsistent");
610-
611-
// Make sure ghost dofs are up to date
612-
// We do not call "close" here to save a global reduction
613-
if (v_local->type() == GHOSTED)
614-
VecGhostUpdateBeginEnd(this->comm(), v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
631+
v_local_in = *this;
615632
}
616633

617634

618635

619636
template <typename T>
620637
void PetscVector<T>::localize (NumericVector<T> & v_local_in,
621-
const std::vector<numeric_index_type> & send_list) const
638+
const std::vector<numeric_index_type> & /*send_list*/) const
622639
{
623-
parallel_object_only();
624-
625-
libmesh_assert(this->comm().verify(int(this->type())));
626-
libmesh_assert(this->comm().verify(int(v_local_in.type())));
627-
628-
// FIXME: Workaround for a strange bug at large-scale.
629-
// If we have ghosting, PETSc lets us just copy the solution, and
630-
// doing so avoids a segfault?
631-
if (v_local_in.type() == GHOSTED &&
632-
this->type() == PARALLEL)
633-
{
634-
v_local_in = *this;
635-
return;
636-
}
637-
638-
// Normal code path begins here
639-
640-
this->_restore_array();
641-
642-
// Make sure the NumericVector passed in is really a PetscVector
643-
PetscVector<T> * v_local = cast_ptr<PetscVector<T> *>(&v_local_in);
644-
645-
libmesh_assert(v_local);
646-
libmesh_assert_equal_to (v_local->size(), this->size());
647-
libmesh_assert_less_equal (send_list.size(), v_local->size());
648-
649-
const numeric_index_type n_sl =
650-
cast_int<numeric_index_type>(send_list.size());
651-
652-
std::vector<PetscInt> idx(n_sl + this->local_size());
653-
for (numeric_index_type i=0; i<n_sl; i++)
654-
idx[i] = static_cast<PetscInt>(send_list[i]);
655-
for (auto i : make_range(this->local_size()))
656-
idx[n_sl+i] = i + this->first_local_index();
657-
658-
// Create the index set & scatter objects
659-
WrappedPetsc<IS> is;
660-
PetscInt * idxptr = idx.empty() ? nullptr : idx.data();
661-
LibmeshPetscCall(ISCreateGeneral(this->comm().get(), n_sl+this->local_size(),
662-
idxptr, PETSC_USE_POINTER, is.get()));
663-
664-
WrappedPetsc<VecScatter> scatter;
665-
LibmeshPetscCall(VecScatterCreate(_vec, is,
666-
v_local->_vec, is,
667-
scatter.get()));
668-
669-
// Perform the scatter
670-
VecScatterBeginEnd(this->comm(), scatter, _vec, v_local->_vec, INSERT_VALUES, SCATTER_FORWARD);
671-
672-
// Make sure ghost dofs are up to date
673-
if (v_local->type() == GHOSTED)
674-
v_local->close();
640+
this->localize(v_local_in);
675641
}
676642

677643

@@ -1000,7 +966,8 @@ void PetscVector<T>::pointwise_mult (const NumericVector<T> & vec1,
1000966
// Call PETSc function.
1001967
LibmeshPetscCall(VecPointwiseMult(_vec, vec1_petsc->vec(), vec2_petsc->vec()));
1002968

1003-
libmesh_assert(this->comm().verify(int(this->type())));
969+
libmesh_assert(this->comm().verify(
970+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
1004971

1005972
if (this->type() == GHOSTED)
1006973
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -1023,7 +990,8 @@ void PetscVector<T>::pointwise_divide (const NumericVector<T> & vec1,
1023990
// Call PETSc function.
1024991
LibmeshPetscCall(VecPointwiseDivide(_vec, vec1_petsc->vec(), vec2_petsc->vec()));
1025992

1026-
libmesh_assert(this->comm().verify(int(this->type())));
993+
libmesh_assert(this->comm().verify(
994+
static_cast<typename std::underlying_type<ParallelType>::type>(this->type())));
1027995

1028996
if (this->type() == GHOSTED)
1029997
VecGhostUpdateBeginEnd(this->comm(), _vec, INSERT_VALUES, SCATTER_FORWARD);
@@ -1103,29 +1071,37 @@ void PetscVector<T>::create_subvector(NumericVector<T> & subvector,
11031071
// If not, we use the appropriate PETSc routines to initialize it.
11041072
if (!petsc_subvector->initialized())
11051073
{
1106-
libmesh_assert(petsc_subvector->_type == AUTOMATIC || petsc_subvector->_type == PARALLEL);
1107-
1108-
if (supplying_global_rows)
1109-
// Initialize the petsc_subvector to have enough space to hold
1110-
// the entries which will be scattered into it. Note: such an
1111-
// init() function (where we let PETSc decide the number of local
1112-
// entries) is not currently offered by the PetscVector
1113-
// class. Should we differentiate here between sequential and
1114-
// parallel vector creation based on this->n_processors() ?
1115-
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
1116-
PETSC_DECIDE, // n_local
1117-
cast_int<PetscInt>(rows.size()), // n_global
1118-
&(petsc_subvector->_vec)));
1074+
if (this->type() == SERIAL)
1075+
{
1076+
libmesh_assert(this->comm().verify(rows.size()));
1077+
LibmeshPetscCall(VecCreateSeq(this->comm().get(), rows.size(), &(petsc_subvector->_vec)));
1078+
petsc_subvector->_type = SERIAL;
1079+
}
11191080
else
1120-
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
1121-
cast_int<PetscInt>(rows.size()),
1122-
PETSC_DETERMINE,
1123-
&(petsc_subvector->_vec)));
1081+
{
1082+
if (supplying_global_rows)
1083+
// Initialize the petsc_subvector to have enough space to hold
1084+
// the entries which will be scattered into it. Note: such an
1085+
// init() function (where we let PETSc decide the number of local
1086+
// entries) is not currently offered by the PetscVector
1087+
// class. Should we differentiate here between sequential and
1088+
// parallel vector creation based on this->n_processors() ?
1089+
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
1090+
PETSC_DECIDE, // n_local
1091+
cast_int<PetscInt>(rows.size()), // n_global
1092+
&(petsc_subvector->_vec)));
1093+
else
1094+
LibmeshPetscCall(VecCreateMPI(this->comm().get(),
1095+
cast_int<PetscInt>(rows.size()),
1096+
PETSC_DETERMINE,
1097+
&(petsc_subvector->_vec)));
1098+
1099+
// We created a parallel vector
1100+
petsc_subvector->_type = PARALLEL;
1101+
}
11241102

11251103
LibmeshPetscCall(VecSetFromOptions (petsc_subvector->_vec));
11261104

1127-
// We created a parallel vector
1128-
petsc_subvector->_type = PARALLEL;
11291105

11301106
// Mark the subvector as initialized
11311107
petsc_subvector->_is_initialized = true;

0 commit comments

Comments
 (0)