8 #include "AtomCollector.h" 11 #include <ddMd/storage/AtomStorage.h> 28 recvArrayCapacity_(256),
49 storagePtr_ = &storage;
58 if (recvArrayCapacity <= 0) {
59 UTIL_THROW(
"Attempt to set nonpositive recvArrayCapacity");
62 UTIL_THROW(
"Attempt to set recvArrayCapacity after allocation");
64 recvArrayCapacity_ = recvArrayCapacity;
88 if (recvArrayCapacity_ == 0) {
91 recvArray_.
allocate(recvArrayCapacity_);
101 storagePtr_->
begin(iterator_);
113 if (domainPtr_ == 0) {
114 UTIL_THROW(
"AtomCollector has not been initialized");
127 ptr = iterator_.
get();
139 while (recvArrayId_ == recvArraySize_) {
142 if (recvBufferSize_ == 0) {
151 if (source_ == domainPtr_->
grid().
size()) {
157 assert(recvBufferSize_ == 0);
158 assert(!isComplete_);
161 int message = source_;
168 recvBufferSize_ = bufferPtr_->
recvSize();
172 if (recvBufferSize_ > 0) {
175 if (recvBufferSize_ != bufferPtr_->
recvSize()) {
176 UTIL_THROW(
"Inconsistent buffer receive counters");
179 && recvArraySize_ < recvArray_.
capacity())
181 recvArray_[recvArraySize_].unpackAtom(*bufferPtr_);
184 if (recvBufferSize_ != bufferPtr_->
recvSize()) {
185 UTIL_THROW(
"Inconsistent buffer receive counters");
197 return &recvArray_[recvArrayId_ - 1];
215 UTIL_THROW(
"AtomCollector::send() called from master node.");
222 storagePtr_->
begin(iterator_);
225 while (!isComplete_) {
230 domainPtr_->
communicator().Recv(&message, 1, MPI::INT, 0, tag);
233 int recvArraySize_ = 0;
234 isComplete_ = iterator_.
isEnd();
237 while (recvArraySize_ < bufferPtr_->atomCapacity() && !isComplete_) {
238 iterator_->packAtom(*bufferPtr_);
241 isComplete_ = iterator_.
isEnd();
bool isEnd() const
Is the current pointer at the end of the PArray?
bool beginRecvBlock()
Begin to receive a block from the recv buffer.
void associate(Domain &domain, AtomStorage &storage, Buffer &buffer)
Initialize pointers to associated objects.
void allocate(int capacity)
Allocate memory on the heap.
~AtomCollector()
Destructor.
bool isInitialized() const
Has this Domain been initialized by calling readParam?
void beginSendBlock(int sendType)
Initialize a data block.
void send()
Send all atoms to the master.
void setCapacity(int recvArrayCapacity)
Set cache capacity on master processor.
A point particle in an MD simulation.
Parallel domain decomposition (DD) MD simulation.
MPI::Intracomm & communicator() const
Return Cartesian communicator by reference.
int size() const
Get total number of grid points.
#define UTIL_THROW(msg)
Macro for throwing an Exception, reporting function, file and line number.
bool notEnd() const
Is the current pointer not at the end of the PArray?
Data * get() const
Return a pointer to the current data.
Utility classes for scientific computation.
Atom * nextPtr()
Return a pointer to the next available atom, or null.
int recvSize() const
Number of unread items left in current recv block.
A container for all the atoms and ghost atoms on this processor.
void send(MPI::Intracomm &comm, int dest)
Send a complete buffer.
const Grid & grid() const
Return processor Grid by const reference.
bool isMaster() const
Is this the master processor (gridRank == 0) ?
Buffer for interprocessor communication.
Decomposition of the system into domains associated with processors.
void recv(MPI::Intracomm &comm, int source)
Receive a buffer.
AtomCollector()
Constructor.
void endSendBlock(bool isComplete=true)
Finalize a block in the send buffer.
void setClassName(const char *className)
Set class name string.
bool isInitialized() const
Has this Buffer been initialized?
void setup()
Setup master processor for receiving.
int capacity() const
Return allocated size.
void clearSendBuffer()
Clear the send buffer.
void endRecvBlock()
Finish processing a block in the recv buffer.
void begin(AtomIterator &iterator)
Set iterator to beginning of the set of atoms.