Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move back MPI Init/Finalize to within HPC Virtualization #551

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,29 @@
#include "hpc_virt_decorator.hpp"
#include "InstructionIterator.hpp"
#include "Utils.hpp"
#include "xacc.hpp"
#include "xacc_service.hpp"
#include <numeric>
#include "TearDown.hpp"

namespace {
static bool hpcVirtDecoratorInitializedMpi = false;
}
namespace xacc {
namespace quantum {

void HPCVirtDecorator::initialize(const HeterogeneousMap &params) {
if (!qpuComm) {
// Initializing MPI here
int provided, isMPIInitialized;
MPI_Initialized(&isMPIInitialized);
if (!isMPIInitialized) {
MPI_Init_thread(0, NULL, MPI_THREAD_MULTIPLE, &provided);
hpcVirtDecoratorInitializedMpi = true;
if (provided != MPI_THREAD_MULTIPLE) {
xacc::warning("MPI_THREAD_MULTIPLE not provided.");
}
}
}
decoratedAccelerator->initialize(params);

if (params.keyExists<int>("n-virtual-qpus")) {
Expand Down Expand Up @@ -261,6 +277,35 @@ void HPCVirtDecorator::execute(
return;
}

void HPCVirtDecorator::finalize() {
if (qpuComm) {
// Make sure we explicitly release this so that MPICommProxy is destroyed
// before framework shutdown (MPI_Finalize if needed)
qpuComm.reset();
}
}

class HPCVirtTearDown : public xacc::TearDown {
public:
virtual void tearDown() override {
auto c = xacc::getService<xacc::AcceleratorDecorator>("hpc-virtualization", false);
if (c) {
auto casted = std::dynamic_pointer_cast<xacc::quantum::HPCVirtDecorator>(c);
assert(casted);
casted->finalize();
}

int finalized, initialized;
MPI_Initialized(&initialized);
if (initialized) {
MPI_Finalized(&finalized);
if (!finalized && hpcVirtDecoratorInitializedMpi) {
MPI_Finalize();
}
}
}
virtual std::string name() const override { return "xacc-hpc-virt"; }
};
} // namespace quantum
} // namespace xacc

Expand All @@ -279,9 +324,9 @@ class US_ABI_LOCAL HPCVirtActivator : public BundleActivator {

void Start(BundleContext context) {
auto c = std::make_shared<xacc::quantum::HPCVirtDecorator>();

context.RegisterService<xacc::AcceleratorDecorator>(c);
context.RegisterService<xacc::Accelerator>(c);
context.RegisterService<xacc::TearDown>(std::make_shared<xacc::quantum::HPCVirtTearDown>());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ class HPCVirtDecorator : public AcceleratorDecorator {

const std::string name() const override { return "hpc-virtualization"; }
const std::string description() const override { return ""; }

~HPCVirtDecorator() override { }
void finalize();
~HPCVirtDecorator() override {};

private:
template <typename T>
Expand Down
27 changes: 0 additions & 27 deletions xacc/xacc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,6 @@
#include <sys/stat.h>
#include "TearDown.hpp"

#ifdef MPI_ENABLED
#include "mpi.h"
#endif

using namespace cxxopts;

namespace xacc {
Expand All @@ -49,10 +45,6 @@ std::map<std::string, std::shared_ptr<AcceleratorBuffer>> allocated_buffers{};

std::string rootPathString = "";

#ifdef MPI_ENABLED
int isMPIInitialized;
#endif

void set_verbose(bool v) { verbose = v; }

int getArgc() { return argc; }
Expand Down Expand Up @@ -112,19 +104,6 @@ void Initialize(int arc, char **arv) {
if (!optionExists("queue-preamble")) {
XACCLogger::instance()->dumpQueue();
}

// Initializing MPI here
#ifdef MPI_ENABLED
int provided;
MPI_Initialized(&isMPIInitialized);
if (!isMPIInitialized) {
MPI_Init_thread(0, NULL, MPI_THREAD_MULTIPLE, &provided);
if (provided != MPI_THREAD_MULTIPLE) {
xacc::warning("MPI_THREAD_MULTIPLE not provided.");
}
isMPIInitialized = 1;
}
#endif
}

void setIsPyApi() { isPyApi = true; }
Expand Down Expand Up @@ -856,12 +835,6 @@ void Finalize() {
compilation_database.clear();
allocated_buffers.clear();
xacc::ServiceAPI_Finalize();
// This replaces the HPC virtualization TearDown
#ifdef MPI_ENABLED
if (isMPIInitialized) {
MPI_Finalize();
}
#endif
}
}

Expand Down