Skip to content

Commit

Permalink
Merge pull request #138 from lcheng9/patch-5
Browse files Browse the repository at this point in the history
Fix a build issue when MFEM_USE_CUDA or MFEM_USE_HIP enabled
  • Loading branch information
sebastiangrimberg authored Nov 29, 2023
2 parents d8349dd + 7b41551 commit 00e23e4
Show file tree
Hide file tree
Showing 5 changed files with 70 additions and 27 deletions.
15 changes: 11 additions & 4 deletions .github/workflows/build-and-test-linux-aarch64.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,14 @@ jobs:
# Install dependencies
apt-get update -q
apt-get install -y build-essential clang cmake curl gfortran git lld \
libmpich-dev pkg-config python3 wget
pkg-config python3 wget
# Install Julia
curl -fsSL https://install.julialang.org | sh -s -- -y
export PATH=~/.juliaup/bin:$PATH
# Install MPI
if [[ "${{ matrix.mpi }}" == 'openmpi' ]]; then
apt-get install -y openmpi-bin libopenmpi-dev
elif [[ "${{ matrix.mpi }}" == 'mpich' ]]; then
apt-get install -y mpich libmpich-dev
fi
# Install math libraries (OpenBLAS)
if [[ "${{ matrix.math-libs }}" == 'openblas' ]]; then
Expand Down Expand Up @@ -110,3 +113,7 @@ jobs:
make -j$NUM_PROC_BUILD
# XX TODO: Disable tests for now since Julia precompilation fails
# # Install Julia
# curl -fsSL https://install.julialang.org | sh -s -- -y
# export PATH=~/.juliaup/bin:$PATH
22 changes: 17 additions & 5 deletions .github/workflows/build-and-test-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,19 @@ jobs:
with:
submodules: 'recursive'

- uses: mpi4py/setup-mpi@v1
- name: Configure Open MPI
if: matrix.mpi == 'openmpi'
run: |
sudo apt-get install -y openmpi-bin libopenmpi-dev
- name: Configure MPICH
if: matrix.mpi == 'mpich'
run: |
sudo apt-get install -y mpich libmpich-dev
- name: Configure Intel MPI
if: matrix.mpi == 'intelmpi'
uses: mpi4py/setup-mpi@v1
with:
mpi: ${{ matrix.mpi }}

Expand Down Expand Up @@ -148,12 +160,12 @@ jobs:
elif [[ "${{ matrix.compiler }}" == 'clang' ]]; then
export CC=clang
export CXX=clang++
export FC=gfortran-11
export FC=gfortran-12
export LDFLAGS='-fuse-ld=lld'
elif [[ "${{ matrix.compiler }}" == 'gcc' ]]; then
export CC=gcc-11
export CXX=g++-11
export FC=gfortran-11
export CC=gcc-12
export CXX=g++-12
export FC=gfortran-12
fi
if [[ "${{ matrix.math-libs }}" == 'aocl' ]]; then
export AOCLROOT=/opt/AMD/aocl/aocl-linux-gcc-4.1.0/gcc
Expand Down
24 changes: 15 additions & 9 deletions .github/workflows/build-and-test-macos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,20 @@ jobs:
with:
submodules: 'recursive'

- uses: mpi4py/setup-mpi@v1
with:
mpi: ${{ matrix.mpi }}

- name: Install pkg-config
- name: Install dependencies
run: |
brew install pkg-config
- name: Configure Open MPI
if: matrix.mpi == 'openmpi'
run: |
brew install openmpi
- name: Configure MPICH
if: matrix.mpi == 'mpich'
run: |
brew install mpich
- name: Install math libraries (OpenBLAS)
if: matrix.math-libs == 'openblas'
run: |
Expand All @@ -71,11 +77,11 @@ jobs:
if [[ "${{ matrix.compiler }}" == 'clang' ]]; then
export CC=$(brew --prefix llvm@15)/bin/clang
export CXX=$(brew --prefix llvm@15)/bin/clang++
export FC=gfortran-11
export FC=gfortran-12
elif [[ "${{ matrix.compiler }}" == 'gcc' ]]; then
export CC=gcc-11
export CXX=g++-11
export FC=gfortran-11
export CC=gcc-12
export CXX=g++-12
export FC=gfortran-12
fi
if [[ "${{ matrix.math-libs }}" == 'openblas' ]]; then
export OPENBLAS_DIR=/usr/local/opt/openblas
Expand Down
20 changes: 19 additions & 1 deletion .github/workflows/spack.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,19 @@ jobs:
steps:
- uses: actions/checkout@v3

- uses: mpi4py/setup-mpi@v1
- name: Configure Open MPI
if: matrix.mpi == 'openmpi'
run: |
sudo apt-get install -y openmpi-bin libopenmpi-dev
- name: Configure MPICH
if: matrix.mpi == 'mpich'
run: |
sudo apt-get install -y mpich libmpich-dev
- name: Configure Intel MPI
if: matrix.mpi == 'intelmpi'
uses: mpi4py/setup-mpi@v1
with:
mpi: ${{ matrix.mpi }}

Expand All @@ -32,6 +44,12 @@ jobs:
run: |
sudo apt-get install -y clang lld
- name: Configure Intel oneAPI compiler
if: matrix.compiler == 'intel'
run: |
sudo apt-get install -y intel-oneapi-compiler-dpcpp-cpp \
intel-oneapi-compiler-fortran
- uses: vsoch/spack-package-action/install@main

- name: Build Palace
Expand Down
16 changes: 8 additions & 8 deletions palace/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,10 @@ static int GetDeviceId(MPI_Comm comm)
#if defined(MFEM_USE_CUDA) || defined(MFEM_USE_HIP)
MPI_Comm node_comm;
MPI_Comm_split_type(comm, MPI_COMM_TYPE_SHARED, Mpi::Rank(comm), MPI_INFO_NULL,
node_comm);
&node_comm);
int node_size = Mpi::Rank(node_comm);
MPI_Comm_free(&node_comm);
return node_size % mfem::Device::GetNgpu();
return node_size % mfem::Device::GetNumGPU();
#else
return 0;
#endif
Expand Down Expand Up @@ -153,15 +153,15 @@ static void PrintPalaceInfo(MPI_Comm comm, int np, int nt, mfem::Device &device)
Mpi::Print(comm, ", {:d} OpenMP thread{}", nt, (nt > 1) ? "s" : "");
}
#if defined(MFEM_USE_CUDA) || defined(MFEM_USE_HIP)
int ngpu = mfem::Device::GetNgpu();
int ngpu = mfem::Device::GetNumGPU();
#if defined(MFEM_USE_CUDA)
const char *device_name = "CUDA"
const char *device_name = "CUDA";
#else
const char *device_name = "HIP"
const char *device_name = "HIP";
#endif
Mpi::Print(comm, "\n{:d} detected {} device{}{}", ngpu, device_name,
(ngpu > 1) ? "s" : "",
mfem::Device::GetGPUAwareMPI() ? " (MPI is GPU aware)" : "");
Mpi::Print(comm, "\n{:d} detected {} device{}{}", ngpu, device_name,
(ngpu > 1) ? "s" : "",
mfem::Device::GetGPUAwareMPI() ? " (MPI is GPU aware)" : "");
#endif
std::ostringstream resource(std::stringstream::out);
resource << "\n";
Expand Down

0 comments on commit 00e23e4

Please sign in to comment.