From 9bf02d01741a64aa7848138af2a629c54b972c3f Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 12:51:42 -0400 Subject: [PATCH 01/25] Fixed compiler flag typos that prevented the -standard-semantics flag from being set. Also moved the standards compliance warnings to the DEBUG build --- Dockerfile | 68 ++++++++++++----------------- cmake/Modules/SetFortranFlags.cmake | 14 +++--- 2 files changed, 35 insertions(+), 47 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5b0e942a1..cb81fb7f6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,24 +2,18 @@ FROM ubuntu:20.04 as build # kick everything off RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - ca-certificates curl git wget gpg-agent software-properties-common build-essential gnupg pkg-config && \ - rm -rf /var/lib/apt/lists/* - -# Get CMAKE and install it -RUN mkdir -p cmake/build && \ - cd cmake/build && \ - curl -LO https://github.com/Kitware/CMake/releases/download/v3.26.2/cmake-3.26.2-linux-x86_64.sh && \ - /bin/bash cmake-3.26.2-linux-x86_64.sh --prefix=/usr/local --skip-license - -# Get the Intel compilers -# download the key to system keyring -RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ -| gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null -# add signed entry to apt sources and configure the APT client to use Intel repository: -RUN echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list -RUN apt-get -y update && apt-get upgrade -y -RUN apt-get install -y intel-hpckit + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + ca-certificates curl git wget gpg-agent software-properties-common build-essential gnupg pkg-config && \ + rm -rf /var/lib/apt/lists/* && \ + mkdir -p cmake/build && \ + cd cmake/build && \ + curl -LO https://github.com/Kitware/CMake/releases/download/v3.26.2/cmake-3.26.2-linux-x86_64.sh && \ + /bin/bash cmake-3.26.2-linux-x86_64.sh --prefix=/usr/local --skip-license && \ + wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ + | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \ + echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \ + apt-get -y update && apt-get upgrade -y && \ + apt-get install -y intel-hpckit && \ # Set Intel compiler environment variables ENV INTEL_DIR="/opt/intel/oneapi" @@ -94,24 +88,19 @@ ENV HDF5_INCLUDE_DIR="${HDF5_ROOT}/include" ENV HDF5_PLUGIN_PATH="${HDF5_LIBDIR}/plugin" # Get the HDF5, NetCDF-C and NetCDF-Fortran libraries -RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/bin/unix/hdf5-1.14.1-2-Std-ubuntu2004_64-Intel.tar.gz | tar xvz -RUN wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz -RUN wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz -RUN wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz - -# Install dependencies -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ - rm -rf /var/lib/apt/lists/* - -# Install HDF5 -RUN cd hdf && \ - ./HDF5-1.14.1-Linux.sh --skip-license && \ - cp -R HDF_Group/HDF5/1.14.1/lib/*.a ${HDF5_ROOT}/lib/ && \ - cp -R HDF_Group/HDF5/1.14.1/include/* ${HDF5_ROOT}/include/ - -RUN cp zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ +RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/bin/unix/hdf5-1.14.1-2-Std-ubuntu2004_64-Intel.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ + wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz && \ + apt-get update && apt-get upgrade -y && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ + rm -rf /var/lib/apt/lists/* && \ + cd hdf && \ + ./HDF5-1.14.1-Linux.sh --skip-license && \ + cp -R HDF_Group/HDF5/1.14.1/lib/*.a ${HDF5_ROOT}/lib/ && \ + cp -R HDF_Group/HDF5/1.14.1/include/* ${HDF5_ROOT}/include/ && \ + cp zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ ENV LD_LIBRARY_PATH="/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}" ENV LDFLAGS="-static-intel -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" @@ -126,7 +115,7 @@ RUN cd netcdf-c-4.9.2 && \ # NetCDF-Fortran library ENV F77=${FC} ENV CFLAGS="-fPIC" -ENV FCFLAGS=${CFLAGS} +ENV FCFLAGS="${CFLAGS} -standard-semantics" ENV FFLAGS=${CFLAGS} ENV CPPFLAGS="-I${INSTALL_DIR}/include -I/usr/include -I/usr/include/x86_64-linux-gnu/curl" ENV LDFLAGS="-static-intel" @@ -161,9 +150,8 @@ RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN 'set(NETCDF_FOUND TRUE)\n' \ 'set(NETCDF_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR})\n' \ 'set(NETCDF_LIBRARIES ${NETCDF_FORTRAN_LIBRARY} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY} ${SZ_LIBRARY} ${Z_LIBRARY} ${ZSTD_LIBRARY} ${BZ2_LIBRARY} ${CURL_LIBRARY} ${XML2_LIBRARY} )\n' \ - 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake - -RUN cd swiftest && \ + 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ + cd swiftest && \ cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=DEBUG -DBUILD_SHARED_LIBS=OFF &&\ cmake --build build --verbose && \ cmake --install build diff --git a/cmake/Modules/SetFortranFlags.cmake b/cmake/Modules/SetFortranFlags.cmake index cb5a40768..e78782ef3 100644 --- a/cmake/Modules/SetFortranFlags.cmake +++ b/cmake/Modules/SetFortranFlags.cmake @@ -83,17 +83,11 @@ SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" # Determines whether the current Fortran Standard behavior of the compiler is fully implemented. -SET_COMPILE_FLAG(CMAKE_Fortran_Flags "${CMAKE_Fortran_FLAGS}" +SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" Fortran "-standard-semantics" # Intel "/standard-semantics" # Intel Windows ) -# Tells the compiler to issue compile-time messages for nonstandard language elements (Fortran 2018). -SET_COMPILE_FLAG(CMAKE_Fortran_Flags "${CMAKE_Fortran_FLAGS}" - Fortran "-stand f18" # Intel - "/stand:f18" # Intel Windows - "-fstd=f2018" # GNU - ) # Allows for lines longer than 80 characters without truncation SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" @@ -215,6 +209,12 @@ SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS_DEBUG "${CMAKE_Fortran_FLAGS_DEBUG}" Fortran "-Wno-unused-dummy-argument" # GNU ) +# Tells the compiler to issue compile-time messages for nonstandard language elements (Fortran 2018). +SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" + Fortran "-stand f18" # Intel + "/stand:f18" # Intel Windows + "-fstd=f2018" # GNU + ) # Traceback SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS_DEBUG "${CMAKE_Fortran_FLAGS_DEBUG}" From 7f9cacbe8549d1305c057f0babce705df0d25b84 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 12:52:01 -0400 Subject: [PATCH 02/25] Set debug flags correctly for the standards check --- cmake/Modules/SetFortranFlags.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/Modules/SetFortranFlags.cmake b/cmake/Modules/SetFortranFlags.cmake index e78782ef3..550738f13 100644 --- a/cmake/Modules/SetFortranFlags.cmake +++ b/cmake/Modules/SetFortranFlags.cmake @@ -210,7 +210,7 @@ SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS_DEBUG "${CMAKE_Fortran_FLAGS_DEBUG}" ) # Tells the compiler to issue compile-time messages for nonstandard language elements (Fortran 2018). -SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" +SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS_DEBUG "${CMAKE_Fortran_FLAGS_DEBUG}" Fortran "-stand f18" # Intel "/stand:f18" # Intel Windows "-fstd=f2018" # GNU From ed4b2f0610b95fd893de3e81f6769cb532e441eb Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 12:54:11 -0400 Subject: [PATCH 03/25] Fixed issues that were causing do concurrent to give bad results when OMP_NUM_THREADS>1 when the F2018 locality-spec is used. --- src/helio/helio_gr.f90 | 16 ++++++++-------- src/swiftest/swiftest_gr.f90 | 26 +++++++++++++++----------- src/swiftest/swiftest_module.f90 | 10 +++++----- src/swiftest/swiftest_orbel.f90 | 32 ++++++++++++++++++-------------- src/swiftest/swiftest_util.f90 | 6 ++++-- src/whm/whm_gr.f90 | 16 ++++++++-------- 6 files changed, 58 insertions(+), 48 deletions(-) diff --git a/src/helio/helio_gr.f90 b/src/helio/helio_gr.f90 index 6de300cae..6b43714b7 100644 --- a/src/helio/helio_gr.f90 +++ b/src/helio/helio_gr.f90 @@ -75,14 +75,14 @@ pure module subroutine helio_gr_p4_pl(self, nbody_system, param, dt) if (self%nbody == 0) return - associate(pl => self) + associate(lmask => self%lmask, rh => self%rh, vb => self%vb, inv_c2 => param%inv_c2) npl = self%nbody #ifdef DOCONLOC - do concurrent(i = 1:npl, pl%lmask(i)) shared(param,pl,dt) + do concurrent(i = 1:npl, lmask(i)) shared(inv_c2, lmask, rh, vb, dt) #else - do concurrent(i = 1:npl, pl%lmask(i)) + do concurrent(i = 1:npl, lmask(i)) #endif - call swiftest_gr_p4_pos_kick(param, pl%rh(:, i), pl%vb(:, i), dt) + call swiftest_gr_p4_pos_kick(inv_c2, rh(1,i), rh(2,i), rh(3,i), vb(1,i), vb(2,i), vb(3,i), dt) end do end associate @@ -108,14 +108,14 @@ pure module subroutine helio_gr_p4_tp(self, nbody_system, param, dt) if (self%nbody == 0) return - associate(tp => self) + associate(rh => self%rh, vb => self%vb, lmask => self%lmask, inv_c2 => param%inv_c2) ntp = self%nbody #ifdef DOCONLOC - do concurrent(i = 1:ntp, tp%lmask(i)) shared(param,tp,dt) + do concurrent(i = 1:ntp, lmask(i)) shared(inv_c2, lmask, rh, vb, dt) #else - do concurrent(i = 1:ntp, tp%lmask(i)) + do concurrent(i = 1:ntp, lmask(i)) #endif - call swiftest_gr_p4_pos_kick(param, tp%rh(:, i), tp%vb(:, i), dt) + call swiftest_gr_p4_pos_kick(inv_c2, rh(1,i), rh(2,i), rh(3,i), vb(1,i), vb(2,i), vb(3,i), dt) end do end associate diff --git a/src/swiftest/swiftest_gr.f90 b/src/swiftest/swiftest_gr.f90 index 1985f6dd1..083e5de1b 100644 --- a/src/swiftest/swiftest_gr.f90 +++ b/src/swiftest/swiftest_gr.f90 @@ -87,7 +87,7 @@ pure module subroutine swiftest_gr_kick_getacch(mu, x, lmask, n, inv_c2, agr) end subroutine swiftest_gr_kick_getacch - pure module subroutine swiftest_gr_p4_pos_kick(param, x, v, dt) + pure elemental module subroutine swiftest_gr_p4_pos_kick(inv_c2, rx, ry, rz, vx, vy, vz, dt) !! author: David A. Minton !! !! Position kick due to p**4 term in the post-Newtonian correction @@ -100,17 +100,21 @@ pure module subroutine swiftest_gr_p4_pos_kick(param, x, v, dt) !! Adapted from David A. Minton's Swifter routine gr_whm_p4.f90 implicit none ! Arguments - class(swiftest_parameters), intent(in) :: param !! Current run configuration parameters - real(DP), dimension(:), intent(inout) :: x !! Position vector - real(DP), dimension(:), intent(in) :: v !! Velocity vector - real(DP), intent(in) :: dt !! Step size + real(DP), intent(in) :: inv_c2 !! One over speed of light squared (1/c**2) + real(DP), intent(inout) :: rx, ry, rz !! Position vector + real(DP), intent(in) :: vx, vy, vz !! Velocity vector + real(DP), intent(in) :: dt !! Step size ! Internals - real(DP), dimension(NDIM) :: dr - real(DP) :: vmag2 - - vmag2 = dot_product(v(:), v(:)) - dr(:) = -2 * param%inv_c2 * vmag2 * v(:) - x(:) = x(:) + dr(:) * dt + real(DP) :: drx, dry, drz + real(DP) :: vmag2 + + vmag2 = vx*vx + vy*vy + vz*vz + drx = -2 * inv_c2 * vmag2 * vx + dry = -2 * inv_c2 * vmag2 * vy + drz = -2 * inv_c2 * vmag2 * vz + rx = rx + drx * dt + ry = ry + dry * dt + rz = rz + drz * dt return end subroutine swiftest_gr_p4_pos_kick diff --git a/src/swiftest/swiftest_module.f90 b/src/swiftest/swiftest_module.f90 index 8349c12c1..a54e2351b 100644 --- a/src/swiftest/swiftest_module.f90 +++ b/src/swiftest/swiftest_module.f90 @@ -550,12 +550,12 @@ pure module subroutine swiftest_gr_kick_getacch(mu, x, lmask, n, inv_c2, agr) real(DP), dimension(:,:), intent(out) :: agr !! Accelerations end subroutine swiftest_gr_kick_getacch - pure module subroutine swiftest_gr_p4_pos_kick(param, x, v, dt) + pure elemental module subroutine swiftest_gr_p4_pos_kick(inv_c2, rx, ry, rz, vx, vy, vz, dt) implicit none - class(swiftest_parameters), intent(in) :: param !! Current run configuration parameters - real(DP), dimension(:), intent(inout) :: x !! Position vector - real(DP), dimension(:), intent(in) :: v !! Velocity vector - real(DP), intent(in) :: dt !! Step size + real(DP), intent(in) :: inv_c2 !! One over speed of light squared (1/c**2) + real(DP), intent(inout) :: rx, ry, rz !! Position vector + real(DP), intent(in) :: vx, vy, vz !! Velocity vector + real(DP), intent(in) :: dt !! Step size end subroutine swiftest_gr_p4_pos_kick pure module subroutine swiftest_gr_pseudovel2vel(param, mu, rh, pv, vh) diff --git a/src/swiftest/swiftest_orbel.f90 b/src/swiftest/swiftest_orbel.f90 index 3827c1b59..431d182ab 100644 --- a/src/swiftest/swiftest_orbel.f90 +++ b/src/swiftest/swiftest_orbel.f90 @@ -21,24 +21,28 @@ module subroutine swiftest_orbel_el2xv_vec(self, cb) class(swiftest_body), intent(inout) :: self !! Swiftest body object class(swiftest_cb), intent(inout) :: cb !! Swiftest central body objec ! Internals - integer(I4B) :: i + integer(I4B) :: i, n if (self%nbody == 0) return + n = self%nbody call self%set_mu(cb) + associate(mu => self%mu, a => self%a, e => self%e, inc => self%inc, capom => self%capom, omega => self%omega, & + capm => self%capm, rh => self%rh, vh => self%vh) #ifdef DOCONLOC - do concurrent (i = 1:self%nbody) shared(self) + do concurrent (i = 1:n) shared(mu, a, e, inc, capom, omega, capm, rh, vh) #else - do concurrent (i = 1:self%nbody) + do concurrent (i = 1:n) #endif - call swiftest_orbel_el2xv(self%mu(i), self%a(i), self%e(i), self%inc(i), self%capom(i), & - self%omega(i), self%capm(i), self%rh(:, i), self%vh(:, i)) - end do + call swiftest_orbel_el2xv(mu(i), a(i), e(i), inc(i), capom(i), omega(i), capm(i), & + rh(1,i), rh(2,i), rh(3,i), vh(1,i), vh(2,i), vh(3,i)) + end do + end associate return end subroutine swiftest_orbel_el2xv_vec - pure subroutine swiftest_orbel_el2xv(mu, a, ie, inc, capom, omega, capm, x, v) + pure elemental subroutine swiftest_orbel_el2xv(mu, a, ie, inc, capom, omega, capm, rx, ry, rz, vx, vy, vz) !! author: David A. Minton !! !! Compute osculating orbital elements from relative C)rtesian position and velocity @@ -56,7 +60,7 @@ pure subroutine swiftest_orbel_el2xv(mu, a, ie, inc, capom, omega, capm, x, v) implicit none real(DP), intent(in) :: mu real(DP), intent(in) :: a, ie, inc, capom, omega, capm - real(DP), dimension(:), intent(out) :: x, v + real(DP), intent(out) :: rx, ry, rz, vx, vy, vz integer(I4B) :: iorbit_type real(DP) :: e, cape, capf, zpara, em1 @@ -129,12 +133,12 @@ pure subroutine swiftest_orbel_el2xv(mu, a, ie, inc, capom, omega, capm, x, v) vfac2 = ri * sqgma endif !-- - x(1) = d11 * xfac1 + d21 * xfac2 - x(2) = d12 * xfac1 + d22 * xfac2 - x(3) = d13 * xfac1 + d23 * xfac2 - v(1) = d11 * vfac1 + d21 * vfac2 - v(2) = d12 * vfac1 + d22 * vfac2 - v(3) = d13 * vfac1 + d23 * vfac2 + rx = d11 * xfac1 + d21 * xfac2 + ry = d12 * xfac1 + d22 * xfac2 + rz = d13 * xfac1 + d23 * xfac2 + vx = d11 * vfac1 + d21 * vfac2 + vy = d12 * vfac1 + d22 * vfac2 + vz = d13 * vfac1 + d23 * vfac2 return end subroutine swiftest_orbel_el2xv diff --git a/src/swiftest/swiftest_util.f90 b/src/swiftest/swiftest_util.f90 index c164a365a..f3ce3082f 100644 --- a/src/swiftest/swiftest_util.f90 +++ b/src/swiftest/swiftest_util.f90 @@ -1213,7 +1213,9 @@ module subroutine swiftest_util_get_energy_and_momentum_system(self, param) #else do concurrent (i = 1:npl, pl%lmask(i)) #endif - h(:) = pl%rb(:,i) .cross. pl%vb(:,i) + h(1) = pl%rb(2,i) * pl%vb(3,i) - pl%rb(3,i) * pl%vb(2,i) + h(2) = pl%rb(3,i) * pl%vb(1,i) - pl%rb(1,i) * pl%vb(3,i) + h(3) = pl%rb(1,i) * pl%vb(2,i) - pl%rb(2,i) * pl%vb(1,i) ! Angular momentum from orbit Lplorbit(:,i) = pl%mass(i) * h(:) @@ -1269,7 +1271,7 @@ module subroutine swiftest_util_get_energy_and_momentum_system(self, param) nbody_system%ke_orbit = 0.5_DP * (kecb + sum(kepl(1:npl), pl%lmask(1:npl))) #ifdef DOCONLOC - do concurrent (j = 1:NDIM) shared(nbody_system,pl,Lcborbit,Lplorbit) + do concurrent (j = 1:NDIM) shared(nbody_system,pl,Lcborbit,Lplorbit,npl) #else do concurrent (j = 1:NDIM) #endif diff --git a/src/whm/whm_gr.f90 b/src/whm/whm_gr.f90 index c46a5ce2d..6e23ca21b 100644 --- a/src/whm/whm_gr.f90 +++ b/src/whm/whm_gr.f90 @@ -84,14 +84,14 @@ pure module subroutine whm_gr_p4_pl(self, nbody_system, param, dt) if (self%nbody == 0) return - associate(pl => self) + associate(xj => self%xj, vj => self%vj, lmask => self%lmask, inv_c2 => param%inv_c2) npl = self%nbody #ifdef DOCONLOC - do concurrent(i = 1:npl, pl%lmask(i)) shared(pl,dt) + do concurrent(i = 1:npl, lmask(i)) shared(lmask, inv_c2, xj, vj,dt) #else - do concurrent(i = 1:npl, pl%lmask(i)) + do concurrent(i = 1:npl, lmask(i)) #endif - call swiftest_gr_p4_pos_kick(param, pl%xj(:, i), pl%vj(:, i), dt) + call swiftest_gr_p4_pos_kick(inv_c2, xj(1,i), xj(2,i), xj(3,i), vj(1,i), vj(2,i), vj(3,i), dt) end do end associate @@ -115,15 +115,15 @@ pure module subroutine whm_gr_p4_tp(self, nbody_system, param, dt) ! Internals integer(I4B) :: i, ntp - associate(tp => self) + associate(rh => self%rh, vh => self%vh, lmask => self%lmask, inv_c2 => param%inv_c2) ntp = self%nbody if (ntp == 0) return #ifdef DOCONLOC - do concurrent(i = 1:ntp, tp%lmask(i)) shared(tp,dt) + do concurrent(i = 1:ntp, lmask(i)) shared(lmask, rh, vh, inv_c2, dt) #else - do concurrent(i = 1:ntp, tp%lmask(i)) + do concurrent(i = 1:ntp, lmask(i)) #endif - call swiftest_gr_p4_pos_kick(param, tp%rh(:, i), tp%vh(:, i), dt) + call swiftest_gr_p4_pos_kick(inv_c2, rh(1,i), rh(2,i), rh(3,i), vh(1,i), vh(2,i), vh(3,i), dt) end do end associate From 2aacbe3f63a23730645552e70412efc1b68a7c16 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 12:54:37 -0400 Subject: [PATCH 04/25] Fixed line lengths based on standards compliance warning flags in debug mode --- src/base/base_module.f90 | 311 +++++++++++++++++++-------------- src/globals/globals_module.f90 | 18 +- 2 files changed, 189 insertions(+), 140 deletions(-) diff --git a/src/base/base_module.f90 b/src/base/base_module.f90 index 0f91e3473..502a42515 100644 --- a/src/base/base_module.f90 +++ b/src/base/base_module.f90 @@ -22,90 +22,100 @@ module base !> User defined parameters that are read in from the parameters input file. !> Each paramter is initialized to a default values. type, abstract :: base_parameters - character(STRMAX) :: integrator !! Name of the nbody integrator used - character(STRMAX) :: param_file_name !! The name of the parameter file - real(DP) :: t0 = 0.0_DP !! Integration reference time - real(DP) :: tstart = -1.0_DP !! Integration start time - real(DP) :: tstop = -1.0_DP !! Integration stop time - real(DP) :: dt = -1.0_DP !! Time step - integer(I8B) :: iloop = 0_I8B !! Main loop counter - integer(I8B) :: nloops = 0_I8B !! Total number of loops to execute - integer(I8B) :: istart = 0_I8B !! Starting index for loop counter - integer(I4B) :: iout = 0 !! Output cadence counter - integer(I4B) :: idump = 0 !! Dump cadence counter - integer(I4B) :: nout = 0 !! Current output step - integer(I4B) :: istep = 0 !! Current value of istep (used for time stretching) - character(STRMAX) :: incbfile = CB_INFILE !! Name of input file for the central body - character(STRMAX) :: inplfile = PL_INFILE !! Name of input file for massive bodies - character(STRMAX) :: intpfile = TP_INFILE !! Name of input file for test particles - character(STRMAX) :: nc_in = NC_INFILE !! Name of system input file for NetCDF input - character(STRMAX) :: in_type = "NETCDF_DOUBLE" !! Data representation type of input data files - character(STRMAX) :: in_form = "XV" !! Format of input data files ("EL" or ["XV"]) - integer(I4B) :: istep_out = -1 !! Number of time steps between saved outputs - integer(I4B) :: nstep_out = -1 !! Total number of saved outputs - real(DP) :: fstep_out = 1.0_DP !! The output step time stretching factor - logical :: ltstretch = .false. !! Whether to employ time stretching or not - character(STRMAX) :: outfile = BIN_OUTFILE !! Name of output binary file - character(STRMAX) :: out_type = "NETCDF_DOUBLE" !! Binary format of output file - character(STRMAX) :: out_form = "XVEL" !! Data to write to output file - character(STRMAX) :: out_stat = 'NEW' !! Open status for output binary file - integer(I4B) :: dump_cadence = 10 !! Number of output steps between dumping simulation data to file - real(DP) :: rmin = -1.0_DP !! Minimum heliocentric radius for test particle - real(DP) :: rmax = -1.0_DP !! Maximum heliocentric radius for test particle - real(DP) :: rmaxu = -1.0_DP !! Maximum unbound heliocentric radius for test particle - real(DP) :: qmin = -1.0_DP !! Minimum pericenter distance for test particle - character(STRMAX) :: qmin_coord = "HELIO" !! Coordinate frame to use for qmin (["HELIO"] or "BARY") - real(DP) :: qmin_alo = -1.0_DP !! Minimum semimajor axis for qmin - real(DP) :: qmin_ahi = -1.0_DP !! Maximum semimajor axis for qmin - real(QP) :: MU2KG = -1.0_QP !! Converts mass units to grams - real(QP) :: TU2S = -1.0_QP !! Converts time units to seconds - real(QP) :: DU2M = -1.0_QP !! Converts distance unit to centimeters - real(DP) :: GU = -1.0_DP !! Universal gravitational constant in the system units - real(DP) :: inv_c2 = -1.0_DP !! Inverse speed of light squared in the system units - real(DP) :: GMTINY = -1.0_DP !! Smallest G*mass that is fully gravitating - real(DP) :: min_GMfrag = -1.0_DP !! Smallest G*mass that can be produced in a fragmentation event - real(DP) :: nfrag_reduction = 30.0_DP !! Reduction factor for limiting the number of fragments in a collision - integer(I4B), dimension(:), allocatable :: seed !! Random seeds for fragmentation modeling - logical :: lmtiny_pl = .false. !! Include semi-interacting massive bodies - character(STRMAX) :: collision_model = "MERGE" !! The Coll - character(STRMAX) :: encounter_save = "NONE" !! Indicate if and how encounter data should be saved - logical :: lenc_save_trajectory = .false. !! Indicates that when encounters are saved, the full trajectory through recursion steps are saved - logical :: lenc_save_closest = .false. !! Indicates that when encounters are saved, the closest approach distance between pairs of bodies is saved - character(NAMELEN) :: interaction_loops = "ADAPTIVE" !! Method used to compute interaction loops. Options are "TRIANGULAR", "FLAT", or "ADAPTIVE" - character(NAMELEN) :: encounter_check_plpl = "ADAPTIVE" !! Method used to compute pl-pl encounter checks. Options are "TRIANGULAR", "SORTSWEEP", or "ADAPTIVE" - character(NAMELEN) :: encounter_check_pltp = "ADAPTIVE" !! Method used to compute pl-tp encounter checks. Options are "TRIANGULAR", "SORTSWEEP", or "ADAPTIVE" - logical :: lcoarray = .false. !! Use Coarrays for test particle parallelization. - - ! The following are used internally, and are not set by the user, but instead are determined by the input value of INTERACTION_LOOPS + character(STRMAX) :: integrator !! Name of the nbody integrator used + character(STRMAX) :: param_file_name !! The name of the parameter file + real(DP) :: t0 = 0.0_DP !! Integration reference time + real(DP) :: tstart = -1.0_DP !! Integration start time + real(DP) :: tstop = -1.0_DP !! Integration stop time + real(DP) :: dt = -1.0_DP !! Time step + integer(I8B) :: iloop = 0_I8B !! Main loop counter + integer(I8B) :: nloops = 0_I8B !! Total number of loops to execute + integer(I8B) :: istart = 0_I8B !! Starting index for loop counter + integer(I4B) :: iout = 0 !! Output cadence counter + integer(I4B) :: idump = 0 !! Dump cadence counter + integer(I4B) :: nout = 0 !! Current output step + integer(I4B) :: istep = 0 !! Current value of istep (used for time stretching) + character(STRMAX) :: incbfile = CB_INFILE !! Name of input file for the central body + character(STRMAX) :: inplfile = PL_INFILE !! Name of input file for massive bodies + character(STRMAX) :: intpfile = TP_INFILE !! Name of input file for test particles + character(STRMAX) :: nc_in = NC_INFILE !! Name of system input file for NetCDF input + character(STRMAX) :: in_type = "NETCDF_DOUBLE" !! Data representation type of input data files + character(STRMAX) :: in_form = "XV" !! Format of input data files ("EL" or ["XV"]) + integer(I4B) :: istep_out = -1 !! Number of time steps between saved outputs + integer(I4B) :: nstep_out = -1 !! Total number of saved outputs + real(DP) :: fstep_out = 1.0_DP !! The output step time stretching factor + logical :: ltstretch = .false. !! Whether to employ time stretching or not + character(STRMAX) :: outfile = BIN_OUTFILE !! Name of output binary file + character(STRMAX) :: out_type = "NETCDF_DOUBLE" !! Binary format of output file + character(STRMAX) :: out_form = "XVEL" !! Data to write to output file + character(STRMAX) :: out_stat = 'NEW' !! Open status for output binary file + integer(I4B) :: dump_cadence = 10 !! Number of output steps between dumping simulation data to file + real(DP) :: rmin = -1.0_DP !! Minimum heliocentric radius for test particle + real(DP) :: rmax = -1.0_DP !! Maximum heliocentric radius for test particle + real(DP) :: rmaxu = -1.0_DP !! Maximum unbound heliocentric radius for test particle + real(DP) :: qmin = -1.0_DP !! Minimum pericenter distance for test particle + character(STRMAX) :: qmin_coord = "HELIO" !! Coordinate frame to use for qmin (["HELIO"] or "BARY") + real(DP) :: qmin_alo = -1.0_DP !! Minimum semimajor axis for qmin + real(DP) :: qmin_ahi = -1.0_DP !! Maximum semimajor axis for qmin + real(QP) :: MU2KG = -1.0_QP !! Converts mass units to grams + real(QP) :: TU2S = -1.0_QP !! Converts time units to seconds + real(QP) :: DU2M = -1.0_QP !! Converts distance unit to centimeters + real(DP) :: GU = -1.0_DP !! Universal gravitational constant in the system units + real(DP) :: inv_c2 = -1.0_DP !! Inverse speed of light squared in the system units + real(DP) :: GMTINY = -1.0_DP !! Smallest G*mass that is fully gravitating + real(DP) :: min_GMfrag = -1.0_DP !! Smallest G*mass that can be produced in a fragmentation event + real(DP) :: nfrag_reduction = 30.0_DP !! Reduction factor for limiting the number of collision fragments + integer(I4B), dimension(:), allocatable :: seed !! Random seeds for fragmentation modeling + logical :: lmtiny_pl = .false. !! Include semi-interacting massive bodies + character(STRMAX) :: collision_model = "MERGE" !! The Coll + character(STRMAX) :: encounter_save = "NONE" !! Indicate if and how encounter data should be saved + logical :: lenc_save_trajectory = .false. !! Indicates that when encounters are saved, the full trajectory + !! through recursion steps are saved + logical :: lenc_save_closest = .false. !! Indicates that when encounters are saved, the closest approach + !! distance between pairs of bodies is saved + character(NAMELEN):: interaction_loops = "ADAPTIVE" !! Method used to compute interaction loops. + !! Options are "TRIANGULAR", "FLAT", or "ADAPTIVE" + character(NAMELEN):: encounter_check_plpl = "ADAPTIVE" !! Method used to compute pl-pl encounter checks. + !! Options are "TRIANGULAR", "SORTSWEEP", or "ADAPTIVE" + character(NAMELEN):: encounter_check_pltp = "ADAPTIVE" !! Method used to compute pl-tp encounter checks. + !! Options are "TRIANGULAR", "SORTSWEEP", or "ADAPTIVE" + logical :: lcoarray = .false. !! Use Coarrays for test particle parallelization. + + ! The following are not set by the user, but instead are determined by the input value of INTERACTION_LOOPS logical :: lflatten_interactions = .false. !! Use the flattened upper triangular matrix for pl-pl interaction loops - logical :: lencounter_sas_plpl = .false. !! Use the Sort and Sweep algorithm to prune the encounter list before checking for close encounters - logical :: lencounter_sas_pltp = .false. !! Use the Sort and Sweep algorithm to prune the encounter list before checking for close encounters + logical :: lencounter_sas_plpl = .false. !! Use the Sort and Sweep algorithm to prune the encounter list before checking + !! for close encounters + logical :: lencounter_sas_pltp = .false. !! Use the Sort and Sweep algorithm to prune the encounter list before checking + !! for close encounters ! Logical flags to turn on or off various features of the code - logical :: lrhill_present = .false. !! Hill radii are given as an input rather than calculated by the code (can be used to inflate close encounter regions manually) + logical :: lrhill_present = .false. !! Hill radii are given as an input rather than calculated by the code (can be used to + !! inflate close encounter regions manually) logical :: lextra_force = .false. !! User defined force function turned on logical :: lbig_discard = .false. !! Save big bodies on every discard logical :: lclose = .false. !! Turn on close encounters logical :: lenergy = .false. !! Track the total energy of the system - logical :: loblatecb = .false. !! Calculate acceleration from oblate central body (automatically turns true if nonzero J2 is input) + logical :: loblatecb = .false. !! Calculate acceleration from oblate central body (automatically turns true if nonzero J2 + !! is input) logical :: lrotation = .false. !! Include rotation states of big bodies logical :: ltides = .false. !! Include tidal dissipation - ! Initial values to pass to the energy report subroutine (usually only used in the case of a restart, otherwise these will be updated with initial conditions values) - real(DP) :: E_orbit_orig = 0.0_DP !! Initial orbital energy + ! Initial values to pass to the energy report subroutine (usually only used in the case of a restart, otherwise these will be + ! updated with initial conditions values) + real(DP) :: E_orbit_orig = 0.0_DP !! Initial orbital energy real(DP) :: GMtot_orig = 0.0_DP !! Initial system mass - real(DP), dimension(NDIM) :: L_total_orig = 0.0_DP !! Initial total angular momentum vector - real(DP), dimension(NDIM) :: L_orbit_orig = 0.0_DP !! Initial orbital angular momentum - real(DP), dimension(NDIM) :: L_spin_orig = 0.0_DP !! Initial spin angular momentum vector - real(DP), dimension(NDIM) :: L_escape = 0.0_DP !! Angular momentum of bodies that escaped the system (used for bookeeping) + real(DP), dimension(NDIM) :: L_total_orig = 0.0_DP !! Initial total angular momentum vector + real(DP), dimension(NDIM) :: L_orbit_orig = 0.0_DP !! Initial orbital angular momentum + real(DP), dimension(NDIM) :: L_spin_orig = 0.0_DP !! Initial spin angular momentum vector + real(DP), dimension(NDIM) :: L_escape = 0.0_DP !! Angular momentum of escaped bodies (used for bookeeping) real(DP) :: GMescape = 0.0_DP !! Mass of bodies that escaped the system (used for bookeeping) - real(DP) :: E_collisions = 0.0_DP !! Energy lost from system due to collisions - real(DP) :: E_untracked = 0.0_DP !! Energy gained from system due to escaped bodies + real(DP) :: E_collisions = 0.0_DP !! Energy lost from system due to collisions + real(DP) :: E_untracked = 0.0_DP !! Energy gained from system due to escaped bodies logical :: lfirstenergy = .true. !! This is the first time computing energe logical :: lfirstkick = .true. !! Initiate the first kick in a symplectic step logical :: lrestart = .false. !! Indicates whether or not this is a restarted run - character(NAMELEN) :: display_style !! Style of the output display {"STANDARD", "COMPACT"}). Default is "STANDARD" + character(NAMELEN) :: display_style !! Style of the output display {["STANDARD"], "COMPACT"}). integer(I4B) :: display_unit !! File unit number for display (either to stdout or to a log file) logical :: log_output = .false. !! Logs the output to file instead of displaying it on the terminal @@ -137,8 +147,10 @@ subroutine abstract_io_param_reader(self, unit, iotype, v_list, iostat, iomsg) implicit none class(base_parameters), intent(inout) :: self !! Collection of parameters integer(I4B), intent(in) :: unit !! File unit number - character(len=*), intent(in) :: iotype !! Dummy argument passed to the input/output procedure contains the text from the char-literal-constant, prefixed with DT. - !! If you do not include a char-literal-constant, the iotype argument contains only DT. + character(len=*), intent(in) :: iotype !! Dummy argument passed to the input/output procedure contains the + !! text from the char-literal-constant, prefixed with DT. If you do + !! not include a char-literal-constant, the iotype argument contains + !! only DT. character(len=*), intent(in) :: v_list(:) !! The first element passes the integrator code to the reader integer(I4B), intent(out) :: iostat !! IO status code character(len=*), intent(inout) :: iomsg !! Message to pass if iostat /= 0 @@ -149,8 +161,10 @@ subroutine abstract_io_param_writer(self, unit, iotype, v_list, iostat, iomsg) implicit none class(base_parameters), intent(in) :: self !! Collection of parameters integer(I4B), intent(in) :: unit !! File unit number - character(len=*), intent(in) :: iotype !! Dummy argument passed to the input/output procedure contains the text from the char-literal-constant, prefixed with DT. - !! If you do not include a char-literal-constant, the iotype argument contains only DT. + character(len=*), intent(in) :: iotype !! Dummy argument passed to the input/output procedure contains the + !! text from the char-literal-constant, prefixed with DT. If you do + !! not include a char-literal-constant, the iotype argument contains + !! only DT. integer(I4B), intent(in) :: v_list(:) !! Not used in this procedure integer(I4B), intent(out) :: iostat !! IO status code character(len=*), intent(inout) :: iomsg !! Message to pass if iostat /= 0 @@ -168,7 +182,8 @@ end subroutine abstract_io_read_in_param type :: base_storage_frame class(*), allocatable :: item contains - procedure :: store => base_util_copy_store !! Stores a snapshot of the nbody system so that later it can be retrieved for saving to file. + procedure :: store => base_util_copy_store !! Stores a snapshot of the nbody system so that later it can be + !! retrieved for saving to file. generic :: assignment(=) => store final :: base_final_storage_frame end type @@ -179,25 +194,26 @@ end subroutine abstract_io_read_in_param integer(I4B) :: nframes !! Total number of frames that can be stored !! An class that establishes the pattern for various storage objects - type(base_storage_frame), dimension(:), allocatable :: frame !! Array of stored frames - integer(I4B) :: iframe = 0 !! Index of the last frame stored in the system - integer(I4B) :: nid !! Number of unique id values in all saved snapshots - integer(I4B), dimension(:), allocatable :: idvals !! The set of unique id values contained in the snapshots - integer(I4B), dimension(:), allocatable :: idmap !! The id value -> index map - integer(I4B) :: nt !! Number of unique time values in all saved snapshots - real(DP), dimension(:), allocatable :: tvals !! The set of unique time values contained in the snapshots - integer(I4B), dimension(:), allocatable :: tmap !! The t value -> index map + type(base_storage_frame), dimension(:), allocatable :: frame !! Array of stored frames + integer(I4B) :: iframe = 0 !! Index of the last frame stored in the system + integer(I4B) :: nid !! Number of unique id values in all saved snapshots + integer(I4B), dimension(:), allocatable :: idvals !! The set of unique id values contained in the snapshots + integer(I4B), dimension(:), allocatable :: idmap !! The id value -> index map + integer(I4B) :: nt !! Number of unique time values in all saved snapshots + real(DP), dimension(:), allocatable :: tvals !! The set of unique time values contained in the snapshots + integer(I4B), dimension(:), allocatable :: tmap !! The t value -> index map contains procedure :: dealloc => base_util_dealloc_storage !! Deallocates all allocatables - procedure :: reset => base_util_reset_storage !! Resets the storage object back to its original state by removing all of the saved items from the storage frames + procedure :: reset => base_util_reset_storage !! Resets the storage object back to its original state by removing all of + !! the saved items from the storage frames procedure :: resize => base_util_resize_storage !! Resizes storage if it is too small procedure :: setup => base_util_setup_storage !! Sets up a storage system with a set number of frames procedure :: save => base_util_snapshot_save !! Takes a snapshot of the current system end type base_storage - !> Class definition for the particle origin information object. This object is used to track time, location, and collisional regime - !> of fragments produced in collisional events. + !> Class definition for the particle origin information object. This object is used to track time, location, and collisional + !> regime of fragments produced in collisional events. type, abstract :: base_particle_info end type base_particle_info @@ -291,13 +307,15 @@ end subroutine abstract_util_dealloc_object subroutine base_util_append_arr_char_string(arr, source, nold, lsource_mask) !! author: David A. Minton !! - !! Append a single array of character string type onto another. If the destination array is not allocated, or is not big enough, this will allocate space for it. + !! Append a single array of character string type onto another. If the destination array is not allocated, or is not big + !! enough, this will allocate space for it. implicit none ! Arguments - character(len=STRMAX), dimension(:), allocatable, intent(inout) :: arr !! Destination array - character(len=STRMAX), dimension(:), allocatable, intent(in) :: source !! Array to append - integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at arr(nold+1). Otherwise, the size of arr will be used. - logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to + character(len=STRMAX), dimension(:), allocatable, intent(inout) :: arr !! Destination array + character(len=STRMAX), dimension(:), allocatable, intent(in) :: source !! Array to append + integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at + !! arr(nold+1). Otherwise, the size of arr will be used. + logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to ! Internals integer(I4B) :: nnew, nsrc, nend_orig @@ -336,13 +354,15 @@ end subroutine base_util_append_arr_char_string subroutine base_util_append_arr_DP(arr, source, nold, lsource_mask) !! author: David A. Minton !! - !! Append a single array of double precision type onto another. If the destination array is not allocated, or is not big enough, this will allocate space for it. + !! Append a single array of double precision type onto another. If the destination array is not allocated, or is not big + !! enough, this will allocate space for it. implicit none ! Arguments - real(DP), dimension(:), allocatable, intent(inout) :: arr !! Destination array - real(DP), dimension(:), allocatable, intent(in) :: source !! Array to append - integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at arr(nold+1). Otherwise, the size of arr will be used. - logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to + real(DP), dimension(:), allocatable, intent(inout) :: arr !! Destination array + real(DP), dimension(:), allocatable, intent(in) :: source !! Array to append + integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at + !! arr(nold+1). Otherwise, the size of arr will be used. + logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to ! Internals integer(I4B) :: nnew, nsrc, nend_orig @@ -381,13 +401,15 @@ end subroutine base_util_append_arr_DP subroutine base_util_append_arr_DPvec(arr, source, nold, lsource_mask) !! author: David A. Minton !! - !! Append a single array of double precision vector type of size (NDIM, n) onto another. If the destination array is not allocated, or is not big enough, this will allocate space for it. + !! Append a single array of double precision vector type of size (NDIM, n) onto another. If the destination array is not + !! allocated, or is not big enough, this will allocate space for it. implicit none ! Arguments - real(DP), dimension(:,:), allocatable, intent(inout) :: arr !! Destination array - real(DP), dimension(:,:), allocatable, intent(in) :: source !! Array to append - integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at arr(nold+1). Otherwise, the size of arr will be used. - logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to + real(DP), dimension(:,:), allocatable, intent(inout) :: arr !! Destination array + real(DP), dimension(:,:), allocatable, intent(in) :: source !! Array to append + integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at + !! arr(nold+1). Otherwise, the size of arr will be used. + logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to ! Internals integer(I4B) :: nnew, nsrc, nend_orig @@ -428,13 +450,15 @@ end subroutine base_util_append_arr_DPvec subroutine base_util_append_arr_I4B(arr, source, nold, lsource_mask) !! author: David A. Minton !! - !! Append a single array of integer(I4B) onto another. If the destination array is not allocated, or is not big enough, this will allocate space for it. + !! Append a single array of integer(I4B) onto another. If the destination array is not allocated, or is not big enough, + !! this will allocate space for it. implicit none ! Arguments - integer(I4B), dimension(:), allocatable, intent(inout) :: arr !! Destination array - integer(I4B), dimension(:), allocatable, intent(in) :: source !! Array to append - integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at arr(nold+1). Otherwise, the size of arr will be used. - logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to + integer(I4B), dimension(:), allocatable, intent(inout) :: arr !! Destination array + integer(I4B), dimension(:), allocatable, intent(in) :: source !! Array to append + integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at + !! arr(nold+1). Otherwise, the size of arr will be used. + logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to ! Internals integer(I4B) :: nnew, nsrc, nend_orig @@ -473,13 +497,15 @@ end subroutine base_util_append_arr_I4B subroutine base_util_append_arr_logical(arr, source, nold, lsource_mask) !! author: David A. Minton !! - !! Append a single array of logical type onto another. If the destination array is not allocated, or is not big enough, this will allocate space for it. + !! Append a single array of logical type onto another. If the destination array is not allocated, or is not big enough, + !! this will allocate space for it. implicit none ! Arguments - logical, dimension(:), allocatable, intent(inout) :: arr !! Destination array - logical, dimension(:), allocatable, intent(in) :: source !! Array to append - integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at arr(nold+1). Otherwise, the size of arr will be used. - logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to + logical, dimension(:), allocatable, intent(inout) :: arr !! Destination array + logical, dimension(:), allocatable, intent(in) :: source !! Array to append + integer(I4B), intent(in), optional :: nold !! Extent of original array. If passed, the source array will begin at + !! arr(nold+1). Otherwise, the size of arr will be used. + logical, dimension(:), intent(in), optional :: lsource_mask !! Logical mask indicating which elements to append to ! Internals integer(I4B) :: nnew, nsrc, nend_orig @@ -574,7 +600,8 @@ subroutine base_util_exit(code) character(*), parameter :: BAR = '("------------------------------------------------")' character(*), parameter :: SUCCESS_MSG = '(/, "Normal termination of Swiftest (version ", f3.1, ")")' character(*), parameter :: FAIL_MSG = '(/, "Terminating Swiftest (version ", f3.1, ") due to error!!")' - character(*), parameter :: USAGE_MSG = '("Usage: swiftest [bs|helio|ra15|rmvs|symba|tu4|whm] [standard|compact|progress|NONE]")' + character(*), parameter :: USAGE_MSG = '("Usage: swiftest [bs|helio|ra15|rmvs|symba|tu4|whm] ' // & + '[standard|compact|progress|NONE]")' character(*), parameter :: HELP_MSG = USAGE_MSG select case(code) @@ -605,7 +632,8 @@ subroutine base_util_fill_arr_char_string(keeps, inserts, lfill_list) ! Arguments character(len=STRMAX), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep character(len=STRMAX), dimension(:), allocatable, intent(in) :: inserts !! Array of values to insert into keep - logical, dimension(:), intent(in) :: lfill_list !! Logical array of bodies to merge into the keeps + logical, dimension(:), intent(in) :: lfill_list !! Logical array of bodies to merge into the + !! keeps if (.not.allocated(keeps) .or. .not.allocated(inserts)) return @@ -702,7 +730,8 @@ end subroutine base_util_fill_arr_logical subroutine base_util_reset_storage(self) !! author: David A. Minton !! - !! Resets the storage object back to its original state by removing all of the saved items from the storage frames, but does not deallocate the frames + !! Resets the storage object back to its original state by removing all of the saved items from the storage frames, but + !! does not deallocate the frames implicit none ! Arguments class(base_storage), intent(inout) :: self @@ -736,7 +765,7 @@ subroutine base_util_resize_arr_char_string(arr, nnew) character(len=STRMAX), dimension(:), allocatable, intent(inout) :: arr !! Array to resize integer(I4B), intent(in) :: nnew !! New size ! Internals - character(len=STRMAX), dimension(:), allocatable :: tmp !! Temporary storage array in case the input array is already allocated + character(len=STRMAX), dimension(:), allocatable :: tmp !! Temp. storage array in case the input array is already allocated integer(I4B) :: nold !! Old size if (nnew < 0) return @@ -1008,10 +1037,12 @@ end subroutine base_util_setup_storage subroutine base_util_snapshot_save(self, snapshot) !! author: David A. Minton !! - !! Checks the current size of the storage object against the required size and extends it by a factor of 2 more than requested if it is too small. - !! Note: The reason to extend it by a factor of 2 is for performance. When there are many enounters per step, resizing every time you want to add an - !! encounter takes significant computational effort. Resizing by a factor of 2 is a tradeoff between performance (fewer resize calls) and memory managment - !! Memory usage grows by a factor of 2 each time it fills up, but no more. + !! Checks the current size of the storage object against the required size and extends it by a factor of 2 more than + !! requested if it is too small. + !! Note: The reason to extend it by a factor of 2 is for performance. When there are many enounters per step, resizing + !! every time you want to add an encounter takes significant computational effort. Resizing by a factor of 2 is a tradeoff + !! between performance (fewer resize calls) and memory managment. Memory usage grows by a factor of 2 each time it fills + !! up, but no more. implicit none ! Arguments class(base_storage), intent(inout) :: self !! Storage encounter storage object @@ -1043,8 +1074,11 @@ subroutine base_util_spill_arr_char_string(keeps, discards, lspill_list, ldestru ! Arguments character(len=STRMAX), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep character(len=STRMAX), dimension(:), allocatable, intent(inout) :: discards !! Array of discards - logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards - logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation should alter the keeps array or not + logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into + !! the discards + logical, intent(in) :: ldestructive !! Logical flag indicating whether or not + !! this operation should alter the keeps + !! array or not ! Internals integer(I4B) :: nspill, nkeep, nlist character(len=STRMAX), dimension(:), allocatable :: tmp !! Array of values to keep @@ -1087,7 +1121,8 @@ subroutine base_util_spill_arr_DP(keeps, discards, lspill_list, ldestructive) real(DP), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep real(DP), dimension(:), allocatable, intent(inout) :: discards !! Array of discards logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discardss - logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation should alter the keeps array or not + logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation + !! should alter the keeps array or not ! Internals integer(I4B) :: nspill, nkeep, nlist real(DP), dimension(:), allocatable :: tmp !! Array of values to keep @@ -1130,7 +1165,8 @@ subroutine base_util_spill_arr_DPvec(keeps, discards, lspill_list, ldestructive) real(DP), dimension(:,:), allocatable, intent(inout) :: keeps !! Array of values to keep real(DP), dimension(:,:), allocatable, intent(inout) :: discards !! Array discards logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards - logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation should alter the keeps array or not + logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this + !! operation should alter the keeps array or not ! Internals integer(I4B) :: i, nspill, nkeep, nlist real(DP), dimension(:,:), allocatable :: tmp !! Array of values to keep @@ -1174,13 +1210,14 @@ subroutine base_util_spill_arr_I4B(keeps, discards, lspill_list, ldestructive) !! This is the inverse of a spill operation implicit none ! Arguments - integer(I4B), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep - integer(I4B), dimension(:), allocatable, intent(inout) :: discards !! Array of discards - logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards - logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation should alter the keeps array or not + integer(I4B), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep + integer(I4B), dimension(:), allocatable, intent(inout) :: discards !! Array of discards + logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards + logical, intent(in) :: ldestructive!! Logical flag indicating whether or not this + !! operation should alter the keeps array or not ! Internals integer(I4B) :: nspill, nkeep, nlist - integer(I4B), dimension(:), allocatable :: tmp !! Array of values to keep + integer(I4B), dimension(:), allocatable :: tmp !! Array of values to keep nkeep = count(.not.lspill_list(:)) nspill = count(lspill_list(:)) @@ -1217,10 +1254,11 @@ subroutine base_util_spill_arr_I8B(keeps, discards, lspill_list, ldestructive) !! This is the inverse of a spill operation implicit none ! Arguments - integer(I8B), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep - integer(I8B), dimension(:), allocatable, intent(inout) :: discards !! Array of discards - logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards - logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation should alter the keeps array or not + integer(I8B), dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep + integer(I8B), dimension(:), allocatable, intent(inout) :: discards !! Array of discards + logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards + logical, intent(in) :: ldestructive!! Logical flag indicating whether or not this + !! operation should alter the keeps array or not ! Internals integer(I4B) :: nspill, nkeep, nlist integer(I8B), dimension(:), allocatable :: tmp !! Array of values to keep @@ -1263,7 +1301,8 @@ subroutine base_util_spill_arr_logical(keeps, discards, lspill_list, ldestructiv logical, dimension(:), allocatable, intent(inout) :: keeps !! Array of values to keep logical, dimension(:), allocatable, intent(inout) :: discards !! Array of discards logical, dimension(:), intent(in) :: lspill_list !! Logical array of bodies to spill into the discards - logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation should alter the keeps array or no + logical, intent(in) :: ldestructive !! Logical flag indicating whether or not this operation + !! should alter the keeps array or no ! Internals integer(I4B) :: nspill, nkeep, nlist logical, dimension(:), allocatable :: tmp !! Array of values to keep @@ -1918,7 +1957,7 @@ pure subroutine base_util_sort_rearrange_arr_char_string(arr, ind, n) integer(I4B), dimension(:), intent(in) :: ind !! Index to rearrange against integer(I4B), intent(in) :: n !! Number of elements in arr and ind to rearrange ! Internals - character(len=STRMAX), dimension(:), allocatable :: tmp !! Temporary copy of arry used during rearrange operation + character(len=STRMAX), dimension(:), allocatable :: tmp !! Temporary copy of arry used during rearrange operation if (.not. allocated(arr) .or. n <= 0) return allocate(tmp, mold=arr) @@ -2062,7 +2101,9 @@ subroutine base_util_unique_DP(input_array, output_array, index_map) ! Arguments real(DP), dimension(:), intent(in) :: input_array !! Unsorted input array real(DP), dimension(:), allocatable, intent(out) :: output_array !! Sorted array of unique values - integer(I4B), dimension(:), allocatable, intent(out) :: index_map !! An array of the same size as input_array that such that any for any index i, output_array(index_map(i)) = input_array(i) + integer(I4B), dimension(:), allocatable, intent(out) :: index_map !! An array of the same size as input_array that such + !! that any for any index i, + !! output_array(index_map(i)) = input_array(i) ! Internals real(DP), dimension(:), allocatable :: unique_array integer(I4B) :: n @@ -2095,7 +2136,9 @@ subroutine base_util_unique_I4B(input_array, output_array, index_map) ! Arguments integer(I4B), dimension(:), intent(in) :: input_array !! Unsorted input array integer(I4B), dimension(:), allocatable, intent(out) :: output_array !! Sorted array of unique values - integer(I4B), dimension(:), allocatable, intent(out) :: index_map !! An array of the same size as input_array that such that any for any index i, output_array(index_map(i)) = input_array(i) + integer(I4B), dimension(:), allocatable, intent(out) :: index_map !! An array of the same size as input_array that such + !! that any for any index i, + !! output_array(index_map(i)) = input_array(i) ! Internals integer(I4B), dimension(:), allocatable :: unique_array integer(I4B) :: n, lo, hi diff --git a/src/globals/globals_module.f90 b/src/globals/globals_module.f90 index fd26b3404..dd58d6dae 100644 --- a/src/globals/globals_module.f90 +++ b/src/globals/globals_module.f90 @@ -37,9 +37,12 @@ module globals real(DP), parameter :: GC = 6.6743E-11_DP !! Universal gravitational constant in SI units real(DP), parameter :: einsteinC = 299792458.0_DP !! Speed of light in SI units - integer(I4B), parameter :: LOWERCASE_BEGIN = iachar('a') !! ASCII character set parameter for lower to upper conversion - start of lowercase - integer(I4B), parameter :: LOWERCASE_END = iachar('z') !! ASCII character set parameter for lower to upper conversion - end of lowercase - integer(I4B), parameter :: UPPERCASE_OFFSET = iachar('A') - iachar('a') !! ASCII character set parameter for lower to upper conversion - offset between upper and lower + integer(I4B), parameter :: LOWERCASE_BEGIN = iachar('a') !! ASCII character set parameter for lower to upper conversion - start + !! of lowercase + integer(I4B), parameter :: LOWERCASE_END = iachar('z') !! ASCII character set parameter for lower to upper conversion - end of + !! lowercase + integer(I4B), parameter :: UPPERCASE_OFFSET = iachar('A') - iachar('a') !! ASCII character set parameter for lower to upper + !! conversion - offset between upper and lower real(SP), parameter :: VERSION_NUMBER = 1.0_SP !! Swiftest version @@ -103,9 +106,11 @@ module globals integer(I4B), parameter :: NDUMPFILES = 2 character(*), parameter :: PARAM_RESTART_FILE = "param.restart.in" #ifdef COARRAY - character(STRMAX) :: SWIFTEST_LOG_FILE !! Name of file to use to log output when using "COMPACT" or "PROGRESS" display style (each co-image gets its own log file) + character(STRMAX) :: SWIFTEST_LOG_FILE !! Name of file to use to log output when using "COMPACT" or + !! "PROGRESS" display style (each co-image gets its own log file) #else - character(*), parameter :: SWIFTEST_LOG_FILE = "swiftest.log" !! Name of file to use to log output when using "COMPACT" or "PROGRESS" display style + character(*), parameter :: SWIFTEST_LOG_FILE = "swiftest.log" !! Name of file to use to log output when using "COMPACT" or + !! "PROGRESS" display style #endif integer(I4B), parameter :: SWIFTEST_LOG_OUT = 33 !! File unit for log file when using "COMPACT" display style @@ -117,7 +122,8 @@ module globals character(*), parameter :: BIN_OUTFILE = 'data.nc' integer(I4B), parameter :: BINUNIT = 20 !! File unit number for the binary output file integer(I4B), parameter :: PARTICLEUNIT = 44 !! File unit number for the binary particle info output file - integer(I4B), parameter :: LUN = 42 !! File unit number for files that are opened and closed within a single subroutine call, and therefore should not collide + integer(I4B), parameter :: LUN = 42 !! File unit number for files that are opened and closed within a single + !! subroutine call, and therefore should not collide !> Miscellaneous constants: integer(I4B), parameter :: NDIM = 3 !! Number of dimensions in our reality From 6b474370c95e44ef50e98a5cba2eaaa14ef32bac Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 14:38:57 -0400 Subject: [PATCH 05/25] Fixed typo in Dockerfile and set the Swiftest build type to RELEASE --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index cb81fb7f6..ca322e71d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ RUN apt-get update && apt-get upgrade -y && \ | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \ echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \ apt-get -y update && apt-get upgrade -y && \ - apt-get install -y intel-hpckit && \ + apt-get install -y intel-hpckit # Set Intel compiler environment variables ENV INTEL_DIR="/opt/intel/oneapi" @@ -152,7 +152,7 @@ RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN 'set(NETCDF_LIBRARIES ${NETCDF_FORTRAN_LIBRARY} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY} ${SZ_LIBRARY} ${Z_LIBRARY} ${ZSTD_LIBRARY} ${BZ2_LIBRARY} ${CURL_LIBRARY} ${XML2_LIBRARY} )\n' \ 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ cd swiftest && \ - cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=DEBUG -DBUILD_SHARED_LIBS=OFF &&\ + cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ cmake --build build --verbose && \ cmake --install build From a621339a35db81095f68abbfd0d7b41177868de4 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 15:34:20 -0400 Subject: [PATCH 06/25] Fixed another Dockerfile typo --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index ca322e71d..0ac00b25d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -100,7 +100,7 @@ RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14 ./HDF5-1.14.1-Linux.sh --skip-license && \ cp -R HDF_Group/HDF5/1.14.1/lib/*.a ${HDF5_ROOT}/lib/ && \ cp -R HDF_Group/HDF5/1.14.1/include/* ${HDF5_ROOT}/include/ && \ - cp zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ + cp /zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ ENV LD_LIBRARY_PATH="/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}" ENV LDFLAGS="-static-intel -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" From 20865a6dc92a45353a6a05ca0a74be5281338999 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 15:35:08 -0400 Subject: [PATCH 07/25] Removed do concurrents from operators and switched to manual cross product when in a do concurrent --- src/collision/collision_util.f90 | 62 +++++++++----- src/encounter/encounter_check.f90 | 10 +-- src/fraggle/fraggle_generate.f90 | 130 ++++++++++++++++++++---------- src/operator/operator_cross.f90 | 42 ++-------- src/operator/operator_mag.f90 | 18 +---- src/operator/operator_unit.f90 | 18 +---- 6 files changed, 147 insertions(+), 133 deletions(-) diff --git a/src/collision/collision_util.f90 b/src/collision/collision_util.f90 index 179fb289c..75c836b4c 100644 --- a/src/collision/collision_util.f90 +++ b/src/collision/collision_util.f90 @@ -146,13 +146,15 @@ end subroutine collision_util_get_idvalues_snapshot module subroutine collision_util_get_energy_and_momentum(self, nbody_system, param, phase) !! Author: David A. Minton !! - !! Calculates total system energy in either the pre-collision outcome state (phase = "before") or the post-collision outcome state (lbefore = .false.) + !! Calculates total system energy in either the pre-collision outcome state (phase = "before") or the post-collision outcome + !! state (lbefore = .false.) implicit none ! Arguments class(collision_basic), intent(inout) :: self !! Encounter collision system object class(base_nbody_system), intent(inout) :: nbody_system !! Swiftest nbody system object class(base_parameters), intent(inout) :: param !! Current Swiftest run configuration parameters - character(len=*), intent(in) :: phase !! One of "before" or "after", indicating which phase of the calculation this needs to be done + character(len=*), intent(in) :: phase !! One of "before" or "after", indicating which phase of the + !! calculation this needs to be done ! Internals integer(I4B) :: i, phase_val, nfrag @@ -179,9 +181,15 @@ module subroutine collision_util_get_energy_and_momentum(self, nbody_system, par do concurrent(i = 1:2) #endif impactors%ke_orbit(i) = 0.5_DP * impactors%mass(i) * dot_product(impactors%vc(:,i), impactors%vc(:,i)) - impactors%ke_spin(i) = 0.5_DP * impactors%mass(i) * impactors%radius(i)**2 * impactors%Ip(3,i) * dot_product(impactors%rot(:,i), impactors%rot(:,i)) + impactors%ke_spin(i) = 0.5_DP * impactors%mass(i) * impactors%radius(i)**2 * impactors%Ip(3,i) & + * dot_product(impactors%rot(:,i), impactors%rot(:,i)) impactors%be(i) = -3 * impactors%Gmass(i) * impactors%mass(i) / (5 * impactors%radius(i)) - impactors%L_orbit(:,i) = impactors%mass(i) * impactors%rc(:,i) .cross. impactors%vc(:,i) + impactors%L_orbit(1,i) = impactors%mass(i) * (impactors%rc(2,i) * impactors%vc(3,i) & + - impactors%rc(3,i) * impactors%vc(2,i)) + impactors%L_orbit(2,i) = impactors%mass(i) * (impactors%rc(3,i) * impactors%vc(1,i) & + - impactors%rc(1,i) * impactors%vc(3,i)) + impactors%L_orbit(3,i) = impactors%mass(i) * (impactors%rc(1,i) * impactors%vc(2,i) & + - impactors%rc(2,i) * impactors%vc(1,i)) impactors%L_spin(:,i) = impactors%mass(i) * impactors%radius(i)**2 * impactors%Ip(3,i) * impactors%rot(:,i) end do self%L_orbit(:,phase_val) = sum(impactors%L_orbit(:,1:2),dim=2) @@ -190,7 +198,8 @@ module subroutine collision_util_get_energy_and_momentum(self, nbody_system, par self%ke_orbit(phase_val) = sum(impactors%ke_orbit(1:2)) self%ke_spin(phase_val) = sum(impactors%ke_spin(1:2)) self%be(phase_val) = sum(impactors%be(1:2)) - call swiftest_util_get_potential_energy(2, [(.true., i = 1, 2)], 0.0_DP, impactors%Gmass, impactors%mass, impactors%rb, self%pe(phase_val)) + call swiftest_util_get_potential_energy(2, [(.true., i = 1, 2)], 0.0_DP, impactors%Gmass, impactors%mass, & + impactors%rb, self%pe(phase_val)) self%te(phase_val) = self%ke_orbit(phase_val) + self%ke_spin(phase_val) + self%be(phase_val) + self%pe(phase_val) else if (phase_val == 2) then #ifdef DOCONLOC @@ -199,11 +208,18 @@ module subroutine collision_util_get_energy_and_momentum(self, nbody_system, par do concurrent(i = 1:nfrag) #endif fragments%ke_orbit(i) = 0.5_DP * fragments%mass(i) * dot_product(fragments%vc(:,i), fragments%vc(:,i)) - fragments%ke_spin(i) = 0.5_DP * fragments%mass(i) * fragments%radius(i)**2 * fragments%Ip(3,i) * dot_product(fragments%rot(:,i), fragments%rot(:,i)) - fragments%L_orbit(:,i) = fragments%mass(i) * fragments%rc(:,i) .cross. fragments%vc(:,i) + fragments%ke_spin(i) = 0.5_DP * fragments%mass(i) * fragments%radius(i)**2 * fragments%Ip(3,i) & + * dot_product(fragments%rot(:,i), fragments%rot(:,i)) + fragments%L_orbit(1,i) = fragments%mass(i) * (fragments%rc(2,i) * fragments%vc(3,i) - & + fragments%rc(3,i) * fragments%vc(2,i)) + fragments%L_orbit(2,i) = fragments%mass(i) * (fragments%rc(3,i) * fragments%vc(1,i) - & + fragments%rc(1,i) * fragments%vc(3,i)) + fragments%L_orbit(3,i) = fragments%mass(i) * (fragments%rc(1,i) * fragments%vc(2,i) - & + fragments%rc(2,i) * fragments%vc(1,i)) fragments%L_spin(:,i) = fragments%mass(i) * fragments%radius(i)**2 * fragments%Ip(3,i) * fragments%rot(:,i) end do - call swiftest_util_get_potential_energy(nfrag, [(.true., i = 1, nfrag)], 0.0_DP, fragments%Gmass, fragments%mass, fragments%rb, fragments%pe) + call swiftest_util_get_potential_energy(nfrag, [(.true., i = 1, nfrag)], 0.0_DP, fragments%Gmass, fragments%mass, & + fragments%rb, fragments%pe) fragments%be = sum(-3*fragments%Gmass(1:nfrag)*fragments%mass(1:nfrag)/(5*fragments%radius(1:nfrag))) fragments%L_orbit_tot(:) = sum(fragments%L_orbit(:,1:nfrag),dim=2) fragments%L_spin_tot(:) = sum(fragments%L_spin(:,1:nfrag),dim=2) @@ -541,9 +557,11 @@ end subroutine collision_util_setup_fragments module subroutine collision_util_set_coordinate_collider(self) - + !! author: David A. Minton + !! !! - !! Defines the collisional coordinate nbody_system, including the unit vectors of both the nbody_system and individual fragments. + !! Defines the collisional coordinate nbody_system, including the unit vectors of both the nbody_system and individual + !! fragments. implicit none ! Arguments class(collision_basic), intent(inout) :: self !! Collisional nbody_system @@ -564,7 +582,8 @@ end subroutine collision_util_set_coordinate_collider module subroutine collision_util_set_coordinate_fragments(self) !! author: David A. Minton !! - !! Defines the collisional coordinate nbody_system, including the unit vectors of both the nbody_system and individual fragments. + !! Defines the collisional coordinate nbody_system, including the unit vectors of both the nbody_system and individual + !! fragments. implicit none ! Arguments class(collision_fragments), intent(inout) :: self !! Collisional nbody_system @@ -590,7 +609,8 @@ end subroutine collision_util_set_coordinate_fragments module subroutine collision_util_set_coordinate_impactors(self) !! author: David A. Minton !! - !! Defines the collisional coordinate nbody_system, including the unit vectors of both the nbody_system and individual fragments. + !! Defines the collisional coordinate nbody_system, including the unit vectors of both the nbody_system and individual + !! fragments. implicit none ! Arguments class(collision_impactors), intent(inout) :: self !! Collisional nbody_system @@ -602,8 +622,8 @@ module subroutine collision_util_set_coordinate_impactors(self) delta_v(:) = impactors%vb(:, 2) - impactors%vb(:, 1) delta_r(:) = impactors%rb(:, 2) - impactors%rb(:, 1) - ! We will initialize fragments on a plane defined by the pre-impact nbody_system, with the z-axis aligned with the angular momentum vector - ! and the y-axis aligned with the pre-impact distance vector. + ! We will initialize fragments on a plane defined by the pre-impact nbody_system, with the z-axis aligned with the angular + ! momentum vector and the y-axis aligned with the pre-impact distance vector. ! y-axis is the separation distance impactors%y_unit(:) = .unit.delta_r(:) @@ -632,14 +652,16 @@ module subroutine collision_util_set_coordinate_impactors(self) impactors%vc(:,1) = impactors%vb(:,1) - impactors%vbcom(:) impactors%vc(:,2) = impactors%vb(:,2) - impactors%vbcom(:) - ! Find the point of impact between the two bodies, defined as the location (in the collisional coordinate system) at the surface of body 1 along the line connecting the two bodies. + ! Find the point of impact between the two bodies, defined as the location (in the collisional coordinate system) at the + ! surface of body 1 along the line connecting the two bodies. impactors%rcimp(:) = impactors%rb(:,1) + impactors%radius(1) * impactors%y_unit(:) - impactors%rbcom(:) ! Set the velocity direction as the "bounce" direction" for disruptions, and body 2's direction for hit and runs if (impactors%regime == COLLRESOLVE_REGIME_HIT_AND_RUN) then impactors%bounce_unit(:) = .unit. impactors%vc(:,2) else - impactors%bounce_unit(:) = .unit. (impactors%vc(:,2) - 2 * dot_product(impactors%vc(:,2),impactors%y_unit(:)) * impactors%y_unit(:)) + impactors%bounce_unit(:) = .unit. (impactors%vc(:,2) - 2 * dot_product(impactors%vc(:,2),impactors%y_unit(:)) & + * impactors%y_unit(:)) end if end associate @@ -742,7 +764,8 @@ module subroutine collision_util_snapshot(self, param, nbody_system, t, arg) class(base_parameters), intent(inout) :: param !! Current run configuration parameters class(base_nbody_system), intent(inout) :: nbody_system !! Swiftest nbody system object to store real(DP), intent(in), optional :: t !! Time of snapshot if different from nbody_system time - character(*), intent(in), optional :: arg !! "before": takes a snapshot just before the collision. "after" takes the snapshot just after the collision. + character(*), intent(in), optional :: arg !! "before": takes a snapshot just before the collision. "after" + !! takes the snapshot just after the collision. ! Arguments class(collision_snapshot), allocatable, save :: snapshot character(len=:), allocatable :: stage @@ -816,8 +839,9 @@ module subroutine collision_util_snapshot(self, param, nbody_system, t, arg) write(message,*) trim(adjustl(plnew%info(i)%name)), " (", trim(adjustl(plnew%info(i)%particle_type)),")" call swiftest_io_log_one_message(COLLISION_LOG_OUT, message) end do - call swiftest_io_log_one_message(COLLISION_LOG_OUT, "***********************************************************" // & - "***********************************************************") + call swiftest_io_log_one_message(COLLISION_LOG_OUT, & + "***********************************************************" // & + "***********************************************************") allocate(after_snap%pl, source=plnew) end select deallocate(after_orig%pl) diff --git a/src/encounter/encounter_check.f90 b/src/encounter/encounter_check.f90 index 2d934e7d8..deef9bdda 100644 --- a/src/encounter/encounter_check.f90 +++ b/src/encounter/encounter_check.f90 @@ -179,7 +179,7 @@ subroutine encounter_check_all_sort_and_sweep_plpl(npl, r, v, renc, dt, nenc, in #else do concurrent (i = 1:npl) #endif - rmag = .mag.r(:,i) + rmag = norm2(r(:,i)) rmax(i) = rmag + RSWEEP_FACTOR * renc(i) rmin(i) = rmag - RSWEEP_FACTOR * renc(i) end do @@ -236,7 +236,7 @@ subroutine encounter_check_all_sort_and_sweep_plplm(nplm, nplt, rplm, vplm, rplt #else do concurrent (i = 1:nplm) #endif - rmag = .mag.rplm(:,i) + rmag = norm2(rplm(:,i)) rmax(i) = rmag + RSWEEP_FACTOR * rencm(i) rmin(i) = rmag - RSWEEP_FACTOR * rencm(i) end do @@ -245,7 +245,7 @@ subroutine encounter_check_all_sort_and_sweep_plplm(nplm, nplt, rplm, vplm, rplt #else do concurrent (i = 1:nplt) #endif - rmag = .mag.rplt(:,i) + rmag = norm2(rplt(:,i)) rmax(nplm+i) = rmag + RSWEEP_FACTOR * renct(i) rmin(nplm+i) = rmag - RSWEEP_FACTOR * renct(i) end do @@ -304,7 +304,7 @@ subroutine encounter_check_all_sort_and_sweep_pltp(npl, ntp, rpl, vpl, rtp, vtp, #else do concurrent (i = 1:npl) #endif - rmag = .mag.rpl(:,i) + rmag = norm2(rpl(:,i)) rmax(i) = rmag + RSWEEP_FACTOR * rencpl(i) rmin(i) = rmag - RSWEEP_FACTOR * rencpl(i) end do @@ -313,7 +313,7 @@ subroutine encounter_check_all_sort_and_sweep_pltp(npl, ntp, rpl, vpl, rtp, vtp, #else do concurrent (i = 1:ntp) #endif - rmag = .mag.rtp(:,i) + rmag = norm2(rtp(:,i)) rmax(npl+i) = rmag + RSWEEP_FACTOR * renctp(i) rmin(npl+i) = rmag - RSWEEP_FACTOR * renctp(i) end do diff --git a/src/fraggle/fraggle_generate.f90 b/src/fraggle/fraggle_generate.f90 index 5ec993fba..cd1df7033 100644 --- a/src/fraggle/fraggle_generate.f90 +++ b/src/fraggle/fraggle_generate.f90 @@ -59,7 +59,8 @@ module subroutine fraggle_generate(self, nbody_system, param, t) call self%set_mass_dist(param) call self%disrupt(nbody_system, param, t, lfailure) if (lfailure) then - call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Fraggle failed to find a solution to match energy contraint. Treating this as a merge.") + call swiftest_io_log_one_message(COLLISION_LOG_OUT, & + "Fraggle failed to find a solution to match energy contraint. Treating this as a merge.") call self%merge(nbody_system, param, t) ! Use the default collision model, which is merge return end if @@ -131,8 +132,9 @@ module subroutine fraggle_generate_disrupt(self, nbody_system, param, t, lfailur real(DP), parameter :: fail_scale_initial = 1.0003_DP integer(I4B) :: nfrag_start - ! The minimization and linear solvers can sometimes lead to floating point exceptions. Rather than halting the code entirely if this occurs, we - ! can simply fail the attempt and try again. So we need to turn off any floating point exception halting modes temporarily + ! The minimization and linear solvers can sometimes lead to floating point exceptions. Rather than halting the code entirely + ! if this occurs, we can simply fail the attempt and try again. So we need to turn off any floating point exception halting + ! modes temporarily call ieee_get_halting_mode(IEEE_ALL,fpe_halting_modes) ! Save the current halting modes so we can turn them off temporarily fpe_quiet_modes(:) = .false. call ieee_set_halting_mode(IEEE_ALL,fpe_quiet_modes) @@ -168,7 +170,8 @@ module subroutine fraggle_generate_disrupt(self, nbody_system, param, t, lfailur if (.not.lfailure) then if (self%fragments%nbody /= nfrag_start) then write(message,*) self%fragments%nbody - call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Fraggle found a solution with " // trim(adjustl(message)) // " fragments" ) + call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Fraggle found a solution with " // trim(adjustl(message)) & + // " fragments" ) end if call self%get_energy_and_momentum(nbody_system, param, phase="after") @@ -176,7 +179,8 @@ module subroutine fraggle_generate_disrupt(self, nbody_system, param, t, lfailur dE = self%te(2) - self%te(1) call swiftest_io_log_one_message(COLLISION_LOG_OUT, "All quantities in collision system natural units") - call swiftest_io_log_one_message(COLLISION_LOG_OUT, "* Conversion factors (collision system units / nbody system units):") + call swiftest_io_log_one_message(COLLISION_LOG_OUT, & + "* Conversion factors (collision system units / nbody system units):") write(message,*) "* Mass: ", self%mscale call swiftest_io_log_one_message(COLLISION_LOG_OUT, message) write(message,*) "* Distance: ", self%dscale @@ -200,7 +204,8 @@ module subroutine fraggle_generate_disrupt(self, nbody_system, param, t, lfailur end if call self%set_original_scale() - self%max_rot = MAX_ROT_SI * param%TU2S ! Re-compute the spin limit from scratch so it doesn't drift due to floating point errors every time we convert + self%max_rot = MAX_ROT_SI * param%TU2S ! Re-compute the spin limit from scratch so it doesn't drift due to floating point + ! errors every time we convert ! Restore the big array if (lk_plpl) call pl%flatten(param) @@ -248,7 +253,8 @@ module subroutine fraggle_generate_hitandrun(self, nbody_system, param, t) end if ! The Fraggle disruption model (and its extended types allow for non-pure hit and run. - if (impactors%mass_dist(2) > 0.9_DP * impactors%mass(jproj)) then ! Pure hit and run, so we'll just keep the two bodies untouched + if (impactors%mass_dist(2) > 0.9_DP * impactors%mass(jproj)) then ! Pure hit and run, so we'll just keep the two bodies + ! untouched call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Pure hit and run. No new fragments generated.") call self%collision_basic%hitandrun(nbody_system, param, t) return @@ -327,7 +333,8 @@ module subroutine fraggle_generate_merge(self, nbody_system, param, t) if (rotmag < self%max_rot) then call self%collision_basic%merge(nbody_system, param, t) else - call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Merger would break the spin barrier. Converting to pure hit and run" ) + call swiftest_io_log_one_message(COLLISION_LOG_OUT, & + "Merger would break the spin barrier. Converting to pure hit and run" ) impactors%mass_dist(1:2) = impactors%mass(1:2) call self%hitandrun(nbody_system, param, t) end if @@ -342,9 +349,9 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu !! Author: Jennifer L.L. Pouplin, Carlisle A. Wishard, and David A. Minton !! !! Initializes the position vectors of the fragments around the center of mass based on the collision style. - !! For hit and run with disruption, the fragments are generated in a random cloud around the smallest of the two colliders (body 2) - !! For disruptive collisions, the fragments are generated in a random cloud around the impact point. Bodies are checked for overlap and - !! regenerated if they overlap. + !! For hit and run with disruption, the fragments are generated in a random cloud around the smallest of the two colliders + !! (body 2). For disruptive collisions, the fragments are generated in a random cloud around the impact point. Bodies are + !! checked for overlap and regenerated if they overlap. implicit none ! Arguments class(collision_fraggle), intent(inout) :: collider !! Fraggle collision system object @@ -372,7 +379,8 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu lhitandrun = (impactors%regime == COLLRESOLVE_REGIME_HIT_AND_RUN) ! We will treat the first two fragments of the list as special cases. - ! Place the first two bodies at the centers of the two fragment clouds, but be sure they are sufficiently far apart to avoid overlap + ! Place the first two bodies at the centers of the two fragment clouds, but be sure they are sufficiently far apart to + ! avoid overlap if (lhitandrun) then rdistance = impactors%radius(2) istart = 2 @@ -385,11 +393,14 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu end if mass_rscale(1:istart-1) = 1.0_DP - ! Give the fragment positions a random value that is scaled with fragment mass so that the more massive bodies tend to be closer to the impact point - ! Later, velocities will be scaled such that the farther away a fragment is placed from the impact point, the higher will its velocity be. + ! Give the fragment positions a random value that is scaled with fragment mass so that the more massive bodies tend to be + ! closer to the impact point. Later, velocities will be scaled such that the farther away a fragment is placed from the + ! impact point, the higher will its velocity be. call random_number(mass_rscale(istart:nfrag)) mass_rscale(istart:nfrag) = (mass_rscale(istart:nfrag) + 1.0_DP) / 2 - mass_rscale(istart:nfrag) = mass_rscale(istart:nfrag) * (sum(fragments%mass(istart:nfrag)) / fragments%mass(istart:nfrag))**(0.125_DP) ! The power is arbitrary. It just gives the velocity a small mass dependence + ! The power of 0.125 in the scaling below is arbitrary. It just gives the velocity a small mass dependence + mass_rscale(istart:nfrag) = mass_rscale(istart:nfrag) * (sum(fragments%mass(istart:nfrag)) & + / fragments%mass(istart:nfrag))**(0.125_DP) mass_rscale(istart:nfrag) = mass_rscale(istart:nfrag) / minval(mass_rscale(istart:nfrag)) loverlap(:) = .true. @@ -402,8 +413,10 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu fragment_cloud_center(:,2) = impactors%rc(:,2) fragments%rc(:,1) = fragment_cloud_center(:,1) else ! Keep the first and second bodies at approximately their original location, but so as not to be overlapping - fragment_cloud_center(:,1) = impactors%rcimp(:) - rbuffer * max(fragments%radius(1),impactors%radius(1)) * impactors%y_unit(:) - fragment_cloud_center(:,2) = impactors%rcimp(:) + rbuffer * max(fragments%radius(2),impactors%radius(2)) * impactors%y_unit(:) + fragment_cloud_center(:,1) = impactors%rcimp(:) - rbuffer * max(fragments%radius(1),& + impactors%radius(1)) * impactors%y_unit(:) + fragment_cloud_center(:,2) = impactors%rcimp(:) + rbuffer * max(fragments%radius(2), & + impactors%radius(2)) * impactors%y_unit(:) fragment_cloud_radius(:) = rdistance / pack_density fragments%rc(:,1:2) = fragment_cloud_center(:,1:2) end if @@ -420,7 +433,8 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu ! Randomly place the n>2 fragments inside their cloud until none are overlapping #ifdef DOCONLOC - do concurrent(i = istart:nfrag, loverlap(i)) shared(fragments, impactors, fragment_cloud_radius, fragment_cloud_center, loverlap, mass_rscale, u, phi, theta, lhitandrun) local(j, direction) + do concurrent(i = istart:nfrag, loverlap(i)) shared(fragments, impactors, fragment_cloud_radius, fragment_cloud_center,& + loverlap, mass_rscale, u, phi, theta, lhitandrun) local(j, direction) #else do concurrent(i = istart:nfrag, loverlap(i)) #endif @@ -442,13 +456,15 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu ! Stretch out the hit and run cloud along the flight trajectory if (lhitandrun) then - fragments%rc(:,i) = fragments%rc(:,i) * (1.0_DP + 2 * fragment_cloud_radius(j) * mass_rscale(i) * impactors%bounce_unit(:)) + fragments%rc(:,i) = fragments%rc(:,i) * (1.0_DP + 2 * fragment_cloud_radius(j) * mass_rscale(i) & + * impactors%bounce_unit(:)) end if fragments%rc(:,i) = fragments%rc(:,i) + fragment_cloud_center(:,j) if (lhitandrun) then - fragments%rc(:,i) = fragments%rc(:,i) + 2 * fragment_cloud_radius(j) * mass_rscale(i) * impactors%bounce_unit(:) ! Shift the stretched cloud downrange + ! Shift the stretched cloud downrange + fragments%rc(:,i) = fragments%rc(:,i) + 2 * fragment_cloud_radius(j) * mass_rscale(i) * impactors%bounce_unit(:) else ! Make sure that the fragments are positioned away from the impact point direction = dot_product(fragments%rc(:,i) - impactors%rcimp(:), fragment_cloud_center(:,j) - impactors%rcimp(:)) @@ -460,7 +476,8 @@ module subroutine fraggle_generate_pos_vec(collider, nbody_system, param, lfailu end do ! Because body 1 and 2 are initialized near the original impactor positions, then if these bodies are still overlapping - ! when the rest are not, we will randomly walk their position in space so as not to move them too far from their starting position + ! when the rest are not, we will randomly walk their position in space so as not to move them too far from their + ! starting position if (all(.not.loverlap(istart:nfrag)) .and. any(loverlap(1:istart-1))) then #ifdef DOCONLOC do concurrent(i = 1:istart-1,loverlap(i)) shared(fragments,loverlap, u, theta, i) local(rwalk, dis) @@ -525,8 +542,10 @@ module subroutine fraggle_generate_rot_vec(collider, nbody_system, param) class(swiftest_parameters), intent(inout) :: param !! Current run configuration parameters ! Internals integer(I4B) :: i, nfrag - real(DP), parameter :: FRAG_ROT_FAC = 0.1_DP ! Fraction of projectile rotation magnitude to add as random noise to fragment rotation - real(DP), parameter :: hitandrun_momentum_transfer = 0.01_DP ! Fraction of projectile momentum transfered to target in a hit and run + real(DP), parameter :: FRAG_ROT_FAC = 0.1_DP ! Fraction of projectile rotation magnitude to add as random noise to fragment + ! rotation + real(DP), parameter :: hitandrun_momentum_transfer = 0.01_DP ! Fraction of projectile momentum transfered to target in a hit + ! and run real(DP) :: mass_fac real(DP), dimension(NDIM) :: drot, dL integer(I4B), parameter :: MAXLOOP = 10 @@ -536,14 +555,15 @@ module subroutine fraggle_generate_rot_vec(collider, nbody_system, param) nfrag = collider%fragments%nbody lhitandrun = (impactors%regime == COLLRESOLVE_REGIME_HIT_AND_RUN) - ! Initialize fragment rotations and velocities to be pre-impact rotation for body 1, and randomized for bodies >1 and scaled to the original rotation. - ! This will get updated later when conserving angular momentum + ! Initialize fragment rotations and velocities to be pre-impact rotation for body 1, and randomized for bodies >1 and + ! scaled to the original rotation. This will get updated later when conserving angular momentum mass_fac = fragments%mass(1) / impactors%mass(1) fragments%rot(:,1) = mass_fac**(5.0_DP/3.0_DP) * impactors%rot(:,1) ! If mass was added, also add spin angular momentum if (mass_fac > 1.0_DP) then - dL(:) = (fragments%mass(1) - impactors%mass(1)) * (impactors%rc(:,2) - impactors%rc(:,1)) .cross. (impactors%vc(:,2) - impactors%vc(:,1)) + dL(:) = (fragments%mass(1) - impactors%mass(1)) * (impactors%rc(:,2) - impactors%rc(:,1)) & + .cross. (impactors%vc(:,2) - impactors%vc(:,1)) drot(:) = dL(:) / (fragments%mass(1) * fragments%radius(1)**2 * fragments%Ip(3,1)) ! Check to make sure we haven't broken the spin barrier. Reduce the rotation change if so do i = 1, MAXLOOP @@ -559,7 +579,8 @@ module subroutine fraggle_generate_rot_vec(collider, nbody_system, param) end if if (lhitandrun) then - dL(:) = hitandrun_momentum_transfer * impactors%mass(2) * (impactors%rc(:,2) - impactors%rc(:,1)) .cross. (impactors%vc(:,2) - impactors%vc(:,1)) + dL(:) = hitandrun_momentum_transfer * impactors%mass(2) * (impactors%rc(:,2) - impactors%rc(:,1)) & + .cross. (impactors%vc(:,2) - impactors%vc(:,1)) drot(:) = dL(:) / (fragments%mass(1) * fragments%radius(1)**2 * fragments%Ip(3,1)) do i = 1, MAXLOOP if (.mag.(fragments%rot(:,1) + drot(:)) < collider%max_rot) exit @@ -580,7 +601,8 @@ module subroutine fraggle_generate_rot_vec(collider, nbody_system, param) do concurrent (i = 2:nfrag) #endif mass_fac = fragments%mass(i) / impactors%mass(2) - fragments%rot(:,i) = mass_fac**(5.0_DP/3.0_DP) * impactors%rot(:,2) + 2 * (fragments%rot(:,i) - 1.0_DP) * FRAG_ROT_FAC * .mag.impactors%rot(:,2) + fragments%rot(:,i) = mass_fac**(5.0_DP/3.0_DP) * impactors%rot(:,2) + 2 * (fragments%rot(:,i) - 1.0_DP) * & + FRAG_ROT_FAC * norm2(impactors%rot(:,2)) end do fragments%rotmag(:) = .mag.fragments%rot(:,:) @@ -603,17 +625,22 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu class(swiftest_parameters), intent(inout) :: param !! Current run configuration parameters logical, intent(out) :: lfailure !! Did the velocity computation fail? ! Internals - real(DP), parameter :: ENERGY_SUCCESS_METRIC = 0.1_DP !! Relative energy error to accept as a success (success also must be energy-losing in addition to being within the metric amount) + real(DP), parameter :: ENERGY_SUCCESS_METRIC = 0.1_DP !! Relative energy error to accept as a success (success also must be + !! energy-losing in addition to being within the metric amount) real(DP), parameter :: ENERGY_CONVERGENCE_TOL = 1e-3_DP !! Relative change in error before giving up on energy convergence - real(DP) :: MOMENTUM_SUCCESS_METRIC = 10*epsilon(1.0_DP) !! Relative angular momentum error to accept as a success (should be *much* stricter than energy) + real(DP) :: MOMENTUM_SUCCESS_METRIC = 10*epsilon(1.0_DP) !! Relative angular momentum error to accept as a success + !! (should be *much* stricter than energy) integer(I4B) :: i, j, loop, try, istart, nfrag, nsteps, nsteps_best, posloop logical :: lhitandrun, lsupercat - real(DP), dimension(NDIM) :: vimp_unit, rimp, vrot, vdisp, L_residual, L_residual_unit, L_residual_best, dL, drot, rot_new, dL_metric - real(DP) :: vimp, vmag, vesc, dE, E_residual, E_residual_best, E_residual_last, ke_avail, ke_remove, dE_best, fscale, dE_metric, mfrag, rn, dL1_mag, dE_conv + real(DP), dimension(NDIM) :: vimp_unit, rimp, vrot, vdisp, L_residual, L_residual_unit, L_residual_best, dL, drot, rot_new + real(DP), dimension(NDIM) :: dL_metric + real(DP) :: vimp, vmag, vesc, dE, E_residual, E_residual_best, E_residual_last, ke_avail, ke_remove, dE_best, fscale + real(DP) :: dE_metric, mfrag, rn, dL1_mag, dE_conv, vumag integer(I4B), dimension(:), allocatable :: vsign real(DP), dimension(:), allocatable :: vscale real(DP), dimension(:), allocatable :: dLi_mag - ! For the initial "guess" of fragment velocities, this is the minimum and maximum velocity relative to escape velocity that the fragments will have + ! For the initial "guess" of fragment velocities, this is the minimum and maximum velocity relative to escape velocity that + ! the fragments will have real(DP), parameter :: hitandrun_vscale = 0.25_DP real(DP) :: vmin_guess real(DP) :: vmax_guess @@ -688,25 +715,34 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu ! Set the velocities of all fragments using all of the scale factors determined above if (istart > 1) fragments%vc(:,1) = impactors%vc(:,1) * impactors%mass(1) / fragments%mass(1) #ifdef DOCONLOC - do concurrent(i = istart:fragments%nbody) shared(fragments,impactors,lhitandrun, vscale, vesc, vsign) local(j,vrot,vmag,vdisp,rimp,vimp_unit) + do concurrent(i = istart:fragments%nbody) shared(fragments,impactors,lhitandrun, vscale, vesc, vsign) & + local(j,vrot,vmag,vdisp,rimp,vimp_unit, vumag) #else do concurrent(i = istart:fragments%nbody) #endif j = fragments%origin_body(i) - vrot(:) = impactors%rot(:,j) .cross. (fragments%rc(:,i) - impactors%rc(:,j)) + vrot(1) = impactors%rot(2,j) * (fragments%rc(3,i) - impactors%rc(3,j)) & + - impactors%rot(3,j) * (fragments%rc(2,i) - impactors%rc(2,j)) + vrot(2) = impactors%rot(3,j) * (fragments%rc(1,i) - impactors%rc(1,j)) & + - impactors%rot(1,j) * (fragments%rc(3,i) - impactors%rc(3,j)) + vrot(3) = impactors%rot(1,j) * (fragments%rc(2,i) - impactors%rc(2,j)) & + - impactors%rot(2,j) * (fragments%rc(1,i) - impactors%rc(1,j)) if (lhitandrun) then - vdisp(:) = .unit.(fragments%rc(:,i) - impactors%rc(:,2)) * vesc + vumag = norm2(fragments%rc(:,i) - impactors%rc(:,2)) + vdisp(:) = (fragments%rc(:,i) - impactors%rc(:,2)) / vumag * vesc fragments%vc(:,i) = vsign(i) * impactors%bounce_unit(:) * vscale(i) + vrot(:) + vdisp(:) else vmag = vscale(i) rimp(:) = fragments%rc(:,i) - impactors%rcimp(:) - vimp_unit(:) = .unit. (rimp(:) + vsign(i) * impactors%bounce_unit(:)) + vumag = norm2(rimp(:) + vsign(i) * impactors%bounce_unit(:)) + vimp_unit(:) = (rimp(:) + vsign(i) * impactors%bounce_unit(:)) / vumag fragments%vc(:,i) = vmag * vimp_unit(:) + vrot(:) end if end do fragments%vmag(:) = .mag. fragments%vc(:,:) - ! Every time the collision-frame velocities are altered, we need to be sure to shift everything back to the center-of-mass frame + ! Every time the collision-frame velocities are altered, we need to be sure to shift everything back to the + ! center-of-mass frame call collision_util_shift_vector_to_origin(fragments%mass, fragments%vc) call fragments%set_coordinate_system() @@ -715,7 +751,8 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu nsteps = nsteps + 1 mfrag = sum(fragments%mass(istart:fragments%nbody)) - ! Try to put residual angular momentum into the spin, but if this would go past the spin barrier, then put it into velocity shear instead + ! Try to put residual angular momentum into the spin, but if this would go past the spin barrier, then put it into + ! velocity shear instead call collider_local%get_energy_and_momentum(nbody_system, param, phase="after") L_residual(:) = (collider_local%L_total(:,2) - collider_local%L_total(:,1)) L_residual_unit(:) = .unit. L_residual(:) @@ -740,7 +777,8 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu if (.mag.rot_new(:) < collider_local%max_rot) then fragments%rot(:,i) = rot_new(:) fragments%rotmag(i) = .mag.fragments%rot(:,i) - else ! We would break the spin barrier here. Add a random component of rotation that is less than what would break the limit. The rest will go in velocity shear + else ! We would break the spin barrier here. Add a random component of rotation that is less than what would + ! break the limit. The rest will go in velocity shear call random_number(drot) call random_number(rn) drot(:) = (rn * collider_local%max_rot - fragments%rotmag(i)) * 2 * (drot(:) - 0.5_DP) @@ -785,7 +823,8 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu ! Check if we've converged on our constraints if (all(dL_metric(:) <= 1.0_DP)) then - if ((abs(E_residual) < abs(E_residual_best)) .or. ((dE < 0.0_DP) .and. (dE_best >= 0.0_DP))) then ! This is our best case so far. Save it for posterity + if ((abs(E_residual) < abs(E_residual_best)) .or. ((dE < 0.0_DP) .and. (dE_best >= 0.0_DP))) then + ! This is our best case so far. Save it for posterity E_residual_best = E_residual L_residual_best(:) = L_residual(:) dE_best = dE @@ -799,7 +838,8 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu if (dE_conv < ENERGY_CONVERGENCE_TOL) exit inner end if - ! Remove a constant amount of velocity from the bodies so we don't shift the center of mass and screw up the momentum + ! Remove a constant amount of velocity from the bodies so we don't shift the center of mass and screw up the + ! momentum ke_avail = 0.0_DP do i = fragments%nbody, 1, -1 ke_avail = ke_avail + 0.5_DP * fragments%mass(i) * max(fragments%vmag(i) - vesc / try,0.0_DP)**2 @@ -830,9 +870,11 @@ module subroutine fraggle_generate_vel_vec(collider, nbody_system, param, lfailu write(message, *) nsteps if (lfailure) then - call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Fraggle velocity calculation failed to converge after " // trim(adjustl(message)) // " steps. The best solution found had:") + call swiftest_io_log_one_message(COLLISION_LOG_OUT, "Fraggle velocity calculation failed to converge after " & + // trim(adjustl(message)) // " steps. The best solution found had:") else - call swiftest_io_log_one_message(COLLISION_LOG_OUT,"Fraggle velocity calculation converged after " // trim(adjustl(message)) // " steps.") + call swiftest_io_log_one_message(COLLISION_LOG_OUT,"Fraggle velocity calculation converged after " & + // trim(adjustl(message)) // " steps.") call collider%get_energy_and_momentum(nbody_system, param, phase="after") L_residual(:) = (collider%L_total(:,2) - collider%L_total(:,1)) diff --git a/src/operator/operator_cross.f90 b/src/operator/operator_cross.f90 index ef80e1fb8..72c24a176 100644 --- a/src/operator/operator_cross.f90 +++ b/src/operator/operator_cross.f90 @@ -104,11 +104,7 @@ pure module function operator_cross_el_sp(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_sp(A(:,i), B(:,i)) end do return @@ -122,11 +118,7 @@ pure module function operator_cross_el_dp(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_dp(A(:,i), B(:,i)) end do return @@ -140,11 +132,7 @@ pure module function operator_cross_el_qp(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_qp(A(:,i), B(:,i)) end do return @@ -158,11 +146,7 @@ pure module function operator_cross_el_i1b(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_i1b(A(:,i), B(:,i)) end do return @@ -176,11 +160,7 @@ pure module function operator_cross_el_i2b(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_i2b(A(:,i), B(:,i)) end do return @@ -194,11 +174,7 @@ pure module function operator_cross_el_i4b(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_i4b(A(:,i), B(:,i)) end do return @@ -212,11 +188,7 @@ pure module function operator_cross_el_i8b(A, B) result(C) n = size(A, 2) if (allocated(C)) deallocate(C) allocate(C, mold = A) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B,C) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n C(:,i) = operator_cross_i8b(A(:,i), B(:,i)) end do return diff --git a/src/operator/operator_mag.f90 b/src/operator/operator_mag.f90 index 55f653fb9..721e4a930 100644 --- a/src/operator/operator_mag.f90 +++ b/src/operator/operator_mag.f90 @@ -44,11 +44,7 @@ pure module function operator_mag_el_sp(A) result(B) if (allocated(B)) deallocate(B) allocate(B(n)) call ieee_set_halting_mode(ieee_underflow, .false.) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n B(i) = norm2(A(:, i)) end do return @@ -63,11 +59,7 @@ pure module function operator_mag_el_dp(A) result(B) if (allocated(B)) deallocate(B) allocate(B(n)) call ieee_set_halting_mode(ieee_underflow, .false.) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n B(i) = norm2(A(:, i)) end do return @@ -82,11 +74,7 @@ pure module function operator_mag_el_qp(A) result(B) if (allocated(B)) deallocate(B) allocate(B(n)) call ieee_set_halting_mode(ieee_underflow, .false.) -#ifdef DOCONLOC - do concurrent (i = 1:n) shared(A,B) -#else - do concurrent (i = 1:n) -#endif + do i = 1,n B(i) = norm2(A(:, i)) end do return diff --git a/src/operator/operator_unit.f90 b/src/operator/operator_unit.f90 index a25ee1bb1..2a14f6645 100644 --- a/src/operator/operator_unit.f90 +++ b/src/operator/operator_unit.f90 @@ -89,11 +89,7 @@ pure module function operator_unit_el_sp(A) result(B) if (allocated(B)) deallocate(B) allocate(B(NDIM,n)) -#ifdef DOCONLOC - do concurrent (i=1:n) shared(A,B) -#else - do concurrent (i=1:n) -#endif + do i=1,n B(:,i) = operator_unit_sp(A(:,i)) end do @@ -113,11 +109,7 @@ pure module function operator_unit_el_dp(A) result(B) if (allocated(B)) deallocate(B) allocate(B(NDIM,n)) -#ifdef DOCONLOC - do concurrent (i=1:n) shared(A,B) -#else - do concurrent (i=1:n) -#endif + do i=1,n B(:,i) = operator_unit_dp(A(:,i)) end do @@ -136,11 +128,7 @@ pure module function operator_unit_el_qp(A) result(B) if (allocated(B)) deallocate(B) allocate(B(NDIM,n)) -#ifdef DOCONLOC - do concurrent (i=1:n) shared(A,B) -#else - do concurrent (i=1:n) -#endif + do i=1,n B(:,i) = operator_unit_qp(A(:,i)) end do From b8be3684a07da6424ad674a140bab8d411594890 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 16:01:35 -0400 Subject: [PATCH 08/25] Removed -standard-semantics from the default flag list, because it is not compatible with the pre-built NetCDF Fortran library. Kept it in the containerized version. --- Dockerfile | 1 + cmake/Modules/SetFortranFlags.cmake | 8 -------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0ac00b25d..d0f2dcb88 100644 --- a/Dockerfile +++ b/Dockerfile @@ -132,6 +132,7 @@ ENV NETCDF_LIBRARY=${NETCDF_HOME} ENV FOR_COARRAY_NUM_IMAGES=1 ENV OMP_NUM_THREADS=1 ENV FC="${INTEL_DIR}/mpi/latest/bin/mpiifort" +ENV FFLAGS="-fPIC -standard-semantics" ENV LDFLAGS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lsz -lz -lzstd -lbz2 -lcurl -lxml2" COPY ./cmake/ /swiftest/cmake/ COPY ./src/ /swiftest/src/ diff --git a/cmake/Modules/SetFortranFlags.cmake b/cmake/Modules/SetFortranFlags.cmake index 550738f13..d1edd9ae9 100644 --- a/cmake/Modules/SetFortranFlags.cmake +++ b/cmake/Modules/SetFortranFlags.cmake @@ -81,14 +81,6 @@ SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" Fortran "-fsignaling-nans " # GNU ) - -# Determines whether the current Fortran Standard behavior of the compiler is fully implemented. -SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" - Fortran "-standard-semantics" # Intel - "/standard-semantics" # Intel Windows - ) - - # Allows for lines longer than 80 characters without truncation SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" Fortran "-ffree-line-length-none" # GNU (gfortran) From f7b8f46205f4c431627f4452506c54d195b3fbf9 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 18:21:47 -0400 Subject: [PATCH 09/25] Updated coarray code so it can compile in the container. --- Dockerfile | 14 +++++------ cmake/Modules/SetFortranFlags.cmake | 4 ++++ docker/bin/swiftest | 2 +- src/coarray/coarray_clone.f90 | 8 +++---- src/rmvs/rmvs_coarray.f90 | 10 +------- src/swiftest/swiftest_coarray.f90 | 36 ++++++++++++++++++----------- src/swiftest/swiftest_module.f90 | 8 +++++++ 7 files changed, 47 insertions(+), 35 deletions(-) diff --git a/Dockerfile b/Dockerfile index d0f2dcb88..35e450d7e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -94,7 +94,7 @@ RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14 wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz && \ apt-get update && apt-get upgrade -y && \ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ + libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ rm -rf /var/lib/apt/lists/* && \ cd hdf && \ ./HDF5-1.14.1-Linux.sh --skip-license && \ @@ -153,14 +153,13 @@ RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN 'set(NETCDF_LIBRARIES ${NETCDF_FORTRAN_LIBRARY} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY} ${SZ_LIBRARY} ${Z_LIBRARY} ${ZSTD_LIBRARY} ${BZ2_LIBRARY} ${CURL_LIBRARY} ${XML2_LIBRARY} )\n' \ 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ cd swiftest && \ - cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ + cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=ON -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ cmake --build build --verbose && \ cmake --install build # Driver container FROM ubuntu:20.04 as Driver -COPY --from=build /opt/intel/oneapi/mpi/latest/lib/libmpifort.so.12 /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/mpi/latest/lib/release/libmpi.so.12 /usr/local/lib/ +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin RUN apt-get update && apt-get upgrade -y && \ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ @@ -172,9 +171,8 @@ FROM continuumio/miniconda3 ENV LD_LIBRARY_PATH="/usr/local/lib" ENV SHELL="/bin/bash" -COPY --from=build /opt/intel/oneapi/mpi/latest/lib/libmpifort.so.12 /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/mpi/latest/lib/release/libmpi.so.12 /usr/local/lib/ COPY ./python/ . +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ COPY --from=build /usr/local/bin/swiftest_driver /bin/ RUN apt-get update && apt-get upgrade -y && \ @@ -188,6 +186,8 @@ RUN apt-get update && apt-get upgrade -y && \ conda update --all -y && \ cd swiftest && conda develop . && \ mkdir -p /.astropy && \ - chmod -R 777 /.astropy + chmod -R 777 /.astropy && \ + mkdir -p /.config/matplotlib && \ + chmod -R 777 /.config/matplotlib ENTRYPOINT ["/opt/conda/bin/python"] \ No newline at end of file diff --git a/cmake/Modules/SetFortranFlags.cmake b/cmake/Modules/SetFortranFlags.cmake index d1edd9ae9..47361de08 100644 --- a/cmake/Modules/SetFortranFlags.cmake +++ b/cmake/Modules/SetFortranFlags.cmake @@ -117,6 +117,10 @@ IF (CONTAINERIZE) SET_COMPILE_FLAG(CMAKE_Fortran_LINK_FLAGS "${CMAKE_Fortran_LINK_FLAGS}" Fortran "-static-intel" # Intel ) + # Use static Intel MPI libraries + SET_COMPILE_FLAG(CMAKE_Fortran_LINK_FLAGS "${CMAKE_Fortran_LINK_FLAGS}" + Fortran "-static_mpi" # Intel + ) IF (USE_OPENMP) SET_COMPILE_FLAG(CMAKE_Fortran_LINK_FLAGS "${CMAKE_Fortran_LINK_FLAGS}" diff --git a/docker/bin/swiftest b/docker/bin/swiftest index bc94b2cb0..fead7f338 100755 --- a/docker/bin/swiftest +++ b/docker/bin/swiftest @@ -1,2 +1,2 @@ #!/bin/sh -- -docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -t -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES swiftest:1.0.0 "$@" \ No newline at end of file +docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -ti -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES swiftest:1.0.0 "$@" \ No newline at end of file diff --git a/src/coarray/coarray_clone.f90 b/src/coarray/coarray_clone.f90 index 9f7e1ea1a..893cff147 100644 --- a/src/coarray/coarray_clone.f90 +++ b/src/coarray/coarray_clone.f90 @@ -71,7 +71,7 @@ module subroutine coarray_component_clone_DP(var,src_img) sync all if (this_image() == si) then do img = 1, num_images() - tmp[img] = var + tmp[img] = var end do sync images(*) else @@ -117,7 +117,7 @@ module subroutine coarray_component_clone_DP_arr1D(var,src_img) allocate(tmp(n[si])[*]) if (this_image() == si) then do img = 1, num_images() - tmp(:)[img] = var + tmp(:)[img] = var end do sync images(*) else @@ -167,7 +167,7 @@ module subroutine coarray_component_clone_DP_arr2D(var,src_img) allocate(tmp(n1[si],n2[si])[*]) if (this_image() == si) then do img = 1, num_images() - tmp(:,:)[img] = var(:,:) + tmp(:,:)[img] = var(:,:) end do sync images(*) else @@ -252,7 +252,7 @@ module subroutine coarray_component_clone_DP_vec2D(var,src_img) allocate(tmp(NDIM,n[si])[*]) if (this_image() == si) then do img = 1, num_images() - tmp(:,:)[img] = var(:,:) + tmp(:,:)[img] = var(:,:) end do sync images(*) else diff --git a/src/rmvs/rmvs_coarray.f90 b/src/rmvs/rmvs_coarray.f90 index 6f7467df8..3a7508159 100644 --- a/src/rmvs/rmvs_coarray.f90 +++ b/src/rmvs/rmvs_coarray.f90 @@ -68,7 +68,6 @@ module subroutine rmvs_coarray_coclone_pl(self) end subroutine rmvs_coarray_coclone_pl - module subroutine rmvs_coarray_coclone_system(self) !! author: David A. Minton !! @@ -141,14 +140,7 @@ module subroutine rmvs_coarray_component_clone_interp_arr1D(var,src_img) do i = 1, n[si] call tmp(i)%coclone() end do - if (this_image() == si) then - do img = 1, num_images() - tmp(:)[img] = var(:) - end do - - sync images(*) - else - sync images(si) + if (this_image() /= si) then if (allocated(var)) deallocate(var) allocate(var, source=tmp) end if diff --git a/src/swiftest/swiftest_coarray.f90 b/src/swiftest/swiftest_coarray.f90 index 7dbd1a816..8329856b9 100644 --- a/src/swiftest/swiftest_coarray.f90 +++ b/src/swiftest/swiftest_coarray.f90 @@ -54,6 +54,7 @@ module subroutine swiftest_coarray_balance_system(nbody_system, param) return end subroutine swiftest_coarray_balance_system + module subroutine swiftest_coarray_coclone_body(self) !! author: David A. Minton !! @@ -94,6 +95,21 @@ module subroutine swiftest_coarray_coclone_body(self) return end subroutine swiftest_coarray_coclone_body + module subroutine swiftest_coarray_coclone_kin(self) + !! author: David A. Minton + !! + !! Broadcasts the image 1 object to all other images in a coarray + implicit none + ! Arguments + class(swiftest_kinship),intent(inout),codimension[*] :: self !! Swiftest kinship object + + call coclone(self%parent) + call coclone(self%nchild) + call coclone(self%child) + + return + end subroutine swiftest_coarray_coclone_kin + module subroutine swiftest_coarray_coclone_nc(self) !! author: David A. Minton !! @@ -327,10 +343,6 @@ module subroutine swiftest_coarray_coclone_system(self) ! Internals integer(I4B) :: i - call self%cb%coclone() - call self%pl%coclone() - call self%tp%coclone() - call coclone(self%maxid) call coclone(self%t) call coclone(self%GMtot) @@ -474,7 +486,7 @@ module subroutine swiftest_coarray_component_clone_kin_arr1D(var,src_img) integer(I4B), intent(in),optional :: src_img ! Internals type(swiftest_kinship), dimension(:), codimension[:], allocatable :: tmp - integer(I4B) :: img, si + integer(I4B) :: i, img, si integer(I4B), allocatable :: n[:] logical, allocatable :: isalloc[:] @@ -484,21 +496,17 @@ module subroutine swiftest_coarray_component_clone_kin_arr1D(var,src_img) si = 1 end if - allocate(isalloc[*]) - allocate(n[*]) + sync all isalloc = allocated(var) if (isalloc) n = size(var) sync all if (.not. isalloc[si]) return allocate(tmp(n[si])[*]) - if (this_image() == si) then - do img = 1, num_images() - tmp(:)[img] = var - end do - sync images(*) - else - sync images(si) + do i = 1, n[si] + call tmp(i)%coclone() + end do + if (this_image() /= si) then if (allocated(var)) deallocate(var) allocate(var, source=tmp) end if diff --git a/src/swiftest/swiftest_module.f90 b/src/swiftest/swiftest_module.f90 index a54e2351b..b7bbd109c 100644 --- a/src/swiftest/swiftest_module.f90 +++ b/src/swiftest/swiftest_module.f90 @@ -93,6 +93,9 @@ module swiftest integer(I4B), dimension(:), allocatable :: child !! Index of children particles contains procedure :: dealloc => swiftest_util_dealloc_kin !! Deallocates all allocatable arrays +#ifdef COARRAY + procedure :: coclone => swiftest_coarray_coclone_kin !! Clones the image 1 body object to all other images in the coarray structure. +#endif final :: swiftest_final_kin !! Finalizes the Swiftest kinship object - deallocates all allocatables end type swiftest_kinship @@ -1776,6 +1779,11 @@ module subroutine swiftest_coarray_coclone_cb(self) class(swiftest_cb),intent(inout),codimension[*] :: self !! Swiftest cb object end subroutine swiftest_coarray_coclone_cb + module subroutine swiftest_coarray_coclone_kin(self) + implicit none + class(swiftest_kinship),intent(inout),codimension[*] :: self !! Swiftest kinship object + end subroutine swiftest_coarray_coclone_kin + module subroutine swiftest_coarray_coclone_nc(self) implicit none class(swiftest_netcdf_parameters),intent(inout),codimension[*] :: self !! Swiftest body object From c0546f649ee9ec7dd4e9b0cb87b23170f54bed4a Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 22:25:20 -0400 Subject: [PATCH 10/25] Improved Docker container --- Dockerfile | 51 ++++++++++++++++-------- docker/.gitignore | 3 +- docker/bin/swiftest_driver | 2 + docker/bin/{swiftest => swiftest_python} | 0 4 files changed, 38 insertions(+), 18 deletions(-) create mode 100755 docker/bin/swiftest_driver rename docker/bin/{swiftest => swiftest_python} (100%) diff --git a/Dockerfile b/Dockerfile index 35e450d7e..9fe400197 100644 --- a/Dockerfile +++ b/Dockerfile @@ -154,26 +154,19 @@ RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ cd swiftest && \ cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=ON -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ - cmake --build build --verbose && \ + cmake --build build && \ + cp bin/swiftest_driver /usr/local/bin/swiftest_driver_caf && \ + rm -rf build && \ + cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ + cmake --build build && \ cmake --install build # Driver container -FROM ubuntu:20.04 as Driver +FROM continuumio/miniconda3 as setup_conda COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libsz2 libcurl3-gnutls libxml2 && \ - rm -rf /var/lib/apt/lists/* - -# Production container -FROM continuumio/miniconda3 - -ENV LD_LIBRARY_PATH="/usr/local/lib" -ENV SHELL="/bin/bash" -COPY ./python/ . -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ COPY --from=build /usr/local/bin/swiftest_driver /bin/ +COPY ./python/. /opt/conda/pkgs/swiftest/python/ RUN apt-get update && apt-get upgrade -y && \ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ @@ -184,10 +177,34 @@ RUN apt-get update && apt-get upgrade -y && \ conda config --set solver libmamba && \ conda install -c conda-forge conda-build numpy scipy matplotlib pandas xarray astropy astroquery tqdm x264 bottleneck ffmpeg h5netcdf netcdf4 dask -y && \ conda update --all -y && \ - cd swiftest && conda develop . && \ - mkdir -p /.astropy && \ + cd /opt/conda/pkgs/swiftest/python/swiftest && conda develop . && \ + conda clean --all -y + +# Production container +FROM ubuntu:20.04 + +RUN apt-get update && apt-get upgrade -y && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + libsz2 libcurl3-gnutls libxml2 && \ + rm -rf /var/lib/apt/lists/* + +ENV LD_LIBRARY_PATH="/usr/local/lib" +ENV SHELL="/bin/bash" +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ +COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /usr/local/lib/ +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /usr/local/lib/ +COPY --from=setup_conda /opt/conda/. /opt/conda/ +COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /usr/local/bin/ +COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin +COPY --from=build /usr/local/bin/swiftest_driver_caf /usr/local/bin/ + +RUN mkdir -p /.astropy && \ chmod -R 777 /.astropy && \ + mkdir -p /.cache/matplotlib && \ mkdir -p /.config/matplotlib && \ - chmod -R 777 /.config/matplotlib + chmod -R 777 /.cache/matplotlib && \ + chmod -R 777 /.config/matplotlib && \ + mkdir -p /opt/conda/pkgs/swiftest/bin && \ + ln -s /usr/local/bin/swiftest_driver /opt/conda/pkgs/swiftest/bin/swiftest_driver ENTRYPOINT ["/opt/conda/bin/python"] \ No newline at end of file diff --git a/docker/.gitignore b/docker/.gitignore index 09c5585d1..772f5a9d9 100644 --- a/docker/.gitignore +++ b/docker/.gitignore @@ -2,4 +2,5 @@ !.gitignore !install.sh !bin -!bin/swiftest +!bin/swiftest_python +!bin/swiftest_driver diff --git a/docker/bin/swiftest_driver b/docker/bin/swiftest_driver new file mode 100755 index 000000000..0f7d8b5dd --- /dev/null +++ b/docker/bin/swiftest_driver @@ -0,0 +1,2 @@ +#!/bin/sh -- +docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -ti --entrypoint /usr/local/bin/swiftest_driver -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES swiftest:1.0.0 "$@" \ No newline at end of file diff --git a/docker/bin/swiftest b/docker/bin/swiftest_python similarity index 100% rename from docker/bin/swiftest rename to docker/bin/swiftest_python From 7152017d63c6e13f2c1be48d014329152499a9e7 Mon Sep 17 00:00:00 2001 From: David Minton Date: Tue, 6 Jun 2023 22:33:28 -0400 Subject: [PATCH 11/25] More improvements to containers --- Dockerfile | 16 ++-------------- singularity/bin/swiftest_driver | 2 +- singularity/install.sh | 2 +- singularity/setenv.sh | 2 +- 4 files changed, 5 insertions(+), 17 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9fe400197..2d7821992 100644 --- a/Dockerfile +++ b/Dockerfile @@ -161,11 +161,8 @@ RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN cmake --build build && \ cmake --install build -# Driver container -FROM continuumio/miniconda3 as setup_conda -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ -COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin -COPY --from=build /usr/local/bin/swiftest_driver /bin/ +# Production container +FROM continuumio/miniconda3 COPY ./python/. /opt/conda/pkgs/swiftest/python/ RUN apt-get update && apt-get upgrade -y && \ @@ -180,20 +177,11 @@ RUN apt-get update && apt-get upgrade -y && \ cd /opt/conda/pkgs/swiftest/python/swiftest && conda develop . && \ conda clean --all -y -# Production container -FROM ubuntu:20.04 - -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libsz2 libcurl3-gnutls libxml2 && \ - rm -rf /var/lib/apt/lists/* - ENV LD_LIBRARY_PATH="/usr/local/lib" ENV SHELL="/bin/bash" COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /usr/local/lib/ COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /usr/local/lib/ -COPY --from=setup_conda /opt/conda/. /opt/conda/ COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /usr/local/bin/ COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin COPY --from=build /usr/local/bin/swiftest_driver_caf /usr/local/bin/ diff --git a/singularity/bin/swiftest_driver b/singularity/bin/swiftest_driver index dba0863a9..30fe6352b 100755 --- a/singularity/bin/swiftest_driver +++ b/singularity/bin/swiftest_driver @@ -1,2 +1,2 @@ #!/bin/sh -- -singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file +singularity run --bind $(pwd):$(pwd) exec --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file diff --git a/singularity/install.sh b/singularity/install.sh index ff2be8596..99cae29c1 100755 --- a/singularity/install.sh +++ b/singularity/install.sh @@ -5,6 +5,6 @@ # tag=${1:-latest} echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest_driver:${tag} Docker container" -singularity pull --force swiftest_driver.sif docker://daminton/swiftest_driver:${tag} +singularity pull --force swiftest.sif docker://daminton/swiftest:${tag} cp -rf bin/swiftest_driver ../bin/ source ./setenv.sh \ No newline at end of file diff --git a/singularity/setenv.sh b/singularity/setenv.sh index cc8905033..82ca7d408 100755 --- a/singularity/setenv.sh +++ b/singularity/setenv.sh @@ -1,4 +1,4 @@ #!/bin/sh -- # This will set the SWIFTEST_SIF environment variable as long as it is executed by source. # $ . ./setenv.sh -export SWIFTEST_SIF="${PWD}/swiftest_driver.sif" \ No newline at end of file +export SWIFTEST="${PWD}/swiftest.sif" \ No newline at end of file From 35e0099e9934c18b07c5add1ca4cbf93207823f0 Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 08:52:57 -0400 Subject: [PATCH 12/25] Updates to singularity scripts and also added apptainer scripts --- apptainer/.gitignore | 5 +++++ apptainer/bin/swiftest_driver | 2 ++ apptainer/bin/swiftest_python | 2 ++ apptainer/install.sh | 10 ++++++++++ apptainer/setenv.sh | 4 ++++ singularity/.gitignore | 1 + singularity/bin/swiftest_driver | 2 +- singularity/bin/swiftest_python | 2 ++ 8 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 apptainer/.gitignore create mode 100755 apptainer/bin/swiftest_driver create mode 100755 apptainer/bin/swiftest_python create mode 100755 apptainer/install.sh create mode 100755 apptainer/setenv.sh create mode 100755 singularity/bin/swiftest_python diff --git a/apptainer/.gitignore b/apptainer/.gitignore new file mode 100644 index 000000000..6ddbdba9f --- /dev/null +++ b/apptainer/.gitignore @@ -0,0 +1,5 @@ +!bin/ +!bin/swiftest_driver +!bin/swiftest_python +!install.sh +!setenv.sh diff --git a/apptainer/bin/swiftest_driver b/apptainer/bin/swiftest_driver new file mode 100755 index 000000000..fa008737a --- /dev/null +++ b/apptainer/bin/swiftest_driver @@ -0,0 +1,2 @@ +#!/bin/sh -- +apptainer exec --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file diff --git a/apptainer/bin/swiftest_python b/apptainer/bin/swiftest_python new file mode 100755 index 000000000..b33012e4a --- /dev/null +++ b/apptainer/bin/swiftest_python @@ -0,0 +1,2 @@ +#!/bin/sh -- +apptainer run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file diff --git a/apptainer/install.sh b/apptainer/install.sh new file mode 100755 index 000000000..abf9f4f91 --- /dev/null +++ b/apptainer/install.sh @@ -0,0 +1,10 @@ +#!/bin/sh -- +# This will install the Singularity container version of the swiftest_driver in place of the native compiled version into ../bin. +# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest_driver.sif, which requires this script to be called via source: +# $ . ./install.sh +# +tag=${1:-latest} +echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest_driver:${tag} Docker container" +apptainer pull --force swiftest.sif docker://daminton/swiftest:${tag} +cp -rf bin/swiftest_driver ../bin/ +source ./setenv.sh \ No newline at end of file diff --git a/apptainer/setenv.sh b/apptainer/setenv.sh new file mode 100755 index 000000000..82ca7d408 --- /dev/null +++ b/apptainer/setenv.sh @@ -0,0 +1,4 @@ +#!/bin/sh -- +# This will set the SWIFTEST_SIF environment variable as long as it is executed by source. +# $ . ./setenv.sh +export SWIFTEST="${PWD}/swiftest.sif" \ No newline at end of file diff --git a/singularity/.gitignore b/singularity/.gitignore index 2a259d270..6ddbdba9f 100644 --- a/singularity/.gitignore +++ b/singularity/.gitignore @@ -1,4 +1,5 @@ !bin/ !bin/swiftest_driver +!bin/swiftest_python !install.sh !setenv.sh diff --git a/singularity/bin/swiftest_driver b/singularity/bin/swiftest_driver index 30fe6352b..03876195b 100755 --- a/singularity/bin/swiftest_driver +++ b/singularity/bin/swiftest_driver @@ -1,2 +1,2 @@ #!/bin/sh -- -singularity run --bind $(pwd):$(pwd) exec --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file +singularity exec --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file diff --git a/singularity/bin/swiftest_python b/singularity/bin/swiftest_python new file mode 100755 index 000000000..4061ff119 --- /dev/null +++ b/singularity/bin/swiftest_python @@ -0,0 +1,2 @@ +#!/bin/sh -- +singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file From aa0acbfacc44834a80d7d1c79248c8f10c1669fc Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 08:56:09 -0400 Subject: [PATCH 13/25] Fixed typos in Singularity/Apptainer scripts --- apptainer/bin/swiftest_python | 2 +- singularity/bin/swiftest_python | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apptainer/bin/swiftest_python b/apptainer/bin/swiftest_python index b33012e4a..6d562c962 100755 --- a/apptainer/bin/swiftest_python +++ b/apptainer/bin/swiftest_python @@ -1,2 +1,2 @@ #!/bin/sh -- -apptainer run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file +apptainer run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file diff --git a/singularity/bin/swiftest_python b/singularity/bin/swiftest_python index 4061ff119..2a7b13735 100755 --- a/singularity/bin/swiftest_python +++ b/singularity/bin/swiftest_python @@ -1,2 +1,2 @@ #!/bin/sh -- -singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file +singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file From 5f2e9bfc6ce1df4f3bbc90dffb1fa1c7b0c762ef Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 08:58:03 -0400 Subject: [PATCH 14/25] Fixed typo in Singularity/Apptainer install scripts --- apptainer/setenv.sh | 2 +- singularity/setenv.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apptainer/setenv.sh b/apptainer/setenv.sh index 82ca7d408..d9f215d7c 100755 --- a/apptainer/setenv.sh +++ b/apptainer/setenv.sh @@ -1,4 +1,4 @@ #!/bin/sh -- # This will set the SWIFTEST_SIF environment variable as long as it is executed by source. # $ . ./setenv.sh -export SWIFTEST="${PWD}/swiftest.sif" \ No newline at end of file +export SWIFTEST_SIF="${PWD}/swiftest.sif" \ No newline at end of file diff --git a/singularity/setenv.sh b/singularity/setenv.sh index 82ca7d408..d9f215d7c 100755 --- a/singularity/setenv.sh +++ b/singularity/setenv.sh @@ -1,4 +1,4 @@ #!/bin/sh -- # This will set the SWIFTEST_SIF environment variable as long as it is executed by source. # $ . ./setenv.sh -export SWIFTEST="${PWD}/swiftest.sif" \ No newline at end of file +export SWIFTEST_SIF="${PWD}/swiftest.sif" \ No newline at end of file From da9b30019a921bed44197cee321a0350f2e46ce5 Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 09:00:36 -0400 Subject: [PATCH 15/25] More typo fixes and cleanup --- apptainer/install.sh | 5 +++-- singularity/install.sh | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/apptainer/install.sh b/apptainer/install.sh index abf9f4f91..a36eaf3a1 100755 --- a/apptainer/install.sh +++ b/apptainer/install.sh @@ -1,10 +1,11 @@ #!/bin/sh -- # This will install the Singularity container version of the swiftest_driver in place of the native compiled version into ../bin. -# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest_driver.sif, which requires this script to be called via source: +# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest.sif, which requires this script to be called via source: # $ . ./install.sh # tag=${1:-latest} -echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest_driver:${tag} Docker container" +echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest:${tag} Docker container" apptainer pull --force swiftest.sif docker://daminton/swiftest:${tag} cp -rf bin/swiftest_driver ../bin/ +cp -rf bin/swiftest_python ../bin/ source ./setenv.sh \ No newline at end of file diff --git a/singularity/install.sh b/singularity/install.sh index 99cae29c1..3c1f3445d 100755 --- a/singularity/install.sh +++ b/singularity/install.sh @@ -1,10 +1,11 @@ #!/bin/sh -- # This will install the Singularity container version of the swiftest_driver in place of the native compiled version into ../bin. -# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest_driver.sif, which requires this script to be called via source: +# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest.sif, which requires this script to be called via source: # $ . ./install.sh # tag=${1:-latest} -echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest_driver:${tag} Docker container" +echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest:${tag} Docker container" singularity pull --force swiftest.sif docker://daminton/swiftest:${tag} cp -rf bin/swiftest_driver ../bin/ +cp -rf bin/swiftest_python ../bin/ source ./setenv.sh \ No newline at end of file From c8ba0905b5dce9d56c4f518c1b3031defc8fe00f Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 09:03:42 -0400 Subject: [PATCH 16/25] Typos in install scripts --- apptainer/install.sh | 2 +- singularity/install.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apptainer/install.sh b/apptainer/install.sh index a36eaf3a1..4250b5abb 100755 --- a/apptainer/install.sh +++ b/apptainer/install.sh @@ -4,7 +4,7 @@ # $ . ./install.sh # tag=${1:-latest} -echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest:${tag} Docker container" +echo "Installing swiftest.sif Singularity container and executable script from swiftest:${tag} Docker container" apptainer pull --force swiftest.sif docker://daminton/swiftest:${tag} cp -rf bin/swiftest_driver ../bin/ cp -rf bin/swiftest_python ../bin/ diff --git a/singularity/install.sh b/singularity/install.sh index 3c1f3445d..e7c47e2b3 100755 --- a/singularity/install.sh +++ b/singularity/install.sh @@ -4,7 +4,7 @@ # $ . ./install.sh # tag=${1:-latest} -echo "Installing swiftest_driver.sif Singularity container and executable script from swiftest:${tag} Docker container" +echo "Installing swiftest.sif Singularity container and executable script from swiftest:${tag} Docker container" singularity pull --force swiftest.sif docker://daminton/swiftest:${tag} cp -rf bin/swiftest_driver ../bin/ cp -rf bin/swiftest_python ../bin/ From 10935fb97bc0dc1cc094de6d47ce8b3ba57174da Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 16:23:29 -0400 Subject: [PATCH 17/25] Made significant improvements to the Docker, Apptainer, and Singularity containers. --- Dockerfile | 40 +++++++++++--------- apptainer/.gitignore | 3 +- apptainer/bin/{swiftest_python => swiftest} | 2 + apptainer/bin/swiftest_driver | 2 + apptainer/install.sh | 26 +++++++++---- apptainer/setenv.sh | 4 -- docker/.gitignore | 2 +- docker/bin/swiftest | 4 ++ docker/bin/swiftest_driver | 4 +- docker/bin/swiftest_python | 2 - docker/install.sh | 5 ++- python/swiftest/swiftest/simulation_class.py | 23 +++++------ singularity/.gitignore | 1 - singularity/bin/swiftest_driver | 2 + singularity/bin/swiftest_python | 2 - singularity/install.sh | 26 +++++++++---- singularity/setenv.sh | 4 -- 17 files changed, 86 insertions(+), 66 deletions(-) rename apptainer/bin/{swiftest_python => swiftest} (60%) delete mode 100755 apptainer/setenv.sh create mode 100755 docker/bin/swiftest delete mode 100755 docker/bin/swiftest_python delete mode 100755 singularity/bin/swiftest_python delete mode 100755 singularity/setenv.sh diff --git a/Dockerfile b/Dockerfile index 2d7821992..b53f8dc8c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -163,7 +163,12 @@ RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN # Production container FROM continuumio/miniconda3 -COPY ./python/. /opt/conda/pkgs/swiftest/python/ +SHELL ["/bin/bash", "--login", "-c"] +ENV SHELL="/bin/bash" +ENV PATH="/opt/conda/bin:${PATH}" +ENV LD_LIBRARY_PATH="/usr/local/lib" + +COPY environment.yml . RUN apt-get update && apt-get upgrade -y && \ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ @@ -172,27 +177,28 @@ RUN apt-get update && apt-get upgrade -y && \ conda update --all -y && \ conda install conda-libmamba-solver -y && \ conda config --set solver libmamba && \ - conda install -c conda-forge conda-build numpy scipy matplotlib pandas xarray astropy astroquery tqdm x264 bottleneck ffmpeg h5netcdf netcdf4 dask -y && \ - conda update --all -y && \ - cd /opt/conda/pkgs/swiftest/python/swiftest && conda develop . && \ - conda clean --all -y + conda env create -f environment.yml && \ + conda init bash && \ + echo "conda activate swiftest-env" >> ~/.bashrc -ENV LD_LIBRARY_PATH="/usr/local/lib" -ENV SHELL="/bin/bash" -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /usr/local/bin/ -COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin -COPY --from=build /usr/local/bin/swiftest_driver_caf /usr/local/bin/ +COPY ./python/. /opt/conda/pkgs/ +COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/envs/swiftest-env/bin/ +COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/bin/ +COPY --from=build /usr/local/bin/swiftest_driver_caf /opt/conda/envs/swiftest-env/bin/ +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /opt/conda/envs/swiftest-env/lib/ +COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /opt/conda/envs/swiftest-env/lib/ +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /opt/conda/envs/swiftest-env/lib/ +COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /opt/conda/envs/swiftest-env/bin/ -RUN mkdir -p /.astropy && \ +# Start new shell to activate the environment and install Swiftest +RUN cd /opt/conda/pkgs/swiftest && conda develop . && \ + conda clean --all -y && \ + mkdir -p /.astropy && \ chmod -R 777 /.astropy && \ mkdir -p /.cache/matplotlib && \ mkdir -p /.config/matplotlib && \ chmod -R 777 /.cache/matplotlib && \ chmod -R 777 /.config/matplotlib && \ - mkdir -p /opt/conda/pkgs/swiftest/bin && \ - ln -s /usr/local/bin/swiftest_driver /opt/conda/pkgs/swiftest/bin/swiftest_driver + ln -s /opt/conda/bin/swiftest_driver /opt/conda/bin/driver -ENTRYPOINT ["/opt/conda/bin/python"] \ No newline at end of file +ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "swiftest-env"] \ No newline at end of file diff --git a/apptainer/.gitignore b/apptainer/.gitignore index 6ddbdba9f..baed94e88 100644 --- a/apptainer/.gitignore +++ b/apptainer/.gitignore @@ -1,5 +1,4 @@ !bin/ !bin/swiftest_driver -!bin/swiftest_python +!bin/swiftest !install.sh -!setenv.sh diff --git a/apptainer/bin/swiftest_python b/apptainer/bin/swiftest similarity index 60% rename from apptainer/bin/swiftest_python rename to apptainer/bin/swiftest index 6d562c962..ca0fcfadd 100755 --- a/apptainer/bin/swiftest_python +++ b/apptainer/bin/swiftest @@ -1,2 +1,4 @@ #!/bin/sh -- +OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} +FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} apptainer run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file diff --git a/apptainer/bin/swiftest_driver b/apptainer/bin/swiftest_driver index fa008737a..e88be2805 100755 --- a/apptainer/bin/swiftest_driver +++ b/apptainer/bin/swiftest_driver @@ -1,2 +1,4 @@ #!/bin/sh -- +OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} +FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} apptainer exec --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file diff --git a/apptainer/install.sh b/apptainer/install.sh index 4250b5abb..5d84ca34a 100755 --- a/apptainer/install.sh +++ b/apptainer/install.sh @@ -1,11 +1,21 @@ #!/bin/sh -- -# This will install the Singularity container version of the swiftest_driver in place of the native compiled version into ../bin. -# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest.sif, which requires this script to be called via source: -# $ . ./install.sh +# This will install the Apptainer version of the swiftest_driver in place of the native compiled version into ../bin as +# well as the swiftest_python script that is used to execute a Python input file. +# The swiftest.sif file will be copied to the SIF_DIR directory. The default location is ${HOME}/.apptainer. +# To change this, just set environment variable SIF_DIR prior to running this script. +# +# The script takes an optional argument "tag" if you want to pull a container other than "latest". # -tag=${1:-latest} -echo "Installing swiftest.sif Singularity container and executable script from swiftest:${tag} Docker container" -apptainer pull --force swiftest.sif docker://daminton/swiftest:${tag} +# In order to use one executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest.sif, +# which requires this script to be called via source: +# $ source ./install.sh +# or +# $ . ./install.sh +TAG=${1:-latest} + +SIF_DIR=${SIF_DIR:-${HOME}/.apptainer} +echo "Installing ${SIF_DIR}/swiftest.sif container from mintongroup/swiftest:${TAG} Docker container" +apptainer pull --force ${SIF_DIR}/swiftest.sif docker://mintongroup/swiftest:${TAG} +cp -rf bin/swiftest ../bin/ cp -rf bin/swiftest_driver ../bin/ -cp -rf bin/swiftest_python ../bin/ -source ./setenv.sh \ No newline at end of file +export SWIFTEST_SIF=${SIF_DIR}/swiftest.sif \ No newline at end of file diff --git a/apptainer/setenv.sh b/apptainer/setenv.sh deleted file mode 100755 index d9f215d7c..000000000 --- a/apptainer/setenv.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -- -# This will set the SWIFTEST_SIF environment variable as long as it is executed by source. -# $ . ./setenv.sh -export SWIFTEST_SIF="${PWD}/swiftest.sif" \ No newline at end of file diff --git a/docker/.gitignore b/docker/.gitignore index 772f5a9d9..5c73deb60 100644 --- a/docker/.gitignore +++ b/docker/.gitignore @@ -2,5 +2,5 @@ !.gitignore !install.sh !bin -!bin/swiftest_python +!bin/swiftest !bin/swiftest_driver diff --git a/docker/bin/swiftest b/docker/bin/swiftest new file mode 100755 index 000000000..8c985e065 --- /dev/null +++ b/docker/bin/swiftest @@ -0,0 +1,4 @@ +#!/bin/sh -- +OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} +FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} +docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -ti -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES mintongroup/swiftest "$@" \ No newline at end of file diff --git a/docker/bin/swiftest_driver b/docker/bin/swiftest_driver index 0f7d8b5dd..146509c36 100755 --- a/docker/bin/swiftest_driver +++ b/docker/bin/swiftest_driver @@ -1,2 +1,4 @@ #!/bin/sh -- -docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -ti --entrypoint /usr/local/bin/swiftest_driver -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES swiftest:1.0.0 "$@" \ No newline at end of file +OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} +FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} +docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -ti --entrypoint /usr/local/bin/swiftest_driver -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES MintonGroup/swiftest "$@" \ No newline at end of file diff --git a/docker/bin/swiftest_python b/docker/bin/swiftest_python deleted file mode 100755 index fead7f338..000000000 --- a/docker/bin/swiftest_python +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -- -docker run -v $(pwd):$(pwd) -w $(pwd) --user "$(id -u):$(id -g)" -ti -e OMP_NUM_THREADS -e FOR_COARRAY_NUM_IMAGES swiftest:1.0.0 "$@" \ No newline at end of file diff --git a/docker/install.sh b/docker/install.sh index 1a0b2b20c..296223149 100755 --- a/docker/install.sh +++ b/docker/install.sh @@ -1,5 +1,6 @@ #!/bin/sh -- tag=${1:-latest} echo "Installing swiftest:${tag} Docker container and executable script" -docker pull daminton/swiftest:${tag} -cp -rf bin/swiftest ../bin/ \ No newline at end of file +docker pull mintongroup/swiftest:${tag} +cp -rf bin/swiftest ../bin/ +cp -rf bin/swiftest_driver ../bin/ \ No newline at end of file diff --git a/python/swiftest/swiftest/simulation_class.py b/python/swiftest/swiftest/simulation_class.py index 8971b897a..399ab0d38 100644 --- a/python/swiftest/swiftest/simulation_class.py +++ b/python/swiftest/swiftest/simulation_class.py @@ -470,6 +470,9 @@ def _type_scrub(output_data): sys.exit() except: warnings.warn(f"Error executing main swiftest_driver program", stacklevel=2) + res = p.communicate() + for line in res[1]: + print(line, end='') sys.exit() pbar.close() @@ -502,11 +505,9 @@ def run(self,dask: bool = False, **kwargs): if not self.binary_source.exists(): msg = "Path to swiftest_driver has not been set!" - msg += f"\nMake sure swiftest_driver is compiled and the executable is in {str(self.binary_path)}" + msg += f"\nMake sure swiftest_driver is compiled and the executable is in {str(self.binary_source.parent)}" warnings.warn(msg,stacklevel=2) return - else: - shutil.copy2(self.binary_source, self.driver_executable) if not self.restart: self.clean() @@ -942,16 +943,15 @@ def set_integrator(self, update_list.append("codename") if self.codename == "Swiftest": self.binary_source = Path(_pyfile).parent.parent.parent.parent / "bin" / "swiftest_driver" - self.binary_path = self.simdir.resolve() - self.driver_executable = self.binary_path / "swiftest_driver" + self.driver_executable = self.binary_source if not self.binary_source.exists(): - warnings.warn(f"Cannot find the Swiftest driver in {str(self.binary_path)}",stacklevel=2) + warnings.warn(f"Cannot find the Swiftest driver at {str(self.binary_source)}",stacklevel=2) self.driver_executable = None else: - if self.binary_path.exists(): + if self.binary_source.exists(): self.driver_executable.resolve() else: - self.binary_path = "NOT IMPLEMENTED FOR THIS CODE" + self.binary_source = "NOT IMPLEMENTED FOR THIS CODE" self.driver_executable = None update_list.append("driver_executable") @@ -1200,8 +1200,6 @@ def set_feature(self, msg = f"Cannot create the {self.simdir.resolve()} directory: File exists." msg += "\nDelete the file or change the location of param_file" raise NotADirectoryError(msg) - self.binary_path = self.simdir.resolve() - self.driver_executable = self.binary_path / "swiftest_driver" self.param_file = Path(kwargs.pop("param_file","param.in")) if self.codename == "Swiftest": @@ -2754,7 +2752,6 @@ def write_param(self, self.driver_script = os.path.join(self.simdir, "swiftest_driver.sh") with open(self.driver_script, 'w') as f: f.write(f"#{self._shell_full}\n") - #f.write(f"source ~/.{self._shell}rc\n") f.write(f"cd {self.simdir}\n") f.write(f"{str(self.driver_executable)} {self.integrator} {str(self.param_file)} compact\n") @@ -2991,11 +2988,9 @@ def save(self, self.write_param(param_file=param_file,**kwargs) if not self.binary_source.exists(): msg = "Path to swiftest_driver has not been set!" - msg += f"\nMake sure swiftest_driver is compiled and the executable is in {str(self.binary_path)}" + msg += f"\nMake sure swiftest_driver is compiled and the executable is in {str(self.binary_source.parent)}" warnings.warn(msg,stacklevel=2) return - else: - shutil.copy2(self.binary_source, self.driver_executable) elif codename == "Swifter": swifter_param = io.swiftest2swifter_param(param) if "rhill" in self.data: diff --git a/singularity/.gitignore b/singularity/.gitignore index 6ddbdba9f..b34dd2df5 100644 --- a/singularity/.gitignore +++ b/singularity/.gitignore @@ -2,4 +2,3 @@ !bin/swiftest_driver !bin/swiftest_python !install.sh -!setenv.sh diff --git a/singularity/bin/swiftest_driver b/singularity/bin/swiftest_driver index 03876195b..024e93115 100755 --- a/singularity/bin/swiftest_driver +++ b/singularity/bin/swiftest_driver @@ -1,2 +1,4 @@ #!/bin/sh -- +OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} +FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} singularity exec --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} swiftest_driver "$@" \ No newline at end of file diff --git a/singularity/bin/swiftest_python b/singularity/bin/swiftest_python deleted file mode 100755 index 2a7b13735..000000000 --- a/singularity/bin/swiftest_python +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -- -singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file diff --git a/singularity/install.sh b/singularity/install.sh index e7c47e2b3..233ee187f 100755 --- a/singularity/install.sh +++ b/singularity/install.sh @@ -1,11 +1,21 @@ #!/bin/sh -- -# This will install the Singularity container version of the swiftest_driver in place of the native compiled version into ../bin. -# In order to use the executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest.sif, which requires this script to be called via source: -# $ . ./install.sh +# This will install the Singularity version of the swiftest_driver in place of the native compiled version into ../bin as +# well as the swiftest_python script that is used to execute a Python input file. +# The swiftest.sif file will be copied to the SIF_DIR directory. The default location is ${HOME}/.singularity. +# To change this, just set environment variable SIF_DIR prior to running this script. +# +# The script takes an optional argument "tag" if you want to pull a container other than "latest". # -tag=${1:-latest} -echo "Installing swiftest.sif Singularity container and executable script from swiftest:${tag} Docker container" -singularity pull --force swiftest.sif docker://daminton/swiftest:${tag} +# In order to use one executable script, the SWIFTEST_SIF environment variable must be set to point to the location of swiftest.sif, +# which requires this script to be called via source: +# $ source ./install.sh +# or +# $ . ./install.sh +TAG=${1:-latest} + +SIF_DIR=${SIF_DIR:-${HOME}/.singularity} +echo "Installing ${SIF_DIR}/swiftest.sif container from mintongroup/swiftest:${TAG} Docker container" +singularity pull --force ${SIF_DIR}/swiftest.sif docker://mintongroup/swiftest:${TAG} +cp -rf bin/swiftest ../bin/ cp -rf bin/swiftest_driver ../bin/ -cp -rf bin/swiftest_python ../bin/ -source ./setenv.sh \ No newline at end of file +export SWIFTEST_SIF=${SIF_DIR}/swiftest.sif \ No newline at end of file diff --git a/singularity/setenv.sh b/singularity/setenv.sh deleted file mode 100755 index d9f215d7c..000000000 --- a/singularity/setenv.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -- -# This will set the SWIFTEST_SIF environment variable as long as it is executed by source. -# $ . ./setenv.sh -export SWIFTEST_SIF="${PWD}/swiftest.sif" \ No newline at end of file From 49e77a3cd714fe63c7ca67035c47b6b81f9924da Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 19:11:11 -0400 Subject: [PATCH 18/25] Added singularity swiftest script --- singularity/.gitignore | 2 +- singularity/bin/swiftest | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100755 singularity/bin/swiftest diff --git a/singularity/.gitignore b/singularity/.gitignore index b34dd2df5..baed94e88 100644 --- a/singularity/.gitignore +++ b/singularity/.gitignore @@ -1,4 +1,4 @@ !bin/ !bin/swiftest_driver -!bin/swiftest_python +!bin/swiftest !install.sh diff --git a/singularity/bin/swiftest b/singularity/bin/swiftest new file mode 100755 index 000000000..8a5849e51 --- /dev/null +++ b/singularity/bin/swiftest @@ -0,0 +1,4 @@ +#!/bin/sh -- +OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} +FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} +singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file From afb2c9f036439711a9cdf003755c7b7d4e1077a1 Mon Sep 17 00:00:00 2001 From: David Minton Date: Wed, 7 Jun 2023 19:29:32 -0400 Subject: [PATCH 19/25] Added flags to ensure that host environment variables don't get passed down to the container in Apptainer/Singularity (which wreaks havoc on the RCAC cluster) --- apptainer/bin/swiftest | 2 +- singularity/bin/swiftest | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/apptainer/bin/swiftest b/apptainer/bin/swiftest index ca0fcfadd..5d91b80d6 100755 --- a/apptainer/bin/swiftest +++ b/apptainer/bin/swiftest @@ -1,4 +1,4 @@ #!/bin/sh -- OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} -apptainer run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file +apptainer run --bind $(pwd):$(pwd) --cleanenv --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file diff --git a/singularity/bin/swiftest b/singularity/bin/swiftest index 8a5849e51..a384860fe 100755 --- a/singularity/bin/swiftest +++ b/singularity/bin/swiftest @@ -1,4 +1,4 @@ #!/bin/sh -- OMP_NUM_THREADS=${OMP_NUM_THREADS:-`nproc --all`} FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES:-1} -singularity run --bind $(pwd):$(pwd) --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file +singularity run --bind $(pwd):$(pwd) --cleanenv --env OMP_NUM_THREADS=${OMP_NUM_THREADS},FOR_COARRAY_NUM_IMAGES=${FOR_COARRAY_NUM_IMAGES} ${SWIFTEST_SIF} "$@" \ No newline at end of file From 1bd3846742e67358a47c171c712b00eda9c0a72d Mon Sep 17 00:00:00 2001 From: David Minton Date: Thu, 8 Jun 2023 09:06:13 -0400 Subject: [PATCH 20/25] Updated usage message to be more accurate. --- src/base/base_module.f90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/base/base_module.f90 b/src/base/base_module.f90 index 502a42515..51266238f 100644 --- a/src/base/base_module.f90 +++ b/src/base/base_module.f90 @@ -600,8 +600,8 @@ subroutine base_util_exit(code) character(*), parameter :: BAR = '("------------------------------------------------")' character(*), parameter :: SUCCESS_MSG = '(/, "Normal termination of Swiftest (version ", f3.1, ")")' character(*), parameter :: FAIL_MSG = '(/, "Terminating Swiftest (version ", f3.1, ") due to error!!")' - character(*), parameter :: USAGE_MSG = '("Usage: swiftest [bs|helio|ra15|rmvs|symba|tu4|whm] ' // & - '[standard|compact|progress|NONE]")' + character(*), parameter :: USAGE_MSG = '("Usage: swiftest ' // & + '[{standard}|compact|progress]")' character(*), parameter :: HELP_MSG = USAGE_MSG select case(code) From 5e6a200839487c0e53d3d65f16f0ea12b0ad8acf Mon Sep 17 00:00:00 2001 From: David Minton Date: Thu, 8 Jun 2023 09:20:47 -0400 Subject: [PATCH 21/25] Added a Dockerfile just for the driver --- docker/.gitignore | 1 + docker/Dockerfile.swiftest_driver | 176 ++++++++++++++++++++++++++++++ 2 files changed, 177 insertions(+) create mode 100644 docker/Dockerfile.swiftest_driver diff --git a/docker/.gitignore b/docker/.gitignore index 5c73deb60..c876f8c26 100644 --- a/docker/.gitignore +++ b/docker/.gitignore @@ -4,3 +4,4 @@ !bin !bin/swiftest !bin/swiftest_driver +!Dockerfile.swiftest_driver diff --git a/docker/Dockerfile.swiftest_driver b/docker/Dockerfile.swiftest_driver new file mode 100644 index 000000000..8414f9016 --- /dev/null +++ b/docker/Dockerfile.swiftest_driver @@ -0,0 +1,176 @@ +FROM ubuntu:20.04 as build + +# kick everything off +RUN apt-get update && apt-get upgrade -y && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + ca-certificates curl git wget gpg-agent software-properties-common build-essential gnupg pkg-config && \ + rm -rf /var/lib/apt/lists/* && \ + mkdir -p cmake/build && \ + cd cmake/build && \ + curl -LO https://github.com/Kitware/CMake/releases/download/v3.26.2/cmake-3.26.2-linux-x86_64.sh && \ + /bin/bash cmake-3.26.2-linux-x86_64.sh --prefix=/usr/local --skip-license && \ + wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ + | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \ + echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \ + apt-get -y update && apt-get upgrade -y && \ + apt-get install -y intel-hpckit + +# Set Intel compiler environment variables +ENV INTEL_DIR="/opt/intel/oneapi" +ENV LANG=C.UTF-8 +ENV ACL_BOARD_VENDOR_PATH='/opt/Intel/OpenCLFPGA/oneAPI/Boards' +ENV ADVISOR_2023_DIR='/opt/intel/oneapi/advisor/2023.1.0' +ENV APM='/opt/intel/oneapi/advisor/2023.1.0/perfmodels' +ENV CCL_CONFIGURATION='cpu_gpu_dpcpp' +ENV CCL_ROOT='/opt/intel/oneapi/ccl/2021.9.0' +ENV CLASSPATH='/opt/intel/oneapi/mpi/2021.9.0//lib/mpi.jar:/opt/intel/oneapi/dal/2023.1.0/lib/onedal.jar' +ENV CLCK_ROOT='/opt/intel/oneapi/clck/2021.7.3' +ENV CMAKE_PREFIX_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/..:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/../lib/cmake:/opt/intel/oneapi/dal/2023.1.0:/opt/intel/oneapi/compiler/2023.1.0/linux/IntelDPCPP:/opt/intel/oneapi/ccl/2021.9.0/lib/cmake/oneCCL' +ENV CMPLR_ROOT='/opt/intel/oneapi/compiler/2023.1.0' +ENV CPATH='/opt/intel/oneapi/tbb/2021.9.0/env/../include:/opt/intel/oneapi/mpi/2021.9.0//include:/opt/intel/oneapi/mkl/2023.1.0/include:/opt/intel/oneapi/ippcp/2021.7.0/include:/opt/intel/oneapi/ipp/2021.8.0/include:/opt/intel/oneapi/dpl/2022.1.0/linux/include:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/include:/opt/intel/oneapi/dev-utilities/2021.9.0/include:/opt/intel/oneapi/dal/2023.1.0/include:/opt/intel/oneapi/ccl/2021.9.0/include/cpu_gpu_dpcpp' +ENV CPLUS_INCLUDE_PATH='/opt/intel/oneapi/clck/2021.7.3/include' +ENV DAALROOT='/opt/intel/oneapi/dal/2023.1.0' +ENV DALROOT='/opt/intel/oneapi/dal/2023.1.0' +ENV DAL_MAJOR_BINARY='1' +ENV DAL_MINOR_BINARY='1' +ENV DIAGUTIL_PATH='/opt/intel/oneapi/vtune/2023.1.0/sys_check/vtune_sys_check.py:/opt/intel/oneapi/debugger/2023.1.0/sys_check/debugger_sys_check.py:/opt/intel/oneapi/compiler/2023.1.0/sys_check/sys_check.sh:/opt/intel/oneapi/advisor/2023.1.0/sys_check/advisor_sys_check.py:' +ENV DNNLROOT='/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp' +ENV DPL_ROOT='/opt/intel/oneapi/dpl/2022.1.0' +ENV FI_PROVIDER_PATH='/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib/prov:/usr/lib64/libfabric' +ENV FPGA_VARS_ARGS='' +ENV FPGA_VARS_DIR='/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga' +ENV GDB_INFO='/opt/intel/oneapi/debugger/2023.1.0/documentation/info/' +ENV INFOPATH='/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/lib' +ENV INSPECTOR_2023_DIR='/opt/intel/oneapi/inspector/2023.1.0' +ENV INTELFPGAOCLSDKROOT='/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga' +ENV INTEL_LICENSE_FILE='/opt/intel/licenses:/root/intel/licenses:/opt/intel/oneapi/clck/2021.7.3/licensing:/opt/intel/licenses:/root/intel/licenses:/Users/Shared/Library/Application Support/Intel/Licenses' +ENV INTEL_PYTHONHOME='/opt/intel/oneapi/debugger/2023.1.0/dep' +ENV IPPCP_TARGET_ARCH='intel64' +ENV IPPCRYPTOROOT='/opt/intel/oneapi/ippcp/2021.7.0' +ENV IPPROOT='/opt/intel/oneapi/ipp/2021.8.0' +ENV IPP_TARGET_ARCH='intel64' +ENV I_MPI_ROOT='/opt/intel/oneapi/mpi/2021.9.0' +ENV LD_LIBRARY_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/../lib/intel64/gcc4.8:/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib:/opt/intel/oneapi/mpi/2021.9.0//lib/release:/opt/intel/oneapi/mpi/2021.9.0//lib:/opt/intel/oneapi/mkl/2023.1.0/lib/intel64:/opt/intel/oneapi/itac/2021.9.0/slib:/opt/intel/oneapi/ippcp/2021.7.0/lib/intel64:/opt/intel/oneapi/ipp/2021.8.0/lib/intel64:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/lib:/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/lib:/opt/intel/oneapi/debugger/2023.1.0/libipt/intel64/lib:/opt/intel/oneapi/debugger/2023.1.0/dep/lib:/opt/intel/oneapi/dal/2023.1.0/lib/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/lib:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/x64:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga/host/linux64/lib:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin:/opt/intel/oneapi/ccl/2021.9.0/lib/cpu_gpu_dpcpp' +ENV LIBRARY_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/../lib/intel64/gcc4.8:/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib:/opt/intel/oneapi/mpi/2021.9.0//lib/release:/opt/intel/oneapi/mpi/2021.9.0//lib:/opt/intel/oneapi/mkl/2023.1.0/lib/intel64:/opt/intel/oneapi/ippcp/2021.7.0/lib/intel64:/opt/intel/oneapi/ipp/2021.8.0/lib/intel64:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/lib:/opt/intel/oneapi/dal/2023.1.0/lib/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin:/opt/intel/oneapi/compiler/2023.1.0/linux/lib:/opt/intel/oneapi/clck/2021.7.3/lib/intel64:/opt/intel/oneapi/ccl/2021.9.0/lib/cpu_gpu_dpcpp' +ENV MANPATH='/opt/intel/oneapi/mpi/2021.9.0/man:/opt/intel/oneapi/itac/2021.9.0/man:/opt/intel/oneapi/debugger/2023.1.0/documentation/man:/opt/intel/oneapi/compiler/2023.1.0/documentation/en/man/common:/opt/intel/oneapi/clck/2021.7.3/man::' +ENV MKLROOT='/opt/intel/oneapi/mkl/2023.1.0' +ENV NLSPATH='/opt/intel/oneapi/mkl/2023.1.0/lib/intel64/locale/%l_%t/%N:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/locale/%l_%t/%N' +ENV OCL_ICD_FILENAMES='libintelocl_emu.so:libalteracl.so:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/x64/libintelocl.so' +ENV ONEAPI_ROOT='/opt/intel/oneapi' +ENV PATH='/opt/intel/oneapi/vtune/2023.1.0/bin64:/opt/intel/oneapi/mpi/2021.9.0//libfabric/bin:/opt/intel/oneapi/mpi/2021.9.0//bin:/opt/intel/oneapi/mkl/2023.1.0/bin/intel64:/opt/intel/oneapi/itac/2021.9.0/bin:/opt/intel/oneapi/inspector/2023.1.0/bin64:/opt/intel/oneapi/dev-utilities/2021.9.0/bin:/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/bin:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga/bin:/opt/intel/oneapi/compiler/2023.1.0/linux/bin/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/bin:/opt/intel/oneapi/clck/2021.7.3/bin/intel64:/opt/intel/oneapi/advisor/2023.1.0/bin64:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' +ENV PKG_CONFIG_PATH='/opt/intel/oneapi/vtune/2023.1.0/include/pkgconfig/lib64:/opt/intel/oneapi/tbb/2021.9.0/env/../lib/pkgconfig:/opt/intel/oneapi/mpi/2021.9.0/lib/pkgconfig:/opt/intel/oneapi/mkl/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/ippcp/2021.7.0/lib/pkgconfig:/opt/intel/oneapi/inspector/2023.1.0/include/pkgconfig/lib64:/opt/intel/oneapi/dpl/2022.1.0/lib/pkgconfig:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/../lib/pkgconfig:/opt/intel/oneapi/dal/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/compiler/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/ccl/2021.9.0/lib/pkgconfig:/opt/intel/oneapi/advisor/2023.1.0/include/pkgconfig/lib64:' +ENV PYTHONPATH='/opt/intel/oneapi/advisor/2023.1.0/pythonapi' +ENV SETVARS_COMPLETED='1' +ENV TBBROOT='/opt/intel/oneapi/tbb/2021.9.0/env/..' +ENV VTUNE_PROFILER_2023_DIR='/opt/intel/oneapi/vtune/2023.1.0' +ENV VTUNE_PROFILER_DIR='/opt/intel/oneapi/vtune/2023.1.0' +ENV VT_ADD_LIBS='-ldwarf -lelf -lvtunwind -lm -lpthread' +ENV VT_LIB_DIR='/opt/intel/oneapi/itac/2021.9.0/lib' +ENV VT_MPI='impi4' +ENV VT_ROOT='/opt/intel/oneapi/itac/2021.9.0' +ENV VT_SLIB_DIR='/opt/intel/oneapi/itac/2021.9.0/slib' + +# Set HDF5 and NetCDF-specific Environment variables +ENV INSTALL_DIR="/usr/local" +ENV LIB_DIR="${INSTALL_DIR}/lib" +ENV LD_LIBRARY_PATH=${LIB_DIR}:${LD_LIBRARY_PATH} +RUN mkdir -p ${LIB_DIR} + +ENV CC="${INTEL_DIR}/compiler/latest/linux/bin/icx-cc" +ENV FC="${INTEL_DIR}/compiler/latest/linux/bin/ifx" +ENV CXX="${INTEL_DIR}/compiler/latest/linux/bin/icpx" +ENV LDFLAGS="-L${LIB_DIR}" +ENV NCDIR="${INSTALL_DIR}" +ENV NFDIR="${INSTALL_DIR}" +ENV HDF5_ROOT="${INSTALL_DIR}" +ENV HDF5_LIBDIR="${HDF5_ROOT}/lib" +ENV HDF5_INCLUDE_DIR="${HDF5_ROOT}/include" +ENV HDF5_PLUGIN_PATH="${HDF5_LIBDIR}/plugin" + +# Get the HDF5, NetCDF-C and NetCDF-Fortran libraries +RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/bin/unix/hdf5-1.14.1-2-Std-ubuntu2004_64-Intel.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ + wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz && \ + apt-get update && apt-get upgrade -y && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ + rm -rf /var/lib/apt/lists/* && \ + cd hdf && \ + ./HDF5-1.14.1-Linux.sh --skip-license && \ + cp -R HDF_Group/HDF5/1.14.1/lib/*.a ${HDF5_ROOT}/lib/ && \ + cp -R HDF_Group/HDF5/1.14.1/include/* ${HDF5_ROOT}/include/ && \ + cp /zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ + +ENV LD_LIBRARY_PATH="/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}" +ENV LDFLAGS="-static-intel -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" +RUN cd netcdf-c-4.9.2 && \ + cmake -S . -B build -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" \ + -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ + -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DBUILD_SHARED_LIBS=OFF && \ + cmake --build build && \ + cmake --install build + +# NetCDF-Fortran library +ENV F77=${FC} +ENV CFLAGS="-fPIC" +ENV FCFLAGS="${CFLAGS} -standard-semantics" +ENV FFLAGS=${CFLAGS} +ENV CPPFLAGS="-I${INSTALL_DIR}/include -I/usr/include -I/usr/include/x86_64-linux-gnu/curl" +ENV LDFLAGS="-static-intel" +ENV LIBS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdf -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" +RUN cd netcdf-fortran-4.6.1 && \ + ./configure --disable-shared --prefix=${NFDIR} && \ + make && \ + make install + +# Swiftest +ENV NETCDF_HOME=${INSTALL_DIR} +ENV NETCDF_FORTRAN_HOME=${NETCDF_HOME} +ENV NETCDF_LIBRARY=${NETCDF_HOME} +ENV FOR_COARRAY_NUM_IMAGES=1 +ENV OMP_NUM_THREADS=1 +ENV FC="${INTEL_DIR}/mpi/latest/bin/mpiifort" +ENV FFLAGS="-fPIC -standard-semantics" +ENV LDFLAGS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lsz -lz -lzstd -lbz2 -lcurl -lxml2" +COPY ./cmake/ /swiftest/cmake/ +COPY ./src/ /swiftest/src/ +COPY ./CMakeLists.txt /swiftest/ +RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN_HOME)\n' \ + 'find_library(NETCDF_FORTRAN_LIBRARY NAMES netcdff HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(NETCDF_LIBRARY NAMES netcdf HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(HDF5_HL_LIBRARY NAMES libhdf5_hl.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(HDF5_LIBRARY NAMES libhdf5.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(Z_LIBRARY NAMES libz.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(ZSTD_LIBRARY NAMES libzstd.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(SZ_LIBRARY NAMES libsz.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(BZ2_LIBRARY NAMES libbz2.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(CURL_LIBRARY NAMES libcurl.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'find_library(XML2_LIBRARY NAMES libxml2.a HINTS ENV LD_LIBRARY_PATH)\n' \ + 'set(NETCDF_FOUND TRUE)\n' \ + 'set(NETCDF_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR})\n' \ + 'set(NETCDF_LIBRARIES ${NETCDF_FORTRAN_LIBRARY} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY} ${SZ_LIBRARY} ${Z_LIBRARY} ${ZSTD_LIBRARY} ${BZ2_LIBRARY} ${CURL_LIBRARY} ${XML2_LIBRARY} )\n' \ + 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ + cd swiftest && \ + cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ + cmake --build build && \ + cmake --install build + +# Production container +FROM ubuntu:20.04 + +RUN apt-get update && apt-get upgrade -y && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + libsz2 libcurl3-gnutls libxml2 && \ + rm -rf /var/lib/apt/lists/* + +COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin/ +# COPY --from=build /usr/local/bin/swiftest_driver_caf /usr/local/bin/ +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ +COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /usr/local/lib/ +COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /usr/local/lib/ +COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /usr/local/bin/ + + +ENTRYPOINT ["/usr/local/bin/swiftest_driver"] \ No newline at end of file From bfe55af08d47bb1f602a71e01f1f2e01e9fab818 Mon Sep 17 00:00:00 2001 From: David Minton Date: Fri, 9 Jun 2023 17:16:06 -0400 Subject: [PATCH 22/25] Made a clean, streamlined Dockerfile that contains almost no dependencies in the end. It can also spit out the binary executables so it could be used as a build tool rather than running from inside the container --- CMakeLists.txt | 8 - Dockerfile | 241 +++++++++++----------------- cmake/Modules/SetFortranFlags.cmake | 52 +++--- docker/Dockerfile.swiftest_driver | 197 +++++++---------------- src/CMakeLists.txt | 2 +- 5 files changed, 173 insertions(+), 327 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 08ec7c9e2..961683f3e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -34,13 +34,6 @@ OPTION(USE_SIMD "Use SIMD vectorization" ON) OPTION(CONTAINERIZE "Compiling for use in a Docker/Singularity container" OFF) OPTION(BUILD_SHARED_LIBS "Build using shared libraries" ON) -# Locate and set external libraries. There are some CMake peculiarities -# taken care of here, such as the fact that the FindOpenMP routine doesn't know -# about Fortran. -IF (NOT BUILD_SHARED_LIBS) - set(CMAKE_FIND_LIBRARY_SUFFIXES ".a" ".so") -ENDIF () - INCLUDE(${CMAKE_MODULE_PATH}/SetParallelizationLibrary.cmake) INCLUDE(${CMAKE_MODULE_PATH}/SetUpNetCDF.cmake) INCLUDE(${CMAKE_MODULE_PATH}/SetMKL.cmake) @@ -51,7 +44,6 @@ INCLUDE(${CMAKE_MODULE_PATH}/SetFortranFlags.cmake) INCLUDE_DIRECTORIES($ENV{NETCDF_FORTRAN_HOME}/include;$ENV{NETCDF_HOME}/include) - # There is an error in CMAKE with this flag for pgf90. Unset it GET_FILENAME_COMPONENT(FCNAME ${CMAKE_Fortran_COMPILER} NAME) IF(FCNAME STREQUAL "pgf90") diff --git a/Dockerfile b/Dockerfile index b53f8dc8c..33c82f700 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,85 +1,73 @@ -FROM ubuntu:20.04 as build - -# kick everything off -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - ca-certificates curl git wget gpg-agent software-properties-common build-essential gnupg pkg-config && \ - rm -rf /var/lib/apt/lists/* && \ - mkdir -p cmake/build && \ - cd cmake/build && \ - curl -LO https://github.com/Kitware/CMake/releases/download/v3.26.2/cmake-3.26.2-linux-x86_64.sh && \ - /bin/bash cmake-3.26.2-linux-x86_64.sh --prefix=/usr/local --skip-license && \ - wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ - | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \ - echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \ - apt-get -y update && apt-get upgrade -y && \ - apt-get install -y intel-hpckit - -# Set Intel compiler environment variables -ENV INTEL_DIR="/opt/intel/oneapi" -ENV LANG=C.UTF-8 -ENV ACL_BOARD_VENDOR_PATH='/opt/Intel/OpenCLFPGA/oneAPI/Boards' -ENV ADVISOR_2023_DIR='/opt/intel/oneapi/advisor/2023.1.0' -ENV APM='/opt/intel/oneapi/advisor/2023.1.0/perfmodels' -ENV CCL_CONFIGURATION='cpu_gpu_dpcpp' -ENV CCL_ROOT='/opt/intel/oneapi/ccl/2021.9.0' -ENV CLASSPATH='/opt/intel/oneapi/mpi/2021.9.0//lib/mpi.jar:/opt/intel/oneapi/dal/2023.1.0/lib/onedal.jar' -ENV CLCK_ROOT='/opt/intel/oneapi/clck/2021.7.3' -ENV CMAKE_PREFIX_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/..:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/../lib/cmake:/opt/intel/oneapi/dal/2023.1.0:/opt/intel/oneapi/compiler/2023.1.0/linux/IntelDPCPP:/opt/intel/oneapi/ccl/2021.9.0/lib/cmake/oneCCL' -ENV CMPLR_ROOT='/opt/intel/oneapi/compiler/2023.1.0' -ENV CPATH='/opt/intel/oneapi/tbb/2021.9.0/env/../include:/opt/intel/oneapi/mpi/2021.9.0//include:/opt/intel/oneapi/mkl/2023.1.0/include:/opt/intel/oneapi/ippcp/2021.7.0/include:/opt/intel/oneapi/ipp/2021.8.0/include:/opt/intel/oneapi/dpl/2022.1.0/linux/include:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/include:/opt/intel/oneapi/dev-utilities/2021.9.0/include:/opt/intel/oneapi/dal/2023.1.0/include:/opt/intel/oneapi/ccl/2021.9.0/include/cpu_gpu_dpcpp' -ENV CPLUS_INCLUDE_PATH='/opt/intel/oneapi/clck/2021.7.3/include' -ENV DAALROOT='/opt/intel/oneapi/dal/2023.1.0' -ENV DALROOT='/opt/intel/oneapi/dal/2023.1.0' -ENV DAL_MAJOR_BINARY='1' -ENV DAL_MINOR_BINARY='1' -ENV DIAGUTIL_PATH='/opt/intel/oneapi/vtune/2023.1.0/sys_check/vtune_sys_check.py:/opt/intel/oneapi/debugger/2023.1.0/sys_check/debugger_sys_check.py:/opt/intel/oneapi/compiler/2023.1.0/sys_check/sys_check.sh:/opt/intel/oneapi/advisor/2023.1.0/sys_check/advisor_sys_check.py:' -ENV DNNLROOT='/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp' -ENV DPL_ROOT='/opt/intel/oneapi/dpl/2022.1.0' -ENV FI_PROVIDER_PATH='/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib/prov:/usr/lib64/libfabric' -ENV FPGA_VARS_ARGS='' -ENV FPGA_VARS_DIR='/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga' -ENV GDB_INFO='/opt/intel/oneapi/debugger/2023.1.0/documentation/info/' -ENV INFOPATH='/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/lib' -ENV INSPECTOR_2023_DIR='/opt/intel/oneapi/inspector/2023.1.0' -ENV INTELFPGAOCLSDKROOT='/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga' -ENV INTEL_LICENSE_FILE='/opt/intel/licenses:/root/intel/licenses:/opt/intel/oneapi/clck/2021.7.3/licensing:/opt/intel/licenses:/root/intel/licenses:/Users/Shared/Library/Application Support/Intel/Licenses' -ENV INTEL_PYTHONHOME='/opt/intel/oneapi/debugger/2023.1.0/dep' -ENV IPPCP_TARGET_ARCH='intel64' -ENV IPPCRYPTOROOT='/opt/intel/oneapi/ippcp/2021.7.0' -ENV IPPROOT='/opt/intel/oneapi/ipp/2021.8.0' -ENV IPP_TARGET_ARCH='intel64' -ENV I_MPI_ROOT='/opt/intel/oneapi/mpi/2021.9.0' -ENV LD_LIBRARY_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/../lib/intel64/gcc4.8:/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib:/opt/intel/oneapi/mpi/2021.9.0//lib/release:/opt/intel/oneapi/mpi/2021.9.0//lib:/opt/intel/oneapi/mkl/2023.1.0/lib/intel64:/opt/intel/oneapi/itac/2021.9.0/slib:/opt/intel/oneapi/ippcp/2021.7.0/lib/intel64:/opt/intel/oneapi/ipp/2021.8.0/lib/intel64:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/lib:/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/lib:/opt/intel/oneapi/debugger/2023.1.0/libipt/intel64/lib:/opt/intel/oneapi/debugger/2023.1.0/dep/lib:/opt/intel/oneapi/dal/2023.1.0/lib/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/lib:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/x64:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga/host/linux64/lib:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin:/opt/intel/oneapi/ccl/2021.9.0/lib/cpu_gpu_dpcpp' -ENV LIBRARY_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/../lib/intel64/gcc4.8:/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib:/opt/intel/oneapi/mpi/2021.9.0//lib/release:/opt/intel/oneapi/mpi/2021.9.0//lib:/opt/intel/oneapi/mkl/2023.1.0/lib/intel64:/opt/intel/oneapi/ippcp/2021.7.0/lib/intel64:/opt/intel/oneapi/ipp/2021.8.0/lib/intel64:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/lib:/opt/intel/oneapi/dal/2023.1.0/lib/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin:/opt/intel/oneapi/compiler/2023.1.0/linux/lib:/opt/intel/oneapi/clck/2021.7.3/lib/intel64:/opt/intel/oneapi/ccl/2021.9.0/lib/cpu_gpu_dpcpp' -ENV MANPATH='/opt/intel/oneapi/mpi/2021.9.0/man:/opt/intel/oneapi/itac/2021.9.0/man:/opt/intel/oneapi/debugger/2023.1.0/documentation/man:/opt/intel/oneapi/compiler/2023.1.0/documentation/en/man/common:/opt/intel/oneapi/clck/2021.7.3/man::' -ENV MKLROOT='/opt/intel/oneapi/mkl/2023.1.0' -ENV NLSPATH='/opt/intel/oneapi/mkl/2023.1.0/lib/intel64/locale/%l_%t/%N:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/locale/%l_%t/%N' -ENV OCL_ICD_FILENAMES='libintelocl_emu.so:libalteracl.so:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/x64/libintelocl.so' -ENV ONEAPI_ROOT='/opt/intel/oneapi' -ENV PATH='/opt/intel/oneapi/vtune/2023.1.0/bin64:/opt/intel/oneapi/mpi/2021.9.0//libfabric/bin:/opt/intel/oneapi/mpi/2021.9.0//bin:/opt/intel/oneapi/mkl/2023.1.0/bin/intel64:/opt/intel/oneapi/itac/2021.9.0/bin:/opt/intel/oneapi/inspector/2023.1.0/bin64:/opt/intel/oneapi/dev-utilities/2021.9.0/bin:/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/bin:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga/bin:/opt/intel/oneapi/compiler/2023.1.0/linux/bin/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/bin:/opt/intel/oneapi/clck/2021.7.3/bin/intel64:/opt/intel/oneapi/advisor/2023.1.0/bin64:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' -ENV PKG_CONFIG_PATH='/opt/intel/oneapi/vtune/2023.1.0/include/pkgconfig/lib64:/opt/intel/oneapi/tbb/2021.9.0/env/../lib/pkgconfig:/opt/intel/oneapi/mpi/2021.9.0/lib/pkgconfig:/opt/intel/oneapi/mkl/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/ippcp/2021.7.0/lib/pkgconfig:/opt/intel/oneapi/inspector/2023.1.0/include/pkgconfig/lib64:/opt/intel/oneapi/dpl/2022.1.0/lib/pkgconfig:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/../lib/pkgconfig:/opt/intel/oneapi/dal/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/compiler/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/ccl/2021.9.0/lib/pkgconfig:/opt/intel/oneapi/advisor/2023.1.0/include/pkgconfig/lib64:' -ENV PYTHONPATH='/opt/intel/oneapi/advisor/2023.1.0/pythonapi' -ENV SETVARS_COMPLETED='1' -ENV TBBROOT='/opt/intel/oneapi/tbb/2021.9.0/env/..' -ENV VTUNE_PROFILER_2023_DIR='/opt/intel/oneapi/vtune/2023.1.0' -ENV VTUNE_PROFILER_DIR='/opt/intel/oneapi/vtune/2023.1.0' -ENV VT_ADD_LIBS='-ldwarf -lelf -lvtunwind -lm -lpthread' -ENV VT_LIB_DIR='/opt/intel/oneapi/itac/2021.9.0/lib' -ENV VT_MPI='impi4' -ENV VT_ROOT='/opt/intel/oneapi/itac/2021.9.0' -ENV VT_SLIB_DIR='/opt/intel/oneapi/itac/2021.9.0/slib' - -# Set HDF5 and NetCDF-specific Environment variables +# Copyright 2023 - David Minton +# This file is part of Swiftest. +# Swiftest is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. +# Swiftest is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty +# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with Swiftest. +# If not, see: https://www.gnu.org/licenses. +# +# This Dockerfile will build the Swiftest driver program with minimal external dependencies using the Intel Oneapi toolkit. +# This is done by building static versions of a minimal set of libraries that NetCDF-Fortran needs (Netcdf-C, HDF5, and Zlib). +# These, along with the Intel runtime libraries, are linked statically to the executable. Only the OS-specific libraries are linked +# dynamically. + +# This build target compiles all dependencies and the swiftest driver itself +FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu22.04 as build + +# The MACHINE_CODE_VALUE argument is a string that is used when compiling the swiftest_driver. It is appended to the "-x" compiler +# option: (-x${MACHINE_CODE_VALUE}). The default value is set to "sse2" which allows for certain SIMD instructions to be used while +# remaining # compatible with a wide range of CPUs. To get the highest performance, you can pass "host" as an argument, but the +# compiled binary # would only run on a CPU with an architecture compatible with the one that the build was performed on. +# For more details and other options, see: +# https://www.intel.com/content/www/us/en/docs/fortran-compiler/developer-guide-reference/2023-1/x-qx.html +ARG MACHINE_CODE_VALUE="sse2" + +# Build type options are DEBUG, RELEASE, PROFILE, or TESTING. +ARG BUILD_TYPE="RELEASE" + ENV INSTALL_DIR="/usr/local" -ENV LIB_DIR="${INSTALL_DIR}/lib" -ENV LD_LIBRARY_PATH=${LIB_DIR}:${LD_LIBRARY_PATH} -RUN mkdir -p ${LIB_DIR} - -ENV CC="${INTEL_DIR}/compiler/latest/linux/bin/icx-cc" -ENV FC="${INTEL_DIR}/compiler/latest/linux/bin/ifx" -ENV CXX="${INTEL_DIR}/compiler/latest/linux/bin/icpx" -ENV LDFLAGS="-L${LIB_DIR}" +ENV CC="${ONEAPI_ROOT}/compiler/latest/linux/bin/icx" +ENV FC="${ONEAPI_ROOT}/compiler/latest/linux/bin/ifx" +ENV CXX="${ONEAPI_ROOT}/compiler/latest/linux/bin/icpx" +ENV F77="${FC}" + +# Get the HDF5, NetCDF-C, and NetCDF-Fortran libraries +RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/src/hdf5-1.14.1-2.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ + wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + m4 && \ + rm -rf /var/lib/apt/lists/* + +RUN cd zlib-1.2.13 && \ + ./configure --prefix=${INSTALL_DIR} --static && \ + make && \ + make install + +RUN cd hdf5-1.14.1-2 && \ + ./configure --disable-shared \ + --enable-build-mode=production \ + --disable-fortran \ + --disable-java \ + --disable-cxx \ + --prefix=${INSTALL_DIR} \ + --with-zlib=${INSTALL_DIR} && \ + make && \ + make install + +RUN cd netcdf-c-4.9.2 && \ + ./configure --disable-shared \ + --disable-dap \ + --disable-libxml2 \ + --disable-byterange \ + --prefix=${INSTALL_DIR} && \ + make && \ + make install + ENV NCDIR="${INSTALL_DIR}" ENV NFDIR="${INSTALL_DIR}" ENV HDF5_ROOT="${INSTALL_DIR}" @@ -87,39 +75,12 @@ ENV HDF5_LIBDIR="${HDF5_ROOT}/lib" ENV HDF5_INCLUDE_DIR="${HDF5_ROOT}/include" ENV HDF5_PLUGIN_PATH="${HDF5_LIBDIR}/plugin" -# Get the HDF5, NetCDF-C and NetCDF-Fortran libraries -RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/bin/unix/hdf5-1.14.1-2-Std-ubuntu2004_64-Intel.tar.gz | tar xvz && \ - wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ - wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ - wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz && \ - apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ - rm -rf /var/lib/apt/lists/* && \ - cd hdf && \ - ./HDF5-1.14.1-Linux.sh --skip-license && \ - cp -R HDF_Group/HDF5/1.14.1/lib/*.a ${HDF5_ROOT}/lib/ && \ - cp -R HDF_Group/HDF5/1.14.1/include/* ${HDF5_ROOT}/include/ && \ - cp /zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ - -ENV LD_LIBRARY_PATH="/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}" -ENV LDFLAGS="-static-intel -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" -RUN cd netcdf-c-4.9.2 && \ - cmake -S . -B build -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" \ - -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ - -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ - -DBUILD_SHARED_LIBS=OFF && \ - cmake --build build && \ - cmake --install build - # NetCDF-Fortran library -ENV F77=${FC} ENV CFLAGS="-fPIC" ENV FCFLAGS="${CFLAGS} -standard-semantics" ENV FFLAGS=${CFLAGS} -ENV CPPFLAGS="-I${INSTALL_DIR}/include -I/usr/include -I/usr/include/x86_64-linux-gnu/curl" -ENV LDFLAGS="-static-intel" -ENV LIBS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdf -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" +ENV CPPFLAGS="-I${INSTALL_DIR}/include" +ENV LIBS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdf -lhdf5_hl -lhdf5 -lm -lz" RUN cd netcdf-fortran-4.6.1 && \ ./configure --disable-shared --prefix=${NFDIR} && \ make && \ @@ -131,38 +92,34 @@ ENV NETCDF_FORTRAN_HOME=${NETCDF_HOME} ENV NETCDF_LIBRARY=${NETCDF_HOME} ENV FOR_COARRAY_NUM_IMAGES=1 ENV OMP_NUM_THREADS=1 -ENV FC="${INTEL_DIR}/mpi/latest/bin/mpiifort" +ENV FC="${ONEAPI_ROOT}/mpi/latest/bin/mpiifort" ENV FFLAGS="-fPIC -standard-semantics" -ENV LDFLAGS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lsz -lz -lzstd -lbz2 -lcurl -lxml2" +ENV LDFLAGS="-L/usr/local/lib" +ENV LIBS="-lhdf5_hl -lhdf5 -lz" COPY ./cmake/ /swiftest/cmake/ COPY ./src/ /swiftest/src/ COPY ./CMakeLists.txt /swiftest/ -RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN_HOME)\n' \ - 'find_library(NETCDF_FORTRAN_LIBRARY NAMES netcdff HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(NETCDF_LIBRARY NAMES netcdf HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(HDF5_HL_LIBRARY NAMES libhdf5_hl.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(HDF5_LIBRARY NAMES libhdf5.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(Z_LIBRARY NAMES libz.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(ZSTD_LIBRARY NAMES libzstd.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(SZ_LIBRARY NAMES libsz.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(BZ2_LIBRARY NAMES libbz2.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(CURL_LIBRARY NAMES libcurl.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(XML2_LIBRARY NAMES libxml2.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'set(NETCDF_FOUND TRUE)\n' \ - 'set(NETCDF_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR})\n' \ - 'set(NETCDF_LIBRARIES ${NETCDF_FORTRAN_LIBRARY} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY} ${SZ_LIBRARY} ${Z_LIBRARY} ${ZSTD_LIBRARY} ${BZ2_LIBRARY} ${CURL_LIBRARY} ${XML2_LIBRARY} )\n' \ - 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ - cd swiftest && \ - cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=ON -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ - cmake --build build && \ - cp bin/swiftest_driver /usr/local/bin/swiftest_driver_caf && \ - rm -rf build && \ - cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ +RUN cd swiftest && \ + cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \ + -DMACHINE_CODE_VALUE=${MACHINE_CODE} \ + -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + -DUSE_COARRAY=OFF \ + -DBUILD_SHARED_LIBS=OFF && \ cmake --build build && \ cmake --install build -# Production container -FROM continuumio/miniconda3 +# This build target creates a container that executes just the driver program +FROM ubuntu:22.04 as driver +COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin/ +ENTRYPOINT ["/usr/local/bin/swiftest_driver"] + +# This build target exports the binary to the host +FROM scratch AS export_driver +COPY --from=build /usr/local/bin/swiftest_driver / + +# This build target creates a container with a conda environment with all dependencies needed to run the Python front end and +# analysis tools +FROM continuumio/miniconda3 as python SHELL ["/bin/bash", "--login", "-c"] ENV SHELL="/bin/bash" ENV PATH="/opt/conda/bin:${PATH}" @@ -170,11 +127,7 @@ ENV LD_LIBRARY_PATH="/usr/local/lib" COPY environment.yml . -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libsz2 libcurl3-gnutls libxml2 && \ - rm -rf /var/lib/apt/lists/* && \ - conda update --all -y && \ +RUN conda update --all -y && \ conda install conda-libmamba-solver -y && \ conda config --set solver libmamba && \ conda env create -f environment.yml && \ @@ -183,12 +136,6 @@ RUN apt-get update && apt-get upgrade -y && \ COPY ./python/. /opt/conda/pkgs/ COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/envs/swiftest-env/bin/ -COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/bin/ -COPY --from=build /usr/local/bin/swiftest_driver_caf /opt/conda/envs/swiftest-env/bin/ -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /opt/conda/envs/swiftest-env/lib/ -COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /opt/conda/envs/swiftest-env/lib/ -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /opt/conda/envs/swiftest-env/lib/ -COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /opt/conda/envs/swiftest-env/bin/ # Start new shell to activate the environment and install Swiftest RUN cd /opt/conda/pkgs/swiftest && conda develop . && \ @@ -199,6 +146,6 @@ RUN cd /opt/conda/pkgs/swiftest && conda develop . && \ mkdir -p /.config/matplotlib && \ chmod -R 777 /.cache/matplotlib && \ chmod -R 777 /.config/matplotlib && \ - ln -s /opt/conda/bin/swiftest_driver /opt/conda/bin/driver + ln -s /opt/conda/envs/swiftest-env/bin/swiftest_driver /opt/conda/bin/driver ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "swiftest-env"] \ No newline at end of file diff --git a/cmake/Modules/SetFortranFlags.cmake b/cmake/Modules/SetFortranFlags.cmake index 47361de08..4c8cc9b85 100644 --- a/cmake/Modules/SetFortranFlags.cmake +++ b/cmake/Modules/SetFortranFlags.cmake @@ -46,6 +46,16 @@ ELSE() MESSAGE(FATAL_ERROR "CMAKE_BUILD_TYPE not valid! ${BUILD_TYPE_MSG}") ENDIF(BT STREQUAL "RELEASE") +IF (CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") + IF (APPLE) + SET(MACHINE_CODE_VALUE "tune=native" CACHE STRING "Tells the compiler which processor features it may target, including which instruction sets and optimizations it may generate.") + ELSE () + SET(MACHINE_CODE_VALUE "arch=native" CACHE STRING "Tells the compiler which processor features it may target, including which instruction sets and optimizations it may generate.") + ENDIF () +ELSE () + SET(MACHINE_CODE_VALUE "host" CACHE STRING "Tells the compiler which processor features it may target, including which instruction sets and optimizations it may generate.") +ENDIF () + ######################################################### # If the compiler flags have already been set, return now ######################################################### @@ -105,14 +115,7 @@ SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" ) -IF (CONTAINERIZE) - # There is some bug where -march=native doesn't work on Mac - IF(APPLE) - SET(GNUNATIVE "-mtune=generic") - ELSE() - SET(GNUNATIVE "-march=generic") - ENDIF() - +IF (NOT BUILD_SHARED_LIBS) # Use static Intel libraries SET_COMPILE_FLAG(CMAKE_Fortran_LINK_FLAGS "${CMAKE_Fortran_LINK_FLAGS}" Fortran "-static-intel" # Intel @@ -127,17 +130,7 @@ IF (CONTAINERIZE) Fortran "-qopenmp-link=static" # Intel ) ENDIF (USE_OPENMP) - -ELSE () - # There is some bug where -march=native doesn't work on Mac - IF(APPLE) - SET(GNUNATIVE "-mtune=native") - ELSE() - SET(GNUNATIVE "-march=native") - ENDIF() -ENDIF (CONTAINERIZE) - - +ENDIF () IF (USE_SIMD) # Enables OpenMP SIMD compilation when OpenMP parallelization is disabled. @@ -148,21 +141,12 @@ IF (USE_SIMD) ) ENDIF (NOT USE_OPENMP) - IF (CONTAINERIZE) - # Optimize for an old enough processor that it should run on most computers - SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" - Fortran "-xSSE2" # Intel - "/QxSSE2" # Intel Windows - ${GNUNATIVE} # GNU - ) - ELSE () - # Optimize for the host's architecture - SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" - Fortran "-xhost" # Intel - "/QxHost" # Intel Windows - ${GNUNATIVE} # GNU - ) - ENDIF (CONTAINERIZE) + # Optimize for an old enough processor that it should run on most computers + SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" + Fortran "-x${MACHINE_CODE_VALUE}" # Intel + "/Qx${MACHINE_CODE_VALUE}" # Intel Windows + "-m${MACHINE_CODE_VALUE}" # GNU + ) # Generate an extended set of vector functions SET_COMPILE_FLAG(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS}" diff --git a/docker/Dockerfile.swiftest_driver b/docker/Dockerfile.swiftest_driver index 8414f9016..08480c058 100644 --- a/docker/Dockerfile.swiftest_driver +++ b/docker/Dockerfile.swiftest_driver @@ -1,85 +1,60 @@ -FROM ubuntu:20.04 as build +# Copyright 2023 - David Minton +# This file is part of Swiftest. +# Swiftest is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. +# Swiftest is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty +# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with Swiftest. +# If not, see: https://www.gnu.org/licenses. +# +# This Dockerfile will build the Swiftest driver program with minimal external dependencies using the Intel Oneapi toolkit. +# This is done by building static versions of a minimal set of libraries that NetCDF-Fortran needs (Netcdf-C, HDF5, and Zlib). +# These, along with the Intel runtime libraries, are linked statically to the executable. Only the OS-specific libraries are linked +# dynamically. +FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu20.04 as build -# kick everything off -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - ca-certificates curl git wget gpg-agent software-properties-common build-essential gnupg pkg-config && \ - rm -rf /var/lib/apt/lists/* && \ - mkdir -p cmake/build && \ - cd cmake/build && \ - curl -LO https://github.com/Kitware/CMake/releases/download/v3.26.2/cmake-3.26.2-linux-x86_64.sh && \ - /bin/bash cmake-3.26.2-linux-x86_64.sh --prefix=/usr/local --skip-license && \ - wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ - | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \ - echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list && \ - apt-get -y update && apt-get upgrade -y && \ - apt-get install -y intel-hpckit +ENV INSTALL_DIR="/usr/local" +ENV CC="${ONEAPI_ROOT}/compiler/latest/linux/bin/icx" +ENV FC="${ONEAPI_ROOT}/compiler/latest/linux/bin/ifx" +ENV CXX="${ONEAPI_ROOT}/compiler/latest/linux/bin/icpx" +ENV F77="${FC}" -# Set Intel compiler environment variables -ENV INTEL_DIR="/opt/intel/oneapi" -ENV LANG=C.UTF-8 -ENV ACL_BOARD_VENDOR_PATH='/opt/Intel/OpenCLFPGA/oneAPI/Boards' -ENV ADVISOR_2023_DIR='/opt/intel/oneapi/advisor/2023.1.0' -ENV APM='/opt/intel/oneapi/advisor/2023.1.0/perfmodels' -ENV CCL_CONFIGURATION='cpu_gpu_dpcpp' -ENV CCL_ROOT='/opt/intel/oneapi/ccl/2021.9.0' -ENV CLASSPATH='/opt/intel/oneapi/mpi/2021.9.0//lib/mpi.jar:/opt/intel/oneapi/dal/2023.1.0/lib/onedal.jar' -ENV CLCK_ROOT='/opt/intel/oneapi/clck/2021.7.3' -ENV CMAKE_PREFIX_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/..:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/../lib/cmake:/opt/intel/oneapi/dal/2023.1.0:/opt/intel/oneapi/compiler/2023.1.0/linux/IntelDPCPP:/opt/intel/oneapi/ccl/2021.9.0/lib/cmake/oneCCL' -ENV CMPLR_ROOT='/opt/intel/oneapi/compiler/2023.1.0' -ENV CPATH='/opt/intel/oneapi/tbb/2021.9.0/env/../include:/opt/intel/oneapi/mpi/2021.9.0//include:/opt/intel/oneapi/mkl/2023.1.0/include:/opt/intel/oneapi/ippcp/2021.7.0/include:/opt/intel/oneapi/ipp/2021.8.0/include:/opt/intel/oneapi/dpl/2022.1.0/linux/include:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/include:/opt/intel/oneapi/dev-utilities/2021.9.0/include:/opt/intel/oneapi/dal/2023.1.0/include:/opt/intel/oneapi/ccl/2021.9.0/include/cpu_gpu_dpcpp' -ENV CPLUS_INCLUDE_PATH='/opt/intel/oneapi/clck/2021.7.3/include' -ENV DAALROOT='/opt/intel/oneapi/dal/2023.1.0' -ENV DALROOT='/opt/intel/oneapi/dal/2023.1.0' -ENV DAL_MAJOR_BINARY='1' -ENV DAL_MINOR_BINARY='1' -ENV DIAGUTIL_PATH='/opt/intel/oneapi/vtune/2023.1.0/sys_check/vtune_sys_check.py:/opt/intel/oneapi/debugger/2023.1.0/sys_check/debugger_sys_check.py:/opt/intel/oneapi/compiler/2023.1.0/sys_check/sys_check.sh:/opt/intel/oneapi/advisor/2023.1.0/sys_check/advisor_sys_check.py:' -ENV DNNLROOT='/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp' -ENV DPL_ROOT='/opt/intel/oneapi/dpl/2022.1.0' -ENV FI_PROVIDER_PATH='/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib/prov:/usr/lib64/libfabric' -ENV FPGA_VARS_ARGS='' -ENV FPGA_VARS_DIR='/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga' -ENV GDB_INFO='/opt/intel/oneapi/debugger/2023.1.0/documentation/info/' -ENV INFOPATH='/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/lib' -ENV INSPECTOR_2023_DIR='/opt/intel/oneapi/inspector/2023.1.0' -ENV INTELFPGAOCLSDKROOT='/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga' -ENV INTEL_LICENSE_FILE='/opt/intel/licenses:/root/intel/licenses:/opt/intel/oneapi/clck/2021.7.3/licensing:/opt/intel/licenses:/root/intel/licenses:/Users/Shared/Library/Application Support/Intel/Licenses' -ENV INTEL_PYTHONHOME='/opt/intel/oneapi/debugger/2023.1.0/dep' -ENV IPPCP_TARGET_ARCH='intel64' -ENV IPPCRYPTOROOT='/opt/intel/oneapi/ippcp/2021.7.0' -ENV IPPROOT='/opt/intel/oneapi/ipp/2021.8.0' -ENV IPP_TARGET_ARCH='intel64' -ENV I_MPI_ROOT='/opt/intel/oneapi/mpi/2021.9.0' -ENV LD_LIBRARY_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/../lib/intel64/gcc4.8:/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib:/opt/intel/oneapi/mpi/2021.9.0//lib/release:/opt/intel/oneapi/mpi/2021.9.0//lib:/opt/intel/oneapi/mkl/2023.1.0/lib/intel64:/opt/intel/oneapi/itac/2021.9.0/slib:/opt/intel/oneapi/ippcp/2021.7.0/lib/intel64:/opt/intel/oneapi/ipp/2021.8.0/lib/intel64:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/lib:/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/lib:/opt/intel/oneapi/debugger/2023.1.0/libipt/intel64/lib:/opt/intel/oneapi/debugger/2023.1.0/dep/lib:/opt/intel/oneapi/dal/2023.1.0/lib/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/lib:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/x64:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga/host/linux64/lib:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin:/opt/intel/oneapi/ccl/2021.9.0/lib/cpu_gpu_dpcpp' -ENV LIBRARY_PATH='/opt/intel/oneapi/tbb/2021.9.0/env/../lib/intel64/gcc4.8:/opt/intel/oneapi/mpi/2021.9.0//libfabric/lib:/opt/intel/oneapi/mpi/2021.9.0//lib/release:/opt/intel/oneapi/mpi/2021.9.0//lib:/opt/intel/oneapi/mkl/2023.1.0/lib/intel64:/opt/intel/oneapi/ippcp/2021.7.0/lib/intel64:/opt/intel/oneapi/ipp/2021.8.0/lib/intel64:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/lib:/opt/intel/oneapi/dal/2023.1.0/lib/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin:/opt/intel/oneapi/compiler/2023.1.0/linux/lib:/opt/intel/oneapi/clck/2021.7.3/lib/intel64:/opt/intel/oneapi/ccl/2021.9.0/lib/cpu_gpu_dpcpp' -ENV MANPATH='/opt/intel/oneapi/mpi/2021.9.0/man:/opt/intel/oneapi/itac/2021.9.0/man:/opt/intel/oneapi/debugger/2023.1.0/documentation/man:/opt/intel/oneapi/compiler/2023.1.0/documentation/en/man/common:/opt/intel/oneapi/clck/2021.7.3/man::' -ENV MKLROOT='/opt/intel/oneapi/mkl/2023.1.0' -ENV NLSPATH='/opt/intel/oneapi/mkl/2023.1.0/lib/intel64/locale/%l_%t/%N:/opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/locale/%l_%t/%N' -ENV OCL_ICD_FILENAMES='libintelocl_emu.so:libalteracl.so:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/x64/libintelocl.so' -ENV ONEAPI_ROOT='/opt/intel/oneapi' -ENV PATH='/opt/intel/oneapi/vtune/2023.1.0/bin64:/opt/intel/oneapi/mpi/2021.9.0//libfabric/bin:/opt/intel/oneapi/mpi/2021.9.0//bin:/opt/intel/oneapi/mkl/2023.1.0/bin/intel64:/opt/intel/oneapi/itac/2021.9.0/bin:/opt/intel/oneapi/inspector/2023.1.0/bin64:/opt/intel/oneapi/dev-utilities/2021.9.0/bin:/opt/intel/oneapi/debugger/2023.1.0/gdb/intel64/bin:/opt/intel/oneapi/compiler/2023.1.0/linux/lib/oclfpga/bin:/opt/intel/oneapi/compiler/2023.1.0/linux/bin/intel64:/opt/intel/oneapi/compiler/2023.1.0/linux/bin:/opt/intel/oneapi/clck/2021.7.3/bin/intel64:/opt/intel/oneapi/advisor/2023.1.0/bin64:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' -ENV PKG_CONFIG_PATH='/opt/intel/oneapi/vtune/2023.1.0/include/pkgconfig/lib64:/opt/intel/oneapi/tbb/2021.9.0/env/../lib/pkgconfig:/opt/intel/oneapi/mpi/2021.9.0/lib/pkgconfig:/opt/intel/oneapi/mkl/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/ippcp/2021.7.0/lib/pkgconfig:/opt/intel/oneapi/inspector/2023.1.0/include/pkgconfig/lib64:/opt/intel/oneapi/dpl/2022.1.0/lib/pkgconfig:/opt/intel/oneapi/dnnl/2023.1.0/cpu_dpcpp_gpu_dpcpp/../lib/pkgconfig:/opt/intel/oneapi/dal/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/compiler/2023.1.0/lib/pkgconfig:/opt/intel/oneapi/ccl/2021.9.0/lib/pkgconfig:/opt/intel/oneapi/advisor/2023.1.0/include/pkgconfig/lib64:' -ENV PYTHONPATH='/opt/intel/oneapi/advisor/2023.1.0/pythonapi' -ENV SETVARS_COMPLETED='1' -ENV TBBROOT='/opt/intel/oneapi/tbb/2021.9.0/env/..' -ENV VTUNE_PROFILER_2023_DIR='/opt/intel/oneapi/vtune/2023.1.0' -ENV VTUNE_PROFILER_DIR='/opt/intel/oneapi/vtune/2023.1.0' -ENV VT_ADD_LIBS='-ldwarf -lelf -lvtunwind -lm -lpthread' -ENV VT_LIB_DIR='/opt/intel/oneapi/itac/2021.9.0/lib' -ENV VT_MPI='impi4' -ENV VT_ROOT='/opt/intel/oneapi/itac/2021.9.0' -ENV VT_SLIB_DIR='/opt/intel/oneapi/itac/2021.9.0/slib' +# Get the HDF5, NetCDF-C, and NetCDF-Fortran libraries +RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/src/hdf5-1.14.1-2.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ + wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ + wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz -# Set HDF5 and NetCDF-specific Environment variables -ENV INSTALL_DIR="/usr/local" -ENV LIB_DIR="${INSTALL_DIR}/lib" -ENV LD_LIBRARY_PATH=${LIB_DIR}:${LD_LIBRARY_PATH} -RUN mkdir -p ${LIB_DIR} +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + m4 && \ + rm -rf /var/lib/apt/lists/* + +RUN cd zlib-1.2.13 && \ + ./configure --prefix=${INSTALL_DIR} --static && \ + make && \ + make install + +RUN cd hdf5-1.14.1-2 && \ + ./configure --disable-shared \ + --enable-build-mode=production \ + --disable-fortran \ + --disable-java \ + --disable-cxx \ + --prefix=${INSTALL_DIR} \ + --with-zlib=${INSTALL_DIR} && \ + make && \ + make install + +RUN cd netcdf-c-4.9.2 && \ + ./configure --disable-shared \ + --disable-dap \ + --disable-libxml2 \ + --disable-byterange \ + --prefix=${INSTALL_DIR} && \ + make && \ + make install -ENV CC="${INTEL_DIR}/compiler/latest/linux/bin/icx-cc" -ENV FC="${INTEL_DIR}/compiler/latest/linux/bin/ifx" -ENV CXX="${INTEL_DIR}/compiler/latest/linux/bin/icpx" -ENV LDFLAGS="-L${LIB_DIR}" ENV NCDIR="${INSTALL_DIR}" ENV NFDIR="${INSTALL_DIR}" ENV HDF5_ROOT="${INSTALL_DIR}" @@ -87,39 +62,12 @@ ENV HDF5_LIBDIR="${HDF5_ROOT}/lib" ENV HDF5_INCLUDE_DIR="${HDF5_ROOT}/include" ENV HDF5_PLUGIN_PATH="${HDF5_LIBDIR}/plugin" -# Get the HDF5, NetCDF-C and NetCDF-Fortran libraries -RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/bin/unix/hdf5-1.14.1-2-Std-ubuntu2004_64-Intel.tar.gz | tar xvz && \ - wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ - wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ - wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz && \ - apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libxml2-dev libcurl4-gnutls-dev libzstd-dev libbz2-dev libaec-dev m4 && \ - rm -rf /var/lib/apt/lists/* && \ - cd hdf && \ - ./HDF5-1.14.1-Linux.sh --skip-license && \ - cp -R HDF_Group/HDF5/1.14.1/lib/*.a ${HDF5_ROOT}/lib/ && \ - cp -R HDF_Group/HDF5/1.14.1/include/* ${HDF5_ROOT}/include/ && \ - cp /zlib-1.2.13/zlib.h ${HDF5_INCLUDE_DIR}/ - -ENV LD_LIBRARY_PATH="/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}" -ENV LDFLAGS="-static-intel -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" -RUN cd netcdf-c-4.9.2 && \ - cmake -S . -B build -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" \ - -DCMAKE_INSTALL_PREFIX=${INSTALL_DIR} \ - -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ - -DBUILD_SHARED_LIBS=OFF && \ - cmake --build build && \ - cmake --install build - # NetCDF-Fortran library -ENV F77=${FC} ENV CFLAGS="-fPIC" ENV FCFLAGS="${CFLAGS} -standard-semantics" ENV FFLAGS=${CFLAGS} -ENV CPPFLAGS="-I${INSTALL_DIR}/include -I/usr/include -I/usr/include/x86_64-linux-gnu/curl" -ENV LDFLAGS="-static-intel" -ENV LIBS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdf -lhdf5_hl -lhdf5 -lsz -lm -lz -lzstd -lbz2 -lcurl -lxml2" +ENV CPPFLAGS="-I${INSTALL_DIR}/include" +ENV LIBS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdf -lhdf5_hl -lhdf5 -lm -lz" RUN cd netcdf-fortran-4.6.1 && \ ./configure --disable-shared --prefix=${NFDIR} && \ make && \ @@ -131,46 +79,21 @@ ENV NETCDF_FORTRAN_HOME=${NETCDF_HOME} ENV NETCDF_LIBRARY=${NETCDF_HOME} ENV FOR_COARRAY_NUM_IMAGES=1 ENV OMP_NUM_THREADS=1 -ENV FC="${INTEL_DIR}/mpi/latest/bin/mpiifort" +ENV FC="${ONEAPI_ROOT}/mpi/latest/bin/mpiifort" ENV FFLAGS="-fPIC -standard-semantics" -ENV LDFLAGS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lsz -lz -lzstd -lbz2 -lcurl -lxml2" +ENV LDFLAGS="-L/usr/local/lib" +ENV LIBS="-lhdf5_hl -lhdf5 -lz" COPY ./cmake/ /swiftest/cmake/ COPY ./src/ /swiftest/src/ COPY ./CMakeLists.txt /swiftest/ -RUN echo 'find_path(NETCDF_INCLUDE_DIR NAMES netcdf.mod HINTS ENV NETCDF_FORTRAN_HOME)\n' \ - 'find_library(NETCDF_FORTRAN_LIBRARY NAMES netcdff HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(NETCDF_LIBRARY NAMES netcdf HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(HDF5_HL_LIBRARY NAMES libhdf5_hl.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(HDF5_LIBRARY NAMES libhdf5.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(Z_LIBRARY NAMES libz.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(ZSTD_LIBRARY NAMES libzstd.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(SZ_LIBRARY NAMES libsz.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(BZ2_LIBRARY NAMES libbz2.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(CURL_LIBRARY NAMES libcurl.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'find_library(XML2_LIBRARY NAMES libxml2.a HINTS ENV LD_LIBRARY_PATH)\n' \ - 'set(NETCDF_FOUND TRUE)\n' \ - 'set(NETCDF_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR})\n' \ - 'set(NETCDF_LIBRARIES ${NETCDF_FORTRAN_LIBRARY} ${NETCDF_LIBRARY} ${HDF5_HL_LIBRARY} ${HDF5_LIBRARY} ${SZ_LIBRARY} ${Z_LIBRARY} ${ZSTD_LIBRARY} ${BZ2_LIBRARY} ${CURL_LIBRARY} ${XML2_LIBRARY} )\n' \ - 'mark_as_advanced(NETCDF_LIBRARY NETCDF_FORTRAN_LIBRARY NETCDF_INCLUDE_DIR)\n' > /swiftest/cmake/Modules/FindNETCDF.cmake && \ - cd swiftest && \ - cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DCONTAINERIZE=ON -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF &&\ +RUN cd swiftest && \ + cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DMACHINE_CODE_VALUE="sse2" -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF && \ cmake --build build && \ cmake --install build # Production container FROM ubuntu:20.04 -RUN apt-get update && apt-get upgrade -y && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - libsz2 libcurl3-gnutls libxml2 && \ - rm -rf /var/lib/apt/lists/* - COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin/ -# COPY --from=build /usr/local/bin/swiftest_driver_caf /usr/local/bin/ -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libicaf.so /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/mpi/2021.9.0//lib/release/libmpi.so.12 /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/compiler/2023.1.0/linux/compiler/lib/intel64_lin/libintlc.so.5 /usr/local/lib/ -COPY --from=build /opt/intel/oneapi/mpi/latest/bin/mpiexec.hydra /usr/local/bin/ - ENTRYPOINT ["/usr/local/bin/swiftest_driver"] \ No newline at end of file diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index cd7567afc..f360bcdbc 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -108,7 +108,7 @@ SET_SOURCE_FILES_PROPERTIES(${SWIFTEST_src} PROPERTIES Fortran_PREPROCESS ON) # Add the needed libraries and special compiler flags ##################################################### -TARGET_LINK_LIBRARIES(${SWIFTEST_DRIVER} PRIVATE ${NETCDF_FORTRAN_LIBRARIES} ${NETCDF_LIBRARIES}) +TARGET_LINK_LIBRARIES(${SWIFTEST_DRIVER} PRIVATE ${NETCDF_FORTRAN_LIBRARIES} ${NETCDF_LIBRARIES} $ENV{LIBS}) IF(USE_OPENMP) SET_PROPERTY(TARGET ${SWIFTEST_DRIVER} APPEND_STRING PROPERTY COMPILE_FLAGS "${OpenMP_Fortran_FLAGS} ") From 14537731d598b6f6ff4d7a3474948f5208b30c1b Mon Sep 17 00:00:00 2001 From: David Minton Date: Fri, 9 Jun 2023 18:05:04 -0400 Subject: [PATCH 23/25] Addedn environment.yml file to repo so that the conda environment can be created --- .gitignore | 1 + Dockerfile | 8 +-- docker/.gitignore | 1 - docker/Dockerfile.swiftest_driver | 99 ------------------------------- environment.yml | 22 +++++++ 5 files changed, 27 insertions(+), 104 deletions(-) delete mode 100644 docker/Dockerfile.swiftest_driver create mode 100644 environment.yml diff --git a/.gitignore b/.gitignore index e5c25df47..3ec1398e3 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ dump* !singularity/ !Dockerfile !swiftest.def +!environment.yml bin/ build/* diff --git a/Dockerfile b/Dockerfile index 33c82f700..4ac07ec7f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -125,12 +125,12 @@ ENV SHELL="/bin/bash" ENV PATH="/opt/conda/bin:${PATH}" ENV LD_LIBRARY_PATH="/usr/local/lib" -COPY environment.yml . - RUN conda update --all -y && \ conda install conda-libmamba-solver -y && \ - conda config --set solver libmamba && \ - conda env create -f environment.yml && \ + conda config --set solver libmamba + +COPY environment.yml . +RUN conda env create -f environment.yml && \ conda init bash && \ echo "conda activate swiftest-env" >> ~/.bashrc diff --git a/docker/.gitignore b/docker/.gitignore index c876f8c26..5c73deb60 100644 --- a/docker/.gitignore +++ b/docker/.gitignore @@ -4,4 +4,3 @@ !bin !bin/swiftest !bin/swiftest_driver -!Dockerfile.swiftest_driver diff --git a/docker/Dockerfile.swiftest_driver b/docker/Dockerfile.swiftest_driver deleted file mode 100644 index 08480c058..000000000 --- a/docker/Dockerfile.swiftest_driver +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2023 - David Minton -# This file is part of Swiftest. -# Swiftest is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. -# Swiftest is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty -# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# You should have received a copy of the GNU General Public License along with Swiftest. -# If not, see: https://www.gnu.org/licenses. -# -# This Dockerfile will build the Swiftest driver program with minimal external dependencies using the Intel Oneapi toolkit. -# This is done by building static versions of a minimal set of libraries that NetCDF-Fortran needs (Netcdf-C, HDF5, and Zlib). -# These, along with the Intel runtime libraries, are linked statically to the executable. Only the OS-specific libraries are linked -# dynamically. -FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu20.04 as build - -ENV INSTALL_DIR="/usr/local" -ENV CC="${ONEAPI_ROOT}/compiler/latest/linux/bin/icx" -ENV FC="${ONEAPI_ROOT}/compiler/latest/linux/bin/ifx" -ENV CXX="${ONEAPI_ROOT}/compiler/latest/linux/bin/icpx" -ENV F77="${FC}" - -# Get the HDF5, NetCDF-C, and NetCDF-Fortran libraries -RUN wget -qO- https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.1/src/hdf5-1.14.1-2.tar.gz | tar xvz && \ - wget -qO- https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.9.2.tar.gz | tar xvz && \ - wget -qO- https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz | tar xvz && \ - wget -qO- https://www.zlib.net/zlib-1.2.13.tar.gz | tar xvz - -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - m4 && \ - rm -rf /var/lib/apt/lists/* - -RUN cd zlib-1.2.13 && \ - ./configure --prefix=${INSTALL_DIR} --static && \ - make && \ - make install - -RUN cd hdf5-1.14.1-2 && \ - ./configure --disable-shared \ - --enable-build-mode=production \ - --disable-fortran \ - --disable-java \ - --disable-cxx \ - --prefix=${INSTALL_DIR} \ - --with-zlib=${INSTALL_DIR} && \ - make && \ - make install - -RUN cd netcdf-c-4.9.2 && \ - ./configure --disable-shared \ - --disable-dap \ - --disable-libxml2 \ - --disable-byterange \ - --prefix=${INSTALL_DIR} && \ - make && \ - make install - -ENV NCDIR="${INSTALL_DIR}" -ENV NFDIR="${INSTALL_DIR}" -ENV HDF5_ROOT="${INSTALL_DIR}" -ENV HDF5_LIBDIR="${HDF5_ROOT}/lib" -ENV HDF5_INCLUDE_DIR="${HDF5_ROOT}/include" -ENV HDF5_PLUGIN_PATH="${HDF5_LIBDIR}/plugin" - -# NetCDF-Fortran library -ENV CFLAGS="-fPIC" -ENV FCFLAGS="${CFLAGS} -standard-semantics" -ENV FFLAGS=${CFLAGS} -ENV CPPFLAGS="-I${INSTALL_DIR}/include" -ENV LIBS="-L/usr/local/lib -L/usr/lib/x86_64-linux-gnu -lnetcdf -lhdf5_hl -lhdf5 -lm -lz" -RUN cd netcdf-fortran-4.6.1 && \ - ./configure --disable-shared --prefix=${NFDIR} && \ - make && \ - make install - -# Swiftest -ENV NETCDF_HOME=${INSTALL_DIR} -ENV NETCDF_FORTRAN_HOME=${NETCDF_HOME} -ENV NETCDF_LIBRARY=${NETCDF_HOME} -ENV FOR_COARRAY_NUM_IMAGES=1 -ENV OMP_NUM_THREADS=1 -ENV FC="${ONEAPI_ROOT}/mpi/latest/bin/mpiifort" -ENV FFLAGS="-fPIC -standard-semantics" -ENV LDFLAGS="-L/usr/local/lib" -ENV LIBS="-lhdf5_hl -lhdf5 -lz" -COPY ./cmake/ /swiftest/cmake/ -COPY ./src/ /swiftest/src/ -COPY ./CMakeLists.txt /swiftest/ -RUN cd swiftest && \ - cmake -S . -B build -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" -DMACHINE_CODE_VALUE="sse2" -DUSE_COARRAY=OFF -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=OFF && \ - cmake --build build && \ - cmake --install build - -# Production container -FROM ubuntu:20.04 - -COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin/ - -ENTRYPOINT ["/usr/local/bin/swiftest_driver"] \ No newline at end of file diff --git a/environment.yml b/environment.yml new file mode 100644 index 000000000..0e1dbb387 --- /dev/null +++ b/environment.yml @@ -0,0 +1,22 @@ +name: swiftest-env + +channels: + - conda-forge + - defaults + +dependencies: + - numpy + - scipy + - matplotlib + - pandas + - xarray + - h5netcdf + - netcdf4 + - dask + - bottleneck + - astropy + - astroquery + - tqdm + - x264 + - ffmpeg + - conda-build \ No newline at end of file From a0e5edc55ed7a781708ed09d06420f96ae677351 Mon Sep 17 00:00:00 2001 From: David Minton Date: Fri, 9 Jun 2023 19:03:52 -0400 Subject: [PATCH 24/25] Fixed issue that was leading to an incompatibility between glibc versions due to the miniconda3 container being based on an older Ubuntu. --- Dockerfile | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4ac07ec7f..55c22e793 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ # dynamically. # This build target compiles all dependencies and the swiftest driver itself -FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu22.04 as build +FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu20.04 as build # The MACHINE_CODE_VALUE argument is a string that is used when compiling the swiftest_driver. It is appended to the "-x" compiler # option: (-x${MACHINE_CODE_VALUE}). The default value is set to "sse2" which allows for certain SIMD instructions to be used while @@ -135,7 +135,7 @@ RUN conda env create -f environment.yml && \ echo "conda activate swiftest-env" >> ~/.bashrc COPY ./python/. /opt/conda/pkgs/ -COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/envs/swiftest-env/bin/ +COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/bin/swiftest_driver # Start new shell to activate the environment and install Swiftest RUN cd /opt/conda/pkgs/swiftest && conda develop . && \ @@ -145,7 +145,6 @@ RUN cd /opt/conda/pkgs/swiftest && conda develop . && \ mkdir -p /.cache/matplotlib && \ mkdir -p /.config/matplotlib && \ chmod -R 777 /.cache/matplotlib && \ - chmod -R 777 /.config/matplotlib && \ - ln -s /opt/conda/envs/swiftest-env/bin/swiftest_driver /opt/conda/bin/driver + chmod -R 777 /.config/matplotlib ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "swiftest-env"] \ No newline at end of file From 9a60cee064927b98ba53943872962eaa85a14dc2 Mon Sep 17 00:00:00 2001 From: David Minton Date: Fri, 9 Jun 2023 20:06:24 -0400 Subject: [PATCH 25/25] Minor tweaks to the Dockerfile to isolate the dependency build from the driver build --- .gitignore | 1 - Dockerfile | 40 ++++++++++++++++++++++++---------------- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 3ec1398e3..5056a889a 100644 --- a/.gitignore +++ b/.gitignore @@ -34,7 +34,6 @@ dump* !docker/ !singularity/ !Dockerfile -!swiftest.def !environment.yml bin/ diff --git a/Dockerfile b/Dockerfile index 55c22e793..ce3e615c3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,18 +13,7 @@ # dynamically. # This build target compiles all dependencies and the swiftest driver itself -FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu20.04 as build - -# The MACHINE_CODE_VALUE argument is a string that is used when compiling the swiftest_driver. It is appended to the "-x" compiler -# option: (-x${MACHINE_CODE_VALUE}). The default value is set to "sse2" which allows for certain SIMD instructions to be used while -# remaining # compatible with a wide range of CPUs. To get the highest performance, you can pass "host" as an argument, but the -# compiled binary # would only run on a CPU with an architecture compatible with the one that the build was performed on. -# For more details and other options, see: -# https://www.intel.com/content/www/us/en/docs/fortran-compiler/developer-guide-reference/2023-1/x-qx.html -ARG MACHINE_CODE_VALUE="sse2" - -# Build type options are DEBUG, RELEASE, PROFILE, or TESTING. -ARG BUILD_TYPE="RELEASE" +FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu20.04 as build_deps ENV INSTALL_DIR="/usr/local" ENV CC="${ONEAPI_ROOT}/compiler/latest/linux/bin/icx" @@ -86,6 +75,25 @@ RUN cd netcdf-fortran-4.6.1 && \ make && \ make install +FROM intel/oneapi-hpckit:2023.1.0-devel-ubuntu20.04 as build_driver +COPY --from=build_deps /usr/local/. /usr/local/ +ENV INSTALL_DIR="/usr/local" +ENV CC="${ONEAPI_ROOT}/compiler/latest/linux/bin/icx" +ENV FC="${ONEAPI_ROOT}/compiler/latest/linux/bin/ifx" +ENV CXX="${ONEAPI_ROOT}/compiler/latest/linux/bin/icpx" +ENV F77="${FC}" + +# The MACHINE_CODE_VALUE argument is a string that is used when compiling the swiftest_driver. It is appended to the "-x" compiler +# option: (-x${MACHINE_CODE_VALUE}). The default value is set to "sse2" which allows for certain SIMD instructions to be used while +# remaining # compatible with a wide range of CPUs. To get the highest performance, you can pass "host" as an argument, but the +# compiled binary # would only run on a CPU with an architecture compatible with the one that the build was performed on. +# For more details and other options, see: +# https://www.intel.com/content/www/us/en/docs/fortran-compiler/developer-guide-reference/2023-1/x-qx.html +ARG MACHINE_CODE_VALUE="sse2" + +# Build type options are DEBUG, RELEASE, PROFILE, or TESTING. +ARG BUILD_TYPE="RELEASE" + # Swiftest ENV NETCDF_HOME=${INSTALL_DIR} ENV NETCDF_FORTRAN_HOME=${NETCDF_HOME} @@ -109,13 +117,13 @@ RUN cd swiftest && \ cmake --install build # This build target creates a container that executes just the driver program -FROM ubuntu:22.04 as driver -COPY --from=build /usr/local/bin/swiftest_driver /usr/local/bin/ +FROM ubuntu:20.04 as driver +COPY --from=build_driver /usr/local/bin/swiftest_driver /usr/local/bin/ ENTRYPOINT ["/usr/local/bin/swiftest_driver"] # This build target exports the binary to the host FROM scratch AS export_driver -COPY --from=build /usr/local/bin/swiftest_driver / +COPY --from=build_driver /usr/local/bin/swiftest_driver / # This build target creates a container with a conda environment with all dependencies needed to run the Python front end and # analysis tools @@ -135,7 +143,7 @@ RUN conda env create -f environment.yml && \ echo "conda activate swiftest-env" >> ~/.bashrc COPY ./python/. /opt/conda/pkgs/ -COPY --from=build /usr/local/bin/swiftest_driver /opt/conda/bin/swiftest_driver +COPY --from=build_driver /usr/local/bin/swiftest_driver /opt/conda/bin/swiftest_driver # Start new shell to activate the environment and install Swiftest RUN cd /opt/conda/pkgs/swiftest && conda develop . && \