diff --git a/.azure-pipelines/azure-pipelines-osx.yml b/.azure-pipelines/azure-pipelines-osx.yml index e7684a4f5c..acfd133642 100755 --- a/.azure-pipelines/azure-pipelines-osx.yml +++ b/.azure-pipelines/azure-pipelines-osx.yml @@ -5,7 +5,7 @@ jobs: - job: osx pool: - vmImage: macOS-12 + vmImage: macOS-13 strategy: matrix: osx_64_: diff --git a/.ci_support/linux_64_.yaml b/.ci_support/linux_64_.yaml index 3aacb735f4..6bc80589c6 100644 --- a/.ci_support/linux_64_.yaml +++ b/.ci_support/linux_64_.yaml @@ -5,7 +5,7 @@ c_compiler: c_compiler_version: - '13' cdt_name: -- cos7 +- conda channel_sources: - conda-forge channel_targets: @@ -15,7 +15,7 @@ cxx_compiler: cxx_compiler_version: - '13' docker_image: -- quay.io/condaforge/linux-anvil-cos7-x86_64 +- quay.io/condaforge/linux-anvil-x86_64:alma9 hdf5: - 1.14.3 linux_clang_version: diff --git a/.ci_support/osx_64_.yaml b/.ci_support/osx_64_.yaml index 5b8851063d..689a527954 100644 --- a/.ci_support/osx_64_.yaml +++ b/.ci_support/osx_64_.yaml @@ -1,13 +1,13 @@ MACOSX_DEPLOYMENT_TARGET: -- '11.1' +- '14.5' MACOSX_SDK_VERSION: -- '11.1' +- '14.5' boost: - '1.78' c_compiler: - clang c_compiler_version: -- '17' +- '18' channel_sources: - conda-forge channel_targets: @@ -15,7 +15,7 @@ channel_targets: cxx_compiler: - clangxx cxx_compiler_version: -- '17' +- '18' hdf5: - 1.14.3 macos_machine: diff --git a/.ci_support/osx_arm64_.yaml b/.ci_support/osx_arm64_.yaml index 31881fcccc..97b9a26a21 100644 --- a/.ci_support/osx_arm64_.yaml +++ b/.ci_support/osx_arm64_.yaml @@ -1,13 +1,13 @@ MACOSX_DEPLOYMENT_TARGET: -- '11.1' +- '14.5' MACOSX_SDK_VERSION: -- '11.1' +- '14.5' boost: - '1.78' c_compiler: - clang c_compiler_version: -- '17' +- '18' channel_sources: - conda-forge channel_targets: @@ -15,7 +15,7 @@ channel_targets: cxx_compiler: - clangxx cxx_compiler_version: -- '17' +- '18' hdf5: - 1.14.3 macos_machine: diff --git a/.circleci/config.yml b/.circleci/config.yml index 15a7db477e..d4c78769cc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -12,7 +12,7 @@ jobs: environment: - CONFIG: "linux_64_" - UPLOAD_PACKAGES: "False" - DOCKER_IMAGE: "quay.io/condaforge/linux-anvil-cos7-x86_64" + DOCKER_IMAGE: "quay.io/condaforge/linux-anvil-x86_64:alma9" steps: - checkout - run: diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh index 705694c60f..3c2f6176dc 100755 --- a/.scripts/build_steps.sh +++ b/.scripts/build_steps.sh @@ -31,18 +31,20 @@ pkgs_dirs: solver: libmamba CONDARC +mv /opt/conda/conda-meta/history /opt/conda/conda-meta/history.$(date +%Y-%m-%d-%H-%M-%S) +echo > /opt/conda/conda-meta/history +micromamba install --root-prefix ~/.conda --prefix /opt/conda \ + --yes --override-channels --channel conda-forge --strict-channel-priority \ + pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 -mamba install --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" -mamba update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" - # set up the condarc setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" source run_conda_forge_build_setup + + # make the build number clobber make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" @@ -71,8 +73,8 @@ else --extra-meta flow_run_id="${flow_run_id:-}" remote_url="${remote_url:-}" sha="${sha:-}" ( startgroup "Inspecting artifacts" ) 2> /dev/null - # inspect_artifacts was only added in conda-forge-ci-setup 4.6.0 - command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts || echo "inspect_artifacts needs conda-forge-ci-setup >=4.6.0" + # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 + command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir "${RECIPE_ROOT}" -m "${CONFIG_FILE}" || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" ( endgroup "Inspecting artifacts" ) 2> /dev/null diff --git a/.scripts/run_osx_build.sh b/.scripts/run_osx_build.sh index 5d33eb88d7..d2fc5890a8 100755 --- a/.scripts/run_osx_build.sh +++ b/.scripts/run_osx_build.sh @@ -7,28 +7,39 @@ source .scripts/logging_utils.sh set -xe MINIFORGE_HOME=${MINIFORGE_HOME:-${HOME}/miniforge3} +MINIFORGE_HOME=${MINIFORGE_HOME%/} # remove trailing slash -( startgroup "Installing a fresh version of Miniforge" ) 2> /dev/null - -MINIFORGE_URL="https://github.com/conda-forge/miniforge/releases/latest/download" -MINIFORGE_FILE="Miniforge3-MacOSX-$(uname -m).sh" -curl -L -O "${MINIFORGE_URL}/${MINIFORGE_FILE}" -rm -rf ${MINIFORGE_HOME} -bash $MINIFORGE_FILE -b -p ${MINIFORGE_HOME} - -( endgroup "Installing a fresh version of Miniforge" ) 2> /dev/null +( startgroup "Provisioning base env with micromamba" ) 2> /dev/null +MICROMAMBA_VERSION="1.5.10-0" +if [[ "$(uname -m)" == "arm64" ]]; then + osx_arch="osx-arm64" +else + osx_arch="osx-64" +fi +MICROMAMBA_URL="https://github.com/mamba-org/micromamba-releases/releases/download/${MICROMAMBA_VERSION}/micromamba-${osx_arch}" +MAMBA_ROOT_PREFIX="${MINIFORGE_HOME}-micromamba-$(date +%s)" +echo "Downloading micromamba ${MICROMAMBA_VERSION}" +micromamba_exe="$(mktemp -d)/micromamba" +curl -L -o "${micromamba_exe}" "${MICROMAMBA_URL}" +chmod +x "${micromamba_exe}" +echo "Creating environment" +"${micromamba_exe}" create --yes --root-prefix "${MAMBA_ROOT_PREFIX}" --prefix "${MINIFORGE_HOME}" \ + --channel conda-forge \ + pip python=3.12 conda-build conda-forge-ci-setup=4 "conda-build>=24.1" +echo "Moving pkgs cache from ${MAMBA_ROOT_PREFIX} to ${MINIFORGE_HOME}" +mv "${MAMBA_ROOT_PREFIX}/pkgs" "${MINIFORGE_HOME}" +echo "Cleaning up micromamba" +rm -rf "${MAMBA_ROOT_PREFIX}" "${micromamba_exe}" || true +( endgroup "Provisioning base env with micromamba" ) 2> /dev/null ( startgroup "Configuring conda" ) 2> /dev/null - -source ${MINIFORGE_HOME}/etc/profile.d/conda.sh +echo "Activating environment" +source "${MINIFORGE_HOME}/etc/profile.d/conda.sh" conda activate base export CONDA_SOLVER="libmamba" export CONDA_LIBMAMBA_SOLVER_NO_CHANNELS_FROM_INSTALLED=1 -mamba install --update-specs --quiet --yes --channel conda-forge --strict-channel-priority \ - pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" -mamba update --update-specs --yes --quiet --channel conda-forge --strict-channel-priority \ - pip mamba conda-build conda-forge-ci-setup=4 "conda-build>=24.1" + @@ -84,8 +95,8 @@ else ( startgroup "Inspecting artifacts" ) 2> /dev/null - # inspect_artifacts was only added in conda-forge-ci-setup 4.6.0 - command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts || echo "inspect_artifacts needs conda-forge-ci-setup >=4.6.0" + # inspect_artifacts was only added in conda-forge-ci-setup 4.9.4 + command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir ./conda.recipe -m ./.ci_support/${CONFIG}.yaml || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4" ( endgroup "Inspecting artifacts" ) 2> /dev/null diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 5baab3e5b8..a13f0ae283 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -2,5 +2,30 @@ # update the conda-forge.yml and/or the recipe/meta.yaml. # -*- mode: yaml -*- -jobs: - - template: ./.azure-pipelines/azure-pipelines-osx.yml \ No newline at end of file +stages: +- stage: Check + jobs: + - job: Skip + pool: + vmImage: 'ubuntu-22.04' + variables: + DECODE_PERCENTS: 'false' + RET: 'true' + steps: + - checkout: self + fetchDepth: '2' + - bash: | + git_log=`git log --max-count=1 --skip=1 --pretty=format:"%B" | tr "\n" " "` + echo "##vso[task.setvariable variable=log]$git_log" + displayName: Obtain commit message + - bash: echo "##vso[task.setvariable variable=RET]false" + condition: and(eq(variables['Build.Reason'], 'PullRequest'), or(contains(variables.log, '[skip azp]'), contains(variables.log, '[azp skip]'), contains(variables.log, '[skip ci]'), contains(variables.log, '[ci skip]'))) + displayName: Skip build? + - bash: echo "##vso[task.setvariable variable=start_main;isOutput=true]$RET" + name: result + displayName: Export result +- stage: Build + condition: and(succeeded(), eq(dependencies.Check.outputs['Skip.result.start_main'], 'true')) + dependsOn: Check + jobs: + - template: ./.azure-pipelines/azure-pipelines-osx.yml \ No newline at end of file diff --git a/conda.recipe/conda_build_config.yaml b/conda.recipe/conda_build_config.yaml index 91e134c23f..e686a9efd8 100644 --- a/conda.recipe/conda_build_config.yaml +++ b/conda.recipe/conda_build_config.yaml @@ -25,7 +25,7 @@ macos_machine: # [osx] - arm64-apple-darwin20.0.0 # [osx and arm64] MACOSX_DEPLOYMENT_TARGET: # [osx] - - 11.1 # [osx] + - 14.5 # [osx] # linux_clang_version is not a part of zip_keys # from conda-forge, if you want to, you can diff --git a/sparta/sparta/functional/Register.hpp b/sparta/sparta/functional/Register.hpp index 4190d3918d..230b4171d1 100644 --- a/sparta/sparta/functional/Register.hpp +++ b/sparta/sparta/functional/Register.hpp @@ -675,6 +675,11 @@ class RegisterBase : public TreeNode */ typedef uint16_t RegDomainT; const RegDomainT regdomain; + + /*! + * \brief Writable flag, taken from the register definition. + */ + const bool writable = true; }; //! Represents an invalid Register ID @@ -1005,6 +1010,7 @@ class RegisterBase : public TreeNode template void write(T val, index_type idx=0) { + if (!isWritable()) return; write(&val, sizeof(val), idx * sizeof(val)); } @@ -1014,6 +1020,7 @@ class RegisterBase : public TreeNode template bool writeWithCheck(T val) { + if (!isWritable()) return false; if (hasWriteCB()) { static_assert((sizeof(T)==4) || (sizeof(T)==8), "write callback only support for 4- and 8-byte registers"); @@ -1031,6 +1038,7 @@ class RegisterBase : public TreeNode template void writeUnmasked(T val, index_type idx=0) { + if (!isWritable()) return; writeUnmasked(&val, sizeof(val), idx * sizeof(val)); } @@ -1049,6 +1057,7 @@ class RegisterBase : public TreeNode template void poke(T val, index_type idx=0) { + if (!isWritable()) return; poke(&val, sizeof(val), idx * sizeof(val)); } @@ -1060,6 +1069,7 @@ class RegisterBase : public TreeNode template void pokeUnmasked(T val, index_type idx=0) { + if (!isWritable()) return; pokeUnmasked(&val, sizeof(val), idx * sizeof(val)); } @@ -1081,6 +1091,7 @@ class RegisterBase : public TreeNode template void dmiWrite(T val, index_type idx = 0) { + if (!isWritable()) return; dmiWrite_(&val, sizeof(val), sizeof(val) * idx); } @@ -1098,6 +1109,14 @@ class RegisterBase : public TreeNode return *(reinterpret_cast(mask_.data()) + idx); } + /*! + * \brief + */ + bool isWritable() const + { + return def_.writable; + } + //////////////////////////////////////////////////////////////////////// //! @} @@ -1195,6 +1214,7 @@ class RegisterBase : public TreeNode void write(const void *buf, size_t size, size_t offset) { + if (!isWritable()) return; sparta_assert(offset + size <= getNumBytes(), "Access out of bounds"); RegisterBits val(reinterpret_cast(buf), size); RegisterBits mask = mask_ >> 8 * offset; @@ -1204,12 +1224,14 @@ class RegisterBase : public TreeNode void writeUnmasked(const void *buf, size_t size, size_t offset) { + if (!isWritable()) return; sparta_assert(offset + size <= getNumBytes(), "Access out of bounds"); write_(buf, size, offset); } void poke(const void *buf, size_t size, size_t offset) { + if (!isWritable()) return; sparta_assert(offset + size <= getNumBytes(), "Access out of bounds"); RegisterBits val(reinterpret_cast(buf), size); RegisterBits mask = mask_ >> 8 * offset; @@ -1219,6 +1241,7 @@ class RegisterBase : public TreeNode void pokeUnmasked(const void *buf, size_t size, size_t offset) { + if (!isWritable()) return; sparta_assert(offset + size <= getNumBytes(), "Access out of bounds"); poke_(buf, size, offset); } @@ -1351,7 +1374,6 @@ class RegisterBase : public TreeNode write_mask |= ((partial_mask >> shift_down) << shift_up); } - } return ~write_mask; @@ -1534,6 +1556,7 @@ class Register : public RegisterBase template inline void dmiWrite(T val, index_type idx = 0) { + if (!isWritable()) return; dmiWriteImpl_(&val, sizeof(val), sizeof(val) * idx); } @@ -1546,6 +1569,7 @@ class Register : public RegisterBase template inline void writeUnmasked(T val, index_type idx = 0) { + if (!isWritable()) return; dmiWriteImpl_(&val, sizeof(T), idx); } @@ -1583,6 +1607,7 @@ class Register : public RegisterBase void write_(const void *buf, size_t size, size_t offset=0) override final { + if (!isWritable()) return; auto &post_write_noti = getPostWriteNotificationSource(); if (SPARTA_EXPECT_FALSE(post_write_noti.observed())) { @@ -1596,12 +1621,14 @@ class Register : public RegisterBase void poke_(const void *buf, size_t size, size_t offset=0) override final { + if (!isWritable()) return; dview_.getLine()->write( dview_.getOffset() + offset, size, static_cast(buf)); } void dmiWrite_(const void *buf, size_t size, size_t offset = 0) override final { + if (!isWritable()) return; dmiWriteImpl_(buf, size, offset); } @@ -1612,6 +1639,7 @@ class Register : public RegisterBase inline void dmiWriteImpl_(const void *buf, size_t size, size_t offset = 0) { + if (!isWritable()) return; memcpy(raw_data_ptr_ + offset, buf, size); dview_.getLine()->flagDirty(); } diff --git a/sparta/sparta/simulation/State.hpp b/sparta/sparta/simulation/State.hpp index 9b4351878a..be1cc0cf1d 100644 --- a/sparta/sparta/simulation/State.hpp +++ b/sparta/sparta/simulation/State.hpp @@ -113,7 +113,7 @@ namespace sparta * only value at a time. * * State classes take two template parameters: the Enum type (enum - * class or just standard enum) and the anticapted maximum number + * class or just standard enum) and the anticipated maximum number * of markers that will be requested -- default is 13. Because * the State class is in the critical path for most performance * modeling applications, use of STL types is discouraged. diff --git a/sparta/src/Simulation.cpp b/sparta/src/Simulation.cpp index 0d5000a802..b021caba46 100644 --- a/sparta/src/Simulation.cpp +++ b/sparta/src/Simulation.cpp @@ -335,14 +335,6 @@ Simulation::Simulation(const std::string& sim_name, // Watch for created nodes to which we will apply taps root_.getNodeAttachedNotification().REGISTER_FOR_THIS(rootDescendantAdded_); - // Handle illegal signals. - // Note: Update documentation if these signals are modified - backtrace_.setAsHandler(SIGSEGV); - backtrace_.setAsHandler(SIGFPE); - backtrace_.setAsHandler(SIGILL); - backtrace_.setAsHandler(SIGABRT); - backtrace_.setAsHandler(SIGBUS); - report_repository_.reset(new sparta::ReportRepository(this)); // Sanity check - simulations cannot exist without a scheduler @@ -486,6 +478,16 @@ void Simulation::configure(const int argc, throw SpartaException("Logging ennoblement is currently not supported with debug-roi. Use --debug or --debug-on-icount"); } + if (SimulationConfiguration::SignalMode::ENABLE_BACKTRACE_SIGNALS == sim_config_->signal_mode) { + // Handle illegal signals. + // Note: Update documentation if these signals are modified + backtrace_.setAsHandler(SIGSEGV); + backtrace_.setAsHandler(SIGFPE); + backtrace_.setAsHandler(SIGILL); + backtrace_.setAsHandler(SIGABRT); + backtrace_.setAsHandler(SIGBUS); + } + // If there are nodes already existing in the tree (e.g. root or "") then // there are no notifications for these TreeNodes since they already exist. // Install taps immediately instead of through rootDescendantAdded_