diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml new file mode 100644 index 0000000..53ca733 --- /dev/null +++ b/.github/workflows/build_wheels.yml @@ -0,0 +1,41 @@ +name: Build + +on: [push, pull_request] + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + # macos-13 is an intel runner, macos-14 is apple silicon + os: [ubuntu-latest, ubuntu-24.04-arm, windows-latest, macos-13, macos-latest] + + steps: + - uses: actions/checkout@v4 + + - name: Install flex and bison (platform-specific) + shell: bash + run: | + if [[ "$RUNNER_OS" == "macOS" ]]; then + brew install bison flex + echo 'PATH="/opt/homebrew/opt/bison/bin:/opt/homebrew/opt/flex/bin:$PATH"' >> $GITHUB_ENV + elif [[ "$RUNNER_OS" == "Windows" ]]; then + choco install winflexbison3 + echo 'C:\ProgramData\chocolatey\bin' >> $GITHUB_PATH + fi + + - name: Build wheels + uses: pypa/cibuildwheel@v3.0.0 + # env: + # CIBW_SOME_OPTION: value + # ... + with: + # package-dir: . + # output-dir: wheelhouse + config-file: "{package}/pyproject.toml" + + - uses: actions/upload-artifact@v4 + with: + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl diff --git a/.gitignore b/.gitignore index b06888a..5fd2476 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,5 @@ build/ CMakeFiles/ Makefile compile_commands.json +test_venv/ +*.so diff --git a/CMakeLists.txt b/CMakeLists.txt index 5dc2988..b07c370 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,6 +5,9 @@ set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) + if(CMAKE_CXX_COMPILER_ID MATCHES "Clang") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libstdc++") endif() @@ -27,11 +30,16 @@ if (NOT DEFINED PYTHON_BUILD_ENABLED) set(PYTHON_BUILD_ENABLED ON) endif() + +if (NOT DEFINED ENABLE_TESTING) + set(ENABLE_TESTING OFF) +endif() + # Ensure that at least one build target is enabled -if(NOT STANDALONE_BUILD_ENABLED AND NOT PYTHON_BUILD_ENABLED) +if(NOT STANDALONE_BUILD_ENABLED AND NOT PYTHON_BUILD_ENABLED AND NOT ENABLE_TESTING) message( FATAL_ERROR - "No build target enabled: Either STANDALONE_BUILD_ENABLED or PYTHON_BUILD_ENABLED must be ON." + "No build target enabled: Either STANDALONE_BUILD_ENABLED or PYTHON_BUILD_ENABLED or ENABLE_TESTING must be ON." ) endif() @@ -183,6 +191,8 @@ FetchContent_MakeAvailable(ryml) set(RYML_DEFAULT_CALLBACK_USES_EXCEPTIONS ON) ##### END OF rapid-yaml +message(STATUS "got ryml") + ############## booksim2 project(booksim2-quickstart LANGUAGES CXX) set(BOOKSIM_VERSION "110ad1b80e493241f6e57587bc11354ac84f91f8") @@ -246,6 +256,9 @@ if(PYTHON_BUILD_ENABLED) ${SOURCE_FILES} ) + set_target_properties(sanafecpp PROPERTIES + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/sanafe + ) target_link_libraries(sanafecpp PRIVATE ${PYTHON_LIBRARIES}) target_link_libraries(sanafecpp PRIVATE pybind11::pybind11) target_link_libraries(sanafecpp PUBLIC ryml::ryml) @@ -263,6 +276,8 @@ if(PYTHON_BUILD_ENABLED) target_compile_definitions(sanafecpp PRIVATE ENABLE_SOURCE_INFO) endif() endif() + + install(TARGETS sanafecpp DESTINATION sanafe) endif() if(STANDALONE_BUILD_ENABLED) @@ -286,3 +301,25 @@ if(STANDALONE_BUILD_ENABLED) endif() endif() endif() + +if(ENABLE_TESTING) + list(FILTER SOURCE_FILES EXCLUDE REGEX "pymodule.cpp") + add_library(sanafe SHARED ${SOURCE_FILES} ${HEADER_FILES}) + target_link_libraries(sanafe PUBLIC ryml::ryml) + target_link_libraries(sanafe PUBLIC booksim) + target_link_libraries(sanafe PRIVATE ${CMAKE_DL_LIBS}) + if (OpenMP_CXX_FOUND AND ENABLE_OPENMP) + target_link_libraries(sanafe PRIVATE OpenMP::OpenMP_CXX) + target_compile_definitions(sanafe PRIVATE HAVE_OPENMP) + endif() + target_link_libraries(sanafe PRIVATE Threads::Threads) + target_include_directories(sanafe PUBLIC + ${CMAKE_SOURCE_DIR}/src + ) + message(STATUS "Testing enabled") + include(CTest) + add_subdirectory(tests) + + find_program(CTEST_MEMORYCHECK_COMMAND valgrind) + set(CTEST_MEMORYCHECK_COMMAND_OPTIONS "--leak-check=full --show-leak-kinds=all --track-origins=yes --error-exitcode=10") +endif() \ No newline at end of file diff --git a/ci/check_build.rb b/ci/check_build.rb new file mode 100755 index 0000000..57c724c --- /dev/null +++ b/ci/check_build.rb @@ -0,0 +1,102 @@ +#!/usr/bin/env ruby + +require 'fileutils' + +log_dir = ENV["SANAFE_CI_LOG_DIR"] || "logs/commit-latest" +log_file = "#{log_dir}/build.log" +FileUtils.mkdir_p(log_dir) + + +#build standalone sim +def build_cpp(label:, build_dir:, compiler: nil, log_file:) + puts "[#{label}] Building standalone simulator..." + + cmake_cmd = "cmake -S . -B #{build_dir}" #construct cmake config command using source and build dirs + cmake_cmd += " -DCMAKE_CXX_COMPILER=#{compiler}" if compiler #add compiler option if provided + cmake_ok = system("#{cmake_cmd} > #{log_file} 2>&1") #run cmake, direct output to log file + + if cmake_ok + make_ok = system("cmake --build #{build_dir} --parallel 8 >> #{log_file} 2>&1") #if cmake works, build and append output to log + end + + if cmake_ok && make_ok + puts "[#{label}] Simulator build: PASS" + true + else + puts "[#{label}] Simulator build: FAIL (see #{log_file})" + false + end +end + +#build and install python .so +def build_python(label:, build_dir:, log_file:) + puts "[#{label}] Building python extension..." + + FileUtils.rm_f("CMakeCache.txt") + FileUtils.mkdir_p(build_dir) + FileUtils.mkdir_p(File.dirname(log_file)) + + full_log_path = File.expand_path(log_file) + + #create venv name + unique_id = ENV["SANAFE_CI_ID"] + venv_path = File.expand_path("#{File.dirname(log_file)}/venv") + venv_python = File.join(venv_path, "bin", "python") + + FileUtils.mkdir_p("venvs") + unless system("python3 -m venv #{venv_path}") + puts "[#{label}] Failed to create virtualenv at #{venv_path}" + return false + end + + install_ok = false + import_ok = false + + File.open(full_log_path, "w") do |log| + Dir.chdir(build_dir) do + IO.popen("#{venv_python} -m pip install .. > /dev/null 2>&1") do |io| + io.each { |line| log.puts line } + end + install_ok = $?.success? + end + end + +puts "[#{label}] Python build: #{install_ok ? 'PASS' : "FAIL (see #{log_file})"}" + install_ok +end + +#set clang and gcc path +clang_path = ENV["CLANG_PATH"] +gcc_path = ENV["GCC_PATH"] + +#gcc builds +gcc_sim_ok = build_cpp( + label: "GCC", + build_dir: "build_gcc", + compiler: gcc_path, + log_file: "#{log_dir}/build_gcc_sim.log" +) +gcc_py_ok = build_python( + label: "GCC", + build_dir: "build_gcc_py", + log_file: "#{log_dir}/build_gcc_py.log" +) + +#clang builds +clang_sim_ok = build_cpp( + label: "Clang", + build_dir: "build_clang", + compiler: clang_path, + log_file: "#{log_dir}/build_clang_sim.log" +) +clang_py_ok = build_python( + label: "Clang", + build_dir: "build_clang_py", + log_file: "#{log_dir}/build_clang_py.log" +) + +#final summary +all_ok = gcc_sim_ok && gcc_py_ok && clang_sim_ok && clang_py_ok +puts all_ok ? "All builds succeeded." : "One or more builds failed." +exit(all_ok ? 0 : 1) + diff --git a/ci/check_coverity.rb b/ci/check_coverity.rb new file mode 100644 index 0000000..e69de29 diff --git a/ci/check_cppcheck.rb b/ci/check_cppcheck.rb new file mode 100644 index 0000000..7784fd6 --- /dev/null +++ b/ci/check_cppcheck.rb @@ -0,0 +1,23 @@ +#!/usr/bin/env ruby + +require 'fileutils' + +log_dir = ENV["SANAFE_CI_LOG_DIR"] || "logs/commit-latest" +log_file = "#{log_dir}/cppcheck.log" +FileUtils.mkdir_p(log_dir) + +cpp_files = Dir.glob("src/*.cpp") + Dir.glob("plugins/*.cpp") +failed = false + +File.open(log_file, "w") do |log| + log.puts "Running CPPCheck..." + cpp_files.each do |file| + log.puts "----- #{file} -----" + result = `cppcheck --enable=all --inconclusive --quiet --std=c++20 --language=c++ --suppress=missingIncludeSystem #{file} 2>&1` + log.puts result + failed ||= !result.strip.empty? + end +end + +puts failed ? "CPPCheck: FAIL (see #{log_file})" : "CPPCheck: PASS" +exit(failed ? 1 : 0) diff --git a/ci/check_dynamic.rb b/ci/check_dynamic.rb new file mode 100644 index 0000000..fa335ef --- /dev/null +++ b/ci/check_dynamic.rb @@ -0,0 +1,78 @@ +#!/usr/bin/env ruby + +require 'fileutils' + +log_dir = ENV["SANAFE_CI_LOG_DIR"] || "logs/commit-latest" +build_log_file = "#{log_dir}/dynamic_build.log" +test_log_file = "#{log_dir}/dynamic_test.log" +FileUtils.mkdir_p(log_dir) + +# timeout after 30 seconds +timeout = 30 + +puts "Running dynamic tests..." + +puts "Building the project with testing enabled..." + +cmake = system("cmake -DENABLE_TESTING=ON -DPYTHON_BUILD_ENABLED=OFF -DSTANDALONE_BUILD_ENABLED=OFF -DCMAKE_BUILD_TYPE=Debug -S . -B build > #{build_log_file} 2>&1") + +if !cmake + puts "CMake configuration failed. See #{build_log_file} for details." + puts "Dynamic Tests: FAIL" + exit 3 +end + +build = system("cmake --build build -j 10 >> #{build_log_file} 2>&1") + +if !build + puts "Build failed. See #{build_log_file} for details." + puts "Dynamic Tests: FAIL" + exit 3 +end + +puts "Build successful. Running tests..." + +test = system("ctest --memcheck --test-dir build --output-on-failure --timeout #{timeout} > #{test_log_file} 2>&1") +exitcode = $?.exitstatus + +puts "Tests completed, now checking for memory leaks..." + +build_dir = "build" +mem_log_dir = "#{build_dir}/Testing/Temporary" +pattern = "MemoryChecker.*\\.log$" +summary_log = File.join(mem_log_dir, "LastMemCheck.log") + +unless Dir.exist?(mem_log_dir) + puts "Memory log directory #{mem_log_dir} does not exist. Something went wrong." + exit 2 +end + +log_files = Dir.entries(mem_log_dir).select { |f| f.match?(pattern) } +puts "Found #{log_files.size} Valgrind log(s)." + +leaks_found = false + +log_files.each do |filename| + path = File.join(mem_log_dir, filename) + contents = File.read(path) + + if contents.include?("definitely lost") + puts "Leak detected in #{filename}" + leaks_found = true + end +end + +if leaks_found + puts "Memory leaks detected in one or more Valgrind logs. See #{mem_log_dir} for details." + puts "Dynamic Tests: FAIL" + exit 2 +end +if exitcode != 0 + puts "One or more tests failed. See #{test_log_file} for details." + puts "Dynamic Tests: FAIL" + exit 1 +else + puts "All tests passed successfully. Hooray! Check build/Testing/Temporary for memory leak reports." + puts "Dynamic Tests: PASS" + exit 0 +end \ No newline at end of file diff --git a/ci/check_format.rb b/ci/check_format.rb new file mode 100755 index 0000000..4cf5da0 --- /dev/null +++ b/ci/check_format.rb @@ -0,0 +1,39 @@ +#!/usr/bin/env ruby +#TODO: break format +require 'fileutils' + +log_dir = ENV["SANAFE_CI_LOG_DIR"] || "logs/commit-latest" +log_file = "#{log_dir}/format.log" +FileUtils.mkdir_p(log_dir) + +#find cpp files +cpp_files = Dir.glob("src/*.cpp") + Dir.glob("plugins/*.cpp") + +failed_files = [] + +File.open(log_file, "w") do |log| + cpp_files.each do |file| + result = `clang-format --dry-run --Werror #{file} 2>&1` + if $?.exitstatus != 0 + failed_files << file + log.puts "[FAIL] #{file}" + log.puts result + else + log.puts "[PASS] #{file}" + end + end + + if failed_files.empty? + log.puts "\nAll files passed clang-format check." + else + log.puts "\n#{failed_files.size} file(s) failed formatting." + end +end + +if failed_files.empty? + puts "Format Check: PASS" +else + puts "Format Check: FAIL (#{failed_files.size} file(s) failed, see #{log_file})" +end + +exit(failed_files.empty? ? 0 : 1) diff --git a/ci/check_perf.rb b/ci/check_perf.rb new file mode 100644 index 0000000..401964d --- /dev/null +++ b/ci/check_perf.rb @@ -0,0 +1,54 @@ +#!/usr/bin/env ruby + +require 'fileutils' + +log_dir = ENV["SANAFE_CI_LOG_DIR"] || "logs/commit-latest" +log_file = "#{log_dir}/perf.log" +FileUtils.mkdir_p(log_dir) + +#test parameters +sim_path = "./sim" +arch_file = "arch/example.yaml" +snn_file = "snn/example.yaml" +steps = 100000 + +perf_output = `(/usr/bin/time -f "%e" #{sim_path} #{arch_file} #{snn_file} #{steps} > /dev/null) 2>&1` +runtime = perf_output.to_f + +baseline_file = "ci/perf_baseline.txt" +commit_perf_file = "#{log_dir}/perf_runtime.txt" + +#first time +if !File.exist?(baseline_file) + File.write(baseline_file, "#{runtime.round(3)}\n") + File.write(log_file, <<~LOG) + Baseline not found. Created a new baseline: + Baseline Runtime: #{runtime.round(3)} sec + + To reset baseline manually later: + cp #{commit_perf_file} #{baseline_file} + LOG + puts "Performance Check: Baseline Created (#{runtime.round(3)} sec)" + File.write(commit_perf_file, "#{runtime.round(3)}\n") + exit 0 +end + +#compare with baseline +baseline = File.read(baseline_file).to_f +delta = (runtime - baseline).abs / baseline +warning = delta > 0.10 + +#write runtime +File.write(commit_perf_file, "#{runtime.round(3)}\n") + +#write to log +File.open(log_file, "w") do |f| + f.puts "Baseline Runtime: #{baseline.round(3)} sec" + f.puts "Current Runtime: #{runtime.round(3)} sec" + f.puts "Delta: #{(delta * 100).round(2)}%" + f.puts warning ? "Performance Check: WARNING (runtime changed > 10%)" : "Performance Check: PASS" +end + +#results +puts warning ? "Performance Check: WARNING (see #{log_file})" : "Performance Check: PASS" +exit(warning ? 1 : 0) diff --git a/ci/check_tidy.rb b/ci/check_tidy.rb new file mode 100644 index 0000000..3a178dd --- /dev/null +++ b/ci/check_tidy.rb @@ -0,0 +1,36 @@ +#!/usr/bin/env ruby + +require 'fileutils' + +log_dir = ENV["SANAFE_CI_LOG_DIR"] || "logs/commit-latest" +log_file = "#{log_dir}/tidy.log" +FileUtils.mkdir_p(log_dir) + +cpp_files = Dir.glob("src/*.cpp") # +failed_files = [] + +File.open(log_file, "w") do |log| + cpp_files.each do |file| + log.puts "----- #{file} -----" + result = `clang-tidy #{file} -- -I./src 2>&1` + log.puts result + + if result.include?("warning:") || result.include?("error:") + failed_files << file + end + end + + if failed_files.empty? + log.puts "\nAll files passed clang-tidy check." + else + log.puts "\n#{failed_files.size} file(s) had warnings/errors." + end +end + +if failed_files.empty? + puts "Tidy Check: PASS" +else + puts "Tidy Check: FAIL (#{failed_files.size} file(s) had issues, see #{log_file})" +end + +exit(failed_files.empty? ? 0 : 1) diff --git a/ci/perf_baseline.txt b/ci/perf_baseline.txt new file mode 100644 index 0000000..53aa115 --- /dev/null +++ b/ci/perf_baseline.txt @@ -0,0 +1 @@ +106.41 diff --git a/ci/run.rb b/ci/run.rb new file mode 100755 index 0000000..a181aad --- /dev/null +++ b/ci/run.rb @@ -0,0 +1,24 @@ +#!/usr/bin/env ruby + +require 'fileutils' + +timestamp = Time.now.strftime("%Y%m%d-%H%M%S") +commit_hash = `git rev-parse --short HEAD`.strip +unique_id = "#{timestamp}-#{commit_hash}" +log_dir = "logs/commit-#{unique_id}" +FileUtils.mkdir_p(log_dir) + +ENV["SANAFE_CI_LOG_DIR"] = log_dir +ENV["SANAFE_CI_ID"] = unique_id + +puts "Running SANA-FE CI for commit #{commit_hash}" +puts "Logs will be saved to #{log_dir}/" +puts "-------------------------------" + +build_status = system("ruby ci/check_build.rb") +format_status = system("ruby ci/check_format.rb") +tidy_status = system("ruby ci/check_tidy.rb") +cppcheck_status = system("ruby ci/check_cppcheck.rb") +#TODO: Optional coverity integration +dynamic_status = system("ruby ci/check_dynamic.rb") +perf_status = system("ruby ci/check_perf.rb") diff --git a/docker/sana-fe-python/Dockerfile b/docker/sana-fe-python/Dockerfile new file mode 100644 index 0000000..70cdd0a --- /dev/null +++ b/docker/sana-fe-python/Dockerfile @@ -0,0 +1,30 @@ +FROM ubuntu:latest + +# Add Docker's official GPG key (for docker-ce-cli installation): +RUN apt-get update && apt-get install -y \ + ca-certificates \ + curl \ + gnupg \ + lsb-release && \ + mkdir -p /etc/apt/keyrings && \ + curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc && \ + chmod a+r /etc/apt/keyrings/docker.asc && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo ${UBUNTU_CODENAME:-focal}) stable" > /etc/apt/sources.list.d/docker.list && \ + apt-get update + +RUN apt update && apt install -y \ + python3 \ + python3-pip \ + python3-dev \ + docker-ce-cli + +WORKDIR /app +COPY . /app + +RUN rm -rf build + +RUN pip3 install --break-system-packages cibuildwheel pybind11 + +# workaround for cibuildwheel issue with flex +CMD ["bash", "-c", "cibuildwheel --output-dir wheelhouse"] \ No newline at end of file diff --git a/docker/sana-fe-python/build_wheels.sh b/docker/sana-fe-python/build_wheels.sh new file mode 100644 index 0000000..ec505af --- /dev/null +++ b/docker/sana-fe-python/build_wheels.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +ROOTLESS_OR_NOT=$(docker info | grep -i "rootless") +if [[ -z "$ROOTLESS_OR_NOT" ]]; then + SOCKET_PATH="/var/run/docker.sock" + echo "You are not running Docker in rootless mode. Using default socket path: $SOCKET_PATH" +else + SOCKET_PATH="$XDG_RUNTIME_DIR/docker.sock" + echo "You are running Docker in rootless mode. Using socket path: $SOCKET_PATH" +fi + +if [[ ! -S "$SOCKET_PATH" ]]; then + echo "Docker socket not found at $SOCKET_PATH. Make sure that Docker is running and accessible." + exit 1 +else + echo "Using Docker socket at $SOCKET_PATH." +fi + +docker build -t sana-fe-python . + +if [[ $? -ne 0 ]]; then + echo "Docker build failed." + exit 1 +fi + +mkdir -p wheelhouse + +docker run --rm -v "$(pwd)/wheelhouse:/app/wheelhouse" -v "$SOCKET_PATH:/var/run/docker.sock" sana-fe-python \ No newline at end of file diff --git a/docker/sana-fe/Dockerfile b/docker/sana-fe/Dockerfile new file mode 100644 index 0000000..5f7ead7 --- /dev/null +++ b/docker/sana-fe/Dockerfile @@ -0,0 +1,15 @@ +FROM ubuntu:latest AS build + +WORKDIR /opt/app +COPY . . + +RUN apt-get update && apt-get install -y python3 python3-pip python3-dev make gcc g++ \ + git flex bison cmake pybind11-dev && \ + apt-get clean + +RUN mkdir build && \ + cd build && \ + cmake -DPYTHON_BUILD_ENABLED=OFF -DSTANDALONE_BUILD_ENABLED=ON .. && \ + make + +ENTRYPOINT [ "/opt/app/build/sim" ] \ No newline at end of file diff --git a/docker/sana-fe/script.sh b/docker/sana-fe/script.sh new file mode 100644 index 0000000..f51f559 --- /dev/null +++ b/docker/sana-fe/script.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +IMAGE_NAME="sana-fe" + +docker build -t $IMAGE_NAME . +if [ $? -eq 0 ]; then + echo "Docker image '$IMAGE_NAME' built successfully." +else + echo "Failed to build Docker image '$IMAGE_NAME'." + exit 1 +fi + +docker run --rm -v "$(pwd)/arch":/data/arch -v "$(pwd)/snn":/data/snn $IMAGE_NAME "$@" \ No newline at end of file diff --git a/plugins/hodgkin_huxley.cpp b/plugins/hodgkin_huxley.cpp index 588a62a..dd83ca0 100644 --- a/plugins/hodgkin_huxley.cpp +++ b/plugins/hodgkin_huxley.cpp @@ -24,7 +24,8 @@ class HodgkinHuxley : public sanafe::SomaUnit // HodgkinHuxley specific public: // system variables - double C_m{10.0}; // Effective capacitance per area of membrane; default is 1 + double C_m{ + 10.0}; // Effective capacitance per area of membrane; default is 1 double g_Na{1200.0}; // Conductance of sodium double g_K{360.0}; // Conductance of potassium double g_L{3.0}; // Conductance of leak channel diff --git a/pyproject.toml b/pyproject.toml index f5ba0e9..ccc0c78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,3 +6,10 @@ requires = [ "pybind11>=2.6.0" ] build-backend = "setuptools.build_meta" + +[tool.cibuildwheel.linux] +before-all = "yum install -y flex" + +[[tool.cibuildwheel.overrides]] +select = "*-musllinux*" +before-all = "apk add flex" \ No newline at end of file diff --git a/setup.py b/setup.py index 4a3c5c3..2348c84 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ def build_extension(self, ext): "-DPYTHON_EXECUTABLE=" + sys.executable, "-DPYTHON_INCLUDE_DIRS=" + sysconfig.get_path('include'), "-DSTANDALONE_BUILD_ENABLED=OFF", + "-DPYTHON_BUILD_ENABLED=ON", "-DPYTHON_FROM_SETUP=ON"] print(f"CMake Arguments: {cmake_args}") cfg = "Debug" if self.debug else "Release" @@ -66,6 +67,9 @@ def build_extension(self, ext): env["CMAKE_BUILD_PARALLEL_LEVEL"] = jobs if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) + env["PYTHON_EXECUTABLE"] = sys.executable + env["PYTHON_INCLUDE_DIRS"] = sysconfig.get_path('include') + subprocess.check_call(["cmake", ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env) subprocess.check_call(["cmake", "--build", "."] + build_args, cwd=self.build_temp) diff --git a/src/chip.cpp b/src/chip.cpp index b6e614d..8a76821 100644 --- a/src/chip.cpp +++ b/src/chip.cpp @@ -437,10 +437,8 @@ void sanafe::SpikingChip::update_run_data( sanafe::RunData sanafe::SpikingChip::sim(const long int timesteps, const TimingModel timing_model, const int scheduler_thread_count, - const bool record_spikes, - const bool record_potentials, - const bool record_perf, - const bool record_messages, + const bool record_spikes, const bool record_potentials, + const bool record_perf, const bool record_messages, std::string output_dir) { RunData rd(total_timesteps + 1); diff --git a/src/core.cpp b/src/core.cpp index 6b5b7ad..b80e64c 100644 --- a/src/core.cpp +++ b/src/core.cpp @@ -136,8 +136,8 @@ void sanafe::Core::map_neuron( // Map the neuron to the core and its hardware units const size_t address = neurons.size(); - neurons.emplace_back(neuron_to_map, neuron_id, this, - mapped_soma, address, mapped_axon_out, mapped_dendrite); + neurons.emplace_back(neuron_to_map, neuron_id, this, mapped_soma, address, + mapped_axon_out, mapped_dendrite); } sanafe::AxonInUnit &sanafe::Core::create_axon_in( diff --git a/src/main.cpp b/src/main.cpp index 3fe1a4a..d4dab99 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -253,8 +253,7 @@ std::vector program_args_to_vector( // NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays,readability-function-size) int main(int argc, const char *argv[]) { - const std::vector arg_vec = - program_args_to_vector(argc, argv); + const std::vector arg_vec = program_args_to_vector(argc, argv); const OptionalProgramFlags optional_flags = parse_command_line_flags(arg_vec); argc -= optional_flags.total_args_parsed; diff --git a/src/models.cpp b/src/models.cpp index df88618..9d3407d 100644 --- a/src/models.cpp +++ b/src/models.cpp @@ -275,8 +275,8 @@ void sanafe::MultiTapModel1D::print_taps() } } -void sanafe::MultiTapModel1D::input_current(const double current, - const std::optional synapse_address) +void sanafe::MultiTapModel1D::input_current( + const double current, const std::optional synapse_address) { constexpr int default_tap = 0; diff --git a/src/pipeline.cpp b/src/pipeline.cpp index 3cc1097..5455e19 100644 --- a/src/pipeline.cpp +++ b/src/pipeline.cpp @@ -54,7 +54,6 @@ void sanafe::PipelineUnit::check_implemented( INFO("Error: %s\n", error.c_str()); throw std::runtime_error(error); } - } void sanafe::PipelineUnit::check_outputs( @@ -176,8 +175,7 @@ void sanafe::PipelineUnit::soma_set_default_attributes() { if (!key_exists(metric)) { - const std::string error = - "Metric not defined: " + metric; + const std::string error = "Metric not defined: " + metric; INFO("Error: %s\n", error.c_str()); throw std::invalid_argument(error); } @@ -203,8 +201,7 @@ void sanafe::PipelineUnit::soma_set_default_attributes() { if (!key_exists(metric)) { - const std::string error = - "Missing metric: " + metric; + const std::string error = "Missing metric: " + metric; INFO("Error: %s\n", error.c_str()); throw std::invalid_argument(error); } diff --git a/src/schedule.cpp b/src/schedule.cpp index 56aa0b2..b2b744f 100644 --- a/src/schedule.cpp +++ b/src/schedule.cpp @@ -62,8 +62,7 @@ void sanafe::schedule_messages(TimestepHandle &ts, Scheduler &scheduler, } } -void sanafe::schedule_messages_simple( - TimestepHandle &ts, Scheduler &scheduler) +void sanafe::schedule_messages_simple(TimestepHandle &ts, Scheduler &scheduler) { // Simple analytical model, that takes the maximum of either neuron or // message processing for each core, and takes the maximum latency of @@ -181,7 +180,8 @@ void sanafe::schedule_create_threads( // **** Detailed scheduler implementation **** -void sanafe::schedule_messages_detailed(TimestepHandle &ts, Scheduler &scheduler) +void sanafe::schedule_messages_detailed( + TimestepHandle &ts, Scheduler &scheduler) { if (scheduler.scheduler_threads.empty()) { @@ -284,8 +284,8 @@ double sanafe::schedule_messages_timestep( return ts_data.sim_time; } -std::vector -sanafe::schedule_init_message_queues(const Timestep &ts, NocInfo &noc) +std::vector sanafe::schedule_init_message_queues( + const Timestep &ts, NocInfo &noc) { const size_t total_links = noc.noc_height_in_tiles * noc.noc_width_in_tiles * @@ -588,7 +588,6 @@ double sanafe::NocInfo::calculate_route_congestion(const Message &m) const return flow_density; } - std::pair sanafe::NocInfo::get_route_xy_increments( const Message &m) noexcept { @@ -598,7 +597,6 @@ std::pair sanafe::NocInfo::get_route_xy_increments( return std::make_pair(x_increment, y_increment); } - // **** Thread management **** // TODO: make this agnostic to scheduling algorithm, so it can be applied to // the simple and cycle accurate models in future diff --git a/src/yaml_arch.cpp b/src/yaml_arch.cpp index 9b595a1..f2dfcf6 100644 --- a/src/yaml_arch.cpp +++ b/src/yaml_arch.cpp @@ -395,8 +395,8 @@ void sanafe::description_parse_tile_section_yaml(const ryml::Parser &parser, for (int t = range.first; t <= range.second; t++) { - std::string name = tile_name.substr(0, tile_name.find('[')) + - "[" + std::to_string(t) + "]"; + std::string name = tile_name.substr(0, tile_name.find('[')) + "[" + + std::to_string(t) + "]"; const TilePowerMetrics power_metrics = description_parse_tile_metrics_yaml( parser, tile_node["attributes"]); diff --git a/src/yaml_common.cpp b/src/yaml_common.cpp index f8ae7e6..af8b29c 100644 --- a/src/yaml_common.cpp +++ b/src/yaml_common.cpp @@ -118,8 +118,7 @@ sanafe::description_parse_model_attributes_yaml( // NOLINT(misc-no-recursion) if (unit_specific_keys.find(key_str) == unit_specific_keys.end()) { //INFO("Parsing attribute: %s\n", key.c_str()); - model_attributes[key_str] = - yaml_parse_attribute(parser, node); + model_attributes[key_str] = yaml_parse_attribute(parser, node); } } } @@ -140,13 +139,11 @@ sanafe::ModelAttribute sanafe::yaml_parse_attribute( if (attribute_node.is_seq()) { - attribute.value = - yaml_parse_attribute_list(parser, attribute_node); + attribute.value = yaml_parse_attribute_list(parser, attribute_node); } else if (attribute_node.is_map()) { - attribute.value = - yaml_parse_attribute_map(parser, attribute_node); + attribute.value = yaml_parse_attribute_map(parser, attribute_node); } else { @@ -260,8 +257,7 @@ sanafe::AttributeVariant sanafe::yaml_parse_attribute_scalar( } // NOLINTEND(readability-function-cognitive-complexity) -std::pair sanafe::yaml_parse_range( - const std::string &range_str) +std::pair sanafe::yaml_parse_range(const std::string &range_str) { constexpr std::string_view range_delimiter = ".."; const size_t delimiter_pos = range_str.find(range_delimiter); diff --git a/src/yaml_snn.cpp b/src/yaml_snn.cpp index 2359996..5ebdf11 100644 --- a/src/yaml_snn.cpp +++ b/src/yaml_snn.cpp @@ -663,8 +663,7 @@ void sanafe::yaml_parse_conv2d(NeuronGroup &source_group, std::vector attribute_list; for (const auto &el : attribute) { - ModelAttribute value = - yaml_parse_attribute(parser, el); + ModelAttribute value = yaml_parse_attribute(parser, el); attribute_list.push_back(std::move(value)); } std::string attribute_name; @@ -707,8 +706,7 @@ void sanafe::yaml_parse_dense(NeuronGroup &source_group, attribute >> ryml::key(attribute_name); for (const auto &el : attribute) { - ModelAttribute value = - yaml_parse_attribute(parser, el); + ModelAttribute value = yaml_parse_attribute(parser, el); attribute_list.push_back(std::move(value)); } attribute_lists[attribute_name] = std::move(attribute_list); @@ -1052,8 +1050,7 @@ ryml::NodeRef sanafe::yaml_serialize_network(ryml::NodeRef root, edge_node |= ryml::FLOW_SL; // NOLINT(misc-include-cleaner) // For now assume there are no default connection attributes const std::map default_attributes{}; - yaml_serialize_model_attributes( - default_attributes, edge_node, + yaml_serialize_model_attributes(default_attributes, edge_node, connection.synapse_attributes); // TODO: support synapse-specific attributes @@ -1079,9 +1076,8 @@ ryml::NodeRef sanafe::yaml_serialize_network(ryml::NodeRef root, return network_node; } -ryml::NodeRef sanafe::yaml_serialize_neuron_group( - ryml::NodeRef parent, const sanafe::NeuronGroup &group, - std::list &strings) +ryml::NodeRef sanafe::yaml_serialize_neuron_group(ryml::NodeRef parent, + const sanafe::NeuronGroup &group, std::list &strings) { auto group_node = parent.append_child(); group_node |= ryml::MAP; @@ -1095,8 +1091,8 @@ ryml::NodeRef sanafe::yaml_serialize_neuron_group( const std::map no_default_attributes{}; if (!group.default_neuron_config.model_attributes.empty()) { - yaml_serialize_model_attributes(no_default_attributes, - attr_node, group.default_neuron_config.model_attributes); + yaml_serialize_model_attributes(no_default_attributes, attr_node, + group.default_neuron_config.model_attributes); } // Add neurons @@ -1124,16 +1120,15 @@ ryml::NodeRef sanafe::yaml_serialize_neuron_group( for (const auto &neuron_run : neuron_runs) { - yaml_serialize_neuron_run( - neurons_node, neuron_run, group, strings); + yaml_serialize_neuron_run(neurons_node, neuron_run, group, strings); } return group_node; } -ryml::NodeRef sanafe::yaml_serialize_neuron_run( - ryml::NodeRef neurons_node, const std::tuple &neuron_run, - const NeuronGroup &group, std::list &strings) +ryml::NodeRef sanafe::yaml_serialize_neuron_run(ryml::NodeRef neurons_node, + const std::tuple &neuron_run, const NeuronGroup &group, + std::list &strings) { auto [start_offset, end_offset] = neuron_run; diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt new file mode 100644 index 0000000..141c762 --- /dev/null +++ b/tests/CMakeLists.txt @@ -0,0 +1,26 @@ +include(FetchContent) + +FetchContent_Declare( + googletest + URL https://github.com/google/googletest/archive/03597a01ee50ed33e9dfd640b249b4be3799d395.zip +) +set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) +FetchContent_MakeAvailable(googletest) + +add_executable( + sanafe_tests + tests.cpp +) + +target_link_libraries( + sanafe_tests + GTest::gtest_main +) + +target_link_libraries( + sanafe_tests + sanafe +) + +include(GoogleTest) +gtest_discover_tests(sanafe_tests) \ No newline at end of file diff --git a/tests/tests.cpp b/tests/tests.cpp new file mode 100644 index 0000000..40f4af2 --- /dev/null +++ b/tests/tests.cpp @@ -0,0 +1,13 @@ +#include +#include "chip.hpp" + + +TEST(InitialTest, CheckTestFunctionality) { + // making sure that the test framework is working correctly + EXPECT_EQ(0, 0); +} + +TEST(InitialTest, CheckChipInitialization) { + char *a = new char[100]; + (void)a; +} \ No newline at end of file