pax_global_header00006660000000000000000000000064147020552320014512gustar00rootroot0000000000000052 comment=54d30f26f72ce62f5dcb5a5258f632f84858714f z3-z3-4.13.3/000077500000000000000000000000001470205523200124705ustar00rootroot00000000000000z3-z3-4.13.3/.dockerignore000066400000000000000000000000471470205523200151450ustar00rootroot00000000000000**/*.swp **/*.pyc .git **/*.Dockerfile z3-z3-4.13.3/.gitattributes000066400000000000000000000002121470205523200153560ustar00rootroot00000000000000# Set default behaviour, in case users don't have core.autocrlf set. * text=auto src/api/dotnet/Properties/AssemblyInfo.cs text eol=crlf z3-z3-4.13.3/.github/000077500000000000000000000000001470205523200140305ustar00rootroot00000000000000z3-z3-4.13.3/.github/dependabot.yml000066400000000000000000000001661470205523200166630ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" z3-z3-4.13.3/.github/workflows/000077500000000000000000000000001470205523200160655ustar00rootroot00000000000000z3-z3-4.13.3/.github/workflows/Windows.yml000066400000000000000000000040771470205523200202520ustar00rootroot00000000000000name: Windows on: push: branches: [ master ] jobs: build: strategy: matrix: arch : [x86,x64,amd64_arm64] include: - arch : x86 - arch : amd64_arm64 - arch : x64 cmd1 : 'julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\"))"' cmd2 : 'julia -e "using libcxxwrap_julia_jll; print(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path))" > tmp.env' cmd3 : 'set /P JlCxxDir=> $GITHUB_OUTPUT - uses: actions/upload-artifact@v4 with: name: coverage-${{steps.date.outputs.date}} path: ${{github.workspace}}/coverage.html retention-days: 4 - uses: actions/upload-artifact@v4 with: name: coverage-details-${{steps.date.outputs.date}} path: ${{env.COV_DETAILS_PATH}} retention-days: 4 z3-z3-4.13.3/.github/workflows/cross-build.yml000066400000000000000000000013051470205523200210350ustar00rootroot00000000000000name: RISC V and PowerPC 64 on: push: pull_request: permissions: contents: read jobs: build: runs-on: ubuntu-latest container: ubuntu:jammy strategy: fail-fast: false matrix: arch: [ aarch64, riscv64, powerpc64 ] steps: - name: Checkout code uses: actions/checkout@v4 - name: Install cross build tools run: apt update && apt install -y ninja-build cmake python3 g++-11-${{ matrix.arch }}-linux-gnu env: DEBIAN_FRONTEND: noninteractive - name: Configure CMake and build run: | mkdir build && cd build cmake -DCMAKE_CXX_COMPILER=${{ matrix.arch }}-linux-gnu-g++-11 ../ make -j$(nproc) z3-z3-4.13.3/.github/workflows/docker-image.yml000066400000000000000000000035771470205523200211530ustar00rootroot00000000000000name: Publish Docker image on: schedule: - cron: "0 1 * * 0" # every Sunday at 1 am workflow_dispatch: # on button click permissions: contents: read jobs: push_to_registry: name: Push Docker image to GitHub Docker registry runs-on: ubuntu-latest steps: - name: Check out the repo uses: actions/checkout@v4 - name: Log in to GitHub Docker registry uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} # ------- # BARE Z3 # ------- - name: Extract metadata (tags, labels) for Bare Z3 Docker Image id: meta uses: docker/metadata-action@v5 with: images: ghcr.io/z3prover/z3 flavor: | latest=auto prefix=ubuntu-20.04-bare-z3- tags: | type=schedule,pattern={{date 'YYYYMMDD'}} type=ref,event=tag type=edge type=sha,prefix=ubuntu-20.04-bare-z3-sha- - name: Build and push Bare Z3 Docker Image uses: docker/build-push-action@v6.9.0 with: context: . push: true target: bare-z3 file: ./docker/ubuntu-20-04.Dockerfile tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} # ------------------------------ # Repo description on GHCR # ------------------------------ # - name: Update repo description # uses: peter-evans/dockerhub-description@v2 # with: # registry: ghcr.io # repository: z3prover/z3 # username: ${{ secrets.DOCKER_USERNAME }} # password: ${{ secrets.DOCKER_PASSWORD }} # short-description: ${{ github.event.repository.description }} # readme-filepath: ./docker/README.md z3-z3-4.13.3/.github/workflows/msvc-static-build-clang-cl.yml000066400000000000000000000010761470205523200236240ustar00rootroot00000000000000name: MSVC Clang-CL Static Build on: push: pull_request: permissions: contents: read # to fetch code (actions/checkout) jobs: build: runs-on: windows-2019 env: BUILD_TYPE: Release steps: - name: Checkout Repo uses: actions/checkout@v4 - name: Build run: | cmake -B build -DCMAKE_BUILD_TYPE=${{ env.BUILD_TYPE }} -DZ3_BUILD_LIBZ3_SHARED=OFF -DZ3_BUILD_LIBZ3_MSVC_STATIC=ON -T ClangCL -DCMAKE_C_FLAGS="/EHsc" -DCMAKE_CXX_FLAGS="/EHsc" cmake --build build --config ${{ env.BUILD_TYPE }} --parallel z3-z3-4.13.3/.github/workflows/msvc-static-build.yml000066400000000000000000000007701470205523200221460ustar00rootroot00000000000000name: MSVC Static Build on: push: pull_request: permissions: contents: read # to fetch code (actions/checkout) jobs: build: runs-on: windows-2019 env: BUILD_TYPE: Release steps: - name: Checkout Repo uses: actions/checkout@v4 - name: Build run: | cmake -B build -DCMAKE_BUILD_TYPE=${{ env.BUILD_TYPE }} -DZ3_BUILD_LIBZ3_SHARED=OFF -DZ3_BUILD_LIBZ3_MSVC_STATIC=ON cmake --build build --config ${{ env.BUILD_TYPE }} --parallel z3-z3-4.13.3/.github/workflows/wasm-release.yml000066400000000000000000000027611470205523200212030ustar00rootroot00000000000000name: WebAssembly Publish on: workflow_dispatch: release: types: [published] defaults: run: working-directory: src/api/js env: EM_VERSION: 3.1.15 permissions: contents: read # to fetch code (actions/checkout) jobs: publish: name: Publish runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Setup node uses: actions/setup-node@v4 with: node-version: "lts/*" registry-url: "https://registry.npmjs.org" - name: Prepare for publish run: | npm version $(node -e 'console.log(fs.readFileSync("../../../scripts/release.yml", "utf8").match(/ReleaseVersion:\s*\x27(\S+)\x27/)[1])') mv PUBLISHED_README.md README.md cp ../../../LICENSE.txt . - name: Setup emscripten uses: mymindstorm/setup-emsdk@v14 with: no-install: true version: ${{env.EM_VERSION}} actions-cache-folder: "emsdk-cache" - name: Install dependencies run: npm ci - name: Build TypeScript run: npm run build:ts - name: Build wasm run: | emsdk install ${EM_VERSION} emsdk activate ${EM_VERSION} source $(dirname $(which emsdk))/emsdk_env.sh which node which clang++ npm run build:wasm - name: Test run: npm test - name: Publish run: npm publish env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} z3-z3-4.13.3/.github/workflows/wasm.yml000066400000000000000000000020501470205523200175540ustar00rootroot00000000000000name: WebAssembly Build on: push: branches: [master] pull_request: defaults: run: working-directory: src/api/js env: EM_VERSION: 3.1.15 permissions: contents: read # to fetch code (actions/checkout) jobs: check: name: Check runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 - name: Setup node uses: actions/setup-node@v4 with: node-version: "lts/*" - name: Setup emscripten uses: mymindstorm/setup-emsdk@v14 with: no-install: true version: ${{env.EM_VERSION}} actions-cache-folder: "emsdk-cache" - name: Install dependencies run: npm ci - name: Build TypeScript run: npm run build:ts - name: Build wasm run: | emsdk install ${EM_VERSION} emsdk activate ${EM_VERSION} source $(dirname $(which emsdk))/emsdk_env.sh which node which clang++ npm run build:wasm - name: Test run: npm test z3-z3-4.13.3/.github/workflows/wip.yml000066400000000000000000000012741470205523200174130ustar00rootroot00000000000000name: Open Issues on: push: branches: [ master ] env: BUILD_TYPE: Debug permissions: contents: read jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Configure CMake run: cmake -B ${{github.workspace}}/build -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} - name: Build # Build your program with the given configuration run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} - name: Clone z3test run: git clone https://github.com/z3prover/z3test z3test - name: Run regressions run: python z3test/scripts/test_benchmarks.py build/z3 z3test/regressions/issues z3-z3-4.13.3/.gitignore000066400000000000000000000033011470205523200144550ustar00rootroot00000000000000*~ rebase.cmd *.pyc *.pyo # Ignore callgrind files callgrind.out.* # .hpp files are automatically generated *.hpp .z3-trace .env .genaiscript package-lock.json package.json node_modules # OCaml generated files *.a *.o *.cma *.cmo *.cmi *.cmx *.byte *.cmxa ocamlz3 # Java generated files *.class *.jar # Emacs temp files \#*\# # Directories with generated code and documentation release/* build/* trace/* build-dist/* dist/* src/out/* doc/html/* # GTAGS generated files src/GPATH src/GRTAGS src/GSYMS src/GTAGS src/HTML/* # CSCOPE files src/cscope.in.out src/cscope.out src/cscope.po.out ncscope.out # CEDET files .cproject .project # Commonly used directories for code bld_dbg/* bld_rel/* bld_dbg_x64/* bld_rel_x64/* .vscode *build*/** # Auto generated files. config.log config.status install_tactic.cpp mem_initializer.cpp gparams_register_modules.cpp scripts/config-debug.mk scripts/config-release.mk src/api/api_commands.cpp src/api/api_log_macros.h src/api/api_log_macros.cpp src/api/dll/api_dll.def src/api/dotnet/Enumerations.cs src/api/dotnet/Native.cs src/api/dotnet/Properties/AssemblyInfo.cs src/api/dotnet/Microsoft.Z3.xml src/api/python/z3/z3consts.py src/api/python/z3/z3core.py src/ast/pattern/database.h src/util/version.h src/util/z3_version.h src/api/java/Native.cpp src/api/java/Native.java src/api/java/enumerations/*.java src/api/ml/z3native_stubs.c src/api/ml/z3native.ml src/api/ml/z3enums.ml src/api/ml/z3native.mli src/api/ml/z3enums.mli src/api/ml/z3.mllib src/api/js/node_modules/ src/api/js/build/ src/api/js/**/*.__GENERATED__.* debug/* examples/python/z3 examples/python/libz3.dll out/** *.bak doc/api doc/code .vs examples/**/obj CMakeSettings.json # Editor temp files *.swp .DS_Store dbg/** *.wsp z3-z3-4.13.3/.gitignore.genai000066400000000000000000000000661470205523200155440ustar00rootroot00000000000000**/genaiscript.d.ts **/package-lock.json **/yarn.lock z3-z3-4.13.3/CMakeLists.txt000066400000000000000000000614511470205523200152370ustar00rootroot00000000000000# Enforce some CMake policies cmake_minimum_required(VERSION 3.16) set(CMAKE_USER_MAKE_RULES_OVERRIDE_CXX "${CMAKE_CURRENT_SOURCE_DIR}/cmake/cxx_compiler_flags_overrides.cmake") project(Z3 VERSION 4.13.3.0 LANGUAGES CXX) ################################################################################ # Project version ################################################################################ set(Z3_FULL_VERSION_STR "${Z3_VERSION}") # Note this might be modified message(STATUS "Z3 version ${Z3_VERSION}") ################################################################################ # Message for polluted source tree sanity checks ################################################################################ set(z3_polluted_tree_msg " should not exist and is polluting the source tree." " It is likely that this file came from the Python build system which" " generates files inside the source tree. This is bad practice and the CMake" " build system is setup to make sure that the source tree is clean during" " its configure step. If you are using git you can remove all untracked files" " using ``git clean -fx``. Be careful when doing this. You should probably use" " this with ``-n`` first to check which file(s) would be removed." ) ################################################################################ # Sanity check - Disallow building in source ################################################################################ if (PROJECT_SOURCE_DIR STREQUAL PROJECT_BINARY_DIR) message(FATAL_ERROR "In source builds are not allowed. You should invoke " "CMake from a different directory.") endif() ################################################################################ # Add our CMake module directory to the list of module search directories ################################################################################ list(APPEND CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake/modules") ################################################################################ # Handle git hash and description ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/git_utils.cmake) macro(disable_git_describe) if(Z3_INCLUDE_GIT_DESCRIBE) message(WARNING "Disabling Z3_INCLUDE_GIT_DESCRIBE") set(Z3_INCLUDE_GIT_DESCRIBE OFF CACHE BOOL "Include git describe output in version output" FORCE) endif() endmacro() macro(disable_git_hash) if(Z3_INCLUDE_GIT_HASH) message(WARNING "Disabling Z3_INCLUDE_GIT_HASH") set(Z3_INCLUDE_GIT_HASH OFF CACHE BOOL "Include git hash in version output" FORCE) endif() endmacro() option(Z3_INCLUDE_GIT_HASH "Include git hash in version output" ON) option(Z3_INCLUDE_GIT_DESCRIBE "Include git describe output in version output" ON) set(GIT_DIR "${PROJECT_SOURCE_DIR}/.git") if ((Z3_INCLUDE_GIT_HASH OR Z3_INCLUDE_GIT_HASH) AND EXISTS "${GIT_DIR}") # Try to make CMake configure depend on the current git HEAD so that # a re-configure is triggered when the HEAD changes. add_git_dir_dependency("${GIT_DIR}" ADD_GIT_DEP_SUCCESS) if (ADD_GIT_DEP_SUCCESS) if (Z3_INCLUDE_GIT_HASH) get_git_head_hash("${GIT_DIR}" Z3GITHASH) if (NOT Z3GITHASH) message(WARNING "Failed to get Git hash") disable_git_hash() else() message(STATUS "Using Git hash in version output: ${Z3GITHASH}") # This mimics the behaviour of the old build system. set(Z3_FULL_VERSION_STR "${Z3_FULL_VERSION_STR} ${Z3GITHASH}") endif() else() message(STATUS "Not using Git hash in version output") endif() if (Z3_INCLUDE_GIT_DESCRIBE) get_git_head_describe("${GIT_DIR}" Z3_GIT_DESCRIPTION) if (NOT Z3_GIT_DESCRIPTION) message(WARNING "Failed to get Git description") disable_git_describe() endif() message(STATUS "Using Git description in version output: ${Z3_GIT_DESCRIPTION}") # This mimics the behaviour of the old build system. set(Z3_FULL_VERSION_STR "${Z3_FULL_VERSION_STR} ${Z3_GIT_DESCRIPTION}") else() message(STATUS "Not including git description in version") endif() else() message(WARNING "Failed to add git dependency.") disable_git_describe() disable_git_hash() endif() else() message(STATUS "Failed to find git directory.") disable_git_describe() disable_git_hash() endif() if(NOT Z3_INCLUDE_GIT_HASH) unset(Z3GITHASH) # Used in configure_file() endif() ################################################################################ # Useful CMake functions/Macros ################################################################################ include(CheckCXXSourceCompiles) include(CMakeDependentOption) ################################################################################ # Compiler flags for Z3 components. # Subsequent commands will append to this ################################################################################ set(Z3_COMPONENT_CXX_DEFINES "") set(Z3_COMPONENT_CXX_FLAGS "") set(Z3_COMPONENT_EXTRA_INCLUDE_DIRS "") set(Z3_DEPENDENT_LIBS "") set(Z3_DEPENDENT_EXTRA_CXX_LINK_FLAGS "") ################################################################################ # Build type ################################################################################ message(STATUS "CMake generator: ${CMAKE_GENERATOR}") set(available_build_types Debug Release RelWithDebInfo MinSizeRel) if (DEFINED CMAKE_CONFIGURATION_TYPES) # Multi-configuration build (e.g. Visual Studio and Xcode). Here # CMAKE_BUILD_TYPE doesn't matter message(STATUS "Available configurations: ${CMAKE_CONFIGURATION_TYPES}") else() # Single configuration generator (e.g. Unix Makefiles, Ninja) if(NOT CMAKE_BUILD_TYPE) message(STATUS "CMAKE_BUILD_TYPE is not set. Setting default") message(STATUS "The available build types are: ${available_build_types}") set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "Options are ${available_build_types}" FORCE) # Provide drop down menu options in cmake-gui set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS ${available_build_types}) endif() message(STATUS "Build type: ${CMAKE_BUILD_TYPE}") # Check the selected build type is valid list(FIND available_build_types "${CMAKE_BUILD_TYPE}" _build_type_index) if ("${_build_type_index}" EQUAL "-1") message(FATAL_ERROR "\"${CMAKE_BUILD_TYPE}\" is an invalid build type.\n" "Use one of the following build types ${available_build_types}") endif() endif() # CMAKE_BUILD_TYPE has no meaning for multi-configuration generators # (e.g. Visual Studio) so use generator expressions instead to add # the right definitions when doing a particular build type. # # Note for some reason we have to leave off ``-D`` here otherwise # we get ``-D-DZ3DEBUG`` passed to the compiler list(APPEND Z3_COMPONENT_CXX_DEFINES $<$:Z3DEBUG>) list(APPEND Z3_COMPONENT_CXX_DEFINES $<$:_EXTERNAL_RELEASE>) list(APPEND Z3_COMPONENT_CXX_DEFINES $<$:_EXTERNAL_RELEASE>) ################################################################################ # Find Python ################################################################################ find_package(Python3 REQUIRED COMPONENTS Interpreter) message(STATUS "Python3_EXECUTABLE: ${Python3_EXECUTABLE}") ################################################################################ # Target architecture detection ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/target_arch_detect.cmake) detect_target_architecture(TARGET_ARCHITECTURE) message(STATUS "Detected target architecture: ${TARGET_ARCHITECTURE}") ################################################################################ # Function for detecting C++ compiler flag support ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/z3_add_cxx_flag.cmake) ################################################################################ # C++ language version ################################################################################ set(CMAKE_CXX_STANDARD 20) set(CMAKE_CXX_STANDARD_REQUIRED ON) ################################################################################ # Platform detection ################################################################################ if (CMAKE_SYSTEM_NAME STREQUAL "Darwin") if (TARGET_ARCHITECTURE STREQUAL "arm64") set(CMAKE_OSX_ARCHITECTURES "arm64") endif() elseif (WIN32) message(STATUS "Platform: Windows") list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_WINDOWS") elseif (EMSCRIPTEN) message(STATUS "Platform: Emscripten") list(APPEND Z3_DEPENDENT_EXTRA_CXX_LINK_FLAGS "-Os" "-s ALLOW_MEMORY_GROWTH=1" "-s ASSERTIONS=0" "-s DISABLE_EXCEPTION_CATCHING=0" "-s ERROR_ON_UNDEFINED_SYMBOLS=1" ) endif() list(APPEND Z3_COMPONENT_EXTRA_INCLUDE_DIRS "${PROJECT_BINARY_DIR}/src" "${PROJECT_SOURCE_DIR}/src" ) ################################################################################ # GNU multiple precision library support ################################################################################ option(Z3_USE_LIB_GMP "Use GNU Multiple Precision Library" OFF) if (Z3_USE_LIB_GMP) # Because this is off by default we will make the configure fail if libgmp # can't be found find_package(GMP REQUIRED) message(STATUS "Using libgmp") list(APPEND Z3_DEPENDENT_LIBS GMP::GMP) list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_MP_GMP") else() list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_MP_INTERNAL") message(STATUS "Not using libgmp") endif() ################################################################################ # API Log sync ################################################################################ option(Z3_API_LOG_SYNC "Use locking when logging Z3 API calls (experimental)" OFF ) if (Z3_API_LOG_SYNC) list(APPEND Z3_COMPONENT_CXX_DEFINES "-DZ3_LOG_SYNC") message(STATUS "Using Z3_API_LOG_SYNC") else() message(STATUS "Not using Z3_API_LOG_SYNC") endif() ################################################################################ # Thread safe or not? ################################################################################ option(Z3_SINGLE_THREADED "Non-thread-safe build" OFF ) if (Z3_SINGLE_THREADED) list(APPEND Z3_COMPONENT_CXX_DEFINES "-DSINGLE_THREAD") message(STATUS "Non-thread-safe build") else() message(STATUS "Thread-safe build") endif() ################################################################################ # FP math ################################################################################ # FIXME: Support ARM "-mfpu=vfp -mfloat-abi=hard" if ((TARGET_ARCHITECTURE STREQUAL "x86_64") OR (TARGET_ARCHITECTURE STREQUAL "i686")) if ((CMAKE_CXX_COMPILER_ID MATCHES "GNU") OR (CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR (CMAKE_CXX_COMPILER_ID MATCHES "Intel")) set(SSE_FLAGS "-mfpmath=sse" "-msse" "-msse2") elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") set(SSE_FLAGS "/arch:SSE2") else() message(FATAL_ERROR "Unknown compiler ${CMAKE_CXX_COMPILER_ID}") endif() CHECK_CXX_COMPILER_FLAG("${SSE_FLAGS}" HAS_SSE2) if (HAS_SSE2) list(APPEND Z3_COMPONENT_CXX_FLAGS ${SSE_FLAGS}) endif() unset(SSE_FLAGS) endif() ################################################################################ # Threading support ################################################################################ set(THREADS_PREFER_PTHREAD_FLAG TRUE) find_package(Threads) list(APPEND Z3_DEPENDENT_LIBS Threads::Threads) ################################################################################ # Compiler warnings ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/compiler_warnings.cmake) ################################################################################ # Save Clang optimization records ################################################################################ option(Z3_SAVE_CLANG_OPTIMIZATION_RECORDS "Enable saving Clang optimization records." OFF) if (Z3_SAVE_CLANG_OPTIMIZATION_RECORDS) z3_add_cxx_flag("-fsave-optimization-record" REQUIRED) endif() ################################################################################ # If using Ninja, force color output for Clang (and gcc, disabled to check build). ################################################################################ if (UNIX AND CMAKE_GENERATOR STREQUAL "Ninja") if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fcolor-diagnostics") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fcolor-diagnostics") endif() # if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color") # set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-color") # endif() endif() ################################################################################ # Option to control what type of library we build ################################################################################ option(Z3_BUILD_LIBZ3_SHARED "Build libz3 as a shared library if true, otherwise build a static library" ON) option(Z3_BUILD_LIBZ3_MSVC_STATIC "Build libz3 as a statically-linked runtime library" OFF) ################################################################################ # Tracing ################################################################################ option(Z3_ENABLE_TRACING_FOR_NON_DEBUG "Enable tracing in non-debug builds." OFF) if (Z3_ENABLE_TRACING_FOR_NON_DEBUG) list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_TRACE") else() # Tracing is always enabled in debug builds list(APPEND Z3_COMPONENT_CXX_DEFINES $<$:_TRACE>) endif() ################################################################################ # Link time optimization ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/compiler_lto.cmake) ################################################################################ # Control flow integrity ################################################################################ option(Z3_ENABLE_CFI "Enable control flow integrity checking" OFF) if (Z3_ENABLE_CFI) set(build_types_with_cfi "RELEASE" "RELWITHDEBINFO") if (NOT Z3_LINK_TIME_OPTIMIZATION) message(FATAL_ERROR "Cannot enable control flow integrity checking without link-time optimization." "You should set Z3_LINK_TIME_OPTIMIZATION to ON or Z3_ENABLE_CFI to OFF.") endif() if (DEFINED CMAKE_CONFIGURATION_TYPES) # Multi configuration generator message(STATUS "Note CFI is only enabled for the following configurations: ${build_types_with_cfi}") # No need for else because this is the same as the set that LTO requires. endif() if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") z3_add_cxx_flag("-fsanitize=cfi" REQUIRED) z3_add_cxx_flag("-fsanitize-cfi-cross-dso" REQUIRED) elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") z3_add_cxx_flag("/guard:cf" REQUIRED) message(STATUS "Enabling CFI for MSVC") foreach (_build_type ${build_types_with_cfi}) message(STATUS "Enabling CFI for MSVC") string(APPEND CMAKE_EXE_LINKER_FLAGS_${_build_type} " /GUARD:CF") string(APPEND CMAKE_SHARED_LINKER_FLAGS_${_build_type} " /GUARD:CF") endforeach() else() message(FATAL_ERROR "Can't enable control flow integrity for compiler \"${CMAKE_CXX_COMPILER_ID}\"." "You should set Z3_ENABLE_CFI to OFF or use Clang or MSVC to compile.") endif() endif() ################################################################################ # MSVC specific flags inherited from old build system ################################################################################ if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") include(${PROJECT_SOURCE_DIR}/cmake/msvc_legacy_quirks.cmake) endif() ################################################################################ # Pass /RELEASE to the linker so that checksums in PE files are calculated. ################################################################################ if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") string(APPEND CMAKE_EXE_LINKER_FLAGS " /RELEASE") string(APPEND CMAKE_SHARED_LINKER_FLAGS " /RELEASE") endif() ################################################################################ # Check atomic linking as needed ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/check_link_atomic.cmake) ################################################################################ # Report default CMake flags ################################################################################ # This is mainly for debugging. message(STATUS "CMAKE_CXX_FLAGS: \"${CMAKE_CXX_FLAGS}\"") message(STATUS "CMAKE_EXE_LINKER_FLAGS: \"${CMAKE_EXE_LINKER_FLAGS}\"") message(STATUS "CMAKE_STATIC_LINKER_FLAGS: \"${CMAKE_STATIC_LINKER_FLAGS}\"") message(STATUS "CMAKE_SHARED_LINKER_FLAGS: \"${CMAKE_SHARED_LINKER_FLAGS}\"") if (DEFINED CMAKE_CONFIGURATION_TYPES) # Multi configuration generator string(TOUPPER "${available_build_types}" build_types_to_report) else() # Single configuration generator string(TOUPPER "${CMAKE_BUILD_TYPE}" build_types_to_report) endif() foreach (_build_type ${build_types_to_report}) message(STATUS "CMAKE_CXX_FLAGS_${_build_type}: \"${CMAKE_CXX_FLAGS_${_build_type}}\"") message(STATUS "CMAKE_EXE_LINKER_FLAGS_${_build_type}: \"${CMAKE_EXE_LINKER_FLAGS_${_build_type}}\"") message(STATUS "CMAKE_SHARED_LINKER_FLAGS_${_build_type}: \"${CMAKE_SHARED_LINKER_FLAGS_${_build_type}}\"") message(STATUS "CMAKE_STATIC_LINKER_FLAGS_${_build_type}: \"${CMAKE_STATIC_LINKER_FLAGS_${_build_type}}\"") endforeach() ################################################################################ # Report Z3_COMPONENT flags ################################################################################ message(STATUS "Z3_COMPONENT_CXX_DEFINES: ${Z3_COMPONENT_CXX_DEFINES}") message(STATUS "Z3_COMPONENT_CXX_FLAGS: ${Z3_COMPONENT_CXX_FLAGS}") message(STATUS "Z3_DEPENDENT_LIBS: ${Z3_DEPENDENT_LIBS}") message(STATUS "Z3_COMPONENT_EXTRA_INCLUDE_DIRS: ${Z3_COMPONENT_EXTRA_INCLUDE_DIRS}") message(STATUS "Z3_DEPENDENT_EXTRA_CXX_LINK_FLAGS: ${Z3_DEPENDENT_EXTRA_CXX_LINK_FLAGS}") ################################################################################ # Z3 installation locations ################################################################################ include(GNUInstallDirs) set(CMAKE_INSTALL_PKGCONFIGDIR "${CMAKE_INSTALL_LIBDIR}/pkgconfig" CACHE PATH "Directory to install pkgconfig files" ) set(CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/z3" CACHE PATH "Directory to install Z3 CMake package files" ) message(STATUS "CMAKE_INSTALL_LIBDIR: \"${CMAKE_INSTALL_LIBDIR}\"") message(STATUS "CMAKE_INSTALL_BINDIR: \"${CMAKE_INSTALL_BINDIR}\"") message(STATUS "CMAKE_INSTALL_INCLUDEDIR: \"${CMAKE_INSTALL_INCLUDEDIR}\"") message(STATUS "CMAKE_INSTALL_PKGCONFIGDIR: \"${CMAKE_INSTALL_PKGCONFIGDIR}\"") message(STATUS "CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR: \"${CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR}\"") ################################################################################ # Uninstall rule ################################################################################ configure_file( "${PROJECT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" @ONLY ) # Target needs to be declared before the components so that they can add # dependencies to this target so they can run their own custom uninstall rules. add_custom_target(uninstall COMMAND "${CMAKE_COMMAND}" -P "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" COMMENT "Uninstalling..." USES_TERMINAL VERBATIM ) ################################################################################ # CMake build file locations ################################################################################ # To mimic the python build system output these into the root of the build # directory set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}") set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}") set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}") ################################################################################ # Extra dependencies for build rules that use the Python infrastructure to # generate files used for Z3's build. Changes to these files will trigger # a rebuild of all the generated files. ################################################################################ # Note: ``update_api.py`` is deliberately not here because it is not used # to generate every generated file. The targets that need it list it explicitly. set(Z3_GENERATED_FILE_EXTRA_DEPENDENCIES "${PROJECT_SOURCE_DIR}/scripts/mk_genfile_common.py" ) ################################################################################ # Z3 components, library and executables ################################################################################ include(${PROJECT_SOURCE_DIR}/cmake/z3_add_component.cmake) include(${PROJECT_SOURCE_DIR}/cmake/z3_append_linker_flag_list_to_target.cmake) add_subdirectory(src) ################################################################################ # Create `Z3Config.cmake` and related files for the build tree so clients can # use Z3 via CMake. ################################################################################ include(CMakePackageConfigHelpers) export(EXPORT Z3_EXPORTED_TARGETS NAMESPACE z3:: FILE "${PROJECT_BINARY_DIR}/Z3Targets.cmake" ) set(Z3_FIRST_PACKAGE_INCLUDE_DIR "${PROJECT_BINARY_DIR}/src/api") set(Z3_SECOND_PACKAGE_INCLUDE_DIR "${PROJECT_SOURCE_DIR}/src/api") set(Z3_CXX_PACKAGE_INCLUDE_DIR "${PROJECT_SOURCE_DIR}/src/api/c++") set(AUTO_GEN_MSG "Automatically generated. DO NOT EDIT") set(CONFIG_FILE_TYPE "build tree") configure_package_config_file("${PROJECT_SOURCE_DIR}/cmake/Z3Config.cmake.in" "Z3Config.cmake" INSTALL_DESTINATION "${PROJECT_BINARY_DIR}" PATH_VARS Z3_FIRST_PACKAGE_INCLUDE_DIR Z3_SECOND_PACKAGE_INCLUDE_DIR Z3_CXX_PACKAGE_INCLUDE_DIR ) unset(Z3_FIRST_PACKAGE_INCLUDE_DIR) unset(Z3_SECOND_PACKAGE_INCLUDE_DIR) unset(Z3_CXX_PACKAGE_INCLUDE_DIR) unset(AUTO_GEN_MSG) unset(CONFIG_FILE_TYPE) write_basic_package_version_file("${PROJECT_BINARY_DIR}/Z3ConfigVersion.cmake" COMPATIBILITY SameMajorVersion ) configure_file("${CMAKE_CURRENT_SOURCE_DIR}/z3.pc.cmake.in" "${CMAKE_CURRENT_BINARY_DIR}/z3.pc" @ONLY) ################################################################################ # Create `Z3Config.cmake` and related files for install tree so clients can use # Z3 via CMake. ################################################################################ install(EXPORT Z3_EXPORTED_TARGETS FILE "Z3Targets.cmake" NAMESPACE z3:: DESTINATION "${CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR}" ) set(Z3_INSTALL_TREE_CMAKE_CONFIG_FILE "${PROJECT_BINARY_DIR}/cmake/Z3Config.cmake") set(Z3_FIRST_PACKAGE_INCLUDE_DIR "${CMAKE_INSTALL_INCLUDEDIR}") set(Z3_SECOND_INCLUDE_DIR "") set(Z3_CXX_PACKAGE_INCLUDE_DIR "") set(AUTO_GEN_MSG "Automatically generated. DO NOT EDIT") set(CONFIG_FILE_TYPE "install tree") # We use `configure_package_config_file()` to try and create CMake files # that are re-locatable so that it doesn't matter if the files aren't placed # in the original install prefix. configure_package_config_file("${PROJECT_SOURCE_DIR}/cmake/Z3Config.cmake.in" "${Z3_INSTALL_TREE_CMAKE_CONFIG_FILE}" INSTALL_DESTINATION "${CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR}" PATH_VARS Z3_FIRST_PACKAGE_INCLUDE_DIR ) unset(Z3_FIRST_PACKAGE_INCLUDE_DIR) unset(Z3_SECOND_PACKAGE_INCLUDE_DIR) unset(Z3_CXX_PACKAGE_INCLUDE_DIR) unset(AUTO_GEN_MSG) unset(CONFIG_FILE_TYPE) # Add install rule to install ${Z3_INSTALL_TREE_CMAKE_CONFIG_FILE} install( FILES "${Z3_INSTALL_TREE_CMAKE_CONFIG_FILE}" DESTINATION "${CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR}" ) # Add install rule to install ${PROJECT_BINARY_DIR}/Z3ConfigVersion.cmake install( FILES "${PROJECT_BINARY_DIR}/Z3ConfigVersion.cmake" DESTINATION "${CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR}" ) # Add install rule to install ${PROJECT_BINARY_DIR}/z3.pc install( FILES "${PROJECT_BINARY_DIR}/z3.pc" DESTINATION "${CMAKE_INSTALL_PKGCONFIGDIR}" ) ################################################################################ # Examples ################################################################################ cmake_dependent_option(Z3_ENABLE_EXAMPLE_TARGETS "Build Z3 api examples" ON "CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR" OFF) if (Z3_ENABLE_EXAMPLE_TARGETS) add_subdirectory(examples) endif() ################################################################################ # Documentation ################################################################################ option(Z3_BUILD_DOCUMENTATION "Build API documentation" OFF) if (Z3_BUILD_DOCUMENTATION) message(STATUS "Building documentation enabled") add_subdirectory(doc) else() message(STATUS "Building documentation disabled") endif() z3-z3-4.13.3/LICENSE.txt000066400000000000000000000021101470205523200143050ustar00rootroot00000000000000Z3 Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.z3-z3-4.13.3/README-CMake.md000066400000000000000000000457401470205523200147370ustar00rootroot00000000000000# Z3's CMake build system [CMake](https://cmake.org/) is a "meta build system" that reads a description of the project written in the ``CMakeLists.txt`` files and emits a build system for that project of your choice using one of CMake's "generators". This allows CMake to support many different platforms and build tools. You can run ``cmake --help`` to see the list of supported "generators" on your platform. Example generators include "UNIX Makefiles" and "Visual Studio 12 2013". ## Getting started ### Fixing a polluted source tree If you have never used the python build system you can skip this step. The existing Python build system creates generated source files in the source tree. The CMake build system will refuse to work if it detects this so you need to clean your source tree first. To do this run the following in the root of the repository ``` git clean -nx src ``` This will list everything that will be removed. If you are happy with this then run. ``` git clean -fx src ``` which will remove the generated source files. ### Unix Makefiles Run the following in the top level directory of the Z3 repository. ``` mkdir build cd build cmake -G "Unix Makefiles" ../ make -j4 # Replace 4 with an appropriate number ``` Note that on some platforms "Unix Makefiles" is the default generator so on those platforms you don't need to pass ``-G "Unix Makefiles"`` command line option to ``cmake``. Note there is nothing special about the ``build`` directory name here. You can call it whatever you like. Note the "Unix Makefile" generator is a "single" configuration generator which means you pick the build type (e.g. ``Debug``, ``Release``) when you invoke CMake. You can set the build type by passing it to the ``cmake`` invocation like so: ``` cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release ../ ``` See the section on "Build Types" for the different CMake build types. If you wish to use a different compiler set the CXX and CC environment variables passed to cmake. This must be done at the very first invocation to ``cmake`` in the build directory because once configuration has happened the compiler is fixed. If you want to use a different compiler to the one you have already configured you either need to make a new build directory or delete the contents of the current build directory and start again. For example to use clang the ``cmake `` line would be ``` CC=clang CXX=clang++ cmake ../ ``` Note that CMake build will detect the target architecture that compiler is set up to build for and the generated build system will build for that architecture. If there is a way to tell your compiler to build for a different architecture via compiler flags then you can set the ``CFLAGS`` and ``CXXFLAGS`` environment variables to have the build target that architecture. For example if you are on a x86_64 machine and you want to do a 32-bit build and have a multilib version of GCC you can run ``cmake`` like this ``` CFLAGS="-m32" CXXFLAGS="-m32" CC=gcc CXX=g++ cmake ../ ``` Note like with the ``CC`` and ``CXX`` flags this must be done on the very first invocation to CMake in the build directory. ### Adding Z3 as a dependency to a CMAKE Project CMake's [FetchContent](https://cmake.org/cmake/help/latest/module/FetchContent.html) allows the fetching and populating of an external project. This is useful when a certain version of z3 is required that may not match with the system version. With the following code in the cmake file of your project, z3 version 4.12.1 is downloaded to the build directory and the cmake targets are added to the project: ``` FetchContent_Declare(z3 GIT_REPOSITORY https://github.com/Z3Prover/z3 GIT_TAG z3-4.12.1 ) FetchContent_MakeAvailable(z3) ``` The header files can be added to the included directories as follows: ``` include_directories( ${z3_SOURCE_DIR}/src/api ) ``` Finally, the z3 library can be linked to a `yourTarget` using ``` target_link_libraries(yourTarget libz3) ``` Note that this is `libz3` not `z3` (`libz3` refers to the library target from `src/CMakeLists.txt`). ### Ninja [Ninja](https://ninja-build.org/) is a simple build system that is built for speed. It can be significantly faster than "UNIX Makefile"s because it is not a recursive build system and thus doesn't create a new process every time it traverses into a directory. Ninja is particularly appropriate if you want fast incremental building. Basic usage is as follows: ``` mkdir build cd build cmake -G "Ninja" ../ ninja ``` Note the discussion of the ``CC``, ``CXX``, ``CFLAGS`` and ``CXXFLAGS`` for "Unix Makefiles" also applies here. Note also that like the "Unix Makefiles" generator, the "Ninja" generator is a single configuration generator so you pick the build type when you invoke ``cmake`` by passing ``CMAKE_BUILD_TYPE=`` to ``cmake``. See the section on "Build Types". Note that Ninja runs in parallel by default. Use the ``-j`` flag to change this. Note that Ninja also runs on Windows. You just need to run ``cmake`` in an environment where the compiler can be found. If you have Visual Studio installed it typically ships with a "Developer Command Prompt Window" that you can use which has the environment variables setup for you. ### NMake NMake is a build system that ships with Visual Studio. You are advised to use Ninja instead which is significantly faster due to supporting concurrent builds. However CMake does support NMake if you wish to use it. Note that NMake is a single configuration generator so you must set ``CMAKE_BUILD_TYPE`` to set the build type. Basic usage: 1. Launch the "Developer Command Prompt Windows" 2. Change to the root of the Z3 repository ``` mkdir build cd build cmake -G "NMake Makefiles" ../ nmake ``` ### Visual Studio Visual Studio 19 comes with integrated support for CMake. It suffices to open the (z3) folder where this file and the Z3 project CMakeLists.txt resides, and Visual Studio does the rest. For legacy versions of Visual Studio a process is as follows: For the Visual Studio generators you need to know which version of Visual Studio you wish to use and also what architecture you want to build for. We'll use the ``cmake-gui`` here as it is easier to pick the right generator but this can be scripted if need be. Here are the basic steps: 1. Create an empty build directory 2. Start the cmake-gui program 3. Set "where is the source code" to the root of the Z3 project repository. You can do this by pressing the "Browse Source..." button and picking the directory. 4. Set "where to build the binaries" to the empty build directory you just created. You can do this by pressing the "Browse build..." button and picking the directory. 5. Press the "Configure" button 6. A window will appear asking you to pick the generator to use. Pick the generator that matches the version of Visual Studio you are using. Note also that some of the generator names contain ``Win64`` (e.g. ``Visual Studio 12 2013 Win64``) this indicates a x86 64-bit build. Generator names without this (e.g. ``Visual Studio 12 2013``) are x86 32-bit build. 7. Press the "Finish" button and wait for CMake to finish it's first configure. 8. A set of configuration options will appear which will affect various aspects of the build. Change them as you desire. If you change a set of options press the "Configure" again. Additional options may appear when you do this. 9. When you have finished changing configuration options press the "Generate" button. 10. When generation is done close cmake-gui. 11. In the build directory open the generated ``Z3.sln`` solution file created by CMake with Visual Studio. 12. In Visual Studio pick the build type (e.g. ``Debug``, ``Release``) you want. 13. Click "BUILD > Build Solution". Note that unlike the "Unix Makefile" and "Ninja" generators the Visual Studio generators are multi-configuration generators which means you don't set the build type when invoking CMake. Instead you set the build type inside Visual Studio. See the "Build Type" section for more information. ### General workflow The general workflow when using CMake is the following 1. Create a new build directory 2. Configure the project 3. Generate the build system 4. Invoke the build system to build the project To perform steps 2 and 3 you can choose from three different tools * cmake * ccmake * cmake-gui ``cmake`` is a command line tool and is what you should use if you are writing a script to build Z3. This tool performs steps 1 and 2 in one go without user interaction. The ``ccmake`` and ``cmake-gui`` tools are more interactive and allow you to change various options. In both these tools the basic steps to follow are: 1. Configure. 2. Change any options you wish. Every time you change a set of options You should configure again. This may cause new options to appear 3. Generate. For information see https://cmake.org/runningcmake/ Note when invoking CMake you give it the path to the source directory. This is the top-level directory in the Z3 repository containing a ``CMakeLists.txt``. That file should contain the line ``project(Z3 C CXX)``. If you give it a path deeper into the Z3 repository (e.g. the ``src`` directory) the configure step will fail. ## Build Types Several build types are supported. * Release * Debug * RelWithDebInfo * MinSizeRel For the single configuration generators (e.g. "Unix Makefile" and "Ninja") you set the build type when invoking ``cmake`` by passing ``-DCMAKE_BUILD_TYPE=`` where ```` is one of the build types specified above. For multi-configuration generators (e.g. Visual Studio) you don't set the build type when invoking CMake and instead set the build type within Visual Studio itself. ## Useful options The following useful options can be passed to CMake whilst configuring. * ``CMAKE_BUILD_TYPE`` - STRING. The build type to use. Only relevant for single configuration generators (e.g. "Unix Makefile" and "Ninja"). * ``CMAKE_INSTALL_BINDIR`` - STRING. The path to install z3 binaries (relative to ``CMAKE_INSTALL_PREFIX``), e.g. ``bin``. * ``CMAKE_INSTALL_INCLUDEDIR`` - STRING. The path to install z3 include files (relative to ``CMAKE_INSTALL_PREFIX``), e.g. ``include``. * ``CMAKE_INSTALL_LIBDIR`` - STRING. The path to install z3 libraries (relative to ``CMAKE_INSTALL_PREFIX``), e.g. ``lib``. * ``CMAKE_INSTALL_PREFIX`` - STRING. The install prefix to use (e.g. ``/usr/local/``). * ``CMAKE_INSTALL_PKGCONFIGDIR`` - STRING. The path to install pkgconfig files. * ``CMAKE_INSTALL_PYTHON_PKG_DIR`` - STRING. The path to install the z3 python bindings. This can be relative (to ``CMAKE_INSTALL_PREFIX``) or absolute. * ``CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR`` - STRING. The path to install CMake package files (e.g. ``/usr/lib/cmake/z3``). * ``CMAKE_INSTALL_API_BINDINGS_DOC`` - STRING. The path to install documentation for API bindings. * ``Python3_EXECUTABLE`` - STRING. The python executable to use during the build. * ``Z3_ENABLE_TRACING_FOR_NON_DEBUG`` - BOOL. If set to ``TRUE`` enable tracing in non-debug builds, if set to ``FALSE`` disable tracing in non-debug builds. Note in debug builds tracing is always enabled. * ``Z3_BUILD_LIBZ3_SHARED`` - BOOL. If set to ``TRUE`` build libz3 as a shared library otherwise build as a static library. * ``Z3_ENABLE_EXAMPLE_TARGETS`` - BOOL. If set to ``TRUE`` add the build targets for building the API examples. * ``Z3_USE_LIB_GMP`` - BOOL. If set to ``TRUE`` use the GNU multiple precision library. If set to ``FALSE`` use an internal implementation. * ``Z3_BUILD_PYTHON_BINDINGS`` - BOOL. If set to ``TRUE`` then Z3's python bindings will be built. * ``Z3_INSTALL_PYTHON_BINDINGS`` - BOOL. If set to ``TRUE`` and ``Z3_BUILD_PYTHON_BINDINGS`` is ``TRUE`` then running the ``install`` target will install Z3's Python bindings. * ``Z3_BUILD_DOTNET_BINDINGS`` - BOOL. If set to ``TRUE`` then Z3's .NET bindings will be built. * ``Z3_INSTALL_DOTNET_BINDINGS`` - BOOL. If set to ``TRUE`` and ``Z3_BUILD_DOTNET_BINDINGS`` is ``TRUE`` then running the ``install`` target will install Z3's .NET bindings. * ``Z3_DOTNET_CSC_EXECUTABLE`` - STRING. The path to the C# compiler to use. Only relevant if ``Z3_BUILD_DOTNET_BINDINGS`` is set to ``TRUE``. * ``Z3_DOTNET_GACUTIL_EXECUTABLE`` - STRING. The path to the gacutil program to use. Only relevant if ``BUILD_DOTNET_BINDINGS`` is set to ``TRUE``. * ``Z3_BUILD_JAVA_BINDINGS`` - BOOL. If set to ``TRUE`` then Z3's Java bindings will be built. * ``Z3_INSTALL_JAVA_BINDINGS`` - BOOL. If set to ``TRUE`` and ``Z3_BUILD_JAVA_BINDINGS`` is ``TRUE`` then running the ``install`` target will install Z3's Java bindings. * ``Z3_JAVA_JAR_INSTALLDIR`` - STRING. The path to directory to install the Z3 Java ``.jar`` file. This path should be relative to ``CMAKE_INSTALL_PREFIX``. * ``Z3_JAVA_JNI_LIB_INSTALLDIRR`` - STRING. The path to directory to install the Z3 Java JNI bridge library. This path should be relative to ``CMAKE_INSTALL_PREFIX``. * ``Z3_INCLUDE_GIT_DESCRIBE`` - BOOL. If set to ``TRUE`` and the source tree of Z3 is a git repository then the output of ``git describe`` will be included in the build. * ``Z3_INCLUDE_GIT_HASH`` - BOOL. If set to ``TRUE`` and the source tree of Z3 is a git repository then the git hash will be included in the build. * ``Z3_BUILD_DOCUMENTATION`` - BOOL. If set to ``TRUE`` then documentation for the API bindings can be built by invoking the ``api_docs`` target. * ``Z3_INSTALL_API_BINDINGS_DOCUMENTATION`` - BOOL. If set to ``TRUE`` and ``Z3_BUILD_DOCUMENTATION` is ``TRUE`` then documentation for API bindings will be installed when running the ``install`` target. * ``Z3_ALWAYS_BUILD_DOCS`` - BOOL. If set to ``TRUE`` and ``Z3_BUILD_DOCUMENTATION`` is ``TRUE`` then documentation for API bindings will always be built. Disabling this is useful for faster incremental builds. The documentation can be manually built by invoking the ``api_docs`` target. * ``Z3_LINK_TIME_OPTIMIZATION`` - BOOL. If set to ``TRUE`` link time optimization will be enabled. * ``Z3_ENABLE_CFI`` - BOOL. If set to ``TRUE`` will enable Control Flow Integrity security checks. This is only supported by MSVC and Clang and will fail on other compilers. This requires Z3_LINK_TIME_OPTIMIZATION to also be enabled. * ``Z3_API_LOG_SYNC`` - BOOL. If set to ``TRUE`` will enable experimental API log sync feature. * ``WARNINGS_AS_ERRORS`` - STRING. If set to ``ON`` compiler warnings will be treated as errors. If set to ``OFF`` compiler warnings will not be treated as errors. If set to ``SERIOUS_ONLY`` a subset of compiler warnings will be treated as errors. * ``Z3_C_EXAMPLES_FORCE_CXX_LINKER`` - BOOL. If set to ``TRUE`` the C API examples will request that the C++ linker is used rather than the C linker. * ``Z3_BUILD_EXECUTABLE`` - BOOL. If set to ``TRUE`` build the z3 executable. Defaults to ``TRUE`` unless z3 is being built as a submodule in which case it defaults to ``FALSE``. * ``Z3_BUILD_TEST_EXECUTABLES`` - BOOL. If set to ``TRUE`` build the z3 test executables. Defaults to ``TRUE`` unless z3 is being built as a submodule in which case it defaults to ``FALSE``. * ``Z3_SAVE_CLANG_OPTIMIZATION_RECORDS`` - BOOL. If set to ``TRUE`` saves Clang optimization records by setting the compiler flag ``-fsave-optimization-record``. * ``Z3_SINGLE_THREADED`` - BOOL. If set to ``TRUE`` compiles Z3 for single threaded mode. On the command line these can be passed to ``cmake`` using the ``-D`` option. In ``ccmake`` and ``cmake-gui`` these can be set in the user interface. Example ``` cmake -DCMAKE_BUILD_TYPE=Release -DZ3_ENABLE_TRACING_FOR_NON_DEBUG=FALSE ../ ``` ## Z3 API Bindings Z3 exposes various language bindings for its API. Below are some notes on building and/or installing these bindings when building Z3 with CMake. ### Java bindings The CMake build uses the ``FindJava`` and ``FindJNI`` cmake modules to detect the installation of Java. If CMake fails to find your installation of Java set the ``JAVA_HOME`` environment variable when invoking CMake so that it points at the correct location. For example ``` JAVA_HOME=/usr/lib/jvm/default cmake -DZ3_BUILD_JAVA_BINDINGS=ON ../ ``` Note that the built ``.jar`` file is named ``com.microsoft.z3-VERSION.jar`` where ``VERSION`` is the Z3 version. Under non Windows systems a symbolic link named ``com.microsoft.z3.jar`` is provided. This symbolic link is not created when building under Windows. ## Developer/packager notes These notes are help developers and packagers of Z3. ### Install/Uninstall Install and uninstall targets are supported. Use ``CMAKE_INSTALL_PREFIX`` to set the install prefix. If you also need to control which directories are used for install set the documented ``CMAKE_INSTALL_*`` options. To install run ``` make install ``` To uninstall run ``` make uninstall ``` Note that ``DESTDIR`` is supported for [staged installs](https://www.gnu.org/prep/standards/html_node/DESTDIR.html). To install ``` mkdir staged make install DESTDIR=/full/path/to/staged/ ``` to uninstall ``` make uninstall DESTDIR=/full/path/to/staged ``` The above also works for Ninja but ``DESTDIR`` must be an environment variable instead. ### Examining invoked commands If you are using the "UNIX Makefiles" generator and want to see exactly the commands that are being run you can pass ``VERBOSE=1`` to make. ``` make VERBOSE=1 ``` If you are using Ninja you can use the ``-v`` flag. ### Additional targets To see the list of targets run ``` make help ``` There are a few special targets: * ``clean`` all the built targets in the current directory and below * ``edit_cache`` will invoke one of the CMake tools (depending on which is available) to let you change configuration options. * ``rebuild_cache`` will reinvoke ``cmake`` for the project. * ``api_docs`` will build the documentation for the API bindings. ### Setting build type specific flags The build system supports single configuration and multi-configuration generators. This means it is not possible to know the build type at configure time and therefore ``${CMAKE_BUILD_TYPE}`` should not be conditionally used to set compiler flags or definitions. Instead you should use Generator expressions which are evaluated by the generator. For example ``` $<$:Z3DEBUG> ``` If the build type at build time is ``Debug`` this evaluates to ``Z3DEBUG`` but evaluates to nothing for all other configurations. You can see examples of this in the ``CMakeLists.txt`` files. ### File-globbing It is tempting use file-globbing in ``CMakeLists.txt`` to find a set for files matching a pattern and use them as the sources to build a target. This however is a bad idea because it prevents CMake from knowing when it needs to rerun itself. This is why source file names are explicitly listed in the ``CMakeLists.txt`` so that when changes are made the source files used to build a target automatically triggers a rerun of CMake. Long story short. Don't use file globbing. ### Serious warning flags By default the `WARNINGS_AS_ERRORS` flag is set to `SERIOUS_ONLY` which means some warnings will be treated as errors. These warnings are controlled by the relevant `*_WARNINGS_AS_ERRORS` list defined in `cmake/compiler_warnings.cmake`. Additional warnings should only be added here if the warnings has no false positives. z3-z3-4.13.3/README.md000066400000000000000000000210651470205523200137530ustar00rootroot00000000000000# Z3 Z3 is a theorem prover from Microsoft Research. It is licensed under the [MIT license](LICENSE.txt). If you are not familiar with Z3, you can start [here](https://github.com/Z3Prover/z3/wiki#background). Pre-built binaries for stable and nightly releases are available from [here](https://github.com/Z3Prover/z3/releases). Z3 can be built using [Visual Studio][1], a [Makefile][2] or using [CMake][3]. It provides [bindings for several programming languages][4]. See the [release notes](RELEASE_NOTES.md) for notes on various stable releases of Z3. ## Build status | Azure Pipelines | Code Coverage | Open Bugs | Android Build | WASM Build | Windows Build | | --------------- | --------------|-----------|---------------|------------|---------------| | [![Build Status](https://dev.azure.com/Z3Public/Z3/_apis/build/status/Z3Prover.z3?branchName=master)](https://dev.azure.com/Z3Public/Z3/_build/latest?definitionId=1&branchName=master) | [![CodeCoverage](https://github.com/Z3Prover/z3/actions/workflows/coverage.yml/badge.svg)](https://github.com/Z3Prover/z3/actions/workflows/coverage.yml) | [![Open Issues](https://github.com/Z3Prover/z3/actions/workflows/wip.yml/badge.svg)](https://github.com/Z3Prover/z3/actions/workflows/wip.yml) |[![Android Build](https://github.com/Z3Prover/z3/actions/workflows/android-build.yml/badge.svg)](https://github.com/Z3Prover/z3/actions/workflows/android-build.yml) | [![WASM Build](https://github.com/Z3Prover/z3/actions/workflows/wasm.yml/badge.svg)](https://github.com/Z3Prover/z3/actions/workflows/wasm.yml) | [![Windows](https://github.com/Z3Prover/z3/actions/workflows/Windows.yml/badge.svg)](https://github.com/Z3Prover/z3/actions/workflows/Windows.yml) Docker image. [1]: #building-z3-on-windows-using-visual-studio-command-prompt [2]: #building-z3-using-make-and-gccclang [3]: #building-z3-using-cmake [4]: #z3-bindings ## Building Z3 on Windows using Visual Studio Command Prompt 32-bit builds, start with: ```bash python scripts/mk_make.py ``` or instead, for a 64-bit build: ```bash python scripts/mk_make.py -x ``` then: ```bash cd build nmake ``` Z3 uses C++17. The recommended version of Visual Studio is therefore VS2019. ## Building Z3 using make and GCC/Clang Execute: ```bash python scripts/mk_make.py cd build make sudo make install ``` Note by default ``g++`` is used as the C++ compiler if it is available. If you would prefer to use Clang change the ``mk_make.py`` invocation to: ```bash CXX=clang++ CC=clang python scripts/mk_make.py ``` Note that Clang < 3.7 does not support OpenMP. You can also build Z3 for Windows using Cygwin and the Mingw-w64 cross-compiler. To configure that case correctly, make sure to use Cygwin's own python and not some Windows installation of Python. For a 64 bit build (from Cygwin64), configure Z3's sources with ```bash CXX=x86_64-w64-mingw32-g++ CC=x86_64-w64-mingw32-gcc AR=x86_64-w64-mingw32-ar python scripts/mk_make.py ``` A 32 bit build should work similarly (but is untested); the same is true for 32/64 bit builds from within Cygwin32. By default, it will install z3 executable at ``PREFIX/bin``, libraries at ``PREFIX/lib``, and include files at ``PREFIX/include``, where ``PREFIX`` installation prefix is inferred by the ``mk_make.py`` script. It is usually ``/usr`` for most Linux distros, and ``/usr/local`` for FreeBSD and macOS. Use the ``--prefix=`` command line option to change the install prefix. For example: ```bash python scripts/mk_make.py --prefix=/home/leo cd build make make install ``` To uninstall Z3, use ```bash sudo make uninstall ``` To clean Z3 you can delete the build directory and run the ``mk_make.py`` script again. ## Building Z3 using CMake Z3 has a build system using CMake. Read the [README-CMake.md](README-CMake.md) file for details. It is recommended for most build tasks, except for building OCaml bindings. ## Building Z3 using vcpkg vcpkg is a full platform package manager, you can easily install libzmq with vcpkg. Execute: ```bash git clone https://github.com/microsoft/vcpkg.git ./bootstrap-vcpkg.bat # For powershell ./bootstrap-vcpkg.sh # For bash ./vcpkg install z3 ``` ## Dependencies Z3 itself has few dependencies. It uses C++ runtime libraries, including pthreads for multi-threading. It is optionally possible to use GMP for multi-precision integers, but Z3 contains its own self-contained multi-precision functionality. Python is required to build Z3. To build Java, .Net, OCaml, Julia APIs requires installing relevant tool chains. ## Z3 bindings Z3 has bindings for various programming languages. ### ``.NET`` You can install a nuget package for the latest release Z3 from [nuget.org](https://www.nuget.org/packages/Microsoft.Z3/). Use the ``--dotnet`` command line flag with ``mk_make.py`` to enable building these. See [``examples/dotnet``](examples/dotnet) for examples. ### ``C`` These are always enabled. See [``examples/c``](examples/c) for examples. ### ``C++`` These are always enabled. See [``examples/c++``](examples/c++) for examples. ### ``Java`` Use the ``--java`` command line flag with ``mk_make.py`` to enable building these. See [``examples/java``](examples/java) for examples. ### ``OCaml`` Use the ``--ml`` command line flag with ``mk_make.py`` to enable building these. See [``examples/ml``](examples/ml) for examples. ### ``Python`` You can install the Python wrapper for Z3 for the latest release from pypi using the command: ```bash pip install z3-solver ``` Use the ``--python`` command line flag with ``mk_make.py`` to enable building these. Note that it is required on certain platforms that the Python package directory (``site-packages`` on most distributions and ``dist-packages`` on Debian based distributions) live under the install prefix. If you use a non standard prefix you can use the ``--pypkgdir`` option to change the Python package directory used for installation. For example: ```bash python scripts/mk_make.py --prefix=/home/leo --python --pypkgdir=/home/leo/lib/python-2.7/site-packages ``` If you do need to install to a non standard prefix a better approach is to use a [Python virtual environment](https://virtualenv.readthedocs.org/en/latest/) and install Z3 there. Python packages also work for Python3. Under Windows, recall to build inside the Visual C++ native command build environment. Note that the ``build/python/z3`` directory should be accessible from where python is used with Z3 and it depends on ``libz3.dll`` to be in the path. ```bash virtualenv venv source venv/bin/activate python scripts/mk_make.py --python cd build make make install # You will find Z3 and the Python bindings installed in the virtual environment venv/bin/z3 -h ... python -c 'import z3; print(z3.get_version_string())' ... ``` See [``examples/python``](examples/python) for examples. ### ``Julia`` The Julia package [Z3.jl](https://github.com/ahumenberger/Z3.jl) wraps the C API of Z3. A previous version of it wrapped the C++ API: Information about updating and building the Julia bindings can be found in [src/api/julia](src/api/julia). ### ``Web Assembly`` / ``TypeScript`` / ``JavaScript`` A WebAssembly build with associated TypeScript typings is published on npm as [z3-solver](https://www.npmjs.com/package/z3-solver). Information about building these bindings can be found in [src/api/js](src/api/js). ### Smalltalk (``Pharo`` / ``Smalltalk/X``) Project [MachineArithmetic](https://github.com/shingarov/MachineArithmetic) provides Smalltalk interface to Z3's C API. For more information, see [MachineArithmetic/README.md](https://github.com/shingarov/MachineArithmetic/blob/pure-z3/MachineArithmetic/README.md) ## System Overview ![System Diagram](https://github.com/Z3Prover/doc/blob/master/programmingz3/images/Z3Overall.jpg) ## Interfaces * Default input format is [SMTLIB2](http://smtlib.cs.uiowa.edu) * Other native foreign function interfaces: * [C++ API](https://z3prover.github.io/api/html/namespacez3.html) * [.NET API](https://z3prover.github.io/api/html/namespace_microsoft_1_1_z3.html) * [Java API](https://z3prover.github.io/api/html/namespacecom_1_1microsoft_1_1z3.html) * [Python API](https://z3prover.github.io/api/html/namespacez3py.html) (also available in [pydoc format](https://z3prover.github.io/api/html/z3.html)) * [Rust](https://github.com/prove-rs/z3.rs) * C * OCaml * [Julia](https://github.com/ahumenberger/Z3.jl) * [Smalltalk](https://github.com/shingarov/MachineArithmetic/blob/pure-z3/MachineArithmetic/README.md) (supports Pharo and Smalltalk/X) ## Power Tools * The [Axiom Profiler](https://github.com/viperproject/axiom-profiler-2) currently developed by ETH Zurich z3-z3-4.13.3/RELEASE_NOTES.md000066400000000000000000002042501470205523200150450ustar00rootroot00000000000000RELEASE NOTES Version 4.next ================ - Planned features - sat.euf - a new CDCL core for SMT queries. It extends the SAT engine with theory solver plugins. the current state is unstable. It lacks efficient ematching. - polysat - native word level bit-vector solving. - introduction of simple induction lemmas to handle a limited repertoire of induction proofs. Version 4.13.3 ============== - Fixes, including #7363 - Fix paths to Java binaries in release - Remove internal build names from pypi wheels Version 4.13.2 ============== - Performance regression fix. #7404 Version 4.13.1 ============== - single-sample cell projection in nlsat was designed by Haokun Li and Bican Xia. - using simple-checker together with and variable ordering supported by qfnra_tactic was developed by Mengyu Zhao (Linxi) and Shaowei Cai. The projection is described in paper by Haokun Li and Bican Xia, [Solving Satisfiability of Polynomial Formulas By Sample - Cell Projection](https://arxiv.org/abs/2003.00409). The code ported from https://github.com/hybridSMT/hybridSMT.git - Add API for providing hints for the solver/optimize contexts for which initial values to attempt to use for variables. The new API function are Z3_solver_set_initial_value and Z3_optimize_set_initial_value, respectively. Supply these functions with a Boolean or numeric variable, and a value. The solver will then attempt to use these values in the initial phase of search. The feature is aimed at resolving nearly similar problems, or problems with a predicted model and the intent is that restarting the solver based on a near solution can avoid prune the space of constraints that are initially infeasible. The SMTLIB front-end contains the new command (set-initial-value var value). For example, (declare-const x Int) (set-initial-value x 10) (push) (assert (> x 0)) (check-sat) (get-model) produces a model where x = 10. We use (push) to ensure that z3 doesn't run a specialized pre-processor that eliminates x, which renders the initialization without effect. Version 4.13.0 ============== - add ARM64 wheels for Python, thanks to Steven Moy, smoy Version 4.12.6 ============== - remove expensive rewrite that coalesces adjacent stores - improved Java use of reference queues thanks to Thomas Haas #7131 - fixes to conditional import of python library thanks to Cal Jacobson #7116 - include universe for constants that get removed during pre-processing #7121 - code improvements, Bruce Mitchener #7119 - fix nested callback handling for user propagators - include ARM64 binaries in distribution - added Julia API, Thanks to Yisu Remy Yang #7108 Version 4.12.5 ============== - Fixes to pypi setup and build for MacOS distributions - A new theory solver "int-blast" enabled by using: - sat.smt=true smt.bv.solver=2 - It solves a few bit-vector problems not handled by bit-blasting, especially if the bit-widths are large. - It is based on encoding bit-vector constraints to non-linear integer arithmetic. - Optimizations to the arithmetic solver. Description: https://github.com/Z3Prover/doc/tree/master/arithmetic Version 4.12.4 ============== - Re-release fixing a few issues with 4.12: - Python dependency on importlib.resources vs importlib_resources break automatic pypi installations. Supposedly fixed by conditioning dependency on Python 3.9 where the feature is built-in. - Missing release of arm64 for Ubuntu. - Futile attempt to streamline adding readme.md file as part of Nuget distribution. Nuget.org now requires a readme file. I was able to integrate the readme with the cmake build, but the cross-platform repackage in scripts/mk_nuget_task.py does not ingest a similar readme file with the CI pipelines. Version 4.12.3 ============== - Alpha support for polymorphism. - SMTLIB3-ish, C, Python It adds the new command `(declare-type-var A)` that declares a symbol (in this case `A`) globally as a polymorphic type variable. The C API contains a new function `Z3_mk_type_variable` and a new enumeration case `Z3_TYPE_VAR` as a kind associated with sorts. All occurrences of `A` are treated as type variables. A function declaration whose signature uses `A` is treated as a shorthand for declarations of all functions that use instances of `A`. Assertions that use type variables are shorthands for assertions covering all instantiations. - Various (ongoing) performance fixes and improvements to smt.arith.solver=6 - A working version of solver.proof.trim=true option. Proofs logs created when using sat.smt=true may be trimmed by running z3 on the generated proof log using the option solver.proof.trim=true. - Optimizations LIA and NIA (linear integer arithmetic and non-linear integer (and real) arithmetic reasoning). smt.arith.solver=6 is the default for most use cases. It trails smt.arith.solver=2 in some scenarios and the gap has been either removed or reduced. smt.arith.solver=6 is complete for integrations of non-linear real arithmetic and theories, smt.arith.solver=2 is not. - qel: Light quantifier elimination based on term graphs (egraphs), and corresponding Model Based Projection for arrays and ADTs. Used by Spacer and QSAT. - added real-closed fields features to C API, exposed more RCF over OCaml API - fixes to FP Version 4.12.2 ============== - remove MSF (Microsoft Solver Foundation) plugin - updated propagate-ineqs tactic and implementing it as a simplifier, bound_simplifier. It now eliminates occurrences of "mod" operators when bounds information implies that the modulus is redundant. This tactic is useful for benchmarks created by converting bit-vector semantics to integer reasoning. - add API function Z3_mk_real_int64 to take two int64 as arguments. The Z3_mk_real function takes integers. - Add _simplifiers_ as optional incremental pre-processing to solvers. They are exposed over the SMTLIB API using the command [`set-simplifier`](https://microsoft.github.io/z3guide/docs/strategies/simplifiers). Simplifiers are similar to tactics, but they operate on solver state that can be incrementally updated. The exposed simplifiers cover all the pre-processing techniques used internally with some additional simplifiers, such as `solve-eqs` and `elim-predicates` that go beyond incremental pre-processing used internally. The advantage of using `solve-eqs` during pre-processing can be significant. Incremental pre-processing simplification using `solve-eqs` and other simplifiers that change interpretations was not possible before. - Optimize added to JS API, thanks to gbagan - SMTLIB2 proposal for bit-vector overflow predicates added, thanks to aehyvari - bug fixes, thanks to Clemens Eisenhofer, hgvk94, Lev Nachmanson, and others Version 4.12.1 ============== - change macos build to use explicit reference to Macos version 11. Hosted builds are migrating to macos-12 and it broke a user Issue #6539. Version 4.12.0 ============== - add clause logging API. - The purpose of logging API and self-checking is to enable an array of use cases. - proof mining (what instantiations did Z3 use)? - A refresh of the AxiomProfiler could use the logging API. The (brittle) trace feature should be deprecated. - debugging - a built-in self certifier implements a custom proof checker for the format used by the new solver (sat.euf=true). - other potential options: - integration into certified tool chains - interpolation - Z3_register_on_clause (also exposed over C++, Python and .Net) - it applies to z3's main CDCL(T) core and a new CDCL(T) core (sat.euf=true). - The added API function allows to register a callback for when clauses are inferred. More precisely, when clauses are assumed (as part of input), deleted, or deduced. Clauses that are deduced by the CDCL SAT engine using standard inferences are marked as 'rup'. Clauses that are deduced by theories are marked by default by 'smt', and when more detailed information is available with proof hints or proof objects. Instantiations are considered useful to track so they are logged using terms of the form (inst (not (forall (x) body)) body[t/x] (bind t)), where 'inst' is a name of a function that produces a proof term representing the instantiation. - add options for proof logging, trimming, and checking for the new core. - sat.smt.proof (symbol) add SMT proof to file (default: ) - sat.smt.proof.check (bool) check SMT proof while it is created (default: false) - it applies a custom self-validator. The self-validator comprises of several small checkers and represent a best-effort validation mechanism. If there are no custom validators associated with inferences, or the custom validators fail to certify inferences, the self-validator falls back to invoking z3 (SMT) solving on the lemma. - euf - propagations and conflicts from congruence closure (theory of equality and uninterpreted functions) are checked based on a proof format that tracks uses of congruence closure and equalities. It only performs union find operations. - tseitin - clausification steps are checked for Boolean operators. - farkas, bound, implies_eq - arithmetic inferences that can be justified using a combination of Farkas lemma and cuts are checked. Note: the arithmetic solver may produce proof hints that the proof checker cannot check. It is mainly a limitation of the arithmetic solver not pulling relevant information. Ensuring a tight coupling with proof hints and the validator capabilities is open ended future work and good material for theses. - bit-vector inferences - are treated as trusted (there is no validation, it always blindly succeeds) - arrays, datatypes - there is no custom validation for other theories at present. Lemmas are validated using SMT. - sat.smt.proof.check_rup (bool) apply forward RUP proof checking (default: true) - this option can incur significant runtime overhead. Effective proof checking relies on first trimming proofs into a format where dependencies are tracked and then checking relevant inferences. Turn this option off if you just want to check theory inferences. - add options to validate proofs offline. It applies to proofs saved when sat.smt.proof is set to a valid file name. - solver.proof.check (bool) check proof logs (default: true) - the option sat.smt.proof_check_rup can be used to control what is checked - solver.proof.save (bool) save proof log into a proof object that can be extracted using (get-proof) (default: false) - experimental: saves a proof log into a term - solver.proof.trim (bool) trim the offline proof and print the trimmed proof to the console - experimental: performs DRUP trimming to reduce the set of hypotheses and inferences relevant to derive the empty clause. - JS support for Arrays, thanks to Walden Yan - More portable memory allocation, thanks to Nuno Lopes (avoid custom handling to calculate memory usage) - clause logging and proofs: many open-ended directions. Many directions and functionality features remain in an open-ended state, subject to fixes, improvements, and contributions. We list a few of them here: - comprehensive efficient self-validators for arithmetic, and other theories - an efficient proof checker when several theory solvers cooperate in a propagation or conflict. The theory combination case is currently delegated to the smt solver. The proper setup for integrating theory lemmas is in principle not complicated, but the implementation requires some changes. - external efficient proof validators (based on certified tool chains) can be integrated over the API. - dampening repeated clauses: A side-effect of conflict resolution is to log theory lemmas. It often happens that the theory lemma becomes the conflict clause, that is then logged as rup. Thus, two clauses are logged. - support for online trim so that proofs generated using clause logging can be used for SPACER - SPACER also would benefit from more robust proof hints for arithmetic lemmas (bounds and implied equalities are sometimes not logged correctly) - integration into axiom profiling through online and/or offline interfaces. - an online interface attaches a callback with a running solver. This is available. - an offline interface saves a clause proof to a file (currently just supported for sat.euf) and then reads the file in a separate process The separate process attaches a callback on inferred clauses. This is currently not available but a relatively small feature. - more detailed proof hints for the legacy solver clause logger. Other than quantifier instantiations, no detailed information is retained for theory clauses. - integration of pre-processing proofs with logging proofs. There is currently no supported bridge to create a end-to-end proof objects. - experimental API for accessing E-graphs. Exposed over Python. This API should be considered temporary and subject to be changed depending on use cases or removed. The functions are `Z3_solver_congruence_root`, `Z3_solver_congruence_next`. Version 4.11.2 ============== - add error handling to fromString method in JavaScript - fix regression in default parameters for CDCL, thanks to Nuno Lopes - fix model evaluation bugs for as-array nested under functions (data-type constructors) - add rewrite simplifications for datatypes with a single constructor - add "Global Guidance" capability to SPACER, thanks to Arie Gurfinkel and Hari Gorvind. The commit logs related to Global Guidance contain detailed information. - change proof logging format for the new core to use SMTLIB commands. The format was so far an extension of DRAT used by SAT solvers, but not well compatible with SMT format that is extensible. The resulting format is a mild extension of SMTLIB with three extra commands assume, learn, del. They track input clauses, generated clauses and deleted clauses. They are optionally augmented by proof hints. Two proof hints are used in the current version: "rup" and "farkas". "rup" is used when the generated clause can be justified by reverse unit propagation. "farkas" is used when the clause can be justified by a combination of Farkas cutting planes. There is a built-in proof checker for the format. Quantifier instantiations are also tracked as proof hints. Other proof hints are to be added as the feature set is tested and developed. The fallback, built-in, self-checker uses z3 to check that the generated clause is a consequence. Note that this is generally insufficient as generated clauses are in principle required to only be satisfiability preserving. Proof checking and transformation operations is overall open ended. The log for the first commit introducing this change contains further information on the format. - fix to re-entrancy bug in user propagator (thanks to Clemens Eisenhofer). - handle _toExpr for quantified formulas in JS bindings Version 4.11.1 ============== - skipped Version 4.11.0 ============== - remove `Z3_bool`, `Z3_TRUE`, `Z3_FALSE` from the API. Use `bool`, `true`, `false` instead. - z3++.h no longer includes `` as it did not use it. - add solver.axioms2files - prints negated theory axioms to files. Each file should be unsat - add solver.lemmas2console - prints lemmas to the console. - remove option smt.arith.dump_lemmas. It is replaced by solver.axioms2files - add option smt.bv.reduce_size. - it allows to apply incremental pre-processing of bit-vectors by identifying ranges that are known to be constant. This rewrite is beneficial, for instance, when bit-vectors are constrained to have many high-level bits set to 0. - add feature to model-based projection for arithmetic to handle integer division. - add fromString method to JavaScript solver object. Version 4.10.2 ============== - fix regression #6194. It broke mod/rem/div reasoning. - fix user propagator scope management for equality callbacks. Version 4.10.1 ============== - fix implementation of mk_fresh in user propagator for Python API Version 4.10.0 ============== - Added API Z3_enable_concurrent_dec_ref to be set by interfaces that use concurrent GC to manage reference counts. This feature is integrated with the OCaml bindings and fixes a regression introduced when OCaml transitioned to concurrent GC. Use of this feature for .Net and Java bindings is not integrated for this release. They use external queues that are unnecessarily complicated. - Added pre-declared abstract datatype declarations to the context so that Z3_eval_smtlib2_string works with List examples. - Fixed Java linking for MacOS Arm64. - Added missing callback handlers in tactics for user-propagator, Thanks to Clemens Eisenhofer - Tuning to Grobner arithmetic reasoning for smt.arith.solver=6 (currently the default in most cases). The check for consistency modulo multiplication was updated in the following way: - polynomial equalities are extracted from Simplex tableau rows using a cone of influence algorithm. Rows where the basic variables were unbounded were previously included. Following the legacy implementation such rows are not included when building polynomial equations. - equations are pre-solved if they are linear and can be split into two groups one containing a single variable that has a lower (upper) bound, the other with more than two variables with upper (lower) bounds. This avoids losing bounds information during completion. - After (partial) completion, perform constant propagation for equalities of the form x = 0 - After (partial) completion, perform factorization for factors of the form x*y*p = 0 where x, are variables, p is linear. - Added support for declaring algebraic datatypes from the C++ interface. Version 4.9.1 ============= - Bugfix release to ensure npm package works Version 4.9.0 ============= - Native M1 (Mac ARM64) binaries and pypi distribution. - thanks to Isabel Garcia Contreras and Arie Gurfinkel for testing and fixes - API for incremental parsing of assertions. A description of the feature is given by example here: https://github.com/Z3Prover/z3/commit/815518dc026e900392bf0d08ed859e5ec42d1e43 It also allows incrementality at the level of adding assertions to the solver object. - Fold/map for sequences: https://microsoft.github.io/z3guide/docs/guide/Sequences#map-and-fold At this point these functions are only exposed over the SMTLIB2 interface (and not programmatic API) maxdiff/mindiff on arrays are more likely to be deprecated - User Propagator: - Add functions and callbacks for external control over branching thanks to Clemens Eisenhofer - A functioning dotnet API for the User Propagator https://github.com/Z3Prover/z3/blob/master/src/api/dotnet/UserPropagator.cs - Java Script API - higher level object wrappers are available thanks to Kevin Gibbons and Olaf Tomalka - Totalizers and RC2 - The MaxSAT engine now allows to run RC2 with totalizer encoding. Totalizers are on by default as preliminary tests suggest this solves already 10% more problems on standard benchmarks. The option opt.rc2.totalizer (which by default is true) is used to control whether to use totalizer encoding or built-in cardinality constraints. The default engine is still maxres, so you have to set opt.maxsat_engine=rc2 to enable the rc2 option at this point. The engines maxres-bin and rc2bin are experimental should not be used (they are inferior to default options). - Incremental constraints during optimization set option opt.incremental = true - The interface `Z3_optimize_register_model_eh` allows to monitor incremental results during optimization. It is now possible to also add constraints to the optimization context during search. You have to set the option opt.incremental=true to be able to add constraints. The option disables some pre-processing functionality that removes variables from the constraints. Version 4.8.17 ============== - fix breaking bug in python interface for user propagator pop - integrate fixes to z3str3 - initial support for nested algebraic datatypes with sequences - initiate map/fold operators on sequences - full integration for next releases - initiate maxdiff/mindiff on arrays - full integration for next releases Examples: ``` (declare-sort Expr) (declare-sort Var) (declare-datatypes ((Stmt 0)) (((Assignment (lval Var) (rval Expr)) (If (cond Expr) (th Stmt) (el Stmt)) (Seq (stmts (Seq Stmt)))))) (declare-const s Stmt) (declare-const t Stmt) (assert ((_ is Seq) t)) (assert ((_ is Seq) s)) (assert (= s (seq.nth (stmts t) 2))) (assert (>= (seq.len (stmts s)) 5)) (check-sat) (get-model) (assert (= s (Seq (seq.unit s)))) (check-sat) ``` Version 4.8.16 ============== - initial support for Darwin Arm64 (for M1, M2, .. users) thanks to zwimer and Anja Petkovi'c Komel for updates. Java is not yet supported, pypi native arm64 distributions are not yet supported. cmake dependency added to enable users to build for not-yet-supported platforms directly; specifically M1 seems to come up. - added functionality to user propagator decisions. Thanks to Clemens Eisenhofer. - added options for rc2 and maxres-bin to maxsat (note that there was no real difference measured from maxres on MaxSAT unweighted so default option is unchanged) - improved search for mutex constraints (at-most-1 constraints) among soft constraints for maxsat derived from approach used in rc2 sample. - multiple merges Version 4.8.15 ============== - elaborate user propagator API. Change id based scheme to expressions - includes a Web Assembly ffi API thanks to Kevin Gibbons Version 4.8.14 ============== - fixes Antimirov derivatives for intersections and unions required required for solving non-emptiness constraints. - includes x86 dll in nuget package for Windows. - exposes additional user propagator functionality Version 4.8.13 ============== The release integrates various bug fixes and tuning. Version 4.8.12 ============== Release provided to fix git tag discrepancy issues with 4.8.11 Version 4.8.11 ============== - self-contained character theory, direct support for UTF8, Unicode character sets. Characters are by default unicode with an 18 bit range. - support for incremental any-time MaxSAT using the option opt.enable_lns. The API allows registering a callback function that is invoked on each incremental improvement to objectives. Version 4.8.10 ============== - rewritten arithmetic solver replacing legacy arithmetic solver and on by default Version 4.8.9 ============= - New features - significant improvements to regular expression solving - expose user theory plugin. It is a leaner user theory plugin that was once available. It allows for registering callbacks that react to when bit-vector and Boolean variables receive fixed values. - Bug fixes - many - Notes - the new arithmetic theory is turned on by default. It _does_ introduce regressions on several scenarios, but has its own advantages. Users can turn on the old solver by setting smt.arith.solver=2. Depending on feedback, we may turn toggle this default setting again back to smt.arith.solver=2. Version 4.8.8 ============= - New features - rewritten NIA (non-linear integer arithmetic) core solver It is enabled in selected theories as default. The legacy arithmetic solver remains the overall default in this release as the rewritten solver shows regressions (on mainly NRA problems). - recursive function representation without hoisting ite expressions. Issue #2601 - model-based interpolation for quantifier-free UF, arithmetic - Julia bindings over the C++ API, thanks to ahumenberger - Bug fixes - numerous, many based on extensive fuzz testing. Thanks to 5hadowblad3, muchang, numairmansur, rainoftime, wintered - Notes - recursive functions are unfolded with separate increments based on unsat core analysis of blocking literals that are separate for different recursive functions. - the seq (string) solver has been revised in several ways and likely shows some regressions in this release. Version 4.8.7 ============= - New features - setting parameter on solver over the API by solver.smtlib2_log= enables tracing calls into the solver as SMTLIB2 commands. It traces, assert, push, pop, check_sat, get_consequences. - Notes - various bug fixes - remove model_compress. Use model.compact - print weights with quantifiers when weight is != 1 Version 4.8.6 ============= - Notes - various bug fixes - built in support for PIP, thanks to Audrey Dutcher - VS compilation mode including misc flags for managed packages Version 4.8.5 ============= - Notes - various bug fixes Version 4.8.4 ============= - Notes - fixes bugs - a substantial update to how the seq theory solver handles regular expressions. Other performance improvements to the seq solver. - Managed .NET DLLs include dotnet standard 1.4 on supported platforms. - Windows Managed DLLs are strong signed in the released binaries. Version 4.8.3 ============= - New features - Native handling of recursive function definitions, thanks to Simon Cruanes - PB rounding based option for conflict resolution when reasoning about PB constraints. - Access to numeral constants as a double from the native API. - Notes - fixes several bugs discovered since the 4.8.1 release. Version 4.8.2 ============= - Post-Release. Version 4.8.1 ============= - Release. Bug-fix for 4.8.0 Version 4.8.0 ============= - New requirements: - A breaking change to the API is that parsers for SMT-LIB2 formulas return a vector of formulas as opposed to a conjunction of formulas. The vector of formulas correspond to the set of "assert" instructions in the SMT-LIB input. - New features - A parallel mode is available for select theories, including QF_BV. By setting parallel.enable=true Z3 will spawn a number of worker threads proportional to the number of available CPU cores to apply cube and conquer solving on the goal. - The SAT solver by default handle cardinality and PB constraints using a custom plugin that operates directly on cardinality and PB constraints. - A "cube" interface is exposed over the solver API. - Model conversion is first class over the textual API, such that subgoals created from running a solver can be passed in text files and a model for the original formula can be recreated from the result. - This has also led to changes in how models are tracked over tactic subgoals. The API for extracting models from apply_result have been replaced. - An optional mode handles xor constraints using a custom xor propagator. It is off by default and its value not demonstrated. - The SAT solver includes new inprocessing techniques that are available during simplification. It performs asymmetric tautology elimination by default, and one can turn on more powerful inprocessing techniques (known as ACCE, ABCE, CCE). Asymmetric branching also uses features introduced in Lingeling by exploiting binary implication graphs. Use sat.acce=true to enable the full repertoire of inprocessing methods. By default, clauses that are "eliminated" by acce are tagged as lemmas (redundant) and are garbage collected if their glue level is high. - Substantial overhaul of the spacer horn clause engine. - Added basic features to support Lambda bindings. - Added model compression to eliminate local function definitions in models when inlining them does not incur substantial overhead. The old behavior, where models are left uncompressed can be replayed by setting the top-level parameter model_compress=false. - Integration of a new solver for linear integer arithmetic and mixed linear integer arithmetic by Lev Nachmanson. It incorporates several improvements to QF_LIA solving based on . using a better LP engine, which is already the foundation for QF_LRA . including cuts based on Hermite Normal Form (thanks to approaches described in "cuts from proofs" and "cutting the mix"). . extracting integer solutions from LP solutions by tightening bounds selectively. We use a generalization of Bromberger and Weidenbach that allows avoiding selected bounds tightenings (https://easychair.org/publications/paper/qGfG). It solves significantly more problems in the QF_LIA category and may at this point also be the best solver for your problem as well. The new solver is enabled only for select SMT-LIB logics. These include QF_LIA, QF_IDL, and QF_UFLIA. Other theories (still) use the legacy solver for arithmetic. You can enable the new solver by setting the parameter smt.arith.solver=6 to give it a spin. - Removed features: - interpolation API - duality engine for constrained Horn clauses. - pdr engine for constrained Horn clauses. The engine's functionality has been folded into spacer as one of optional strategies. - long deprecated API functions have been removed from z3_api.h Version 4.7.1 ============= - New requirements: - uses stdbool and stdint as part of z3. - New features: - none - Removed features: - none - Notes: This is a minor release prior to a set of planned major updates. It uses minor version 7 to indicate that the use of stdbool and stdint are breaking changes to consumers of the C-based API. Version 4.6.0 ============= - New requirements: - C++11 capable compiler to build Z3. - C++ API now requires C++11 or newer. - New features (including): - A new string solver from University of Waterloo - A new linear real arithmetic solver - Changed behavior for optimization commands from the SMT2 command-line interface. Objective values are no longer printed by default. They can be retrieved by issuing the command (get-objectives). Pareto front objectives are accessed by issuing multiple (check-sat) calls until it returns unsat. - Removed features: - Removed support for SMT-LIB 1.x Version 4.5.0 ============= - New features: - New theories of strings and sequences. - Consequence finding API "get-consequences" to compute set of consequences modulo hard constraints and set of assumptions. Optimized implementations provided for finite domains (QF_FD) and for most SMT logics. - CMake build system (thanks @delcypher). - New API functions, including accessing assertions, parsing SMT-LIB benchmarks. - Updated and improved OCaml API (thanks @martin-neuhaeusser). - Updated and improved Java API (thanks @cheshire). - New resource limit facilities to avoid non-deterministic timeout behaviour. You can enable it from the command-line using the switch rlimit=. - New bit-vector simplification and ackermannization tactics (thanks @MikolasJanota, @nunoplopes). - QSAT: a new solver for satisfiability of quantified arithmetic formulas. See: Bjorner, Janota: Playing with Quantified Satisfaction, LPAR 2016. This is the new default solver for logics LIA, LRA, NRA. It furthermore can be applied as a tactic on quantified formulas using algebraic data-types (but excluding selector sub-terms because Z3 does not specify the semantics of applying a selector to a non-matching constructor term). - A specialized logic QF_FD and associated incremental solver (that supports push/pop). The QF_FD domain comprises of bit-vectors, enumeration data-types used only in equalities, and bounded integers: Integers used in QF_FD problems have to be constrained by a finite bound. - Queries in the fixedpoint engine are now function symbols and not formulas with free variables. This makes the association of free variables in the answers unambiguous. To emulate queries over compound formulas, introduce a fresh predicate whose arguments are the relevant free variables in the formula and add a rule that uses the fresh predicate in the head and formula in the body. - Minimization of unsat cores is available as an option for the SAT and SMT cores. By setting smt.core.minimize=true resp. sat.core.minimize=true cores produced by these modules are minimized. - A multitude of bugs has been fixed. Version 4.4.1 ============= - This release marks the transition to the new GitHub fork & pull model; the unstable and contrib branches will be retired with all new contributions going into the master branch directly. - A multitude of bugs has been fixed. - New Feature: Support for optimization queries. The SMT-LIB2 command language is augmented by three commands (maximize ), (minimize [:weight ] [:id ]). The programmatic API also contains a dedicated context for solving optimization queries. The TACAS 2015 tool paper by Bjorner, Dung and Fleckenstein describes additional details and the online tutorial on http://rise4fun.com/z3opt illustrates some uses. Version 4.4.0 ============= - New feature: Support for the theory of floating-point numbers. This comes in the form of logics (QF_FP and QF_FPBV), tactics (qffp and qffpbv), as well as a theory plugin that allows theory combinations. Z3 supports the official SMT theory definition of FP (see http://smtlib.cs.uiowa.edu/theories/FloatingPoint.smt2) in SMT2 files, as well as all APIs. - New feature: Stochastic local search engine for bit-vector formulas (see the qfbv-sls tactic). See also: Froehlich, Biere, Wintersteiger, Hamadi: Stochastic Local Search for Satisfiability Modulo Theories, AAAI 2015. - Upgrade: This release includes a brand new OCaml/ML API that is much better integrated with the build system, and hopefully also easier to use than the previous one. - Fixed various bugs reported by Marc Brockschmidt, Venkatesh-Prasad Ranganath, Enric Carbonell, Morgan Deters, Tom Ball, Malte Schwerhoff, Amir Ebrahimi, Codeplex users rsas, clockish, Heizmann, susmitj, steimann, and Stackoverflow users user297886. Version 4.3.2 ============= - Added preliminary support for the theory of floating point numbers (tactics qffpa, qffpabv, and logics QF_FPA, QF_FPABV). - Added the interpolation features of iZ3, which are now integrated into Z3. - Fixed a multitude of bugs and inconsistencies that were reported to us either in person, by email, or on Codeplex. Of those that we do have records of, we would like to express our gratitude to: Vladimir Klebanov, Konrad Jamrozik, Nuno Lopes, Carsten Ruetz, Esteban Pavese, Tomer Weiss, Ilya Mironov, Gabriele Paganelli, Levent Erkok, Fabian Emmes, David Cok, Etienne Kneuss, Arlen Cox, Matt Lewis, Carsten Otto, Paul Jackson, David Monniaux, Markus Rabe, Martin Pluecker, Jasmin Blanchette, Jules Villard, Andrew Gacek, George Karpenkov, Joerg Pfaehler, and Pablo Aledo as well as the following Codeplex users that either reported bugs or took part in discussions: xor88, parno, gario, Bauna, GManNickG, hanwentao, dinu09, fhowar, Cici, chinissai, barak_cohen, tvalentyn, krikunts, sukyoung, daramos, snedunuri, rajtendulkar, sonertari, nick8325, dvitek, amdragon, Beatgodes, dmonniaux, nickolai, DameNingen, mangpo, ttsiodras, blurium, sbrickey, pcodemod, indranilsaha, apanda, hougaardj, yoff, EfForEffort, Ansotegui, scottgw, viorelpreoteasa, idudka, c2855337, gario, jnfoster, omarmrivas, switicus, vosandi, foens, yzwwf, Heizmann, znajem, ilyagri, hougaardj, cliguda, rgrig, 92c849c1ccc707173, edmcman, cipher1024, MichaelvW, hellok, n00b42, ic3guy, Adorf, tvcsantos, zilongwang, Elarnon, immspw, jbridge99, danliew, zverlov, petross, jmh93, dradorf, fniksic, Heyji, cxcfan, henningg, wxlfrank, rvprasad, MovGP0, jackie1015, cowang, ffaghih, sanpra1989, gzchenyin, baitman, xjtulixiangyang, andreis, trucnguyenlam, erizzi, hanhchi, qsp, windypan, vadave, gradanne, SamWot, gsingh93, manjeetdahiya, zverlov, RaLa, and regehr. - New parameter setting infrastructure. Now, it is possible to set parameter for Z3 internal modules. Several parameter names changed. Execute `z3 -p` for the new parameter list. - Added get_version() and get_version_string() to Z3Py - Added support for FreeBSD. Z3 can be compiled on FreeBSD using g++. - Added support for Python 3.x. - Reverted to `(set-option :global-decls false)` as the default. In Z3 4.3.0 and Z3 4.3.1, this option was set to true. Thanks to Julien Henry for reporting this problem. - Added `doc` directory and scripts for automatically generating the API documentation. - Removed 'autoconf' dependency. We do not need to execute 'autoconf' and './configure' anymore to build Z3. - Fixed incorrect result returned by Z3_solver_get_num_scopes. (Thanks to Herman Venter). This bug was introduced in Z3 4.3.0 - Java bindings. To enable them, we must use the option `--java` when executing the `mk_make.py` script. Example: `python scripts/mk_make.py --java` - Fixed crash when parsing incorrect formulas. The crash was introduced when support for "arithmetic coercions" was added in Z3 4.3.0. - Added new option to mk_make to allow users to specify where python bindings (Z3Py) will be installed. (Thanks to Dejan Jovanovic for reporting the problem). - Fixed crash reported at http://z3.codeplex.com/workitem/10 - Removed auxiliary constants created by the nnf tactic from Z3 models. - Fixed problem in the pretty printer. It was not introducing quotes for attribute names such as |foo:10|. - Fixed bug when using assumptions (Thanks to Philippe Suter and Etienne Kneuss) Consider the following example: (assert F) (check-sat a) (check-sat) If 'F' is unsatisfiable independently of the assumption 'a', and the inconsistency can be detected by just performing propagation, Then, version <= 4.3.1 may return unsat sat instead of unsat unsat We say may because 'F' may have other unsatisfiable cores. - Fixed bug reported at http://stackoverflow.com/questions/13923316/unprintable-solver-model - Fixed timers on Linux and FreeBSD. - Fixed crash reported at http://z3.codeplex.com/workitem/11. - Fixed bug reported at http://stackoverflow.com/questions/14307692/unknown-when-using-defs - Relax check_logic procedure. Now, it accepts coercions (to_real) automatically introduced by Z3. (Thanks to Paul Jackson). This is a fix for http://z3.codeplex.com/workitem/19. - Fixed http://stackoverflow.com/questions/14524316/z3-4-3-get-complete-model. - Fixed bugs in the C++ API (Thanks to Andrey Kupriyanov). - Fixed bug reported at http://z3.codeplex.com/workitem/23 (Thanks to Paul Jackson). - Fixed bug reported at http://stackoverflow.com/questions/15226944/segmentation-fault-in-z3 (Thanks to Tianhai Liu). Version 4.3.1 ============= - Added support for compiling Z3 using clang++ on Linux and OSX - Added missing compilation option (-D _EXTERNAL_RELEASE) in release mode. Version 4.3.0 ============= - Fixed bug during model construction reported by Heizmann (http://z3.codeplex.com/workitem/5) - Remark: We skipped version 4.2 due to a mistake when releasing 4.1.2. Version 4.1.2 was accidentally tagged as 4.2. Thanks to Claude Marche for reporting this issue. From now on, we are also officially moving to a 3 number naming convention for version numbers. The idea is to have more frequent releases containing bug fixes. - The Z3 codebase was reorganized, we also have a new build system. In all platforms, we need Python 2.7.x installed. On Windows, you can build using Visual Studio Command Prompt. On Linux, OSX, Cygwin, you can build using g++. See README for compilation instructions. - Removed tactic mip. It was based on code that was deleted during the code reorganization. - Fixed compilation problems with clang/llvm. Many thanks to Xi Wang for finding the problem, and suggesting the fix. - Now, Z3 automatically adds arithmetic coercions: to_real and to_int. Option (set-option :int-real-coercions false) disables this feature. If SMTLIB2_COMPLIANT=true in the command line, then :int-real-coercions is also set to false. - SMTLIB2_COMPLIANT is false by default. Use command line option SMTLIB2_COMPLIANT=true to enable it back. - Added "make install" and "make uninstall" to Makefile.in. - Added "make install-z3py" and "make uninstall-z3py" to Makefile.in. - Fixed crash/bug in the simplifier. The crash occurred when option ":sort-sums true" was used. - Added "--with-python=" option to configure script. - Cleaned c++, maxsat, test_mapi examples. - Move RELEASE_NOTES files to source code distribution. - Removed unnecessary files from source code distribution. - Removed unnecessary compilation modes from z3-prover.sln. - Added Xor procedure to Z3Py. - Z3 by default switches to an incremental solver when a Solver object is used to solve many queries. In the this version, we switch back to the tactic framework if the incremental solver returns "unknown". - Allow negative numerals in the SMT 2.0 frontend. That is, Z3 SMT 2.0 parser now accepts numerals such as "-2". It is not needed to encode them as "(- 2)" anymore. The parser still accepts -foo as a symbol. That is, it is *not* a shorthand for (- foo). This feature is disabled when SMTLIB2_COMPLIANT=true is set in the command line. - Now, Z3 can be compiled inside cygwin using gcc. - Fixed bug in the unsat core generation. First source code release (October 2, 2012) =========================================== - Fixed bug in Z3Py. The method that builds Z3 applications could crash if one of the arguments have to be "casted" into the correct sort (Thanks to Dennis Yurichev). - Fixed bug in datatype theory (Thanks to Ayrat). - Fixed bug in the definition of MkEmptySet and MkFullSet in the .Net API. - Display warning message and ignore option CASE_SPLIT=3,4 or 5 when auto configuration is enabled (AUTO_CONFIG=true) (Thanks Tobias from StackOverflow). - Made the predicates <, <=, > and >= chainable as defined in the SMT 2.0 standard (Thanks to Matthias Weiler). - Added missing Z3_decl_kind's for datatypes: Z3_OP_DT_CONSTRUCTOR, Z3_OP_DT_ACCESSOR, Z3_OP_DT_RECOGNISER. - Added support for numbers in scientific notation at Z3_ast Z3_mk_numeral(__in Z3_context c, __in Z3_string numeral, __in Z3_sort ty). - New builtin symbols in the arithmetic theory: pi, euler, sin, cos, tan, asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh. The first two are constants, and the others are unary functions. These symbols are not available if a SMT 2.0 logic is specified (e.g., QF_LRA, QF_NRA, QF_LIA, etc) because these symbols are not defined in these logics. That is, the new symbols are only available if the logic is not specified. Version 4.1 =========== - New OCAML API (Many thanks to Josh Berdine) - CodeContracts in the .NET API (Many thanks to Francesco Logozzo). Users can now check whether they are using the .NET API correctly using Clousot. - Added option :error-behavior. The default value is continued-execution. Now, users can force the Z3 SMT 2.0 frontend to exit whenever an error is reported. They just have to use the command (set-option :error-behavior immediate-exit). - Fixed bug in term-if-then-else elimination (Thanks to Artur Niewiadomski). - Fixed bug in difference logic detection code (Thanks to Dejan Jovanovic). - Fixed bug in the pseudo-boolean preprocessor (Thanks to Adrien Champion). - Fixed bug in bvsmod preprocessing rules (Thanks to Dejan Jovanovic). - Fixed bug in Tactic tseitin-cnf (Thanks to Georg Hofferek). - Added missing simplification step in nlsat. - Fixed bug in model construction for linear real arithmetic (Thanks to Marcello Bersani). - Fixed bug in preprocessor that eliminated rational powers (e.g., (^ x (/ 1.0 2.0))), the bug affected only problems where the denominator was even (Thanks to Johannes Eriksson). - Fixed bug in the k-th root operation in the algebraic number package. The result was correct, but the resulting polynomial could be incorrectly tagged as minimal and trigger nontermination on comparison operations. (Thanks to Johannes Eriksson). - Fixed bug affecting problems containing patterns with n-ary arithmetic terms such as (p (+ x y 2)). This bug was introduced in Z3 4.0. (Thanks to Paul Jackson). - Fixed crash when running out of memory. - Fixed crash reported by Alex Summers. The crash was happening on scripts that contain quantifiers, and use boolean formulas inside terms. - Fixed crash in the MBQI module (Thanks to Stephan Falke). - Fixed bug in the E-matching engine. It was missing instances of multi-patterns (Thanks Alex Summers). - Fixed bug in Z3Py pretty printer. - The pattern inference module does not generate warning messages by default anymore. This module was responsible for producing messages such as: "WARNING: failed to find a pattern for quantifier (quantifier id: k!199)". The option PI_WARNINGS=true can be used to enable these warning messages. - Added missing return statements in z3++.h (Thanks to Daniel Neider). - Removed support for TPTP5 and Simplify input formats. - Removed support for Z3 (low-level) input format. It is still available in the API. - Removed support for "SMT 1.5" input format (aka .smtc files). This was a hybrid input format that was implemented while the SMT 2.0 standard was being designed. Users should move to SMT 2.0 format. Note that SMT 1.0 format is still available. - Made tseitin-cnf tactic more "user friendly". It automatically applies required transformations needed to eliminate operators such as: and, distinct, etc. - Implemented new PSC (principal subresultant coefficient) algorithm. This was one of the bottlenecks in the new nlsat solver/tactic. Version 4.0 =========== Z3 4.0 is a major release. The main new features are: - New C API, and it is backwards compatible, but several methods are marked as deprecated. In the new API, many solvers can be created in the same context. It also includes support for user defined strategies using Tactics. It also exposes a new interface for browsing models. - A thin C++ layer around the C API that illustrates how to leverage reference counting of ast objects. Several examples can be found in the directory 'examples/c++'. - New .NET API together with updated version of the legacy .NET API. The new .NET API supports the new features, Tactics, Solvers, Goals, and integration of with reference counting. Terms and sorts life-times no longer requires a scoping discipline. - Z3Py: Python interface for Z3. It covers all main features in the Z3 API. - NLSAT solver for nonlinear arithmetic. - The PDR algorithm in muZ. - iZ3: an interpolating theorem prover built on top of Z3 (\ref iz3documentation). iZ3 is only available for Windows and Linux. - New logging infrastructure. Z3 logs are used to record every Z3 API call performed by your application. If you find a bug, just the log need to be sent to the Z3 team. The following APIs were removed: Z3_trace_to_file, Z3_trace_to_stderr, Z3_trace_to_stdout, Z3_trace_off. The APIs: Z3_open_log, Z3_append_log and Z3_close_log do not receive a Z3_context anymore. When creating a log, you must invoke Z3_open_log before any other Z3 function call. The new logs are much more precise. However, they still have two limitations. They are not useful for logging applications that use callbacks (e.g., theory plugins) because the log interpreter does not have access to these callbacks. They are not precise for applications that are using multiple threads for processing multiple Z3 contexts. - Z3 (for Linux and OSX) does not depend on GMP anymore. - Z3 1.x backwards compatibility macros are defined in z3_v1.h. If you still use them, you have to explicitly include this file. - Fixed all bugs reported at Stackoverflow. Temporarily disabled features: - User theories cannot be used with the new Solver API yet. Users may still use them with the deprecated solver API. - Parallel Z3 is also disabled in this release. However, we have parallel combinators for creating strategies (See tutorial). The two features above will return in future releases. Here is a list of all deprecated functions. Version 3.2 =========== This is a bug-fix refresh that fixes reported problems with 3.1. - Added support for chainable and right associative attributes. - Fixed model generation for QBVF (aka UFBV) logic. Now, Z3 officially supports the logics BV and UFBV. These are essentially QF_BV and QF_UFBV with quantifiers. - Fixed bug in eval and get-value commands. Thanks to Levent Erkok. - Fixed performance bug that was affecting VCC and Slayer. Thanks to Michal Moskal. - Fixed time measurement on Linux. Thanks to Ayrat Khalimov. - Fixed bug in destructive equality resolution (DER=true). - Fixed bug in map operator in the theory of arrays. Thanks to Shaz Quadeer. - Improved OCaml build scripts for Windows. Thanks to Josh Berdine. - Fixed crash in MBQI (when Real variables were used). - Fixed bugs in quantifier elimination. Thanks to Josh Berdine. - Fixed crash when an invalid datatype declaration is used. - Fixed bug in the SMT2 parser. - Fixed crash in quick checker for quantified formulas. Thanks to Swen Jacobs. - Fixed bug in the bvsmod simplifier. Thanks to Trevor Hansen. - New APIs: \c Z3_substitute and \c Z3_substitute_vars. - Fixed crash in MBQI. Thanks to Dejan Jovanovic. Version 3.1 =========== This is a bug-fix refresh that fixes reported problems with 3.0. - Fixed a bug in model generation. Thanks to Arlen Cox and Gordon Fraser. - Fixed a bug in Z3_check_assumptions that prevented it from being used between satisfiable instances. Thanks to Krystof Hoder. - Fixed two bugs in quantifier elimination. Thanks to Josh Berdine. - Fixed bugs in the preprocessor. - Fixed performance bug in MBQI. Thanks to Kathryn Stolee. - Improved strategy for QBVF (aka UFBV) logic. - Added support for negative assumptions in the check-sat command. Version 3.0 =========== - Fully compliant SMT-LIB 2.0 (SMT2) front-end. The old front-end is still available (command line option -smtc). The Z3 Guide describes the new front-end. - Parametric inductive datatypes, and parametric user defined types. - New SAT solver. Z3 can also read dimacs input formulas. - New Bitvector (QF_BV) solver. The new solver is only available when using the new SMT2 front-end. - Major performance improvements. - New preprocessing stack. - Performance improvements for linear and nonlinear arithmetic. The improvements are only available when using the SMT2 front-end. - Added API for parsing SMT2 files. - Fixed bug in AUTO_CONFIG=true. Thanks to Alberto Griggio. - Fixed bug in the Z3 simplifier cache. It was not being reset during backtracking. Thanks to Alberto Griggio. - Fixed many other bugs reported by users. - Improved model-based quantifier instantiation (MBQI). - New solver for Quantified Bitvector Logic (QBVF). - Z3 checks the user specified logic. - TPTP 5 front-end. Version 2.19 ============ - In the SMT-LIB 1.0 frontend, Z3 will only display the model when requested by the user (MODEL=true). - Fixed bug in the variable elimination preprocessor. Thanks to Alberto Griggio. - Fixed bug in the expression strong simplifier. Thanks to Marko. - Fixed bug in the Z3 auto configuration mode. Thanks to Vladimir Klebanov. - Fixed bug when model generation is used in the context of user-defined-theories. Thanks to Philippe Suter. - Fixed bug in quantifier elimination procedure. Thanks to Mikkel Larsen Pedersen. - Improved speed of Z3 lexer for SMT-LIB frontend. - Added a sample under examples/fixedpoints to illustrate using the API for pluggable relations. - Added an API method \c Z3_get_param_value for retrieving a configuration value given a configuration parameter name. Version 2.18 ============ - Z3 has a new mode for solving fixed-point queries. It allows formulating Datalogish queries combined with constraints. Try it online. - Fixed bug that affects the array theory over the API using RELEVANCY=0. Thanks to Josh Berdine. Version 2.17 ============ - Z3 has new model finding capabilities for Quantified SMT formulas. The new features are enabled with MBQI=true. (Model Based Quantifier Instantiation). MBQI implements a counter-example based refinement loop, where candidate models are built and checked. When the model checking step fails, it creates new quantifier instantiations. The models are returned as simple functional programs. The new feature is also a decision procedure for many known decidable fragments such as: EPR (Effectively Propositional), Bradley&Manna&Sipma's Array Property Fragment (VMCAI'06), Almost Uninterpreted Fragment (Complete instantiation for quantified SMT formulas, CAV'09), McPeak&Necula's list fragment (CAV'05), QBVF (Quantified Bit-Vector Formulas FMCAD'10), to cite a few. MBQI is useful for checking the consistency of background axiomatizations, synthesizing functions, and building real counterexamples for verification tools. Users can constrain the search space by providing templates for function symbols, and constraints on the size of the universe and range of functions. - Fixed bug in the command (simplify [expr]) SMT-LIB 2.0 frontend. - New model pretty printer. The old style is still available (option MODEL_V2=true). Z3 1.x style is also available (option MODEL_V1=true). - Removed \c ARRAY_PROPERTY option. It is subsumed by MBQI=true. - Z3 uses the (set-logic [name]) to configure itself. - Assumptions can be provided to the \c check-sat command. The command (check-sat [assumptions]) checks the satisfiability of the logical context modulo the given set of assumptions. The assumptions must be Boolean constants or the negation of Boolean constants. When the logical context is unsatisfiable modulo the given assumptions, Z3 will display a subset of the \c assumptions that contributed to the conflict. Lemmas learned during the execution of \c check-sat are preserved. - Added command (echo [string]) to the SMT-LIB 2.0 frontend. - Z3 models explicitly include an interpretation for uninterpreted sorts. The interpretation is presented using the \c define-sort primitive. For example, \code (define-sort S e_1 ... e_n) \endcode states that the interpretation of the uninterpreted sort S is finite, and its universe is composed by values \c e_1, ..., \c e_n. - Options \c WARNING and \c VERBOSE can be set in the SMT-LIB 2.0 frontend using the commands (set-option WARNING ) (set-option VERBOSE ). - Fixed unintentional side-effects in the Z3 pretty printer. Thanks to Swen Jacobs. - Added interpreted constants of the form as-array[f]. The constants are used in models produced by Z3 to encode the interpretation of arrays. The following axiom scheme axiomatizes the new constants: \code (forall (x1 S1) ... (xn Sn) (= (select as-array[f] x1 ... xn) (f x1 ... xn))) \endcode - Fixed bug in the option MACRO_FINDER=true. - Fixed bug in the (eval [expr]) command in the SMT-LIB 2.0 frontend. - Soundness bug in solver for array property fragment. Thanks to Trevor Hansen. Version 2.16 ============ The following bugs are fixed in this release: - Bugs in quantifier elimination. Thanks to Mikkel Larsen Pedersen. - Crash in non-linear arithmetic. Thanks to Trevor Hansen. - Unsoundness in mixed integer-linear version using to_real. Thanks to Hirai. - A crash and bugs in check_assumptions feature. Thanks to Akash Lal and Shaz Qadeer. Version 2.15 ============ The following bugs are fixed in this release: - A bug in the quantifier elimination that affects nested alternating quantifiers that cannot be fully eliminated. - A crash in proof generation. Thanks to Sascha Boehme. Version 2.14 ============ The following bugs are fixed in this release: - A crash in arithmetic simplification. Thanks to Trevor Hansen. - An unsoundness bug in the quantifier elimination. It affects the equivalence of answers that are computed in some cases. - Incorrect printing of parameters and other values in SMT-LIB2 mode. Thanks to Tjark Weber. Version 2.13 ============ The following bugs are fixed in this release: - Soundness bug in solver for array property fragment. Thanks to Trevor Hansen. - Soundness bug introduced in macro expansion utilities. Thanks to Wintersteiger. - Incorrect handling of QF_NRA. Thanks to Trevor Hansen. - Mixup between SMT2 and SMT1 pretty printing formats. Thanks to Alvin Cheung and Tjark Weber. Version 2.12 ============ News: - Philippe Suter made a JNI binding available. There is also an existing Python binding by Sascha Boehme. See \ref contrib. The following features are added in this release: - Enable check_assumptions without enclosing push/pop. This resolves the limitation described in \ref sub_release_limitations_2_0. - Expose coefficients used in arithmetical proofs. - Allow quantified theory axioms. The following bugs are fixed in this release: - Fixes to the SMT-LIB 2.0 pretty printing mode. - Detect miss-annotated SMT-LIB benchmarks to avoid crashes when using the wrong solvers. Thanks to Trevor Hansen. - A digression in the managed API from 2.10 when passing null parameters. - Crash/incorrect handling of inequalities over the reals during quantifier elimination. Thanks to Mikkel Larsen Pedersen. - Bug in destructive equality resolution. Thanks to Sascha Boehme. - Bug in initialization for x64_mt executable on SMT benchmarks. Thanks to Alvin Cheung. Version 2.11 ============ The following features are added in this release: - SMT-LIB 2.0 parsing support for (! ..) in quantifiers and (_ ..). - Allow passing strings to function and sort declarations in the .NET Theory builders. - Add a parameter to the proof construct for theory lemmas to indicate which theory provided the lemma. - More detailed proof production in rewrite steps. The following bugs are fixed in this release: - A bug in BV propagation. Thanks to Trevor Hansen. Version 2.10 ============ The following bugs are fixed in this release: - Inconsistent printing of integer and real types from the low level and SMT-LIB pretty printers. Thanks to Sascha Boehme. - Missing relevancy propagation and memory smash in user-theory plugins. Thanks to Stan Rosenberg. Version 2.9 =========== The following bugs are fixed in this release: - Incorrect constant folding of extraction for large bit-vectors. Thanks to Alvin. - Z3 crashed when using patterns that are variables. Thanks to Michael Emmi. - Unsound array property fragment handling of non-integer types. Thanks to Juergen Christ. - The quantifier elimination procedure for data-types has been replaced. Thanks to Josh Berdine. - Refresh 2.9.1: Add missing AssumeEq to the .NET managed API. Thanks to Stan Rosenberg. Version 2.8 =========== The following features have been added: - User theories: The user can add theory solvers that get invoked by Z3's core during search. See also \ref theory_plugin_ex. - SMT2 features: parse smt2 let bindings. The following bugs are fixed in this release: - Incorrect semantics of constant folding for (bvsmod 0 x), where x is positive, incorrect constant folding for bvsdiv, incorrect simplification of bvnor, bvnand, incorrect compilation of bvshl when using a shift amount that evaluates to the length of the bit-vector. Thanks to Trevor Hansen and Robert Brummayer. - Incorrect NNF conversion in linear quantifier elimination routines. Thanks to Josh Berdine. - Missing constant folding of extraction for large bit-vectors. Thanks to Alvin. - Missing APIs for bvredand and bvredor. Version 2.7 =========== The following features have been added: - Partial support for SMT-LIB 2.0 format: Added declare-fun, define-fun, declare-sort, define-sort, get-value - Added coercion function to_int and testing function is_int. To coerce from reals to integers and to test whether a real is an integer. The function to_real was already supported. - Added Z3_repeat to create the repetition of bit-vectors. The following bugs are fixed in this release: - Incorrect semantics of constant folding for bvsmod. - Incorrect semantics of constant folding for div/mod. Thanks to Sascha Boehme. - Non-termination problem associated with option LOOKAHEAD=true. It gets set for QF_UF in auto-configuration mode. Thanks to Pierre-Christophe Bué. - Incorrect axioms created for injective functions. Thanks to Sascha Boehme. - Stack overflow during simplification of large nested bit-vector terms. Thanks to David Molnar. - Crash in unsat-core generation when enabling SOLVER=true. Thanks to Lucas Cordeiro. - Unlimited cache growth while simplifying bit-vectors. Thanks to Eric Landers. - Crash when solving array property formulas using non-standard array operators. Thanks to Sascha Boehme. Version 2.6 =========== This release fixes a few bugs. Thanks to Marko Kääramees for reporting a bug in the strong context simplifier and to Josh Berdine. This release also introduces some new preprocessing features: - More efficient destructive equality resolution DER=true. - DISTRIBUTE_FORALL=true (distributes universal quantifiers over conjunctions, this transformation may affect pattern inference). - Rewriter that uses universally quantified equations PRE_DEMODULATOR=true (yes, the option name is not good, we will change it in a future release). - REDUCE_ARGS=true (this transformation is essentially a partial ackermannization for functions where a particular argument is always an interpreted value). - Better support for macro detection (a macro is a universally quantified formula of the form Forall X. F(X) = T[X]). We also change the option name, now it is called MACRO_FINDER=true. - ELIM_QUANTIFIERS=true enables quantifier elimination methods. Previous variants called QUANT_ARITH are deprecated. Version 2.5 =========== This release introduces the following features: - STRONG_CONTEXT_SIMPLIFIER=true allows simplifying sub-formulas to true/false depending on context-dependent information. The approach that we use is described on the Microsoft Z3 forum. - Some parameter values can be updated over the API. This functionality is called Z3_update_param_value in the C API. This is particularly useful for turning the strong context simplifier on and off. It also fixes bugs reported by Enric Rodríguez Carbonell, Nuno Lopes, Josh Berdine, Ethan Jackson, Rob Quigley and Lucas Cordeiro. Version 2.4 =========== This release introduces the following features: - Labeled literals for the SMT-LIB format. The Simplify format has supported labeled formulas to simplify displaying counter-examples. Section \ref smtlib_labels explains how labels are now supported in the SMT-LIB format. - Preliminary support for SMT-LIB2 It fixes the following bugs: - Bug in non-linear arithmetic routines. - Crash observed a class of modular integer arithmetic formulas. - Incomplete saturation leading to incorrectly sat labeling. - Crash in the bit-vector procedure when using int2bv and bv2int. Thanks to Michal Moskal, Sascha Boehme and Ethan Jackson. Version 2.3 =========== This release introduces the following features: - F# Quotation utilities. The release contains a new directory 'utils'. It contains utilities built on top of Z3. The main one is support for translating F# quoted expressions into Z3 formulas. - QUANT_ARITH configuration. Complete quantifier-elimination simplification for linear real and linear integer arithmetic. QUANT_ARITH=1 uses Ferrante/Rackhoff for reals and Cooper's method for integers. QUANT_ARITH=2 uses Fourier-Motzkin for reals and the Omega test for integers. It fixes the following bugs: - Incorrect simplification of map over store in the extended array theory. Reported by Catalin Hritcu. - Incomplete handling of equality propagation with constant arrays. Reported by Catalin Hritcu. - Crash in bit-vector theory. - Incorrectness in proof reconstruction for quantifier manipulation. Thanks to Catalin Hritcu, Nikolai Tillmann and Sascha Boehme. Version 2.2 =========== This release fixes minor bugs. It introduces some additional features in the SMT-LIB front-end to make it easier to parse new operators in the theory of arrays. These are described in \ref smtlibext. Version 2.1 =========== This is a bug fix release. Many thanks to Robert Brummayer, Carine Pascal, François Remy, Rajesh K Karmani, Roberto Lublinerman and numerous others for their feedback and bug reports. Version 2.0 =========== - Parallel Z3. Thanks to Christoph Wintersteiger there is a binary supporting running multiple instances of Z3 from different threads, but more interestingly, also making use of multiple cores for a single formula. - Check Assumptions. The binary API exposes a new call #Z3_check_assumptions, which allows passing in additional assumptions while checking for consistency of the already asserted formulas. The API function returns a subset of the assumptions that were used in an unsatisfiable core. It also returns an optional proof object. - Proof Objects. The #Z3_check_assumptions returns a proof object if the configuration flag PROOF_MODE is set to 1 or 2. - Partial support for non-linear arithmetic. The support uses support for computing Groebner bases. It allows solving some, but far from all, formulas using polynomials over the reals. Uses should be aware that the support for non-linear arithmetic (over the reals) is not complete in Z3. - Recursive data-types. The theory of well-founded recursive data-types is supported over the binary APIs. It supports ground satisfiability checking for tuples, enumeration types (scalars), lists and mutually recursive data-types. z3-z3-4.13.3/azure-pipelines.yml000066400000000000000000000242471470205523200163400ustar00rootroot00000000000000 variables: cmakeJulia: '-DZ3_BUILD_JULIA_BINDINGS=True' cmakeJava: '-DZ3_BUILD_JAVA_BINDINGS=True' cmakeNet: '-DZ3_BUILD_DOTNET_BINDINGS=True' cmakePy: '-DZ3_BUILD_PYTHON_BINDINGS=True' cmakeStdArgs: '-DZ3_BUILD_DOTNET_BINDINGS=True -DZ3_BUILD_JAVA_BINDINGS=True -DZ3_BUILD_PYTHON_BINDINGS=True -G "Ninja" ../' asanEnv: 'CXXFLAGS="${CXXFLAGS} -fsanitize=address -fno-omit-frame-pointer" CFLAGS="${CFLAGS} -fsanitize=address -fno-omit-frame-pointer"' ubsanEnv: 'CXXFLAGS="${CXXFLAGS} -fsanitize=undefined" CFLAGS="${CFLAGS} -fsanitize=undefined"' msanEnv: 'CC=clang LDFLAGS="-L../libcxx/libcxx_msan/lib -lc++abi -Wl,-rpath=../libcxx/libcxx_msan/lib" CXX=clang++ CXXFLAGS="${CXXFLAGS} -stdlib=libc++ -fsanitize-memory-track-origins -fsanitize=memory -fPIE -fno-omit-frame-pointer -g -O2" CFLAGS="${CFLAGS} -stdlib=libc -fsanitize=memory -fsanitize-memory-track-origins -fno-omit-frame-pointer -g -O2"' # TBD: # test python bindings # build documentation # Asan, ubsan, msan # Disabled pending clang dependencies for std::unordered_map jobs: - job: "LinuxPythonDebug" displayName: "Ubuntu build - python make - debug" pool: vmImage: "ubuntu-latest" strategy: matrix: MT: cmdLine: 'python scripts/mk_make.py -d --java --dotnet' runRegressions: 'True' ST: cmdLine: './configure --single-threaded' runRegressions: 'False' steps: - script: $(cmdLine) - script: | set -e cd build make -j3 make -j3 examples make -j3 test-z3 cd .. - template: scripts/test-z3.yml - ${{if eq(variables['runRegressions'], 'True')}}: - template: scripts/test-regressions.yml - job: "ManylinuxPythonBuildAmd64" displayName: "Python bindings (manylinux Centos AMD64) build" pool: vmImage: "ubuntu-latest" container: "quay.io/pypa/manylinux2014_x86_64:latest" steps: - script: "/opt/python/cp38-cp38/bin/python -m venv $PWD/env" - script: 'echo "##vso[task.prependpath]$PWD/env/bin"' - script: "pip install build git+https://github.com/rhelmot/auditwheel" - script: "cd src/api/python && python -m build && AUDITWHEEL_PLAT= auditwheel repair --best-plat dist/*.whl && cd ../../.." - script: "pip install ./src/api/python/wheelhouse/*.whl && python - ` to the CMake invocation where # `` is the path to the directory containing this file. # # This file was built for the @CONFIG_FILE_TYPE@. ################################################################################ # Handle dependencies (necessary when compiling the static library) if(NOT @Z3_BUILD_LIBZ3_SHARED@) include(CMakeFindDependencyMacro) # Threads::Threads set(THREADS_PREFER_PTHREAD_FLAG TRUE) find_dependency(Threads) # GMP::GMP if(@Z3_USE_LIB_GMP@) find_dependency(GMP) endif() endif() # Exported targets include("${CMAKE_CURRENT_LIST_DIR}/Z3Targets.cmake") @PACKAGE_INIT@ # Version information set(Z3_VERSION_MAJOR @Z3_VERSION_MAJOR@) set(Z3_VERSION_MINOR @Z3_VERSION_MINOR@) set(Z3_VERSION_PATCH @Z3_VERSION_PATCH@) set(Z3_VERSION_TWEAK @Z3_VERSION_TWEAK@) set(Z3_VERSION_STRING "${Z3_VERSION_MAJOR}.${Z3_VERSION_MINOR}.${Z3_VERSION_PATCH}.${Z3_VERSION_TWEAK}") # NOTE: We can't use `set_and_check()` here because this a list of paths. # List of include directories set(Z3_C_INCLUDE_DIRS @PACKAGE_Z3_FIRST_PACKAGE_INCLUDE_DIR@ @PACKAGE_Z3_SECOND_PACKAGE_INCLUDE_DIR@) set(Z3_CXX_INCLUDE_DIRS @PACKAGE_Z3_CXX_PACKAGE_INCLUDE_DIR@ ${Z3_C_INCLUDE_DIRS}) # List of libraries to link against set(Z3_LIBRARIES "z3::libz3") z3-z3-4.13.3/cmake/check_link_atomic.cmake000066400000000000000000000011221470205523200201740ustar00rootroot00000000000000set(ATOMIC_TEST_SOURCE " #include std::atomic x; std::atomic y; std::atomic z; std::atomic w; int main() { ++z; ++y; ++w; return ++x; }") CHECK_CXX_SOURCE_COMPILES("${ATOMIC_TEST_SOURCE}" BUILTIN_ATOMIC) if (NOT BUILTIN_ATOMIC) set(CMAKE_REQUIRED_LIBRARIES atomic) CHECK_CXX_SOURCE_COMPILES("${ATOMIC_TEST_SOURCE}" ATOMICS_REQUIRE_LIBATOMIC) unset(CMAKE_REQUIRED_LIBRARIES) if (ATOMICS_REQUIRE_LIBATOMIC) list(APPEND Z3_DEPENDENT_LIBS atomic) else() message(FATAL_ERROR "Host compiler must support std::atomic!") endif() endif() z3-z3-4.13.3/cmake/cmake_uninstall.cmake.in000066400000000000000000000016561470205523200203400ustar00rootroot00000000000000if(NOT EXISTS "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") message(FATAL_ERROR "Cannot find install manifest: " "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt") endif() file(READ "@CMAKE_CURRENT_BINARY_DIR@/install_manifest.txt" files) string(REGEX REPLACE "\n" ";" files "${files}") foreach(file ${files}) set(_full_file_path "$ENV{DESTDIR}${file}") message(STATUS "Uninstalling ${_full_file_path}") if(IS_SYMLINK "${_full_file_path}" OR EXISTS "${_full_file_path}") # We could use ``file(REMOVE ...)`` here but then we wouldn't # know if the removal failed. execute_process(COMMAND "@CMAKE_COMMAND@" "-E" "remove" "${_full_file_path}" RESULT_VARIABLE rm_retval ) if(NOT "${rm_retval}" STREQUAL 0) message(FATAL_ERROR "Problem when removing \"${_full_file_path}\"") endif() else() message(STATUS "File \"${_full_file_path}\" does not exist.") endif() endforeach() z3-z3-4.13.3/cmake/compiler_lto.cmake000066400000000000000000000044271470205523200172510ustar00rootroot00000000000000option(Z3_LINK_TIME_OPTIMIZATION "Use link time optimiziation" OFF) if (Z3_LINK_TIME_OPTIMIZATION) message(STATUS "LTO enabled") set(build_types_with_lto "RELEASE" "RELWITHDEBINFO") if (DEFINED CMAKE_CONFIGURATION_TYPES) # Multi configuration generator message(STATUS "Note LTO is only enabled for the following configurations: ${build_types_with_lto}") else() # Single configuration generator string(TOUPPER "${CMAKE_BUILD_TYPE}" _build_type_upper) list(FIND build_types_with_lto "${_build_type_upper}" _index) if ("${_index}" EQUAL -1) message(FATAL_ERROR "Configuration ${CMAKE_BUILD_TYPE} does not support LTO." "You should set Z3_LINK_TIME_OPTIMIZATION to OFF.") endif() endif() set(_lto_compiler_flag "") set(_lto_linker_flag "") if ((CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR (CMAKE_CXX_COMPILER_ID MATCHES "GNU")) set(_lto_compiler_flag "-flto") set(_lto_linker_flag "-flto") elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") set(_lto_compiler_flag "/GL") set(_lto_linker_flag "/LTCG") else() message(FATAL_ERROR "Can't enable LTO for compiler \"${CMAKE_CXX_COMPILER_ID}\"." "You should set Z3_LINK_TIME_OPTIMIZATION to OFF.") endif() CHECK_CXX_COMPILER_FLAG("${_lto_compiler_flag}" HAS_LTO) if (NOT HAS_LTO) message(FATAL_ERROR "Compiler does not support LTO") endif() foreach (_config ${build_types_with_lto}) # Set flags compiler and linker flags globally rather than using # `Z3_COMPONENT_CXX_FLAGS` and `Z3_DEPENDENT_EXTRA_CXX_LINK_FLAGS` # respectively. We need per configuration compiler and linker flags. The # `LINK_FLAGS` property (which we populate with # `Z3_DEPENDENT_EXTRA_CXX_LINK_FLAGS`) doesn't seem to support generator # expressions so we can't do `$<$:${_lto_linker_flag}>`. set(CMAKE_CXX_FLAGS_${_config} "${CMAKE_CXX_FLAGS_${_config}} ${_lto_compiler_flag}") set(CMAKE_EXE_LINKER_FLAGS_${_config} "${CMAKE_EXE_LINKER_FLAGS_${_config}} ${_lto_linker_flag}") set(CMAKE_SHARED_LINKER_FLAGS_${_config} "${CMAKE_SHARED_LINKER_FLAGS_${_config}} ${_lto_linker_flag}") set(CMAKE_STATIC_LINKER_FLAGS_${_config} "${CMAKE_STATIC_LINKER_FLAGS_${_config}} ${_lto_linker_flag}") endforeach() else() message(STATUS "LTO disabled") endif() z3-z3-4.13.3/cmake/compiler_warnings.cmake000066400000000000000000000136041470205523200203000ustar00rootroot00000000000000################################################################################ # Compiler warning flags ################################################################################ # These are passed to relevant compiler provided they are supported set(GCC_AND_CLANG_WARNINGS "-Wall" ) set(GCC_ONLY_WARNINGS "") set(CLANG_ONLY_WARNINGS "") set(MSVC_WARNINGS "/W3") ################################################################################ # Serious warnings ################################################################################ # This declares the flags that are passed to the compiler when # `WARNINGS_AS_ERRORS` is set to `SERIOUS_ONLY`. Only flags that are supported # by the compiler are used. # # In effect this a "whitelist" approach where we explicitly tell the compiler # which warnings we want to be treated as errors. The alternative would be a # "blacklist" approach where we ask the compiler to treat all warnings are # treated as errors but then we explicitly list which warnings which should be # allowed. # # The "whitelist" approach seems simpiler because we can incrementally add # warnings we "think are serious". # TODO: Add more warnings that are considered serious enough that we should # treat them as errors. set(GCC_AND_CLANG_WARNINGS_AS_ERRORS # https://clang.llvm.org/docs/DiagnosticsReference.html#wodr "-Werror=odr" # https://clang.llvm.org/docs/DiagnosticsReference.html#wreturn-type "-Werror=return-type" ) set(GCC_WARNINGS_AS_ERRORS "" ) set(CLANG_WARNINGS_AS_ERRORS # https://clang.llvm.org/docs/DiagnosticsReference.html#wdelete-non-virtual-dtor "-Werror=delete-non-virtual-dtor" # https://clang.llvm.org/docs/DiagnosticsReference.html#woverloaded-virtual "-Werror=overloaded-virtual" # warn the user if a class with virtual functions has a # non-virtual destructor. This helps catch hard to # track down memory errors "-Werror=non-virtual-dtor" # warn if a null dereference is detected "-Werror=null-dereference" # warn for potential performance problem casts # "-Werror=cast-align" # warn if float is implicit promoted to double # "-Werror=double-promotion" "-Werror=no-unreachable-code-return" # warn the user if a variable declaration shadows one from a parent context # "-Werror=shadow" # warn for c-style casts # "-Werror=old-style-cast" # warn on sign conversions # "-Werror=sign-conversion" # warn on type conversions that may lose data # "-Werror=conversion" # warn on anything being unused # "-Werror=unused" ) ################################################################################ # Test warning/error flags ################################################################################ set(WARNING_FLAGS_TO_CHECK "") set(WARNING_AS_ERROR_FLAGS_TO_CHECK "") if (CMAKE_CXX_COMPILER_ID MATCHES "GNU") list(APPEND WARNING_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS}) list(APPEND WARNING_FLAGS_TO_CHECK ${GCC_ONLY_WARNINGS}) list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS_AS_ERRORS}) list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${GCC_WARNINGS_AS_ERRORS}) elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang") list(APPEND WARNING_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS}) list(APPEND WARNING_FLAGS_TO_CHECK ${CLANG_ONLY_WARNINGS}) list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS_AS_ERRORS}) list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${CLANG_WARNINGS_AS_ERRORS}) elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") list(APPEND WARNING_FLAGS_TO_CHECK ${MSVC_WARNINGS}) # CMake's default flags include /W3 already so remove them if # they already exist. if (CMAKE_CXX_FLAGS MATCHES "/W3") string(REPLACE "/W3" "" _cmake_cxx_flags_remove_w3 "${CMAKE_CXX_FLAGS}") set(CMAKE_CXX_FLAGS "${_cmake_cxx_flags_remove_w3}" CACHE STRING "" FORCE) endif() else() message(AUTHOR_WARNING "Unknown compiler") endif() # Loop through flags and use the ones which the compiler supports foreach (flag ${WARNING_FLAGS_TO_CHECK}) z3_add_cxx_flag("${flag}") endforeach() # TODO: Remove this eventually. # Detect legacy `WARNINGS_AS_ERRORS` boolean option and covert to new # to new option type. get_property( WARNINGS_AS_ERRORS_CACHE_VAR_TYPE CACHE WARNINGS_AS_ERRORS PROPERTY TYPE ) if (WARNINGS_AS_ERRORS_CACHE_VAR_TYPE STREQUAL "BOOL") message(WARNING "Detected legacy WARNINGS_AS_ERRORS option. Upgrading") set(WARNINGS_AS_ERRORS_DEFAULT "${WARNINGS_AS_ERRORS}") # Delete old entry unset(WARNINGS_AS_ERRORS CACHE) else() set(WARNINGS_AS_ERRORS_DEFAULT "SERIOUS_ONLY") endif() set(WARNINGS_AS_ERRORS ${WARNINGS_AS_ERRORS_DEFAULT} CACHE STRING "Treat warnings as errors. ON, OFF, or SERIOUS_ONLY" ) # Set GUI options set_property( CACHE WARNINGS_AS_ERRORS PROPERTY STRINGS "ON;OFF;SERIOUS_ONLY" ) if (WARNINGS_AS_ERRORS STREQUAL "ON") message(STATUS "Treating compiler warnings as errors") if ((CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR (CMAKE_CXX_COMPILER_ID MATCHES "GNU")) list(APPEND Z3_COMPONENT_CXX_FLAGS "-Werror") elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") list(APPEND Z3_COMPONENT_CXX_FLAGS "/WX") else() message(AUTHOR_WARNING "Unknown compiler") endif() elseif (WARNINGS_AS_ERRORS STREQUAL "SERIOUS_ONLY") message(STATUS "Treating only serious compiler warnings as errors") # Loop through the flags foreach (flag ${WARNING_AS_ERROR_FLAGS_TO_CHECK}) # Add globally because some flags need to be passed at link time. z3_add_cxx_flag("${flag}" GLOBAL) endforeach() elseif (WARNINGS_AS_ERRORS STREQUAL "OFF") message(STATUS "Not treating compiler warnings as errors") if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") # Warnings as errors is off by default for MSVC so setting this # is not necessary but this duplicates the behaviour of the old # build system. list(APPEND Z3_COMPONENT_CXX_FLAGS "/WX-") endif() else() message(FATAL_ERROR "WARNINGS_AS_ERRORS set to unsupported value \"${WARNINGS_AS_ERRORS}\"" ) endif() z3-z3-4.13.3/cmake/cxx_compiler_flags_overrides.cmake000066400000000000000000000014031470205523200225020ustar00rootroot00000000000000# This file overrides the default compiler flags for CMake's built-in # configurations (CMAKE_BUILD_TYPE). Most compiler flags should not be set here. # The main purpose is to have very fine grained control of the compiler flags. # We only override the defaults for Clang and GCC right now. # CMake's MSVC logic is complicated so for now it's better to just inherit CMake's defaults. if (("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") OR ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")) # Taken from Modules/Compiler/GNU.cmake set(CMAKE_CXX_FLAGS_INIT "") set(CMAKE_CXX_FLAGS_DEBUG_INIT "-g -O0") set(CMAKE_CXX_FLAGS_MINSIZEREL_INIT "-Os -DNDEBUG") set(CMAKE_CXX_FLAGS_RELEASE_INIT "-O3 -DNDEBUG") set(CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT "-O2 -g -DNDEBUG") endif() z3-z3-4.13.3/cmake/git_utils.cmake000066400000000000000000000203611470205523200165570ustar00rootroot00000000000000# add_git_dir_dependency(GIT_DIR SUCCESS_VAR) # # Adds a configure time dependency on the git directory such that if the HEAD # of the git directory changes CMake will be forced to re-run. This useful # for fetching the current git hash and including it in the build. # # `GIT_DOT_FILE` is the path to the git directory (i.e. the `.git` directory) or # `.git` file used by a git worktree. # `SUCCESS_VAR` is the name of the variable to set. It will be set to TRUE # if the dependency was successfully added and FALSE otherwise. function(add_git_dir_dependency GIT_DOT_FILE SUCCESS_VAR) if (NOT "${ARGC}" EQUAL 2) message(FATAL_ERROR "Invalid number (${ARGC}) of arguments") endif() if (NOT IS_ABSOLUTE "${GIT_DOT_FILE}") message(FATAL_ERROR "GIT_DOT_FILE (\"${GIT_DOT_FILE}\") is not an absolute path") endif() if (NOT EXISTS "${GIT_DOT_FILE}") message(FATAL_ERROR "GIT_DOT_FILE (\"${GIT_DOT_FILE}\") does not exist") endif() if (NOT IS_DIRECTORY "${GIT_DOT_FILE}") # Might be a git worktree. In this case we need parse out the worktree # git directory file(READ "${GIT_DOT_FILE}" GIT_DOT_FILE_DATA LIMIT 512) string(STRIP "${GIT_DOT_FILE_DATA}" GIT_DOT_FILE_DATA_STRIPPED) if (GIT_DOT_FILE_DATA_STRIPPED MATCHES "^gitdir:[ ]*(.+)$") # Git worktree message(STATUS "Found git worktree") set(GIT_WORKTREE_DIR "${CMAKE_MATCH_1}") set(GIT_HEAD_FILE "${GIT_WORKTREE_DIR}/HEAD") # Figure out where real git directory lives set(GIT_COMMON_DIR_FILE "${GIT_WORKTREE_DIR}/commondir") if (NOT EXISTS "${GIT_COMMON_DIR_FILE}") get_filename_component(GIT_WORKTREE_PARENT "${GIT_WORKTREE_DIR}" DIRECTORY) get_filename_component(GIT_WORKTREE_PARENT "${GIT_WORKTREE_PARENT}" NAME) if (EXISTS "${Z3_SOURCE_DIR}/${GIT_HEAD_FILE}" AND EXISTS "${Z3_SOURCE_DIR}/${GIT_WORKTREE_DIR}") # Z3 is a git submodule set(GIT_HEAD_FILE "${Z3_SOURCE_DIR}/${GIT_HEAD_FILE}") set(GIT_DIR "${Z3_SOURCE_DIR}/${GIT_WORKTREE_DIR}") else() message(FATAL_ERROR "Found git worktree dir but could not find \"${GIT_COMMON_DIR_FILE}\"") endif() else() file(READ "${GIT_COMMON_DIR_FILE}" GIT_COMMON_DIR_FILE_DATA LIMIT 512) string(STRIP "${GIT_COMMON_DIR_FILE_DATA}" GIT_COMMON_DIR_FILE_DATA_STRIPPED) get_filename_component(GIT_DIR "${GIT_WORKTREE_DIR}/${GIT_COMMON_DIR_FILE_DATA_STRIPPED}" ABSOLUTE) endif() if (NOT IS_DIRECTORY "${GIT_DIR}") message(FATAL_ERROR "Failed to compute path to git directory from git worktree") endif() else() message(FATAL_ERROR "GIT_DOT_FILE (\"${GIT_DOT_FILE}\") is not a directory or a pointer to git worktree directory") endif() else() # Just a normal `.git` directory message(STATUS "Found simple git working directory") set(GIT_HEAD_FILE "${GIT_DOT_FILE}/HEAD") set(GIT_DIR "${GIT_DOT_FILE}") endif() message(STATUS "Found git directory \"${GIT_DIR}\"") if (NOT EXISTS "${GIT_HEAD_FILE}") message(AUTHOR_WARNING "Git head file \"${GIT_HEAD_FILE}\" cannot be found") set(${SUCCESS_VAR} FALSE PARENT_SCOPE) return() endif() # List of files in the git tree that CMake configuration should depend on set(GIT_FILE_DEPS "${GIT_HEAD_FILE}") # Examine the HEAD and workout what additional dependencies there are. file(READ "${GIT_HEAD_FILE}" GIT_HEAD_DATA LIMIT 128) string(STRIP "${GIT_HEAD_DATA}" GIT_HEAD_DATA_STRIPPED) if (GIT_HEAD_DATA_STRIPPED MATCHES "^ref:[ ]*(.+)$") # HEAD points at a reference. set(GIT_REF "${CMAKE_MATCH_1}") if (EXISTS "${GIT_DIR}/${GIT_REF}") # Unpacked reference. The file contains the commit hash # so add a dependency on this file so that if we stay on this # reference (i.e. branch) but change commit CMake will be forced # to reconfigure. list(APPEND GIT_FILE_DEPS "${GIT_DIR}/${GIT_REF}") elseif(EXISTS "${GIT_DIR}/packed-refs") # The ref must be packed (see `man git-pack-refs`). list(APPEND GIT_FILE_DEPS "${GIT_DIR}/packed-refs") else() # Fail message(AUTHOR_WARNING "Unhandled git reference") set(${SUCCESS_VAR} FALSE PARENT_SCOPE) return() endif() else() # Detached HEAD. # No other dependencies needed endif() # FIXME: # This is the directory we will copy (via `configure_file()`) git files # into. This is a hack. It would be better to use the # `CMAKE_CONFIGURE_DEPENDS` directory property but that feature is not # available in CMake 2.8.12. So we use `configure_file()` to effectively # do the same thing. When the source file to `configure_file()` changes # it will trigger a re-run of CMake. set(GIT_CMAKE_FILES_DIR "${CMAKE_CURRENT_BINARY_DIR}/git_cmake_files") file(MAKE_DIRECTORY "${GIT_CMAKE_FILES_DIR}") foreach (git_dependency ${GIT_FILE_DEPS}) message(STATUS "Adding git dependency \"${git_dependency}\"") configure_file( "${git_dependency}" "${GIT_CMAKE_FILES_DIR}" COPYONLY ) endforeach() set(${SUCCESS_VAR} TRUE PARENT_SCOPE) endfunction() # get_git_head_hash(GIT_DOT_FILE OUTPUT_VAR) # # Retrieve the current commit hash for a git working directory where # `GIT_DOT_FILE` is the `.git` directory or `.git` pointer file in a git # worktree in the root of the git working directory. # # `OUTPUT_VAR` should be the name of the variable to put the result in. If this # function fails then either a fatal error will be raised or `OUTPUT_VAR` will # contain a string with the suffix `NOTFOUND` which can be used in CMake `if()` # commands. function(get_git_head_hash GIT_DOT_FILE OUTPUT_VAR) if (NOT "${ARGC}" EQUAL 2) message(FATAL_ERROR "Invalid number of arguments") endif() if (NOT EXISTS "${GIT_DOT_FILE}") message(FATAL_ERROR "\"${GIT_DOT_FILE}\" does not exist") endif() if (NOT IS_ABSOLUTE "${GIT_DOT_FILE}") message(FATAL_ERROR \""${GIT_DOT_FILE}\" is not an absolute path") endif() find_package(Git) # NOTE: Use `GIT_FOUND` rather than `Git_FOUND` which was only # available in CMake >= 3.5 if (NOT GIT_FOUND) set(${OUTPUT_VAR} "GIT-NOTFOUND" PARENT_SCOPE) return() endif() get_filename_component(GIT_WORKING_DIR "${GIT_DOT_FILE}" DIRECTORY) execute_process( COMMAND "${GIT_EXECUTABLE}" "rev-parse" "-q" # Quiet "HEAD" WORKING_DIRECTORY "${GIT_WORKING_DIR}" RESULT_VARIABLE GIT_EXIT_CODE OUTPUT_VARIABLE Z3_GIT_HASH OUTPUT_STRIP_TRAILING_WHITESPACE ) if (NOT "${GIT_EXIT_CODE}" EQUAL 0) message(WARNING "Failed to execute git") set(${OUTPUT_VAR} NOTFOUND PARENT_SCOPE) return() endif() set(${OUTPUT_VAR} "${Z3_GIT_HASH}" PARENT_SCOPE) endfunction() # get_git_head_describe(GIT_DOT_FILE OUTPUT_VAR) # # Retrieve the output of `git describe` for a git working directory where # `GIT_DOT_FILE` is the `.git` directory or `.git` pointer file in a git # worktree in the root of the git working directory. # # `OUTPUT_VAR` should be the name of the variable to put the result in. If this # function fails then either a fatal error will be raised or `OUTPUT_VAR` will # contain a string with the suffix `NOTFOUND` which can be used in CMake `if()` # commands. function(get_git_head_describe GIT_DOT_FILE OUTPUT_VAR) if (NOT "${ARGC}" EQUAL 2) message(FATAL_ERROR "Invalid number of arguments") endif() if (NOT EXISTS "${GIT_DOT_FILE}") message(FATAL_ERROR "\"${GIT_DOT_FILE}\" does not exist") endif() if (NOT IS_ABSOLUTE "${GIT_DOT_FILE}") message(FATAL_ERROR \""${GIT_DOT_FILE}\" is not an absolute path") endif() find_package(Git) # NOTE: Use `GIT_FOUND` rather than `Git_FOUND` which was only # available in CMake >= 3.5 if (NOT GIT_FOUND) set(${OUTPUT_VAR} "GIT-NOTFOUND" PARENT_SCOPE) return() endif() get_filename_component(GIT_WORKING_DIR "${GIT_DOT_FILE}" DIRECTORY) execute_process( COMMAND "${GIT_EXECUTABLE}" "describe" "--long" WORKING_DIRECTORY "${GIT_WORKING_DIR}" RESULT_VARIABLE GIT_EXIT_CODE OUTPUT_VARIABLE Z3_GIT_DESCRIPTION OUTPUT_STRIP_TRAILING_WHITESPACE ) if (NOT "${GIT_EXIT_CODE}" EQUAL 0) message(WARNING "Failed to execute git") set(${OUTPUT_VAR} NOTFOUND PARENT_SCOPE) return() endif() set(${OUTPUT_VAR} "${Z3_GIT_DESCRIPTION}" PARENT_SCOPE) endfunction() z3-z3-4.13.3/cmake/modules/000077500000000000000000000000001470205523200152205ustar00rootroot00000000000000z3-z3-4.13.3/cmake/modules/DotnetImports.props.in000066400000000000000000000004311470205523200215230ustar00rootroot00000000000000 ${_DN_OUTPUT_PATH}/ ${_DN_XPLAT_LIB_DIR}/ ${_DN_VERSION} ${_DN_CUSTOM_BUILDPROPS} z3-z3-4.13.3/cmake/modules/FindDotnet.cmake000066400000000000000000000457701470205523200202750ustar00rootroot00000000000000#.rst # FindDotnet # ---------- # # Find DotNet executable, and initialize functions for adding dotnet projects. # # Results are reported in the following variables:: # # DOTNET_FOUND - True if dotnet executable is found # DOTNET_EXE - Dotnet executable # DOTNET_VERSION - Dotnet version as reported by dotnet executable # NUGET_EXE - Nuget executable (WIN32 only) # NUGET_CACHE_PATH - Nuget package cache path # # The following functions are defined to add dotnet/msbuild projects: # # ADD_DOTNET -- add a project to be built by dotnet. # # ``` # ADD_DOTNET( [RELEASE|DEBUG] [X86|X64|ANYCPU] [NETCOREAPP] # [CONFIG configuration] # [PLATFORM platform] # [PACKAGE output_nuget_packages... ] # [VERSION nuget_package_version] # [DEPENDS depend_nuget_packages... ] # [OUTPUT_PATH output_path relative to cmake binary output dir] # [CUSTOM_BUILDPROPS value....] # [SOURCES additional_file_dependencies... ] # [ARGUMENTS additional_build_args...] # [PACK_ARGUMENTS additional_pack_args...]) # ``` # # RUN_DOTNET -- Run a project with `dotnet run`. The `OUTPUT` argument represents artifacts # produced by running the .NET program, and can be consumed from other build steps. # # ``` # RUN_DOTNET( [RELEASE|DEBUG] [X86|X64|ANYCPU] [NETCOREAPP] # [ARGUMENTS program_args...] # [OUTPUT outputs...] # [CONFIG configuration] # [PLATFORM platform] # [DEPENDS depend_nuget_packages... ] # [OUTPUT_PATH output_path relative to cmake binary output dir] # [CUSTOM_BUILDPROPS value....] # [SOURCES additional_file_dependencies... ]) # ``` # # ADD_MSBUILD -- add a project to be built by msbuild. Windows-only. When building in Unix systems, msbuild targets are skipped. # # ``` # ADD_MSBUILD( [RELEASE|DEBUG] [X86|X64|ANYCPU] [NETCOREAPP] # [CONFIG configuration] # [PLATFORM platform] # [PACKAGE output_nuget_packages... ] # [DEPENDS depend_nuget_packages... ] # [CUSTOM_BUILDPROPS value....] # [SOURCES additional_file_dependencies... ] # [ARGUMENTS additional_build_args...] # [PACK_ARGUMENTS additional_pack_args...]) # ``` # # SMOKETEST_DOTNET -- add a dotnet smoke test project to the build. The project will be run during a build, # and if the program fails to build or run, the build fails. Currently only .NET Core App framework is supported. # Multiple smoke tests will be run one-by-one to avoid global resource conflicts. # # SMOKETEST_DOTNET( [RELEASE|DEBUG] [X86|X64|ANYCPU] [NETCOREAPP] # [ARGUMENTS program_args...] # [CONFIG configuration] # [PLATFORM platform] # [DEPENDS depend_nuget_packages... ] # [OUTPUT_PATH output_path relative to cmake binary output dir] # [CUSTOM_BUILDPROPS value....] # [SOURCES additional_file_dependencies... ]) # # For all the above functions, `RELEASE|DEBUG` overrides `CONFIG`, `X86|X64|ANYCPU` overrides PLATFORM. # For Unix systems, the target framework defaults to `netstandard2.0`, unless `NETCOREAPP` is specified. # For Windows, the project is built as-is, allowing multi-targeting. # # # DOTNET_REGISTER_LOCAL_REPOSITORY -- register a local NuGet package repository. # # ``` # DOTNET_REGISTER_LOCAL_REPOSITORY(repo_name repo_path) # ``` # # TEST_DOTNET -- add a dotnet test project to ctest. The project will be run with `dotnet test`, # and trx test reports will be generated in the build directory. For Windows, all target frameworks # are tested against. For other platforms, only .NET Core App is tested against. # Test failures will not fail the build. # Tests are only run with `ctest -C `, not with `cmake --build ...` # # ``` # TEST_DOTNET( # [ARGUMENTS additional_dotnet_test_args...] # [OUTPUT_PATH output_path relative to cmake binary output dir]) # ``` # # GEN_DOTNET_PROPS -- Generates a Directory.Build.props file. The created file is populated with MSBuild properties: # - DOTNET_PACKAGE_VERSION: a version string that can be referenced in the actual project file as $(DOTNET_PACKAGE_VERSION). # The version string value can be set with PACKAGE_VERSION argument, and defaults to '1.0.0'. # - XPLAT_LIB_DIR: points to the cmake build root directory. # - OutputPath: Points to the cmake binary directory (overridden by OUTPUT_PATH, relatively). Therefore, projects built without cmake will consistently output # to the cmake build directory. # - Custom properties can be injected with XML_INJECT argument, which injects an arbitrary string into the project XML file. # # ``` # GEN_DOTNET_PROPS( # [PACKAGE_VERSION version] # [XML_INJECT xml_injection]) # ``` # # Require 3.5 for batch copy multiple files cmake_minimum_required(VERSION 3.5.0) IF(DOTNET_FOUND) RETURN() ENDIF() SET(NUGET_CACHE_PATH "~/.nuget/packages") FIND_PROGRAM(DOTNET_EXE dotnet) SET(DOTNET_MODULE_DIR ${CMAKE_CURRENT_LIST_DIR}) IF(NOT DOTNET_EXE) SET(DOTNET_FOUND FALSE) IF(Dotnet_FIND_REQUIRED) MESSAGE(SEND_ERROR "Command 'dotnet' is not found.") ENDIF() RETURN() ENDIF() EXECUTE_PROCESS( COMMAND ${DOTNET_EXE} --version OUTPUT_VARIABLE DOTNET_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE ) IF(WIN32) FIND_PROGRAM(NUGET_EXE nuget PATHS ${PROJECT_BINARY_DIR}/tools) IF(NUGET_EXE) MESSAGE("-- Found nuget: ${NUGET_EXE}") ELSE() SET(NUGET_EXE ${PROJECT_BINARY_DIR}/tools/nuget.exe) MESSAGE("-- Downloading nuget...") FILE(DOWNLOAD https://dist.nuget.org/win-x86-commandline/latest/nuget.exe ${NUGET_EXE}) MESSAGE("nuget.exe downloaded and saved to ${NUGET_EXE}") ENDIF() ENDIF() FUNCTION(DOTNET_REGISTER_LOCAL_REPOSITORY repo_name repo_path) MESSAGE("-- Registering NuGet local repository '${repo_name}' at '${repo_path}'.") GET_FILENAME_COMPONENT(repo_path ${repo_path} ABSOLUTE) IF(WIN32) STRING(REPLACE "/" "\\" repo_path ${repo_path}) EXECUTE_PROCESS(COMMAND ${NUGET_EXE} sources list OUTPUT_QUIET) EXECUTE_PROCESS(COMMAND ${NUGET_EXE} sources Remove -Name "${repo_name}" OUTPUT_QUIET ERROR_QUIET) EXECUTE_PROCESS(COMMAND ${NUGET_EXE} sources Add -Name "${repo_name}" -Source "${repo_path}") ELSE() GET_FILENAME_COMPONENT(nuget_config ~/.nuget/NuGet/NuGet.Config ABSOLUTE) EXECUTE_PROCESS(COMMAND ${DOTNET_EXE} nuget locals all --list OUTPUT_QUIET) EXECUTE_PROCESS(COMMAND sed -i "#${repo_name}#d" "${nuget_config}") EXECUTE_PROCESS(COMMAND sed -i "s## \\n #g" "${nuget_config}") ENDIF() ENDFUNCTION() FUNCTION(DOTNET_GET_DEPS _DN_PROJECT arguments) CMAKE_PARSE_ARGUMENTS( # prefix _DN # options (flags) "RELEASE;DEBUG;X86;X64;ANYCPU;NETCOREAPP" # oneValueArgs "CONFIG;PLATFORM;VERSION;OUTPUT_PATH" # multiValueArgs "PACKAGE;DEPENDS;ARGUMENTS;PACK_ARGUMENTS;OUTPUT;SOURCES;CUSTOM_BUILDPROPS" # the input arguments ${arguments}) GET_FILENAME_COMPONENT(_DN_abs_proj "${_DN_PROJECT}" ABSOLUTE) GET_FILENAME_COMPONENT(_DN_proj_dir "${_DN_abs_proj}" DIRECTORY) GET_FILENAME_COMPONENT(_DN_projname "${_DN_PROJECT}" NAME) STRING(REGEX REPLACE "\\.[^.]*$" "" _DN_projname_noext ${_DN_projname}) FILE(GLOB_RECURSE DOTNET_deps ${_DN_proj_dir}/*.cs ${_DN_proj_dir}/*.fs ${_DN_proj_dir}/*.vb ${_DN_proj_dir}/*.xaml ${_DN_proj_dir}/*.resx ${_DN_proj_dir}/*.xml ${_DN_proj_dir}/*.*proj ${_DN_proj_dir}/*.cs ${_DN_proj_dir}/*.config) LIST(APPEND DOTNET_deps ${_DN_SOURCES}) SET(_DN_deps "") FOREACH(dep ${DOTNET_deps}) IF(NOT dep MATCHES /obj/ AND NOT dep MATCHES /bin/) LIST(APPEND _DN_deps ${dep}) ENDIF() ENDFOREACH() IF(_DN_RELEASE) SET(_DN_CONFIG Release) ELSEIF(_DN_DEBUG) SET(_DN_CONFIG Debug) ENDIF() IF(NOT _DN_CONFIG) SET(_DN_CONFIG "$<$:Debug>$<$>:Release>") ENDIF() # If platform is not specified, do not pass the Platform property. # dotnet will pick the default Platform. IF(_DN_X86) SET(_DN_PLATFORM x86) ELSEIF(_DN_X64) SET(_DN_PLATFORM x64) ELSEIF(_DN_ANYCPU) SET(_DN_PLATFORM "AnyCPU") ENDIF() # If package version is not set, first fallback to DOTNET_PACKAGE_VERSION # If again not set, defaults to 1.0.0 IF(NOT _DN_VERSION) SET(_DN_VERSION ${DOTNET_PACKAGE_VERSION}) ENDIF() IF(NOT _DN_VERSION) SET(_DN_VERSION "1.0.0") ENDIF() # Set the output path to the binary directory. # Build outputs in separated output directories prevent overwriting. # Later we then copy the outputs to the destination. IF(NOT _DN_OUTPUT_PATH) SET(_DN_OUTPUT_PATH ${_DN_projname_noext}) ENDIF() GET_FILENAME_COMPONENT(_DN_OUTPUT_PATH ${PROJECT_BINARY_DIR}/${_DN_OUTPUT_PATH} ABSOLUTE) # In a cmake build, the XPLAT libraries are always copied over. # Set the proper directory for .NET projects. SET(_DN_XPLAT_LIB_DIR ${PROJECT_BINARY_DIR}) SET(DOTNET_PACKAGES ${_DN_PACKAGE} PARENT_SCOPE) SET(DOTNET_CONFIG ${_DN_CONFIG} PARENT_SCOPE) SET(DOTNET_PLATFORM ${_DN_PLATFORM} PARENT_SCOPE) SET(DOTNET_DEPENDS ${_DN_DEPENDS} PARENT_SCOPE) SET(DOTNET_PROJNAME ${_DN_projname_noext} PARENT_SCOPE) SET(DOTNET_PROJPATH ${_DN_abs_proj} PARENT_SCOPE) SET(DOTNET_PROJDIR ${_DN_proj_dir} PARENT_SCOPE) SET(DOTNET_ARGUMENTS ${_DN_ARGUMENTS} PARENT_SCOPE) SET(DOTNET_RUN_OUTPUT ${_DN_OUTPUT} PARENT_SCOPE) SET(DOTNET_PACKAGE_VERSION ${_DN_VERSION} PARENT_SCOPE) SET(DOTNET_OUTPUT_PATH ${_DN_OUTPUT_PATH} PARENT_SCOPE) SET(DOTNET_deps ${_DN_deps} PARENT_SCOPE) IF(_DN_PLATFORM) SET(_DN_PLATFORM_PROP "/p:Platform=${_DN_PLATFORM}") ENDIF() IF(_DN_NETCOREAPP) SET(_DN_BUILD_OPTIONS -f netcoreapp2.0) SET(_DN_PACK_OPTIONS /p:TargetFrameworks=netcoreapp2.0) ELSEIF(UNIX) # Unix builds default to netstandard2.0 SET(_DN_BUILD_OPTIONS -f netstandard2.0) SET(_DN_PACK_OPTIONS /p:TargetFrameworks=netstandard2.0) ENDIF() SET(_DN_IMPORT_PROP ${CMAKE_CURRENT_BINARY_DIR}/${_DN_projname}.imports.props) CONFIGURE_FILE(${DOTNET_MODULE_DIR}/DotnetImports.props.in ${_DN_IMPORT_PROP}) SET(_DN_IMPORT_ARGS "/p:DirectoryBuildPropsPath=${_DN_IMPORT_PROP}") SET(DOTNET_IMPORT_PROPERTIES ${_DN_IMPORT_ARGS} PARENT_SCOPE) SET(DOTNET_BUILD_PROPERTIES ${_DN_PLATFORM_PROP} ${_DN_IMPORT_ARGS} PARENT_SCOPE) SET(DOTNET_BUILD_OPTIONS ${_DN_BUILD_OPTIONS} PARENT_SCOPE) SET(DOTNET_PACK_OPTIONS --include-symbols ${_DN_PACK_OPTIONS} ${_DN_PACK_ARGUMENTS} PARENT_SCOPE) ENDFUNCTION() MACRO(ADD_DOTNET_DEPENDENCY_TARGETS tgt) FOREACH(pkg_dep ${DOTNET_DEPENDS}) ADD_DEPENDENCIES(${tgt}_${DOTNET_PROJNAME} PKG_${pkg_dep}) MESSAGE(" ${DOTNET_PROJNAME} <- ${pkg_dep}") ENDFOREACH() FOREACH(pkg ${DOTNET_PACKAGES}) STRING(TOLOWER ${pkg} pkg_lowercase) GET_FILENAME_COMPONENT(cache_path ${NUGET_CACHE_PATH}/${pkg_lowercase} ABSOLUTE) IF(WIN32) SET(rm_command powershell -NoLogo -NoProfile -NonInteractive -Command "Remove-Item -Recurse -Force -ErrorAction Ignore '${cache_path}'\; exit 0") ELSE() SET(rm_command rm -rf ${cache_path}) ENDIF() ADD_CUSTOM_TARGET( DOTNET_PURGE_${pkg} COMMAND ${CMAKE_COMMAND} -E echo "======= [x] Purging nuget package cache for ${pkg}" COMMAND ${rm_command} DEPENDS ${DOTNET_deps} ) ADD_DEPENDENCIES(${tgt}_${DOTNET_PROJNAME} DOTNET_PURGE_${pkg}) # Add a target for the built package -- this can be referenced in # another project. ADD_CUSTOM_TARGET(PKG_${pkg}) ADD_DEPENDENCIES(PKG_${pkg} ${tgt}_${DOTNET_PROJNAME}) MESSAGE("==== ${DOTNET_PROJNAME} -> ${pkg}") ENDFOREACH() ENDMACRO() MACRO(DOTNET_BUILD_COMMANDS) IF(${DOTNET_IS_MSBUILD}) SET(build_dotnet_cmds COMMAND ${CMAKE_COMMAND} -E echo "======= Building msbuild project ${DOTNET_PROJNAME} [${DOTNET_CONFIG} ${DOTNET_PLATFORM}]" COMMAND ${NUGET_EXE} restore -Force ${DOTNET_PROJPATH} COMMAND ${DOTNET_EXE} msbuild ${DOTNET_PROJPATH} /t:Clean ${DOTNET_BUILD_PROPERTIES} /p:Configuration="${DOTNET_CONFIG}" COMMAND ${DOTNET_EXE} msbuild ${DOTNET_PROJPATH} /t:Build ${DOTNET_BUILD_PROPERTIES} /p:Configuration="${DOTNET_CONFIG}" ${DOTNET_ARGUMENTS}) SET(build_dotnet_type "msbuild") ELSE() SET(build_dotnet_cmds COMMAND ${CMAKE_COMMAND} -E echo "======= Building .NET project ${DOTNET_PROJNAME} [${DOTNET_CONFIG} ${DOTNET_PLATFORM}]" COMMAND ${DOTNET_EXE} restore ${DOTNET_PROJPATH} ${DOTNET_IMPORT_PROPERTIES} COMMAND ${DOTNET_EXE} clean ${DOTNET_PROJPATH} ${DOTNET_BUILD_PROPERTIES} COMMAND ${DOTNET_EXE} build --no-restore ${DOTNET_PROJPATH} -c ${DOTNET_CONFIG} ${DOTNET_BUILD_PROPERTIES} ${DOTNET_BUILD_OPTIONS} ${DOTNET_ARGUMENTS}) SET(build_dotnet_type "dotnet") ENDIF() # DOTNET_OUTPUTS refer to artifacts produced, that the BUILD_proj_name target depends on. SET(DOTNET_OUTPUTS ${CMAKE_CURRENT_BINARY_DIR}/${DOTNET_PROJNAME}.buildtimestamp) LIST(APPEND build_dotnet_cmds COMMAND ${CMAKE_COMMAND} -E touch ${DOTNET_OUTPUTS}) IF(NOT "${DOTNET_PACKAGES}" STREQUAL "") MESSAGE("-- Adding ${build_dotnet_type} project ${DOTNET_PROJPATH} (version ${DOTNET_PACKAGE_VERSION})") FOREACH(pkg ${DOTNET_PACKAGES}) LIST(APPEND DOTNET_OUTPUTS ${DOTNET_OUTPUT_PATH}/${pkg}.${DOTNET_PACKAGE_VERSION}.nupkg) LIST(APPEND DOTNET_OUTPUTS ${DOTNET_OUTPUT_PATH}/${pkg}.${DOTNET_PACKAGE_VERSION}.symbols.nupkg) LIST(APPEND build_dotnet_cmds COMMAND ${CMAKE_COMMAND} -E remove ${DOTNET_OUTPUT_PATH}/${pkg}.${DOTNET_PACKAGE_VERSION}.nupkg) LIST(APPEND build_dotnet_cmds COMMAND ${CMAKE_COMMAND} -E remove ${DOTNET_OUTPUT_PATH}/${pkg}.${DOTNET_PACKAGE_VERSION}.symbols.nupkg) ENDFOREACH() LIST(APPEND build_dotnet_cmds COMMAND ${DOTNET_EXE} pack --no-build --no-restore ${DOTNET_PROJPATH} -c ${DOTNET_CONFIG} ${DOTNET_BUILD_PROPERTIES} ${DOTNET_PACK_OPTIONS}) ELSE() MESSAGE("-- Adding ${build_dotnet_type} project ${DOTNET_PROJPATH} (no nupkg)") ENDIF() ADD_CUSTOM_COMMAND( OUTPUT ${DOTNET_OUTPUTS} DEPENDS ${DOTNET_deps} ${build_dotnet_cmds} ) ADD_CUSTOM_TARGET( BUILD_${DOTNET_PROJNAME} ALL DEPENDS ${DOTNET_OUTPUTS}) ENDMACRO() FUNCTION(ADD_DOTNET DOTNET_PROJECT) DOTNET_GET_DEPS(${DOTNET_PROJECT} "${ARGN}") SET(DOTNET_IS_MSBUILD FALSE) DOTNET_BUILD_COMMANDS() ADD_DOTNET_DEPENDENCY_TARGETS(BUILD) ENDFUNCTION() FUNCTION(ADD_MSBUILD DOTNET_PROJECT) IF(NOT WIN32) MESSAGE("-- Building non-Win32, skipping ${DOTNET_PROJECT}") RETURN() ENDIF() DOTNET_GET_DEPS(${DOTNET_PROJECT} "${ARGN}") SET(DOTNET_IS_MSBUILD TRUE) DOTNET_BUILD_COMMANDS() ADD_DOTNET_DEPENDENCY_TARGETS(BUILD) ENDFUNCTION() FUNCTION(RUN_DOTNET DOTNET_PROJECT) DOTNET_GET_DEPS(${DOTNET_PROJECT} "${ARGN};NETCOREAPP") MESSAGE("-- Adding dotnet run project ${DOTNET_PROJECT}") FILE(MAKE_DIRECTORY ${DOTNET_OUTPUT_PATH}) ADD_CUSTOM_COMMAND( OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${DOTNET_PROJNAME}.runtimestamp ${DOTNET_RUN_OUTPUT} DEPENDS ${DOTNET_deps} COMMAND ${DOTNET_EXE} restore ${DOTNET_PROJPATH} ${DOTNET_IMPORT_PROPERTIES} COMMAND ${DOTNET_EXE} clean ${DOTNET_PROJPATH} ${DOTNET_BUILD_PROPERTIES} COMMAND ${DOTNET_EXE} build --no-restore ${DOTNET_PROJPATH} -c ${DOTNET_CONFIG} ${DOTNET_BUILD_PROPERTIES} ${DOTNET_BUILD_OPTIONS} # XXX tfm COMMAND ${DOTNET_EXE} ${DOTNET_OUTPUT_PATH}/netcoreapp2.0/${DOTNET_PROJNAME}.dll ${DOTNET_ARGUMENTS} COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/${DOTNET_PROJNAME}.runtimestamp WORKING_DIRECTORY ${DOTNET_OUTPUT_PATH}) ADD_CUSTOM_TARGET( RUN_${DOTNET_PROJNAME} DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${DOTNET_PROJNAME}.runtimestamp ${DOTNET_RUN_OUTPUT}) ADD_DOTNET_DEPENDENCY_TARGETS(RUN) ENDFUNCTION() FUNCTION(TEST_DOTNET DOTNET_PROJECT) DOTNET_GET_DEPS(${DOTNET_PROJECT} "${ARGN}") MESSAGE("-- Adding dotnet test project ${DOTNET_PROJECT}") IF(WIN32) SET(test_framework_args "") ELSE() SET(test_framework_args -f netcoreapp2.0) ENDIF() ADD_TEST(NAME ${DOTNET_PROJNAME} COMMAND ${DOTNET_EXE} test ${test_framework_args} --results-directory "${PROJECT_BINARY_DIR}" --logger trx ${DOTNET_ARGUMENTS} WORKING_DIRECTORY ${DOTNET_OUTPUT_PATH}) ENDFUNCTION() SET_PROPERTY(GLOBAL PROPERTY DOTNET_LAST_SMOKETEST "") FUNCTION(SMOKETEST_DOTNET DOTNET_PROJECT) MESSAGE("-- Adding dotnet smoke test project ${DOTNET_PROJECT}") IF(WIN32) RUN_DOTNET(${DOTNET_PROJECT} "${ARGN}") ELSE() RUN_DOTNET(${DOTNET_PROJECT} "${ARGN}") ENDIF() DOTNET_GET_DEPS(${DOTNET_PROJECT} "${ARGN}") ADD_CUSTOM_TARGET( SMOKETEST_${DOTNET_PROJNAME} ALL DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${DOTNET_PROJNAME}.runtimestamp) ADD_DOTNET_DEPENDENCY_TARGETS(SMOKETEST) GET_PROPERTY(_dn_last_smoketest GLOBAL PROPERTY DOTNET_LAST_SMOKETEST) IF(_dn_last_smoketest) MESSAGE("${_dn_last_smoketest} -> SMOKETEST_${DOTNET_PROJNAME}") ADD_DEPENDENCIES(SMOKETEST_${DOTNET_PROJNAME} ${_dn_last_smoketest}) ENDIF() # Chain the smoke tests together so they are executed sequentially SET_PROPERTY(GLOBAL PROPERTY DOTNET_LAST_SMOKETEST SMOKETEST_${DOTNET_PROJNAME}) ENDFUNCTION() SET(DOTNET_IMPORTS_TEMPLATE ${CMAKE_CURRENT_LIST_DIR}/DotnetImports.props.in) FUNCTION(GEN_DOTNET_PROPS target_props_file) CMAKE_PARSE_ARGUMENTS( # prefix _DNP # options (flags) "" # oneValueArgs "PACKAGE_VERSION;XML_INJECT" # multiValueArgs "" # the input arguments ${ARGN}) IF(NOT _DNP_PACKAGE_VERSION) SET(_DNP_PACKAGE_VERSION 1.0.0) ENDIF() IF(_DNP_XML_INJECT) SET(_DN_CUSTOM_BUILDPROPS ${_DNP_XML_INJECT}) ENDIF() SET(_DN_OUTPUT_PATH ${PROJECT_BINARY_DIR}) SET(_DN_XPLAT_LIB_DIR ${PROJECT_BINARY_DIR}) SET(_DN_VERSION ${_DNP_PACKAGE_VERSION}) CONFIGURE_FILE(${DOTNET_IMPORTS_TEMPLATE} ${target_props_file}) UNSET(_DN_OUTPUT_PATH) UNSET(_DN_XPLAT_LIB_DIR) UNSET(_DN_VERSION) ENDFUNCTION() MESSAGE("-- Found .NET toolchain: ${DOTNET_EXE} (version ${DOTNET_VERSION})") SET(DOTNET_FOUND TRUE) z3-z3-4.13.3/cmake/modules/FindGMP.cmake000066400000000000000000000025641470205523200174550ustar00rootroot00000000000000# Tries to find an install of the GNU multiple precision library # # Once done this will define # GMP_FOUND - BOOL: System has the GMP library installed # GMP_INCLUDE_DIRS - LIST:The GMP include directories # GMP_C_LIBRARIES - LIST:The libraries needed to use GMP via it's C interface # GMP_CXX_LIBRARIES - LIST:The libraries needed to use GMP via it's C++ interface include(FindPackageHandleStandardArgs) # Try to find libraries find_library(GMP_C_LIBRARIES NAMES gmp DOC "GMP C libraries" ) find_library(GMP_CXX_LIBRARIES NAMES gmpxx DOC "GMP C++ libraries" ) # Try to find headers find_path(GMP_C_INCLUDES NAMES gmp.h DOC "GMP C header" ) find_path(GMP_CXX_INCLUDES NAMES gmpxx.h DOC "GMP C++ header" ) # TODO: We should check we can link some simple code against libgmp and libgmpxx # Handle QUIET and REQUIRED and check the necessary variables were set and if so # set ``GMP_FOUND`` find_package_handle_standard_args(GMP REQUIRED_VARS GMP_C_LIBRARIES GMP_C_INCLUDES GMP_CXX_LIBRARIES GMP_CXX_INCLUDES) if (GMP_FOUND) set(GMP_INCLUDE_DIRS "${GMP_C_INCLUDES}" "${GMP_CXX_INCLUDES}") list(REMOVE_DUPLICATES GMP_INCLUDE_DIRS) if (NOT TARGET GMP::GMP) add_library(GMP::GMP UNKNOWN IMPORTED) set_target_properties(GMP::GMP PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${GMP_C_INCLUDES}" IMPORTED_LOCATION "${GMP_C_LIBRARIES}") endif() endif() z3-z3-4.13.3/cmake/msvc_legacy_quirks.cmake000066400000000000000000000176511470205523200204560ustar00rootroot00000000000000# This file ether sets or notes various compiler and linker flags for MSVC that # were defined by the old python/Makefile based build system but # don't obviously belong in the other sections in the CMake build system. ################################################################################ # Compiler definitions ################################################################################ # FIXME: All the commented out defines should be removed once # we are confident it is correct to not set them. set(Z3_MSVC_LEGACY_DEFINES # Don't set `_DEBUG`. The old build system sets this but this # is wrong. MSVC will set this depending on which runtime is being used. # See https://msdn.microsoft.com/en-us/library/b0084kay.aspx # _DEBUG # The old build system only set `UNICODE` and `_UNICODE` for x86_64 release. # That seems completely wrong so set it for all configurations. # According to https://blogs.msdn.microsoft.com/oldnewthing/20040212-00/?p=40643/ # `UNICODE` affects Windows headers and `_UNICODE` affects C runtime header files. # There is some discussion of this define at https://msdn.microsoft.com/en-us/library/dybsewaf.aspx UNICODE _UNICODE ) if (TARGET_ARCHITECTURE STREQUAL "x86_64") list(APPEND Z3_MSVC_LEGACY_DEFINES "" # Don't set `_LIB`. The old build system sets this for x86_64 release # build. This flag doesn't seem to be documented but a stackoverflow # post hints that this is usually set when building a static library. # See http://stackoverflow.com/questions/35034683/how-to-tell-if-current-project-is-dll-or-static-lib # This seems wrong give that the old build system set this regardless # whether or not libz3 was static or shared so its probably best # to not set for now. #$<$:_LIB> #$<$:_LIB> # Don't set `_CONSOLE`. The old build system sets for all configurations # except x86_64 release. It seems ( https://codeyarns.com/2010/12/02/visual-c-windows-and-console-subsystems/ ) # that `_CONSOLE` used to be defined by older Visual C++ environments. # Setting this undocumented option seems like a bad idea so let's not do it. #$<$ #$<$ # Don't set `ASYNC_COMMANDS`. The old build system sets this for x86_64 # release but this macro does not appear to be used anywhere and is not # documented so don't set it for now. #$<$:ASYNC_COMMANDS> #$<$:ASYNC_COMMANDS> ) else() list(APPEND Z3_MSVC_LEGACY_DEFINES "" # Don't set `_CONSOLE`. See reasoning above. #_CONSOLE ) endif() # Note we don't set WIN32 or _WINDOWS because # CMake provides that for us. As a sanity check make sure the option # is present. if (NOT CMAKE_CXX_FLAGS MATCHES "[-/]D[ ]*WIN32") message(FATAL_ERROR "\"/D WIN32\" is missing") endif() if (NOT CMAKE_CXX_FLAGS MATCHES "[-/]D[ ]*_WINDOWS") message(FATAL_ERROR "\"/D _WINDOWS\" is missing") endif() list(APPEND Z3_COMPONENT_CXX_DEFINES ${Z3_MSVC_LEGACY_DEFINES}) ################################################################################ # Compiler flags ################################################################################ # FIXME: We might want to move this out somewhere else if we decide # we want to set `-fno-omit-frame-pointer` for gcc/clang. # No omit frame pointer set(NO_OMIT_FRAME_POINTER_MSVC_FLAG "/Oy-") CHECK_CXX_COMPILER_FLAG(${NO_OMIT_FRAME_POINTER_MSVC_FLAG} HAS_MSVC_NO_OMIT_FRAME_POINTER) if (NOT HAS_MSVC_NO_OMIT_FRAME_POINTER) message(FATAL_ERROR "${NO_OMIT_FRAME_POINTER_MSVC_FLAG} flag not supported") endif() # FIXME: This doesn't make a huge amount of sense but the old # build system kept the frame pointer for all configurations # except x86_64 release (I don't know why the frame pointer # is kept for i686 release). if (TARGET_ARCHITECTURE STREQUAL "x86_64") list(APPEND Z3_COMPONENT_CXX_FLAGS $<$:${NO_OMIT_FRAME_POINTER_MSVC_FLAG}> $<$:${NO_OMIT_FRAME_POINTER_MSVC_FLAG}> ) else() list(APPEND Z3_COMPONENT_CXX_FLAGS ${NO_OMIT_FRAME_POINTER_MSVC_FLAG}) endif() if ((TARGET_ARCHITECTURE STREQUAL "x86_64") OR (TARGET_ARCHITECTURE STREQUAL "i686")) # Use __cdecl calling convention. Apparently this is MSVC's default # but the old build system set it so for completeness set it too. # See https://msdn.microsoft.com/en-us/library/46t77ak2.aspx z3_add_cxx_flag("/Gd" REQUIRED) endif() z3_add_cxx_flag("/EHsc" REQUIRED) ################################################################################ # Linker flags ################################################################################ # By default CMake enables incremental linking for Debug and RelWithDebInfo # builds. The old build system disables it for all builds so try to do the same # by changing all configurations if necessary string(TOUPPER "${available_build_types}" _build_types_as_upper) foreach (_build_type ${_build_types_as_upper}) foreach (t EXE SHARED STATIC) set(_replacement "/INCREMENTAL:NO") # Remove any existing incremental flags string(REGEX REPLACE "/INCREMENTAL:YES" "${_replacement}" _replaced_linker_flags "${CMAKE_${t}_LINKER_FLAGS_${_build_type}}") string(REGEX REPLACE "(/INCREMENTAL$)|(/INCREMENTAL )" "${_replacement} " _replaced_linker_flags "${_replaced_linker_flags}") if (NOT "${_replaced_linker_flags}" MATCHES "${_replacement}") # Flag not present. Add it string(APPEND _replaced_linker_flags " ${_replacement}") endif() set(CMAKE_${t}_LINKER_FLAGS_${_build_type} "${_replaced_linker_flags}") endforeach() endforeach() # The original build system passes `/STACK:` to the linker. # This size comes from the original build system. # FIXME: What is the rationale behind this? set(STACK_SIZE_MSVC_LINKER 8388608) # MSVC documentation (https://msdn.microsoft.com/en-us/library/35yc2tc3.aspx) # says this only matters for executables which is why this is not being # set for CMAKE_SHARED_LINKER_FLAGS or CMAKE_STATIC_LINKER_FLAGS. string(APPEND CMAKE_EXE_LINKER_FLAGS " /STACK:${STACK_SIZE_MSVC_LINKER}") # The original build system passes `/SUBSYSTEM:` to the linker where `` # depends on what is being linked. Where `` is `CONSOLE` for executables # and `WINDOWS` for shard libraries. # We don't need to pass `/SUBSYSTEM:CONSOLE` because CMake will do this for # us when building executables because we don't pass the `WIN32` argument to # `add_executable()`. # FIXME: We probably don't need this. https://msdn.microsoft.com/en-us/library/fcc1zstk.aspx # suggests that `/SUBSYSTEM:` only matters for executables. string(APPEND CMAKE_SHARED_LINKER_FLAGS " /SUBSYSTEM:WINDOWS") # FIXME: The following linker flags are weird. They are set in all configurations # in the old build system except release x86_64. We try to emulate this here but # this is likely the wrong thing to do. foreach (_build_type ${_build_types_as_upper}) if (TARGET_ARCHITECTURE STREQUAL "x86_64" AND (_build_type STREQUAL "RELEASE" OR _build_type STREQUAL "RELWITHDEBINFO") ) message(AUTHOR_WARNING "Skipping legacy linker MSVC options for x86_64 ${_build_type}") else() # Linker optimizations. # See https://msdn.microsoft.com/en-us/library/bxwfs976.aspx string(APPEND CMAKE_EXE_LINKER_FLAGS_${_build_type} " /OPT:REF /OPT:ICF") string(APPEND CMAKE_SHARED_LINKER_FLAGS_${_build_type} " /OPT:REF /OPT:ICF") # FIXME: This is not necessary. This is MSVC's default. # See https://msdn.microsoft.com/en-us/library/b1kw34cb.aspx string(APPEND CMAKE_EXE_LINKER_FLAGS_${_build_type} " /TLBID:1") string(APPEND CMAKE_SHARED_LINKER_FLAGS_${_build_type} " /TLBID:1") # FIXME: This is not necessary. This is MSVC's default. # Indicate that the executable is compatible with DEP # See https://msdn.microsoft.com/en-us/library/ms235442.aspx string(APPEND CMAKE_EXE_LINKER_FLAGS_${_build_type} " /NXCOMPAT") endif() endforeach() z3-z3-4.13.3/cmake/target_arch_detect.cmake000066400000000000000000000016211470205523200203650ustar00rootroot00000000000000############################################################################### # Target detection # # We abuse the compiler preprocessor to work out what target the compiler is # building for. The nice thing about this approach is that we'll detect the # right target even if we are using a cross compiler. ############################################################################### function(detect_target_architecture OUTPUT_VAR) try_run(run_result compile_result "${PROJECT_BINARY_DIR}" "${PROJECT_SOURCE_DIR}/cmake/target_arch_detect.cpp" COMPILE_OUTPUT_VARIABLE compiler_output ) if (compile_result) message(FATAL_ERROR "Expected compile to fail") endif() string(REGEX MATCH "CMAKE_TARGET_ARCH_([a-zA-Z0-9_]+)" arch "${compiler_output}") # Strip out prefix string(REPLACE "CMAKE_TARGET_ARCH_" "" arch "${arch}") set(${OUTPUT_VAR} "${arch}" PARENT_SCOPE) endfunction() z3-z3-4.13.3/cmake/target_arch_detect.cpp000066400000000000000000000006671470205523200201000ustar00rootroot00000000000000// This is used by the CMake build to detect // what architecture the compiler is targeting. // TODO: Add more targets here #if defined(__i386__) || defined(_M_IX86) #error CMAKE_TARGET_ARCH_i686 #elif defined(__x86_64__) || defined(_M_X64) #error CMAKE_TARGET_ARCH_x86_64 #elif defined(__ARM_ARCH_ISA_A64) #error CMAKE_TARGET_ARCH_arm64 #elif defined(__ARM_ARCH) #error CMAKE_TARGET_ARCH_arm #else #error CMAKE_TARGET_ARCH_unknown #endif z3-z3-4.13.3/cmake/z3_add_component.cmake000066400000000000000000000360611470205523200200060ustar00rootroot00000000000000include(CMakeParseArguments) define_property(GLOBAL PROPERTY Z3_LIBZ3_COMPONENTS BRIEF_DOCS "List of Z3 components to use in libz3" FULL_DOCS "List of Z3 components to use in libz3") function(z3_expand_dependencies output_var) if (ARGC LESS 2) message(FATAL_ERROR "Invalid number of arguments") endif() # Remaining args should be component names set(_expanded_deps ${ARGN}) set(_old_number_of_deps 0) list(LENGTH _expanded_deps _number_of_deps) while (_number_of_deps GREATER _old_number_of_deps) set(_old_number_of_deps "${_number_of_deps}") # Loop over the known dependencies and retrieve their dependencies set(_old_expanded_deps ${_expanded_deps}) foreach (dependency ${_old_expanded_deps}) get_property(_depdeps GLOBAL PROPERTY Z3_${dependency}_DEPS) list(APPEND _expanded_deps ${_depdeps}) unset(_depdeps) endforeach() list(REMOVE_DUPLICATES _expanded_deps) list(LENGTH _expanded_deps _number_of_deps) endwhile() set(${output_var} ${_expanded_deps} PARENT_SCOPE) endfunction() function(z3_add_component_dependencies_to_target target_name) if (ARGC LESS 2) message(FATAL_ERROR "Invalid number of arguments") endif() if (NOT (TARGET ${target_name})) message(FATAL_ERROR "Target \"${target_name}\" does not exist") endif() # Remaining args should be component names set(_expanded_deps ${ARGN}) foreach (dependency ${_expanded_deps}) # Ensure this component's dependencies are built before this component. # This is important because we might need the generated header files in # other components. add_dependencies(${target_name} ${dependency}) endforeach() endfunction() # z3_add_component(component_name # [NOT_LIBZ3_COMPONENT] # SOURCES source1 [source2...] # [COMPONENT_DEPENDENCIES component1 [component2...]] # [PYG_FILES pygfile1 [pygfile2...]] # [TACTIC_HEADERS header_file1 [header_file2...]] # [EXTRA_REGISTER_MODULE_HEADERS header_file1 [header_file2...]] # [MEMORY_INIT_FINALIZER_HEADERS header_file1 [header_file2...]] # ) # # Declares a Z3 component (as a CMake "object library") with target name # ``component_name``. # # The option ``NOT_LIBZ3_COMPONENT`` declares that the # component should not be included in libz3. If this is not specified # the component will be included in libz3. # # The mandatory ``SOURCES`` keyword should be followed by the source files # (including any files generated at build or configure time) that are should be # included in the component. It is not necessary to list header files here as # CMake infers header file dependencies unless that header file is generated at # build time. # # The optional ``COMPONENT_DEPENDENCIES`` keyword should be followed by a list of # components that ``component_name`` should depend on. The components listed here # must have already been declared using ``z3_add_component()``. Listing components # here causes them to be built before ``component_name``. It also currently causes # the include directories used by the transistive closure of the dependencies # to be added to the list of include directories used to build ``component_name``. # # The optional ``PYG_FILES`` keyword should be followed by a list of one or # more ``.pyg`` files that should used to be generate # ``_params.hpp`` header files used by the ``component_name``. # This generated file will automatically be scanned for the register module # declarations (i.e. ``REG_PARAMS()``, ``REG_MODULE_PARAMS()``, and # ``REG_MODULE_DESCRIPTION()``). # # The optional ``TACTIC_HEADERS`` keyword should be followed by a list of one or # more header files that declare a tactic and/or a probe that is part of this # component (see ``ADD_TACTIC()`` and ``ADD_PROBE()``). # # The optional ``EXTRA_REGISTER_MODULE_HEADERS`` keyword should be followed by a list # of one or more header files that contain module registration declarations. # NOTE: The header files generated from ``.pyg`` files don't need to be included. # # The optional ``MEMORY_INIT_FINALIZER_HEADERS`` keyword should be followed by a list # of one or more header files that contain memory initializer/finalizer declarations # (i.e. ``ADD_INITIALIZER()`` or ``ADD_FINALIZER()``). macro(z3_add_component component_name) CMAKE_PARSE_ARGUMENTS("Z3_MOD" "NOT_LIBZ3_COMPONENT" "" "SOURCES;COMPONENT_DEPENDENCIES;PYG_FILES;TACTIC_HEADERS;EXTRA_REGISTER_MODULE_HEADERS;MEMORY_INIT_FINALIZER_HEADERS" ${ARGN}) message(STATUS "Adding component ${component_name}") # Note: We don't check the sources exist here because # they might be generated files that don't exist yet. set(_list_generated_headers "") set_property(GLOBAL PROPERTY Z3_${component_name}_REGISTER_MODULE_HEADERS "") foreach (pyg_file ${Z3_MOD_PYG_FILES}) set(_full_pyg_file_path "${CMAKE_CURRENT_SOURCE_DIR}/${pyg_file}") if (NOT (EXISTS "${_full_pyg_file_path}")) message(FATAL_ERROR "\"${_full_pyg_file_path}\" does not exist") endif() string(REPLACE ".pyg" ".hpp" _output_file "${pyg_file}") if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${_output_file}") message(FATAL_ERROR "\"${CMAKE_CURRENT_SOURCE_DIR}/${_output_file}\" " ${z3_polluted_tree_msg} ) endif() set(_full_output_file_path "${CMAKE_CURRENT_BINARY_DIR}/${_output_file}") message(STATUS "Adding rule to generate \"${_output_file}\"") add_custom_command(OUTPUT "${_output_file}" COMMAND "${Python3_EXECUTABLE}" "${PROJECT_SOURCE_DIR}/scripts/pyg2hpp.py" "${_full_pyg_file_path}" "${CMAKE_CURRENT_BINARY_DIR}" MAIN_DEPENDENCY "${_full_pyg_file_path}" DEPENDS "${PROJECT_SOURCE_DIR}/scripts/pyg2hpp.py" ${Z3_GENERATED_FILE_EXTRA_DEPENDENCIES} COMMENT "Generating \"${_full_output_file_path}\" from \"${pyg_file}\"" WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" USES_TERMINAL VERBATIM ) list(APPEND _list_generated_headers "${_full_output_file_path}") # FIXME: This implicit dependency of a generated file depending on # generated files was inherited from the old build system. # Typically generated headers contain `REG_PARAMS()`, `REG_MODULE_PARAMS()` # and `REG_MODULE_DESCRIPTION()` declarations so add to the list of # header files to scan. set_property(GLOBAL APPEND PROPERTY Z3_${component_name}_REGISTER_MODULE_HEADERS "${_full_output_file_path}" ) endforeach() unset(_full_include_dir_path) unset(_full_output_file_path) unset(_output_file) # Add tactic/probe headers to global property set_property(GLOBAL PROPERTY Z3_${component_name}_TACTIC_HEADERS "") foreach (tactic_header ${Z3_MOD_TACTIC_HEADERS}) set(_full_tactic_header_file_path "${CMAKE_CURRENT_SOURCE_DIR}/${tactic_header}") if (NOT (EXISTS "${_full_tactic_header_file_path}")) message(FATAL_ERROR "\"${_full_tactic_header_file_path}\" does not exist") endif() set_property(GLOBAL APPEND PROPERTY Z3_${component_name}_TACTIC_HEADERS "${_full_tactic_header_file_path}" ) endforeach() unset(_full_tactic_header_file_path) # Add additional register module headers foreach (extra_register_module_header ${Z3_MOD_EXTRA_REGISTER_MODULE_HEADERS}) set(_full_extra_register_module_header_path "${CMAKE_CURRENT_SOURCE_DIR}/${extra_register_module_header}" ) if (NOT (EXISTS "${_full_extra_register_module_header_path}")) message(FATAL_ERROR "\"${_full_extra_register_module_header_path}\" does not exist") endif() set_property(GLOBAL APPEND PROPERTY Z3_${component_name}_REGISTER_MODULE_HEADERS "${_full_extra_register_module_header_path}" ) endforeach() unset(_full_extra_register_module_header) # Add memory initializer/finalizer headers to global property set_property(GLOBAL PROPERTY Z3_${component_name}_MEM_INIT_FINALIZER_HEADERS "") foreach (memory_init_finalizer_header ${Z3_MOD_MEMORY_INIT_FINALIZER_HEADERS}) set(_full_memory_init_finalizer_header_path "${CMAKE_CURRENT_SOURCE_DIR}/${memory_init_finalizer_header}") if (NOT (EXISTS "${_full_memory_init_finalizer_header_path}")) message(FATAL_ERROR "\"${_full_memory_init_finalizer_header_path}\" does not exist") endif() set_property(GLOBAL APPEND PROPERTY Z3_${component_name}_MEM_INIT_FINALIZER_HEADERS "${_full_memory_init_finalizer_header_path}" ) endforeach() unset(_full_memory_init_finalizer_header_path) # Using "object" libraries here means we have a convenient # name to refer to a component in CMake but we don't actually # create a static/library from them. This allows us to easily # build a static or dynamic library from the object libraries # on all platforms. Is this added flexibility worth the linking # overhead it adds? add_library(${component_name} OBJECT ${Z3_MOD_SOURCES} ${_list_generated_headers}) unset(_list_generated_headers) # Add definitions foreach (define ${Z3_COMPONENT_CXX_DEFINES}) target_compile_definitions(${component_name} PRIVATE ${define}) endforeach() # Add compiler flags foreach (flag ${Z3_COMPONENT_CXX_FLAGS}) target_compile_options(${component_name} PRIVATE ${flag}) endforeach() set_target_properties(${component_name} PROPERTIES # Position independent code needed in shared libraries POSITION_INDEPENDENT_CODE ON # Symbol visibility CXX_VISIBILITY_PRESET hidden VISIBILITY_INLINES_HIDDEN ON) # It's unfortunate that we have to manage dependencies ourselves. # # If we weren't building "object" libraries we could use # ``` # target_link_libraries(${component_name} INTERFACE ${Z3_MOD_COMPONENT_DEPENDENCIES}) # ``` # but we can't do that with "object" libraries. set_property(GLOBAL PROPERTY Z3_${component_name}_DEPS "") # Record this component's dependencies foreach (dependency ${Z3_MOD_COMPONENT_DEPENDENCIES}) if (NOT (TARGET ${dependency})) message(FATAL_ERROR "Component \"${component_name}\" depends on a non existent component \"${dependency}\"") endif() set_property(GLOBAL APPEND PROPERTY Z3_${component_name}_DEPS "${dependency}") endforeach() # Determine all the components that this component depends on set(_expanded_deps "") if (DEFINED Z3_MOD_COMPONENT_DEPENDENCIES) z3_expand_dependencies(_expanded_deps ${Z3_MOD_COMPONENT_DEPENDENCIES}) z3_add_component_dependencies_to_target(${component_name} ${_expanded_deps}) endif() #message(STATUS "Component \"${component_name}\" has the following dependencies ${_expanded_deps}") # Add any extra include directories foreach (extra_include ${Z3_COMPONENT_EXTRA_INCLUDE_DIRS}) target_include_directories(${component_name} PRIVATE "${extra_include}") endforeach() if (NOT Z3_MOD_NOT_LIBZ3_COMPONENT) # Add this component to the global list of Z3 components for libz3 set_property(GLOBAL APPEND PROPERTY Z3_LIBZ3_COMPONENTS ${component_name}) endif() endmacro() macro(z3_add_install_tactic_rule) # Arguments should be component names to use if (ARGC LESS 1) message(FATAL_ERROR "There should be at least one component") endif() if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/install_tactic.cpp") message(FATAL_ERROR "\"${CMAKE_CURRENT_SOURCE_DIR}/install_tactic.cpp\"" ${z3_polluted_tree_msg} ) endif() z3_expand_dependencies(_expanded_components ${ARGN}) # Get header files that declare tactics/probes set(_tactic_header_files "") foreach (dependency ${_expanded_components}) get_property(_component_tactic_header_files GLOBAL PROPERTY Z3_${dependency}_TACTIC_HEADERS ) list(APPEND _tactic_header_files "${_component_tactic_header_files}") endforeach() unset(_component_tactic_header_files) string(REPLACE ";" "\n" _tactic_header_files "${_tactic_header_files}") file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/install_tactic.deps" ${_tactic_header_files}) add_custom_command(OUTPUT "install_tactic.cpp" COMMAND "${Python3_EXECUTABLE}" "${PROJECT_SOURCE_DIR}/scripts/mk_install_tactic_cpp.py" "${CMAKE_CURRENT_BINARY_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/install_tactic.deps" DEPENDS "${PROJECT_SOURCE_DIR}/scripts/mk_install_tactic_cpp.py" ${Z3_GENERATED_FILE_EXTRA_DEPENDENCIES} "${CMAKE_CURRENT_BINARY_DIR}/install_tactic.deps" COMMENT "Generating \"${CMAKE_CURRENT_BINARY_DIR}/install_tactic.cpp\"" USES_TERMINAL VERBATIM ) unset(_expanded_components) unset(_tactic_header_files) endmacro() macro(z3_add_memory_initializer_rule) # Arguments should be component names to use if (ARGC LESS 1) message(FATAL_ERROR "There should be at least one component") endif() if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/mem_initializer.cpp") message(FATAL_ERROR "\"${CMAKE_CURRENT_SOURCE_DIR}/mem_initializer.cpp\"" ${z3_polluted_tree_msg} ) endif() z3_expand_dependencies(_expanded_components ${ARGN}) # Get header files that declare initializers and finalizers set(_mem_init_finalize_headers "") foreach (dependency ${_expanded_components}) get_property(_dep_mem_init_finalize_headers GLOBAL PROPERTY Z3_${dependency}_MEM_INIT_FINALIZER_HEADERS ) list(APPEND _mem_init_finalize_headers ${_dep_mem_init_finalize_headers}) endforeach() add_custom_command(OUTPUT "mem_initializer.cpp" COMMAND "${Python3_EXECUTABLE}" "${PROJECT_SOURCE_DIR}/scripts/mk_mem_initializer_cpp.py" "${CMAKE_CURRENT_BINARY_DIR}" ${_mem_init_finalize_headers} DEPENDS "${PROJECT_SOURCE_DIR}/scripts/mk_mem_initializer_cpp.py" ${Z3_GENERATED_FILE_EXTRA_DEPENDENCIES} ${_mem_init_finalize_headers} COMMENT "Generating \"${CMAKE_CURRENT_BINARY_DIR}/mem_initializer.cpp\"" USES_TERMINAL VERBATIM ) unset(_mem_init_finalize_headers) unset(_expanded_components) endmacro() macro(z3_add_gparams_register_modules_rule) # Arguments should be component names to use if (ARGC LESS 1) message(FATAL_ERROR "There should be at least one component") endif() if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/gparams_register_modules.cpp") message(FATAL_ERROR "\"${CMAKE_CURRENT_SOURCE_DIR}/gparams_register_modules.cpp\"" ${z3_polluted_tree_msg} ) endif() z3_expand_dependencies(_expanded_components ${ARGN}) # Get the list of header files to parse set(_register_module_header_files "") foreach (dependency ${_expanded_components}) get_property(_component_register_module_header_files GLOBAL PROPERTY Z3_${dependency}_REGISTER_MODULE_HEADERS) list(APPEND _register_module_header_files ${_component_register_module_header_files}) endforeach() unset(_component_register_module_header_files) add_custom_command(OUTPUT "gparams_register_modules.cpp" COMMAND "${Python3_EXECUTABLE}" "${PROJECT_SOURCE_DIR}/scripts/mk_gparams_register_modules_cpp.py" "${CMAKE_CURRENT_BINARY_DIR}" ${_register_module_header_files} DEPENDS "${PROJECT_SOURCE_DIR}/scripts/mk_gparams_register_modules_cpp.py" ${Z3_GENERATED_FILE_EXTRA_DEPENDENCIES} ${_register_module_header_files} COMMENT "Generating \"${CMAKE_CURRENT_BINARY_DIR}/gparams_register_modules.cpp\"" USES_TERMINAL VERBATIM ) unset(_expanded_components) unset(_register_module_header_files) endmacro() z3-z3-4.13.3/cmake/z3_add_cxx_flag.cmake000066400000000000000000000024151470205523200175730ustar00rootroot00000000000000include(CheckCXXCompilerFlag) include(CMakeParseArguments) function(z3_add_cxx_flag flag) CMAKE_PARSE_ARGUMENTS(z3_add_flag "REQUIRED;GLOBAL" "" "" ${ARGN}) string(REPLACE "-" "_" SANITIZED_FLAG_NAME "${flag}") string(REPLACE "/" "_" SANITIZED_FLAG_NAME "${SANITIZED_FLAG_NAME}") string(REPLACE "=" "_" SANITIZED_FLAG_NAME "${SANITIZED_FLAG_NAME}") string(REPLACE " " "_" SANITIZED_FLAG_NAME "${SANITIZED_FLAG_NAME}") string(REPLACE ":" "_" SANITIZED_FLAG_NAME "${SANITIZED_FLAG_NAME}") string(REPLACE "+" "_" SANITIZED_FLAG_NAME "${SANITIZED_FLAG_NAME}") unset(HAS_${SANITIZED_FLAG_NAME}) CHECK_CXX_COMPILER_FLAG("${flag}" HAS_${SANITIZED_FLAG_NAME}) if (z3_add_flag_REQUIRED AND NOT HAS_${SANITIZED_FLAG_NAME}) message(FATAL_ERROR "The flag \"${flag}\" is required but your C++ compiler doesn't support it") endif() if (HAS_${SANITIZED_FLAG_NAME}) message(STATUS "C++ compiler supports ${flag}") if (z3_add_flag_GLOBAL) # Set globally set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${flag} " PARENT_SCOPE) else() list(APPEND Z3_COMPONENT_CXX_FLAGS "${flag}") set(Z3_COMPONENT_CXX_FLAGS "${Z3_COMPONENT_CXX_FLAGS}" PARENT_SCOPE) endif() else() message(STATUS "C++ compiler does not support ${flag}") endif() endfunction() z3-z3-4.13.3/cmake/z3_append_linker_flag_list_to_target.cmake000066400000000000000000000014321470205523200240750ustar00rootroot00000000000000# The LINK_FLAGS property of a target in CMake is unfortunately a string and # not a list. This function takes a list of linker flags and iterates through # them to append them as strings to the ``LINK_FLAGS`` property of # the specified target. # E.g. # z3_append_linker_flag_list_to_target(mytarget "-static") function(z3_append_linker_flag_list_to_target target) if (NOT (TARGET "${target}")) message(FATAL_ERROR "Specified target \"${target}\" is not a target") endif() foreach(flag ${ARGN}) #message(STATUS "Appending link flag \"${flag}\" to target ${target}") # Note that space inside the quoted string is required so that the flags # are space separated. set_property(TARGET ${target} APPEND_STRING PROPERTY LINK_FLAGS " ${flag}") endforeach() endfunction() z3-z3-4.13.3/configure000077500000000000000000000006421470205523200144010ustar00rootroot00000000000000#!/bin/sh if test -z $PYTHON; then PYTHON=python fi if ! which $PYTHON > /dev/null; then echo "'$PYTHON' not found. Try to set the environment variable PYTHON." exit 1 fi if ! $PYTHON -c "print('testing')" > /dev/null ; then echo "'$PYTHON' failed to execute basic test script. Try to set the environment variable PYTHON with a working Python interpreter." exit 1 fi $PYTHON scripts/mk_make.py "$@" z3-z3-4.13.3/contrib/000077500000000000000000000000001470205523200141305ustar00rootroot00000000000000z3-z3-4.13.3/contrib/qprofdiff/000077500000000000000000000000001470205523200161105ustar00rootroot00000000000000z3-z3-4.13.3/contrib/qprofdiff/Makefile000077500000000000000000000001471470205523200175550ustar00rootroot00000000000000qprofdiff: main.cpp $(CXX) $(CXXFLAGS) main.cpp -o qprofdiff all: qprofdiff clean: rm -f qprofdiff z3-z3-4.13.3/contrib/qprofdiff/main.cpp000066400000000000000000000210771470205523200175470ustar00rootroot00000000000000/*++ Copyright (c) 2017 Microsoft Corporation Module Name: main.cpp Abstract: Main file for qprofdiff. Author: Christoph M. Wintersteiger (cwinter) Revision History: --*/ #include #include #include #include #include #include #include #include #include using namespace std; set options; // Profile format: // [quantifier_instances] qname : num_instances : max_generation : max_cost_s const string prefix = "[quantifier_instances]"; unsigned prefix_len = prefix.length(); typedef struct { unsigned num_instances, max_generation, max_cost; } map_entry; string trim(string str) { size_t linx = str.find_first_not_of(' '); size_t rinx = str.find_last_not_of(' '); return str.substr(linx, rinx-linx+1); } int parse(string const & filename, map & data) { ifstream fs(filename.c_str()); if (!fs.is_open()) { cout << "Can't open file '" << filename << "'" << endl; return ENOENT; } string qid; string tokens[4]; unsigned cur_token = 0; while (!fs.eof()) { string line; getline(fs, line); if (line.substr(0, prefix_len) == prefix) { line = trim(line.substr(prefix_len)); size_t from = 0, ti = 0; for (size_t inx = line.find(" : ", from); inx != string::npos; inx = line.find(" : ", from)) { tokens[ti] = trim(line.substr(from, inx-from)); from = inx+3; //3 is the length of " : " ti++; } if (from != line.length() && ti < 4) tokens[ti] = trim(line.substr(from)); qid = tokens[0]; if (data.find(qid) == data.end()) { map_entry & entry = data[qid]; entry.num_instances = entry.max_generation = entry.max_cost = 0; } // Existing entries represent previous occurrences of quantifiers // that, at some point, were removed (e.g. backtracked). We sum // up instances from all occurrences of the same qid. map_entry & entry = data[qid]; entry.num_instances += atoi(tokens[1].c_str()); entry.max_generation = max(entry.max_generation, (unsigned)atoi(tokens[2].c_str())); entry.max_cost = max(entry.max_cost, (unsigned)atoi(tokens[3].c_str())); } } fs.close(); return 0; } void display_data(map & data) { for (map::iterator it = data.begin(); it != data.end(); it++) cout << it->first << ": " << it->second.num_instances << ", " << it->second.max_generation << ", " << it->second.max_cost << endl; } typedef struct { int d_num_instances, d_max_generation, d_max_cost; bool left_only, right_only; } diff_entry; typedef struct { string qid; diff_entry e; } diff_item; #define DIFF_LT(X) bool diff_item_lt_ ## X (diff_item const & l, diff_item const & r) { \ int l_lt_r = l.e.d_ ## X < r.e.d_ ## X; \ int l_eq_r = l.e.d_ ## X == r.e.d_ ## X; \ return \ l.e.left_only ? (r.e.left_only ? ((l_eq_r) ? l.qid < r.qid : l_lt_r) : false) : \ l.e.right_only ? (r.e.right_only ? ((l_eq_r) ? l.qid < r.qid : l_lt_r) : true) : \ r.e.right_only ? false : \ r.e.left_only ? true : \ l_lt_r; \ } DIFF_LT(num_instances) DIFF_LT(max_generation) DIFF_LT(max_cost) int indicate(diff_entry const & e, bool suppress_unchanged) { if (e.left_only) { cout << "< "; return INT_MIN; } else if (e.right_only) { cout << "> "; return INT_MAX; } else { int const & delta = (options.find("-si") != options.end()) ? e.d_num_instances : (options.find("-sg") != options.end()) ? e.d_max_generation : (options.find("-sc") != options.end()) ? e.d_max_cost : e.d_num_instances; if (delta < 0) cout << "+ "; else if (delta > 0) cout << "- "; else if (delta == 0 && !suppress_unchanged) cout << "= "; return delta; } } void diff(map & left, map & right) { map diff_data; for (map::const_iterator lit = left.begin(); lit != left.end(); lit++) { string const & qid = lit->first; map_entry const & lentry = lit->second; map::const_iterator rit = right.find(qid); if (rit != right.end()) { map_entry const & rentry = rit->second; diff_entry & de = diff_data[qid]; de.left_only = de.right_only = false; de.d_num_instances = lentry.num_instances - rentry.num_instances; de.d_max_generation = lentry.max_generation - rentry.max_generation; de.d_max_cost = lentry.max_cost - rentry.max_cost; } else { diff_entry & de = diff_data[qid]; de.left_only = true; de.right_only = false; de.d_num_instances = lentry.num_instances; de.d_max_generation = lentry.max_generation; de.d_max_cost = lentry.max_cost; } } for (map::const_iterator rit = right.begin(); rit != right.end(); rit++) { string const & qid = rit->first; map_entry const & rentry = rit->second; map::const_iterator lit = left.find(qid); if (lit == left.end()) { diff_entry & de = diff_data[qid]; de.left_only = false; de.right_only = true; de.d_num_instances = -(int)rentry.num_instances; de.d_max_generation = -(int)rentry.max_generation; de.d_max_cost = -(int)rentry.max_cost; } } vector flat_data; for (map::const_iterator it = diff_data.begin(); it != diff_data.end(); it++) { flat_data.push_back(diff_item()); flat_data.back().qid = it->first; flat_data.back().e = it->second; } stable_sort(flat_data.begin(), flat_data.end(), options.find("-si") != options.end() ? diff_item_lt_num_instances : options.find("-sg") != options.end() ? diff_item_lt_max_generation : options.find("-sc") != options.end() ? diff_item_lt_max_cost : diff_item_lt_num_instances); bool suppress_unchanged = options.find("-n") != options.end(); for (vector::const_iterator it = flat_data.begin(); it != flat_data.end(); it++) { diff_item const & d = *it; string const & qid = d.qid; diff_entry const & e = d.e; int delta = indicate(e, suppress_unchanged); if (!(delta == 0 && suppress_unchanged)) cout << qid << " (" << (e.d_num_instances > 0 ? "" : "+") << -e.d_num_instances << " inst., " << (e.d_max_generation > 0 ? "" : "+") << -e.d_max_generation << " max. gen., " << (e.d_max_cost > 0 ? "" : "+") << -e.d_max_cost << " max. cost)" << endl; } } void display_usage() { cout << "Usage: qprofdiff [options] " << endl; cout << "Options:" << endl; cout << " -n Suppress unchanged items" << endl; cout << " -si Sort by difference in number of instances" << endl; cout << " -sg Sort by difference in max. generation" << endl; cout << " -sc Sort by difference in max. cost" << endl; } int main(int argc, char ** argv) { char * filename1 = 0; char * filename2 = 0; for (int i = 1; i < argc; i++) { int len = string(argv[i]).length(); if (len > 1 && argv[i][0] == '-') { options.insert(string(argv[i])); } else if (filename1 == 0) filename1 = argv[i]; else if (filename2 == 0) filename2 = argv[i]; else { cout << "Invalid argument: " << argv[i] << endl << endl; display_usage(); return EINVAL; } } if (filename1 == 0 || filename2 == 0) { cout << "Two filenames required." << endl << endl; display_usage(); return EINVAL; } cout << "Comparing " << filename1 << " to " << filename2 << endl; map data1, data2; int r = parse(filename1, data1); if (r != 0) return r; r = parse(filename2, data2); if (r != 0) return r; // display_data(data1); // display_data(data2); diff(data1, data2); return 0; } z3-z3-4.13.3/contrib/qprofdiff/maintainers.txt000066400000000000000000000001241470205523200211600ustar00rootroot00000000000000# Maintainers - Christoph M. Wintersteiger (@wintersteiger, cwinter@microsoft.com) z3-z3-4.13.3/contrib/qprofdiff/qprofdiff.vcxproj000066400000000000000000000151761470205523200215170ustar00rootroot00000000000000 Debug Win32 Release Win32 Debug x64 Release x64 15.0 {96E7E3EF-4162-474D-BD32-C702632AAF2B} qprofdiff 8.1 Application true v141 NotSet Application false v141 true MultiByte Application true v141 MultiByte Application false v141 true MultiByte $(IncludePath) $(LibraryPath) $(IncludePath) $(LibraryPath) Level3 Disabled true MultiThreadedDebugDLL ..\..\src\util;%(AdditionalIncludeDirectories) ProgramDatabase $(LibraryPath);%(AdditionalLibraryDirectories) Level3 Disabled true ..\..\src\util;%(AdditionalIncludeDirectories) Level3 MaxSpeed true true true ..\..\src\util;%(AdditionalIncludeDirectories) true true Level3 MaxSpeed true true true ..\..\src\util;%(AdditionalIncludeDirectories) true true z3-z3-4.13.3/contrib/qprofdiff/qprofdiff.vcxproj.filters000066400000000000000000000016471470205523200231640ustar00rootroot00000000000000 {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx {93995380-89BD-4b04-88EB-625FBE52EBFB} h;hh;hpp;hxx;hm;inl;inc;xsd {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms Source Files z3-z3-4.13.3/contrib/suppressions/000077500000000000000000000000001470205523200167055ustar00rootroot00000000000000z3-z3-4.13.3/contrib/suppressions/README.md000066400000000000000000000002741470205523200201670ustar00rootroot00000000000000# Suppression files This directory contains suppression files used by various program analysis tools. Suppression files tell a program analysis tool to suppress various warnings/errors. z3-z3-4.13.3/contrib/suppressions/maintainers.txt000066400000000000000000000000471470205523200217610ustar00rootroot00000000000000# Maintainers - Dan Liew (@delcypher) z3-z3-4.13.3/contrib/suppressions/sanitizers/000077500000000000000000000000001470205523200211005ustar00rootroot00000000000000z3-z3-4.13.3/contrib/suppressions/sanitizers/README.md000066400000000000000000000001571470205523200223620ustar00rootroot00000000000000# Sanitizer suppression files This directory contains files used to suppress ASan/LSan/UBSan warnings/errors. z3-z3-4.13.3/contrib/suppressions/sanitizers/asan.txt000066400000000000000000000000441470205523200225610ustar00rootroot00000000000000# AddressSanitizer suppression file z3-z3-4.13.3/contrib/suppressions/sanitizers/lsan.txt000066400000000000000000000000411470205523200225710ustar00rootroot00000000000000# LeakSanitizer suppression file z3-z3-4.13.3/contrib/suppressions/sanitizers/ubsan.txt000066400000000000000000000003621470205523200227520ustar00rootroot00000000000000# UndefinedBehavior sanitizer suppression file # FIXME: UBSan doesn't usually have false positives so we need to fix all of these! # Occurs when running tptp example # See https://github.com/Z3Prover/z3/issues/964 null:rational.h null:mpq.h z3-z3-4.13.3/doc/000077500000000000000000000000001470205523200132355ustar00rootroot00000000000000z3-z3-4.13.3/doc/CMakeLists.txt000066400000000000000000000057121470205523200160020ustar00rootroot00000000000000find_package(Doxygen REQUIRED) message(STATUS "DOXYGEN_EXECUTABLE: \"${DOXYGEN_EXECUTABLE}\"") message(STATUS "DOXYGEN_VERSION: \"${DOXYGEN_VERSION}\"") set(DOC_DEST_DIR "${CMAKE_CURRENT_BINARY_DIR}/api") set(DOC_TEMP_DIR "${CMAKE_CURRENT_BINARY_DIR}/temp") set(MK_API_DOC_SCRIPT "${CMAKE_CURRENT_SOURCE_DIR}/mk_api_doc.py") set(PYTHON_API_OPTIONS "") set(DOTNET_API_OPTIONS "") set(JAVA_API_OPTIONS "") SET(DOC_EXTRA_DEPENDS "") if (Z3_BUILD_PYTHON_BINDINGS) # FIXME: Don't hard code this path list(APPEND PYTHON_API_OPTIONS "--z3py-package-path" "${PROJECT_BINARY_DIR}/python/z3") list(APPEND DOC_EXTRA_DEPENDS "build_z3_python_bindings") else() list(APPEND PYTHON_API_OPTIONS "--no-z3py") endif() if (BUILD_DOTNET_BINDINGS) # FIXME: Don't hard code these paths list(APPEND DOTNET_API_OPTIONS "--dotnet-search-paths" "${PROJECT_SOURCE_DIR}/src/api/dotnet" "${PROJECT_BINARY_DIR}/src/api/dotnet" ) list(APPEND DOC_EXTRA_DEPENDS "build_z3_dotnet_bindings") else() list(APPEND DOTNET_API_OPTIONS "--no-dotnet") endif() if (BUILD_JAVA_BINDINGS) # FIXME: Don't hard code these paths list(APPEND JAVA_API_OPTIONS "--java-search-paths" "${PROJECT_SOURCE_DIR}/src/api/java" "${PROJECT_BINARY_DIR}/src/api/java" ) list(APPEND DOC_EXTRA_DEPENDS "build_z3_java_bindings") else() list(APPEND JAVA_API_OPTIONS "--no-java") endif() option(Z3_ALWAYS_BUILD_DOCS "Always build documentation for API bindings" ON) if (Z3_ALWAYS_BUILD_DOCS) set(ALWAYS_BUILD_DOCS_ARG "ALL") else() set(ALWAYS_BUILD_DOCS_ARG "") # FIXME: This sucks but there doesn't seem to be a way to make the top level # install target depend on the `api_docs` target. message(WARNING "Building documentation for API bindings is not part of the" " all target. This may result in stale files being installed when running" " the install target. You should run the api_docs target before running" " the install target. Alternatively Set Z3_ALWAYS_BUILD_DOCS to ON to" " automatically build documentation when running the install target." ) endif() add_custom_target(api_docs ${ALWAYS_BUILD_DOCS_ARG} COMMAND "${Python3_EXECUTABLE}" "${MK_API_DOC_SCRIPT}" --build "${PROJECT_BINARY_DIR}" --doxygen-executable "${DOXYGEN_EXECUTABLE}" --output-dir "${DOC_DEST_DIR}" --temp-dir "${DOC_TEMP_DIR}" ${PYTHON_API_OPTIONS} ${DOTNET_API_OPTIONS} ${JAVA_API_OPTIONS} DEPENDS ${DOC_EXTRA_DEPENDS} COMMENT "Generating documentation" USES_TERMINAL ) # Remove generated documentation when running `clean` target. set_property(DIRECTORY APPEND PROPERTY ADDITIONAL_MAKE_CLEAN_FILES "${DOC_DEST_DIR}" ) option(Z3_INSTALL_API_BINDINGS_DOCUMENTATION "Install documentation for API bindings" ON) set(CMAKE_INSTALL_API_BINDINGS_DOC "${CMAKE_INSTALL_DOCDIR}" CACHE PATH "Path to install documentation for API bindings" ) if (Z3_INSTALL_API_BINDINGS_DOCUMENTATION) install( DIRECTORY "${DOC_DEST_DIR}" DESTINATION "${CMAKE_INSTALL_API_BINDINGS_DOC}" ) endif() z3-z3-4.13.3/doc/README000066400000000000000000000011211470205523200141100ustar00rootroot00000000000000API documentation ----------------- To generate the API documentation for the C, C++, .NET, Java and Python APIs, we must execute python mk_api_doc.py We must have doxygen installed in our system. The documentation will be stored in the subdirectory './api/html'. The main file is './api/html/index.html' Code documentation ------------------ To generate documentation for the Z3 code, we must execute doxygen z3code.dox We must also have dot installed in our system. The documentation will be store in the subdirectory './code/html'. The main file is './code/html/index.html' z3-z3-4.13.3/doc/design_recfuns.md000066400000000000000000000076531470205523200165700ustar00rootroot00000000000000# Design for handling recursive functions Main source of inspiration is [Sutter, Köksal & Kuncak 2011], as implemented in Leon, but the main differences is that we should unroll function definitions directly from the inside of Z3, in a backtracking way. Termination and fairness are ensured by iterative-deepening on the maximum number of unrollings in a given branch. ## Unfolding The idea is that every function definition `f(x1…xn) := rhs[x1…xn]` is compiled into: - a list of cases `A_f_i[x1…xn] => f(x1…xn) = rhs_i[x1…xn]`. When `A_f_i[t1…tn]` becomes true in the model, `f(t1…tn)` is said to be *unfolded* and the clause `A_f_i[t1…tn] => f(t1…tn) = rhs_i[t1…tn]` is added as an auxiliary clause. - a list of constraints `Γ_f_i[x1…xn] <=> A_f_i[x1…xn]` that states when `A_f_i[x1…xn]` should be true, depending on inputs `x1…xn`. For every term `f(t1…tn)` present in congruence closure, we immediately add all the `Γ_f_i[t1…tn] <=> A_f_i[t1…tn]` as auxiliary clauses (maybe during internalization of `f(t1…tn)`?). where each `A_f_i[x1…xn]` is a special new predicate representing the given case of `f`, and `rhs_i` does not contain any `ite`. We assume pattern matching has been compiled to `ite` beforehand. For example, `fact(n) := if n<2 then 1 else n * fact(n-1)` is compiled into: - `A_fact_0[n] => fact(n) = 1` - `A_fact_1[n] => fact(n) = n * fact(n-1)` - `A_fact_0[n] <=> n < 2` - `A_fact_1[n] <=> ¬(n < 2)` The 2 first clauses are only added when `A_fact_0[t]` is true (respectively `A_fact_1[t]` is true). The 2 other clauses are added as soon as `fact(t)` is internalized. ## Termination To ensure termination, we define variables: - `unfold_depth: int` - `current_max_unfold_depth: int` - `global_max_unfold_depth: int` and a special literal `[max_depth=$n]` for each `n:int`. Solving is done under the local assumption `[max_depth=$current_max_unfold_depth]` (this should be handled in some outer loop, e.g. in a custom tactic). Whenever `A_f_i[t1…tn]` becomes true (for any `f`), we increment `unfold_depth`. If `unfold_depth > current_max_unfold_depth`, then the conflict clause `[max_depth=$current_max_unfold_depth] => Γ => false` where `Γ` is the conjunction of all `A_f_i[t1…tn]` true in the trail. For non-recursive functions, we don't have to increment `unfold_depth`. Some other functions that are known If the solver answers "SAT", we have a model. Otherwise, if `[max_depth=$current_max_unfold_depth]` is part of the unsat-core, then we increase `current_max_unfold_depth`. If `current_max_unfold_depth == global_max_unfold_depth` then we report "UNKNOWN" (reached global depth limit), otherwise we can try to `solve()` again with the new assumption (higher depth limit). ## Tactic there should be a parametrized tactic `funrec(t, n)` where `t` is the tactic used to solve (under assumption that depth is limited to `current_max_unfold_depth`) and `n` is an integer that is assigned to `global_max_unfold_depth`. This way, to try and find models for a problem with recursive functions + LIA, one could use something like `(funrec (then simplify dl smt) 100)`. ## Expected benefits This addition to Z3 would bring many benefits compared to current alternatives (Leon, quantifiers, …) - should be very fast and lightweight (compared to Leon or quantifiers). In particular, every function call is very lightweight even compared to Leon (no need for full model building, followed by unsat core extraction) - possibility of answering "SAT" for any `QF_*` fragment + recursive functions - makes `define-funs-rec` a first-class citizen of the language, usable to model user-defined theories or to analyze functional programs directly ## Optimizations - maybe `C_f_i` literals should never be decided on (they can always be propagated). Even stronger: they should not be part of conflicts? (i.e. tune conflict resolution to always resolve these literals away, disregarding their level) z3-z3-4.13.3/doc/mk_api_doc.py000066400000000000000000000342501470205523200157000ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation 2015 """ Z3 API documentation generator script """ import argparse import os import shutil import re import getopt import pydoc import sys import subprocess ML_ENABLED=False MLD_ENABLED=False JS_ENABLED=False BUILD_DIR='../build' DOXYGEN_EXE='doxygen' TEMP_DIR=os.path.join(os.getcwd(), 'tmp') OUTPUT_DIRECTORY=os.path.join(os.getcwd(), 'api') Z3PY_PACKAGE_PATH='../src/api/python/z3' JS_API_PATH='../src/api/js' Z3PY_ENABLED=True DOTNET_ENABLED=True JAVA_ENABLED=True Z3OPTIONS_ENABLED=True DOTNET_API_SEARCH_PATHS=['../src/api/dotnet'] JAVA_API_SEARCH_PATHS=['../src/api/java'] SCRIPT_DIR=os.path.abspath(os.path.dirname(__file__)) def parse_options(): global ML_ENABLED, MLD_ENABLED, BUILD_DIR, DOXYGEN_EXE, TEMP_DIR, OUTPUT_DIRECTORY global Z3PY_PACKAGE_PATH, Z3PY_ENABLED, DOTNET_ENABLED, JAVA_ENABLED, JS_ENABLED global DOTNET_API_SEARCH_PATHS, JAVA_API_SEARCH_PATHS, JS_API_PATH parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-b', '--build', default=BUILD_DIR, help='Directory where Z3 is built (default: %(default)s)', ) parser.add_argument('--ml', action='store_true', default=False, help='Include ML/OCaml API documentation' ) parser.add_argument('--mld', action='store_true', default=False, help='Include ML/OCaml API documentation' ) parser.add_argument('--js', action='store_true', default=False, help='Include JS/TS API documentation' ) parser.add_argument('--doxygen-executable', dest='doxygen_executable', default=DOXYGEN_EXE, help='Doxygen executable to use (default: %(default)s)', ) parser.add_argument('--temp-dir', dest='temp_dir', default=TEMP_DIR, help='Path to directory to use as temporary directory. ' '(default: %(default)s)', ) parser.add_argument('--output-dir', dest='output_dir', default=OUTPUT_DIRECTORY, help='Path to output directory (default: %(default)s)', ) parser.add_argument('--z3py-package-path', dest='z3py_package_path', default=Z3PY_PACKAGE_PATH, help='Path to directory containing Z3py package (default: %(default)s)', ) # FIXME: I would prefer not to have negative options (i.e. `--z3py` # instead of `--no-z3py`) but historically these bindings have been on by # default so we have options to disable generating documentation for these # bindings rather than enable them. parser.add_argument('--no-z3py', dest='no_z3py', action='store_true', default=False, help='Do not generate documentation for Python bindings', ) parser.add_argument('--no-dotnet', dest='no_dotnet', action='store_true', default=False, help='Do not generate documentation for .NET bindings', ) parser.add_argument('--no-java', dest='no_java', action='store_true', default=False, help='Do not generate documentation for Java bindings', ) parser.add_argument('--dotnet-search-paths', dest='dotnet_search_paths', nargs='+', default=DOTNET_API_SEARCH_PATHS, help='Specify one or more path to look for .NET files (default: %(default)s).', ) parser.add_argument('--java-search-paths', dest='java_search_paths', nargs='+', default=JAVA_API_SEARCH_PATHS, help='Specify one or more paths to look for Java files (default: %(default)s).', ) pargs = parser.parse_args() ML_ENABLED = pargs.ml MLD_ENABLED = pargs.mld JS_ENABLED = pargs.js BUILD_DIR = pargs.build DOXYGEN_EXE = pargs.doxygen_executable TEMP_DIR = pargs.temp_dir OUTPUT_DIRECTORY = pargs.output_dir Z3PY_PACKAGE_PATH = pargs.z3py_package_path Z3PY_ENABLED = not pargs.no_z3py DOTNET_ENABLED = not pargs.no_dotnet JAVA_ENABLED = not pargs.no_java DOTNET_API_SEARCH_PATHS = pargs.dotnet_search_paths JAVA_API_SEARCH_PATHS = pargs.java_search_paths if Z3PY_ENABLED: if not os.path.exists(Z3PY_PACKAGE_PATH): raise Exception('"{}" does not exist'.format(Z3PY_PACKAGE_PATH)) if not os.path.basename(Z3PY_PACKAGE_PATH) == 'z3': raise Exception('"{}" does not end with "z3"'.format(Z3PY_PACKAGE_PATH)) return def mk_dir(d): if not os.path.exists(d): os.makedirs(d) # Eliminate def_API, extra_API, and def_Type directives from file 'inf'. # The result is stored in 'outf'. def cleanup_API(inf, outf): pat1 = re.compile(".*def_API.*") pat2 = re.compile(".*extra_API.*") pat3 = re.compile(r".*def_Type\(.*") pat4 = re.compile("Z3_DECLARE_CLOSURE.*") pat5 = re.compile("DEFINE_TYPE.*") _inf = open(inf, 'r') _outf = open(outf, 'w') for line in _inf: if not pat1.match(line) and not pat2.match(line) and not pat3.match(line) and not pat4.match(line) and not pat5.match(line): _outf.write(line) def configure_file(template_file_path, output_file_path, substitutions): """ Read a template file ``template_file_path``, perform substitutions found in the ``substitutions`` dictionary and write the result to the output file ``output_file_path``. The template file should contain zero or more template strings of the form ``@NAME@``. The substitutions dictionary maps old strings (without the ``@`` symbols) to their replacements. """ assert isinstance(template_file_path, str) assert isinstance(output_file_path, str) assert isinstance(substitutions, dict) assert len(template_file_path) > 0 assert len(output_file_path) > 0 print("Generating {} from {}".format(output_file_path, template_file_path)) if not os.path.exists(template_file_path): raise Exception('Could not find template file "{}"'.format(template_file_path)) # Read whole template file into string template_string = None with open(template_file_path, 'r') as f: template_string = f.read() # Do replacements for (old_string, replacement) in substitutions.items(): template_string = template_string.replace('@{}@'.format(old_string), replacement) # Write the string to the file with open(output_file_path, 'w') as f: f.write(template_string) try: parse_options() print("Creating temporary directory \"{}\"".format(TEMP_DIR)) mk_dir(TEMP_DIR) # Short-hand for path to temporary file def temp_path(path): return os.path.join(TEMP_DIR, path) # Short-hand for path to file in `doc` directory def doc_path(path): return os.path.join(SCRIPT_DIR, path) # Create configuration file from template doxygen_config_substitutions = { 'OUTPUT_DIRECTORY': OUTPUT_DIRECTORY, 'TEMP_DIR': TEMP_DIR, 'CXX_API_SEARCH_PATHS': doc_path('../src/api/c++'), } if Z3PY_ENABLED: print("Z3Py documentation enabled") doxygen_config_substitutions['PYTHON_API_FILES'] = 'z3*.py' else: print("Z3Py documentation disabled") doxygen_config_substitutions['PYTHON_API_FILES'] = '' if DOTNET_ENABLED: print(".NET documentation enabled") doxygen_config_substitutions['DOTNET_API_FILES'] = '*.cs' dotnet_api_search_path_str = "" for p in DOTNET_API_SEARCH_PATHS: # Quote path so that paths with spaces are handled correctly dotnet_api_search_path_str += "\"{}\" ".format(p) doxygen_config_substitutions['DOTNET_API_SEARCH_PATHS'] = dotnet_api_search_path_str else: print(".NET documentation disabled") doxygen_config_substitutions['DOTNET_API_FILES'] = '' doxygen_config_substitutions['DOTNET_API_SEARCH_PATHS'] = '' if JAVA_ENABLED: print("Java documentation enabled") doxygen_config_substitutions['JAVA_API_FILES'] = '*.java' java_api_search_path_str = "" for p in JAVA_API_SEARCH_PATHS: # Quote path so that paths with spaces are handled correctly java_api_search_path_str += "\"{}\" ".format(p) doxygen_config_substitutions['JAVA_API_SEARCH_PATHS'] = java_api_search_path_str else: print("Java documentation disabled") doxygen_config_substitutions['JAVA_API_FILES'] = '' doxygen_config_substitutions['JAVA_API_SEARCH_PATHS'] = '' if JS_ENABLED: print('Javascript documentation enabled') else: print('Javascript documentation disabled') doxygen_config_file = temp_path('z3api.cfg') configure_file( doc_path('z3api.cfg.in'), doxygen_config_file, doxygen_config_substitutions) website_dox_substitutions = {} bullet_point_prefix='\n - ' website_dox_substitutions['CPP_API'] = ( '{prefix}C++ API ' ).format( prefix=bullet_point_prefix) website_dox_substitutions['C_API'] = ( '{prefix}C API ' ).format( prefix=bullet_point_prefix) if Z3PY_ENABLED: print("Python documentation enabled") website_dox_substitutions['PYTHON_API'] = ( '{prefix}Python API ' '(also available in pydoc format)' ).format( prefix=bullet_point_prefix) else: print("Python documentation disabled") website_dox_substitutions['PYTHON_API'] = '' if DOTNET_ENABLED: website_dox_substitutions['DOTNET_API'] = ( '{prefix}' '' '.NET API').format( prefix=bullet_point_prefix) else: website_dox_substitutions['DOTNET_API'] = '' if JAVA_ENABLED: website_dox_substitutions['JAVA_API'] = ( '{prefix}' 'Java API').format( prefix=bullet_point_prefix) else: website_dox_substitutions['JAVA_API'] = '' if ML_ENABLED or MLD_ENABLED: website_dox_substitutions['OCAML_API'] = ( '{prefix}ML/OCaml API' ).format( prefix=bullet_point_prefix) else: website_dox_substitutions['OCAML_API'] = '' if JS_ENABLED: website_dox_substitutions['JS_API'] = ( '{prefix}Javascript/Typescript API' ).format( prefix=bullet_point_prefix) else: website_dox_substitutions['JS_API'] = '' configure_file( doc_path('website.dox.in'), temp_path('website.dox'), website_dox_substitutions) mk_dir(os.path.join(OUTPUT_DIRECTORY, 'html')) if Z3PY_ENABLED: shutil.copyfile(doc_path('../src/api/python/z3/z3.py'), temp_path('z3py.py')) cleanup_API(doc_path('../src/api/z3_api.h'), temp_path('z3_api.h')) cleanup_API(doc_path('../src/api/z3_ast_containers.h'), temp_path('z3_ast_containers.h')) cleanup_API(doc_path('../src/api/z3_algebraic.h'), temp_path('z3_algebraic.h')) cleanup_API(doc_path('../src/api/z3_polynomial.h'), temp_path('z3_polynomial.h')) cleanup_API(doc_path('../src/api/z3_rcf.h'), temp_path('z3_rcf.h')) cleanup_API(doc_path('../src/api/z3_fixedpoint.h'), temp_path('z3_fixedpoint.h')) cleanup_API(doc_path('../src/api/z3_optimization.h'), temp_path('z3_optimization.h')) cleanup_API(doc_path('../src/api/z3_fpa.h'), temp_path('z3_fpa.h')) print("Removed annotations from z3_api.h.") try: if subprocess.call([DOXYGEN_EXE, doxygen_config_file]) != 0: print("ERROR: doxygen returned nonzero return code") exit(1) except: print("ERROR: failed to execute 'doxygen', make sure doxygen (http://www.doxygen.org) is available in your system.") exit(1) print("Generated Doxygen based documentation") shutil.rmtree(os.path.realpath(TEMP_DIR)) print("Removed temporary directory \"{}\"".format(TEMP_DIR)) if Z3PY_ENABLED: # Put z3py at the beginning of the search path to try to avoid picking up # an installed copy of Z3py. sys.path.insert(0, os.path.dirname(Z3PY_PACKAGE_PATH)) if sys.version < '3': import __builtin__ __builtin__.Z3_LIB_DIRS = [ BUILD_DIR ] else: import builtins builtins.Z3_LIB_DIRS = [ BUILD_DIR ] for modulename in ( 'z3', 'z3.z3', 'z3.z3consts', 'z3.z3core', 'z3.z3num', 'z3.z3poly', 'z3.z3printer', 'z3.z3rcf', 'z3.z3types', 'z3.z3util', ): pydoc.writedoc(modulename) doc = modulename + '.html' shutil.move(doc, os.path.join(OUTPUT_DIRECTORY, 'html', doc)) print("Generated pydoc Z3Py documentation.") if ML_ENABLED: ml_output_dir = os.path.join(OUTPUT_DIRECTORY, 'html', 'ml') mk_dir(ml_output_dir) if subprocess.call(['ocamldoc', '-html', '-d', ml_output_dir, '-sort', '-hide', 'Z3', '-I', '$(ocamlfind query zarith)', '-I', '%s/api/ml' % BUILD_DIR, '%s/api/ml/z3enums.mli' % BUILD_DIR, '%s/api/ml/z3.mli' % BUILD_DIR]) != 0: print("ERROR: ocamldoc failed.") exit(1) print("Generated ML/OCaml documentation.") if JS_ENABLED: try: subprocess.check_output(['npm', 'run', '--prefix=%s' % JS_API_PATH, 'check-engine']) except subprocess.CalledProcessError as e: print("ERROR: node version check failed.") print(e.output) exit(1) if subprocess.call(['npm', 'run', '--prefix=%s' % JS_API_PATH, 'docs']) != 0: print("ERROR: npm run docs failed.") exit(1) print("Generated Javascript documentation.") print("Documentation was successfully generated at subdirectory '{}'.".format(OUTPUT_DIRECTORY)) except Exception: exctype, value = sys.exc_info()[:2] print("ERROR: failed to generate documentation: %s" % value) exit(1) z3-z3-4.13.3/doc/mk_params_doc.py000066400000000000000000000033541470205523200164130ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation 2015 """ Z3 API documentation for parameters """ import argparse import subprocess import sys import re import os BUILD_DIR='../build' OUTPUT_DIRECTORY=os.path.join(os.getcwd(), 'api') def parse_options(): global BUILD_DIR, OUTPUT_DIRECTORY parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-b', '--build', default=BUILD_DIR, help='Directory where Z3 is built (default: %(default)s)', ) parser.add_argument('--output-dir', dest='output_dir', default=OUTPUT_DIRECTORY, help='Path to output directory (default: %(default)s)', ) pargs = parser.parse_args() BUILD_DIR = pargs.build OUTPUT_DIRECTORY = pargs.output_dir def help(ous): global BUILD_DIR ous.write("Z3 Options\n") z3_exe = BUILD_DIR + "/z3" out = subprocess.Popen([z3_exe, "-pm"],stdout=subprocess.PIPE).communicate()[0] modules = ["global"] if out != None: out = out.decode(sys.getdefaultencoding()) module_re = re.compile(r"\[module\] (.*)\,") lines = out.split("\n") for line in lines: m = module_re.search(line) if m: modules += [m.group(1)] for module in modules: out = subprocess.Popen([z3_exe, "-pmmd:%s" % module],stdout=subprocess.PIPE).communicate()[0] if out == None: continue out = out.decode(sys.getdefaultencoding()) out = out.replace("\r","") ous.write(out) parse_options() def mk_dir(d): if not os.path.exists(d): os.makedirs(d) mk_dir(os.path.join(OUTPUT_DIRECTORY, 'md')) with open(OUTPUT_DIRECTORY + "/md/Parameters.md",'w') as ous: help(ous) z3-z3-4.13.3/doc/mk_tactic_doc.py000066400000000000000000000071241470205523200163760ustar00rootroot00000000000000# Copyright (c) Microsoft Corporation 2015 """ Tactic documentation generator script """ import os import re import sys import subprocess BUILD_DIR='../build' SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) OUTPUT_DIRECTORY = os.path.join(os.getcwd(), 'api') def doc_path(path): return os.path.join(SCRIPT_DIR, path) is_doc = re.compile("Tactic Documentation") is_doc_end = re.compile("\-\-\*\/") is_tac_name = re.compile("## Tactic (.*)") is_simplifier = re.compile("ADD_SIMPLIFIER\(.*\"([^\"]*)\".*,.*\"([^\"]*)\".*,.*\"([^\"]*)\"\.*\)") def is_ws(s): return all([0 for ch in s if ch != ' ' and ch != '\n']) def extract_params(ous, tac): z3_exe = BUILD_DIR + "/z3" out = subprocess.Popen([z3_exe, f"-tacticsmd:{tac}"], stdout=subprocess.PIPE).communicate()[0] if not out: return out = out.decode(sys.getdefaultencoding()) if is_ws(out): return ous.write("### Parameters\n\n") for line in out: ous.write(line.replace("\r","")) ous.write("\n") def generate_tactic_doc(ous, f, ins): tac_name = None for line in ins: m = is_tac_name.search(line) if m: tac_name = m.group(1) if is_doc_end.search(line): if tac_name: extract_params(ous, tac_name) break ous.write(line) def extract_tactic_doc(ous, f): with open(f) as ins: for line in ins: if is_doc.search(line): generate_tactic_doc(ous, f, ins) def generate_simplifier_doc(ous, name, desc): ous.write("## Simplifier [" + name + "](https://microsoft.github.io/z3guide/docs/strategies/summary/#tactic-" + name + ")\n") ous.write("### Description\n" + desc + "\n") def extract_simplifier_doc(ous, f): with open(f) as ins: for line in ins: m = is_simplifier.search(line) if m: generate_simplifier_doc(ous, m.group(1), m.group(2)) def find_tactic_name(path): with open(path) as ins: for line in ins: m = is_tac_name.search(line) if m: return m.group(1) print(f"no tactic in {path}") return "" def find_simplifier_name(path): with open(path) as ins: for line in ins: m = is_simplifier.search(line) if m: return m.group(1) print(f"no simplifier in {path}") return "" def presort_files(find_fn): tac_files = [] for root, dirs, files in os.walk(doc_path("../src")): for f in files: if f.endswith("~"): continue if f.endswith("tactic.h") or "simplifiers" in root: tac_files += [(f, os.path.join(root, f))] tac_files = sorted(tac_files, key = lambda x: find_fn(x[1])) return tac_files def help(ous): ous.write("---\n") ous.write("title: Tactics Summary\n") ous.write("sidebar_position: 6\n") ous.write("---\n") tac_files = presort_files(find_tactic_name) for file, path in tac_files: extract_tactic_doc(ous, path) def help_simplifier(ous): ous.write("---\n") ous.write("title: Simplifiers Summary\n") ous.write("sidebar_position: 7\n") ous.write("---\n") tac_files = presort_files(find_simplifier_name) for file, path in tac_files: extract_simplifier_doc(ous, path) def mk_dir(d): if not os.path.exists(d): os.makedirs(d) mk_dir(os.path.join(OUTPUT_DIRECTORY, 'md')) with open(OUTPUT_DIRECTORY + "/md/tactics-summary.md",'w') as ous: help(ous) with open(OUTPUT_DIRECTORY + "/md/simplifier-summary.md",'w') as ous: help_simplifier(ous) z3-z3-4.13.3/doc/website.dox.in000066400000000000000000000007171470205523200160250ustar00rootroot00000000000000/** \mainpage An Efficient Theorem Prover Z3 is a high-performance theorem prover being developed at Microsoft Research. The Z3 website is at http://github.com/z3prover. This website hosts the automatically generated documentation for the Z3 APIs. @C_API@ @CPP_API@ @DOTNET_API@ @JAVA_API@ @PYTHON_API@ @OCAML_API@ @JS_API@ */ z3-z3-4.13.3/doc/z3api.cfg.in000066400000000000000000003326151470205523200153630ustar00rootroot00000000000000# Doxyfile 1.8.16 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the configuration # file that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = Z3 # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = "@OUTPUT_DIRECTORY@" # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = NO # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all generated output in the proper direction. # Possible values are: None, LTR, RTL and Context. # The default value is: None. OUTPUT_TEXT_DIRECTION = None # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = "The $name class " \ "The $name widget " \ "The $name file " \ is \ provides \ specifies \ contains \ represents \ a \ an \ the # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = YES # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = YES # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = ".." # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = NO # If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line # such as # /*************** # as being the beginning of a Javadoc-style comment "banner". If set to NO, the # Javadoc-style will behave just like regular comments and it will not be # interpreted by doxygen. # The default value is: NO. JAVADOC_BANNER = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 8 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines (in the resulting output). You can put ^^ in the value part of an # alias to insert a newline as if a physical newline was in the original file. # When you need a literal { or } or , in the value part of an alias you have to # escape them by means of a backslash (\), this can lead to conflicts with the # commands \{ and \} for these it is advised to use the version @{ and @} or use # a double escape (\\{ and \\}) ALIASES = "beginfaq=
    " \ "faq{2}=
  • \1

    \2
  • " \ "endfaq=
" \ "cmdopt{1}=\arg /\1" \ "ext{1}=.\1" \ "ty{1}=\1" \ "emph{1}=\1" \ "extdoc{2}=\2" \ "nicebox{1}=
\1
" \ "ccode{1}=\1" # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = YES # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice # sources only. Doxygen will then generate output that is more tailored for that # language. For instance, namespaces will be presented as modules, types will be # separated into more groups, etc. # The default value is: NO. OPTIMIZE_OUTPUT_SLICE = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, # Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, # Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: # FortranFree, unknown formatted Fortran: Fortran. In the later case the parser # tries to guess whether the code is fixed or free formatted code, this is the # default for Fortran type files), VHDL, tcl. For instance to make doxygen treat # .inc files as Fortran files (default is PHP), and .f files as C (default is # Fortran), use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. # Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 5 # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = YES # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual # methods of a class will be included in the documentation. # The default value is: NO. EXTRACT_PRIV_VIRTUAL = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = NO # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # (class|struct|union) declarations. If set to NO, these declarations will be # included in the documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = YES # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # (including Cygwin) ands Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = NO # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = YES # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = NO # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = NO # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = NO # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = NO # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = NO # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = NO # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong or incomplete # parameter documentation, but not about the absence of documentation. If # EXTRACT_ALL is set to YES then this flag will automatically be disabled. # The default value is: NO. WARN_NO_PARAMDOC = NO # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. # The default value is: NO. WARN_AS_ERROR = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text " # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = "@TEMP_DIR@" \ "@CXX_API_SEARCH_PATHS@" \ @DOTNET_API_SEARCH_PATHS@ \ @JAVA_API_SEARCH_PATHS@ # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: https://www.gnu.org/software/libiconv/) for the list of # possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, # *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, # *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = website.dox \ z3_api.h \ z3_algebraic.h \ z3_ast_containers.h \ z3_fixedpoint.h \ z3_fpa.h \ z3_interp.h \ z3_optimization.h \ z3_polynomial.h \ z3_rcf.h \ z3++.h \ @PYTHON_API_FILES@ \ @DOTNET_API_FILES@ \ @JAVA_API_FILES@ # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = * # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = YES # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = YES # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # entity all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = YES # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = NO # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = NO # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: http://clang.llvm.org/) for more accurate parsing at the # cost of reduced performance. This can be particularly helpful with template # rich C++ code for which doxygen's built-in parser lacks the necessary type # information. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = # If clang assisted parsing is enabled you can provide the clang parser with the # path to the compilation database (see: # http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) used when the files # were built. This is equivalent to specifying the "-p" option to a clang tool, # such as clang-check. These options will then be passed to the parser. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. CLANG_DATABASE_PATH = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = NO # The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in # which the alphabetical index list will be split. # Minimum value: 1, maximum value: 20, default value: 5. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to YES can help to show when doxygen was last run and thus if the # documentation is up to date. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that # are dynamically created via Javascript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML # page. Disable this option to support browsers that do not have Javascript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_MENUS = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = YES # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: https://developer.apple.com/xcode/), introduced with OSX # 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy # genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the master .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location of Qt's # qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the # generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = YES # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANSPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # https://www.mathjax.org) which uses client side Javascript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. # The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /