Import Upstream version 4.8.12

This commit is contained in:
luoyaoming 2024-05-06 21:59:40 +08:00
parent 9f66ac52d6
commit 26726873ba
1894 changed files with 123637 additions and 59345 deletions

5
.gitignore vendored
View File

@ -1,4 +1,5 @@
*~
rebase.cmd
*.pyc
*.pyo
# Ignore callgrind files
@ -23,6 +24,7 @@ release/*
build/*
build-dist/*
dist/*
src/out/*
doc/html/*
# GTAGS generated files
src/GPATH
@ -73,9 +75,12 @@ src/api/ml/z3enums.ml
src/api/ml/z3native.mli
src/api/ml/z3enums.mli
src/api/ml/z3.mllib
out/**
*.bak
doc/api
doc/code
.vs
examples/**/obj
CMakeSettings.json
# Editor temp files
*.swp

View File

@ -1,92 +0,0 @@
cache:
# This persistent cache is used to cache the building of
# docker base images.
directories:
- $DOCKER_TRAVIS_CI_CACHE_DIR
sudo: required
language: cpp
services:
- docker
env:
global:
# This environment variable tells the `travis_ci_linux_entry_point.sh`
# script to look for a cached Docker image.
- DOCKER_TRAVIS_CI_CACHE_DIR=$HOME/.cache/docker
# Configurations
matrix:
###############################################################################
# Ubuntu 16.04 LTS
###############################################################################
# 64-bit UBSan Debug build
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/clang-3.9 CXX_COMPILER=/usr/bin/clang++-3.9 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug UBSAN_BUILD=1 RUN_UNIT_TESTS=SKIP
# 64-bit ASan Debug build
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/clang-3.9 CXX_COMPILER=/usr/bin/clang++-3.9 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug ASAN_BUILD=1 RUN_UNIT_TESTS=SKIP ASAN_DSO=/usr/lib/clang/3.9/lib/linux/libclang_rt.asan-x86_64.so
# Build for running unit tests under ASan/UBSan
# FIXME: We should really be doing a debug build but the unit tests run too
# slowly when we do that.
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/clang-3.9 CXX_COMPILER=/usr/bin/clang++-3.9 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=RelWithDebInfo ASAN_BUILD=1 RUN_UNIT_TESTS=BUILD_AND_RUN ASAN_DSO=/usr/lib/clang/3.9/lib/linux/libclang_rt.asan-x86_64.so UBSAN_BUILD=1 RUN_API_EXAMPLES=0 RUN_SYSTEM_TESTS=0 DOTNET_BINDINGS=0 JAVA_BINDINGS=0 PYTHON_BINDINGS=0
# 64-bit GCC 5.4 RelWithDebInfo
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/gcc-5 CXX_COMPILER=/usr/bin/g++-5 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=RelWithDebInfo
# 64-bit Clang 3.9 RelWithDebInfo
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/clang-3.9 CXX_COMPILER=/usr/bin/clang++-3.9 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=RelWithDebInfo
# Debug builds
#
# Note the unit tests for the debug builds are compiled but **not**
# executed. This is because the debug build of unit tests takes a large
# amount of time to execute compared to the optimized builds. The hope is
# that just running the optimized unit tests is sufficient.
#
# 64-bit GCC 5.4 Debug
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/gcc-5 CXX_COMPILER=/usr/bin/g++-5 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug RUN_UNIT_TESTS=BUILD_ONLY
# 64-bit Clang Debug
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/clang-3.9 CXX_COMPILER=/usr/bin/clang++-3.9 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug RUN_UNIT_TESTS=BUILD_ONLY
# 32-bit GCC 5.4 RelWithDebInfo
- LINUX_BASE=ubuntu32_16.04 C_COMPILER=/usr/bin/gcc-5 CXX_COMPILER=/usr/bin/g++-5 TARGET_ARCH=i686 Z3_BUILD_TYPE=RelWithDebInfo
# Both of the two configurations below build the docs because the current
# implementation uses python as part of the building process.
# TODO: Teach one of the configurations to upload built docs somewhere.
# Test with Python 3 and API docs
- LINUX_BASE=ubuntu_16.04 PYTHON_EXECUTABLE=/usr/bin/python3 BUILD_DOCS=1
# Test with LibGMP and API docs
- LINUX_BASE=ubuntu_16.04 TARGET_ARCH=x86_64 USE_LIBGMP=1 BUILD_DOCS=1 PYTHON_EXECUTABLE=/usr/bin/python2.7
# Unix Makefile generator build
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/gcc-5 CXX_COMPILER=/usr/bin/g++-5 TARGET_ARCH=x86_64 Z3_CMAKE_GENERATOR="Unix Makefiles"
# LTO build
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/gcc-5 CXX_COMPILER=/usr/bin/g++-5 TARGET_ARCH=x86_64 USE_LTO=1
# Static build. Note we have disable building the bindings because they won't work with a static libz3
- LINUX_BASE=ubuntu_16.04 C_COMPILER=/usr/bin/gcc-5 CXX_COMPILER=/usr/bin/g++-5 TARGET_ARCH=x86_64 Z3_STATIC_BUILD=1 DOTNET_BINDINGS=0 JAVA_BINDINGS=0 PYTHON_BINDINGS=0
###############################################################################
# Ubuntu 14.04 LTS
###############################################################################
# GCC 4.8
# 64-bit GCC 4.8 RelWithDebInfo
- LINUX_BASE=ubuntu_14.04 C_COMPILER=/usr/bin/gcc-4.8 CXX_COMPILER=/usr/bin/g++-4.8 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=RelWithDebInfo
# 64-bit GCC 4.8 Debug
- LINUX_BASE=ubuntu_14.04 C_COMPILER=/usr/bin/gcc-4.8 CXX_COMPILER=/usr/bin/g++-4.8 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug RUN_UNIT_TESTS=BUILD_ONLY
# macOS (a.k.a OSX) support
matrix:
include:
# For now just test a single configuration. macOS builds on TravisCI are
# very slow so we should keep the number of configurations we test on this
# OS to a minimum.
- os: osx
osx_image: xcode8.3
env: Z3_BUILD_TYPE=RelWithDebInfo DOTNET_BINDINGS=0
script:
# Use `travis_wait` when doing LTO builds because this configuration will
# have long link times during which it will not show any output which
# TravisCI might kill due to perceived inactivity.
- if [ "X${USE_LTO}" = "X1" ]; then
travis_wait 55 contrib/ci/scripts/travis_ci_entry_point.sh || exit 1;
else
contrib/ci/scripts/travis_ci_entry_point.sh || exit 1;
fi

View File

@ -1,17 +1,8 @@
# Enforce some CMake policies
cmake_minimum_required(VERSION 3.4)
if (POLICY CMP0054)
cmake_policy(SET CMP0054 NEW)
endif()
if (POLICY CMP0042)
# Enable `MACOSX_RPATH` by default.
cmake_policy(SET CMP0042 NEW)
endif()
set(CMAKE_USER_MAKE_RULES_OVERRIDE_CXX "${CMAKE_CURRENT_SOURCE_DIR}/cmake/cxx_compiler_flags_overrides.cmake")
project(Z3 VERSION 4.8.7.0 LANGUAGES CXX)
project(Z3 VERSION 4.8.12.0 LANGUAGES CXX)
################################################################################
# Project version
@ -42,7 +33,7 @@ set(z3_polluted_tree_msg
################################################################################
# Sanity check - Disallow building in source
################################################################################
if ("${PROJECT_SOURCE_DIR}" STREQUAL "${PROJECT_BINARY_DIR}")
if (PROJECT_SOURCE_DIR STREQUAL PROJECT_BINARY_DIR)
message(FATAL_ERROR "In source builds are not allowed. You should invoke "
"CMake from a different directory.")
endif()
@ -72,7 +63,7 @@ set(GIT_DIR "${PROJECT_SOURCE_DIR}/.git")
if (EXISTS "${GIT_DIR}")
# Try to make CMake configure depend on the current git HEAD so that
# a re-configure is triggered when the HEAD changes.
add_git_dir_dependency("${PROJECT_SOURCE_DIR}" ADD_GIT_DEP_SUCCESS)
add_git_dir_dependency("${GIT_DIR}" ADD_GIT_DEP_SUCCESS)
if (ADD_GIT_DEP_SUCCESS)
if (Z3_INCLUDE_GIT_HASH)
get_git_head_hash("${GIT_DIR}" Z3GITHASH)
@ -114,6 +105,7 @@ endif()
# Useful CMake functions/Macros
################################################################################
include(CheckCXXSourceCompiles)
include(CMakeDependentOption)
################################################################################
# Compiler flags for Z3 components.
@ -139,7 +131,7 @@ else()
if(NOT CMAKE_BUILD_TYPE)
message(STATUS "CMAKE_BUILD_TYPE is not set. Setting default")
message(STATUS "The available build types are: ${available_build_types}")
set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE String
set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING
"Options are ${available_build_types}"
FORCE)
# Provide drop down menu options in cmake-gui
@ -187,31 +179,34 @@ include(${PROJECT_SOURCE_DIR}/cmake/z3_add_cxx_flag.cmake)
################################################################################
# C++ language version
################################################################################
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
################################################################################
# Platform detection
################################################################################
if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux")
if (CMAKE_SYSTEM_NAME STREQUAL "Linux")
message(STATUS "Platform: Linux")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_LINUX_")
if ("${TARGET_ARCHITECTURE}" STREQUAL "x86_64")
if (TARGET_ARCHITECTURE STREQUAL "x86_64")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_USE_THREAD_LOCAL")
endif()
elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "GNU")
elseif (CMAKE_SYSTEM_NAME STREQUAL "Android")
message(STATUS "Platform: Android")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_ANDROID_")
elseif (CMAKE_SYSTEM_NAME MATCHES "GNU")
message(STATUS "Platform: GNU/Hurd")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_HURD_")
elseif ("${CMAKE_SYSTEM_NAME}" STREQUAL "Darwin")
elseif (CMAKE_SYSTEM_NAME STREQUAL "Darwin")
# Does macOS really not need any special flags?
message(STATUS "Platform: Darwin")
elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "FreeBSD")
elseif (CMAKE_SYSTEM_NAME MATCHES "FreeBSD")
message(STATUS "Platform: FreeBSD")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_FREEBSD_")
elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "NetBSD")
elseif (CMAKE_SYSTEM_NAME MATCHES "NetBSD")
message(STATUS "Platform: NetBSD")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_NetBSD_")
elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "OpenBSD")
elseif (CMAKE_SYSTEM_NAME MATCHES "OpenBSD")
message(STATUS "Platform: OpenBSD")
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_OPENBSD_")
elseif (CYGWIN)
@ -247,8 +242,7 @@ if (Z3_USE_LIB_GMP)
# can't be found
find_package(GMP REQUIRED)
message(STATUS "Using libgmp")
list(APPEND Z3_DEPENDENT_LIBS ${GMP_C_LIBRARIES})
list(APPEND Z3_COMPONENT_EXTRA_INCLUDE_DIRS ${GMP_INCLUDE_DIRS})
list(APPEND Z3_DEPENDENT_LIBS GMP::GMP)
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_MP_GMP")
else()
list(APPEND Z3_COMPONENT_CXX_DEFINES "-D_MP_INTERNAL")
@ -273,11 +267,11 @@ endif()
################################################################################
# Thread safe or not?
################################################################################
option(SINGLE_THREADED
option(Z3_SINGLE_THREADED
"Non-thread-safe build"
OFF
)
if (SINGLE_THREADED)
if (Z3_SINGLE_THREADED)
list(APPEND Z3_COMPONENT_CXX_DEFINES "-DSINGLE_THREAD")
message(STATUS "Non-thread-safe build")
else()
@ -288,18 +282,18 @@ endif()
# FP math
################################################################################
# FIXME: Support ARM "-mfpu=vfp -mfloat-abi=hard"
if (("${TARGET_ARCHITECTURE}" STREQUAL "x86_64") OR ("${TARGET_ARCHITECTURE}" STREQUAL "i686"))
if (("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU") OR ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") OR ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Intel"))
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Intel")
if ((TARGET_ARCHITECTURE STREQUAL "x86_64") OR (TARGET_ARCHITECTURE STREQUAL "i686"))
if ((CMAKE_CXX_COMPILER_ID MATCHES "GNU") OR (CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR (CMAKE_CXX_COMPILER_ID MATCHES "Intel"))
if (CMAKE_CXX_COMPILER_ID MATCHES "Intel")
# Intel's compiler requires linking with libiomp5
list(APPEND Z3_DEPENDENT_LIBS "iomp5")
endif()
set(SSE_FLAGS "-mfpmath=sse" "-msse" "-msse2")
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Intel")
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Intel")
set(SSE_FLAGS "-mfpmath=sse" "-msse" "-msse2")
# Intel's compiler requires linking with libiomp5
list(APPEND Z3_DEPENDENT_LIBS "iomp5")
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(SSE_FLAGS "/arch:SSE2")
else()
message(FATAL_ERROR "Unknown compiler ${CMAKE_CXX_COMPILER_ID}")
@ -316,7 +310,7 @@ endif()
################################################################################
set(THREADS_PREFER_PTHREAD_FLAG TRUE)
find_package(Threads)
list(APPEND Z3_DEPENDENT_LIBS ${CMAKE_THREAD_LIBS_INIT})
list(APPEND Z3_DEPENDENT_LIBS Threads::Threads)
################################################################################
# Compiler warnings
@ -326,9 +320,9 @@ include(${PROJECT_SOURCE_DIR}/cmake/compiler_warnings.cmake)
################################################################################
# Save Clang optimization records
################################################################################
option(SAVE_CLANG_OPTIMIZATION_RECORDS "Enable saving Clang optimization records." OFF)
option(Z3_SAVE_CLANG_OPTIMIZATION_RECORDS "Enable saving Clang optimization records." OFF)
if (SAVE_CLANG_OPTIMIZATION_RECORDS)
if (Z3_SAVE_CLANG_OPTIMIZATION_RECORDS)
z3_add_cxx_flag("-fsave-optimization-record" REQUIRED)
endif()
@ -357,6 +351,7 @@ option(Z3_BUILD_LIBZ3_SHARED "Build libz3 as a shared library if true, otherwise
################################################################################
if (NOT MSVC)
z3_add_cxx_flag("-fvisibility=hidden" REQUIRED)
z3_add_cxx_flag("-fvisibility-inlines-hidden" REQUIRED)
endif()
################################################################################
@ -374,17 +369,16 @@ endif()
# Position independent code
################################################################################
# This is required because code built in the components will end up in a shared
# library. If not building a shared library ``-fPIC`` isn't needed and would add
# unnecessary overhead.
if (Z3_BUILD_LIBZ3_SHARED)
# Avoid adding -fPIC compiler switch if we compile with MSVC (which does not
# support the flag) or if we target Windows, which generally does not use
# position independent code for native code shared libraries (DLLs).
if (NOT (MSVC OR MINGW OR WIN32))
z3_add_cxx_flag("-fPIC" REQUIRED)
endif()
# library.
# Avoid adding -fPIC compiler switch if we compile with MSVC (which does not
# support the flag) or if we target Windows, which generally does not use
# position independent code for native code shared libraries (DLLs).
if (NOT (MSVC OR MINGW OR WIN32))
z3_add_cxx_flag("-fPIC" REQUIRED)
endif()
################################################################################
# Link time optimization
################################################################################
@ -408,7 +402,7 @@ if (Z3_ENABLE_CFI)
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
z3_add_cxx_flag("-fsanitize=cfi" REQUIRED)
z3_add_cxx_flag("-fsanitize-cfi-cross-dso" REQUIRED)
elseif ("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
z3_add_cxx_flag("/guard:cf" REQUIRED)
message(STATUS "Enabling CFI for MSVC")
foreach (_build_type ${build_types_with_cfi})
@ -425,14 +419,14 @@ endif()
################################################################################
# MSVC specific flags inherited from old build system
################################################################################
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
include(${PROJECT_SOURCE_DIR}/cmake/msvc_legacy_quirks.cmake)
endif()
################################################################################
# Pass /RELEASE to the linker so that checksums in PE files are calculated.
################################################################################
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
string(APPEND CMAKE_EXE_LINKER_FLAGS " /RELEASE")
string(APPEND CMAKE_SHARED_LINKER_FLAGS " /RELEASE")
endif()
@ -569,6 +563,9 @@ write_basic_package_version_file("${PROJECT_BINARY_DIR}/Z3ConfigVersion.cmake"
COMPATIBILITY SameMajorVersion
)
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/z3.pc.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/z3.pc" @ONLY)
################################################################################
# Create `Z3Config.cmake` and related files for install tree so clients can use
# Z3 via CMake.
@ -611,10 +608,18 @@ install(
DESTINATION "${CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR}"
)
# Add install rule to install ${PROJECT_BINARY_DIR}/z3.pc
install(
FILES "${PROJECT_BINARY_DIR}/z3.pc"
DESTINATION "${CMAKE_INSTALL_PKGCONFIGDIR}"
)
################################################################################
# Examples
################################################################################
option(Z3_ENABLE_EXAMPLE_TARGETS "Build Z3 api examples" ON)
cmake_dependent_option(Z3_ENABLE_EXAMPLE_TARGETS
"Build Z3 api examples" ON
"CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR" OFF)
if (Z3_ENABLE_EXAMPLE_TARGETS)
add_subdirectory(examples)
endif()

View File

@ -142,8 +142,13 @@ nmake
### Visual Studio
For the Visual Studio generators you need to know which version of Visual Studio you wish
to use and also what architecture you want to build for.
Visual Studio 19 comes with integrated support for CMake.
It suffices to open the (z3) folder where this file and the Z3 project CMakeLists.txt resides,
and Visual Studio does the rest.
For legacy versions of Visual Studio a process is as follows:
For the Visual Studio generators you need to know which version of
Visual Studio you wish to use and also what architecture you want to build for.
We'll use the ``cmake-gui`` here as it is easier to pick the right generator but this can
be scripted if need be.
@ -241,11 +246,11 @@ The following useful options can be passed to CMake whilst configuring.
* ``CMAKE_INSTALL_PYTHON_PKG_DIR`` - STRING. The path to install the z3 python bindings. This can be relative (to ``CMAKE_INSTALL_PREFIX``) or absolute.
* ``CMAKE_INSTALL_Z3_CMAKE_PACKAGE_DIR`` - STRING. The path to install CMake package files (e.g. ``/usr/lib/cmake/z3``).
* ``CMAKE_INSTALL_API_BINDINGS_DOC`` - STRING. The path to install documentation for API bindings.
* ``PYTHON_EXECUTABLE`` - STRING. The python executable to use during the build.
* ``Z3_ENABLE_TRACING_FOR_NON_DEBUG`` - BOOL. If set to ``TRUE`` enable tracing in non-debug builds, if set to ``FALSE`` disable tracing in non-debug builds. Note in debug builds tracing is always enabled.
* ``Z3_BUILD_LIBZ3_SHARED`` - BOOL. If set to ``TRUE`` build libz3 as a shared library otherwise build as a static library.
* ``Z3_ENABLE_EXAMPLE_TARGETS`` - BOOL. If set to ``TRUE`` add the build targets for building the API examples.
* ``Z3_USE_LIB_GMP`` - BOOL. If set to ``TRUE`` use the GNU multiple precision library. If set to ``FALSE`` use an internal implementation.
* ``Z3_PYTHON_EXECUTABLE`` - STRING. The python executable to use during the build.
* ``Z3_BUILD_PYTHON_BINDINGS`` - BOOL. If set to ``TRUE`` then Z3's python bindings will be built.
* ``Z3_INSTALL_PYTHON_BINDINGS`` - BOOL. If set to ``TRUE`` and ``Z3_BUILD_PYTHON_BINDINGS`` is ``TRUE`` then running the ``install`` target will install Z3's Python bindings.
* ``Z3_BUILD_DOTNET_BINDINGS`` - BOOL. If set to ``TRUE`` then Z3's .NET bindings will be built.
@ -270,6 +275,11 @@ The following useful options can be passed to CMake whilst configuring.
* ``WARNINGS_AS_ERRORS`` - STRING. If set to ``TRUE`` compiler warnings will be treated as errors. If set to ``False`` compiler warnings will not be treated as errors.
If set to ``SERIOUS_ONLY`` a subset of compiler warnings will be treated as errors.
* ``Z3_C_EXAMPLES_FORCE_CXX_LINKER`` - BOOL. If set to ``TRUE`` the C API examples will request that the C++ linker is used rather than the C linker.
* ``Z3_BUILD_EXECUTABLE`` - BOOL. If set to ``TRUE`` build the z3 executable. Defaults to ``TRUE`` unless z3 is being built as a submodule in which case it defaults to ``FALSE``.
* ``Z3_BUILD_TEST_EXECUTABLES`` - BOOL. If set to ``TRUE`` build the z3 test executables. Defaults to ``TRUE`` unless z3 is being built as a submodule in which case it defaults to ``FALSE``.
* ``Z3_SAVE_CLANG_OPTIMIZATION_RECORDS`` - BOOL. If set to ``TRUE`` saves Clang optimization records by setting the compiler flag ``-fsave-optimization-record``.
* ``Z3_SINGLE_THREADED`` - BOOL. If set to ``TRUE`` compiles Z3 for single threaded mode.
On the command line these can be passed to ``cmake`` using the ``-D`` option. In ``ccmake`` and ``cmake-gui`` these can be set in the user interface.
@ -293,7 +303,7 @@ installation of Java. If CMake fails to find your installation of Java set the
correct location. For example
```
JAVA_HOME=/usr/lib/jvm/default cmake -DBUILD_JAVA_BINDINGS=ON ../
JAVA_HOME=/usr/lib/jvm/default cmake -DZ3_BUILD_JAVA_BINDINGS=ON ../
```
Note that the built ``.jar`` file is named ``com.microsoft.z3-VERSION.jar``
where ``VERSION`` is the Z3 version. Under non Windows systems a

View File

@ -14,9 +14,9 @@ See the [release notes](RELEASE_NOTES) for notes on various stable releases of Z
## Build status
| Azure Pipelines | TravisCI |
| --------------- | -------- |
[![Build Status](https://z3build.visualstudio.com/Z3Build/_apis/build/status/Z3Build-CI?branchName=master)](https://z3build.visualstudio.com/Z3Build/_build/latest?definitionId=10) | [![Build Status](https://travis-ci.org/Z3Prover/z3.svg?branch=master)](https://travis-ci.org/Z3Prover/z3)
| Azure Pipelines |
| --------------- |
[![Build Status](https://dev.azure.com/Z3Public/Z3/_apis/build/status/Z3Prover.z3?branchName=master)](https://dev.azure.com/Z3Public/Z3/_build/latest?definitionId=1&branchName=master)
[1]: #building-z3-on-windows-using-visual-studio-command-prompt
[2]: #building-z3-using-make-and-gccclang
@ -44,6 +44,8 @@ cd build
nmake
```
Z3 uses C++17. The recommended version of Visual Studio is therefore VS2019.
## Building Z3 using make and GCC/Clang
Execute:
@ -101,35 +103,22 @@ Z3 has a build system using CMake. Read the [README-CMake.md](README-CMake.md)
file for details. It is recommended for most build tasks,
except for building OCaml bindings.
## Dependencies
Z3 itself has few dependencies. It uses C++ runtime libraries, including pthreads for multi-threading.
It is optionally possible to use GMP for multi-precision integers, but Z3 contains its own self-contained
multi-precision functionality. Python is required to build Z3. To build Java, .Net, OCaml,
Julia APIs requires installing relevant tool chains.
## Z3 bindings
Z3 has bindings for various programming languages.
### ``.NET``
You can install a nuget package for the latest release Z3 from [nuget.org](https://www.nuget.org/packages/Microsoft.Z3.x64/).
You can install a nuget package for the latest release Z3 from [nuget.org](https://www.nuget.org/packages/Microsoft.Z3/).
Use the ``--dotnet`` command line flag with ``mk_make.py`` to enable building these.
On non-windows platforms [mono](http://www.mono-project.com/) is required. On these
platforms the location of the C# compiler and gac utility need to be known. You
can set these as follows if they aren't detected automatically. For example:
```bash
CSC=/usr/bin/csc GACUTIL=/usr/bin/gacutil python scripts/mk_make.py --dotnet
```
Note for very old versions of Mono (e.g. ``2.10``) you may need to set ``CSC``
to ``/usr/bin/dmcs``.
Note that when ``make install`` is executed on non-windows platforms the GAC
utility is used to install ``Microsoft.Z3.dll`` into the
[GAC](http://www.mono-project.com/docs/advanced/assemblies-and-the-gac/) as the
``Microsoft.Z3.Sharp`` package. During install a
[pkg-config](http://www.freedesktop.org/wiki/Software/pkg-config/) file
(``Microsoft.Z3.Sharp.pc``) is also installed which allows the
[MonoDevelop](http://www.monodevelop.com/) IDE to find the bindings. Running
``make uninstall`` will remove the dll from the GAC and the ``pkg-config`` file.
See [``examples/dotnet``](examples/dotnet) for examples.
@ -200,6 +189,10 @@ python -c 'import z3; print(z3.get_version_string())'
See [``examples/python``](examples/python) for examples.
### ``Julia``
The Julia package [Z3.jl](https://github.com/ahumenberger/Z3.jl) wraps the C++ API of Z3. Information about updating and building the Julia bindings can be found in [src/api/julia](src/api/julia).
### ``Web Assembly``
[WebAssembly](https://github.com/cpitclaudel/z3.wasm) bindings are provided by Clément Pit-Claudel.
@ -213,11 +206,12 @@ See [``examples/python``](examples/python) for examples.
* Default input format is [SMTLIB2](http://smtlib.cs.uiowa.edu)
* Other native foreign function interfaces:
* C
* C++
* Python
* Java
* C#
* OCaml
* [C++ API](https://z3prover.github.io/api/html/group__cppapi.html)
* [.NET API](https://z3prover.github.io/api/html/namespace_microsoft_1_1_z3.html)
* [Java API](https://z3prover.github.io/api/html/namespacecom_1_1microsoft_1_1z3.html)
* [Python API](https://z3prover.github.io/api/html/namespacez3py.html) (also available in [pydoc format](https://z3prover.github.io/api/html/z3.html))
* C
* OCaml
* [Julia](https://github.com/ahumenberger/Z3.jl)

View File

@ -3,12 +3,63 @@ RELEASE NOTES
Version 4.8.next
================
- Planned features
- rewritten NIA (non-linear integer arithmetic) core solver
- self-contained character theory, direct support for UTF8, Unicode character sets
- recursive function representation without hoisting ite expressions. Issue #2601
- specialized solver support for QF_ABV and ABV based on lazy SMT and dual reduction
- model-based interpolation for quantifier-free UF, arithmetic, arrays
- circuit optimization techniques for BV pre-processing
- the smtfd solver and tactic implement this strategy, but is not prime for users.
- introduction of simple induction lemmas to handle a limited repertoire of induction proofs.
- circuit optimization techniques for BV in-processing are available as the sat.cut
option, but at this point does not translate into benefits. It is currently
turned off by default.
Version 4.8.12
==============
Release provided to fix git tag discrepancy issues with 4.8.11
Version 4.8.11
==============
- self-contained character theory, direct support for UTF8, Unicode character sets.
Characters are by default unicode with an 18 bit range.
- support for incremental any-time MaxSAT using the option opt.enable_lns. The API
allows registering a callback function that is invoked on each incremental improvement
to objectives.
Version 4.8.10
==============
- rewritten arithmetic solver replacing legacy arithmetic solver and on by default
Version 4.8.9
=============
- New features
- significant improvements to regular expression solving
- expose user theory plugin. It is a leaner user theory plugin that was once available.
It allows for registering callbacks that react to when bit-vector and Boolean variables
receive fixed values.
- Bug fixes
- many
- Notes
- the new arithmetic theory is turned on by default. It _does_ introduce regressions on
several scenarios, but has its own advantages. Users can turn on the old solver by setting smt.arith.solver=2.
Depending on feedback, we may turn toggle this default setting again back to smt.arith.solver=2.
Version 4.8.8
=============
- New features
- rewritten NIA (non-linear integer arithmetic) core solver
It is enabled in selected theories as default.
The legacy arithmetic solver remains the overall default in this release
as the rewritten solver shows regressions (on mainly NRA problems).
- recursive function representation without hoisting ite expressions. Issue #2601
- model-based interpolation for quantifier-free UF, arithmetic
- Julia bindings over the C++ API, thanks to ahumenberger
- Bug fixes
- numerous, many based on extensive fuzz testing.
Thanks to 5hadowblad3, muchang, numairmansur, rainoftime, wintered
- Notes
- recursive functions are unfolded with separate increments based on unsat core
analysis of blocking literals that are separate for different recursive functions.
- the seq (string) solver has been revised in several ways and likely shows some
regressions in this release.
Version 4.8.7
=============

View File

@ -1,21 +1,35 @@
variables:
cmakeJulia: '-DZ3_BUILD_JULIA_BINDINGS=True'
cmakeJava: '-DZ3_BUILD_JAVA_BINDINGS=True'
cmakeNet: '-DZ3_BUILD_DOTNET_BINDINGS=True'
cmakePy: '-DZ3_BUILD_PYTHON_BINDINGS=True'
cmakeStdArgs: '-DZ3_BUILD_DOTNET_BINDINGS=True -DZ3_BUILD_JAVA_BINDINGS=True -DZ3_BUILD_PYTHON_BINDINGS=True -G "Ninja" ../'
asanEnv: 'CXXFLAGS="${CXXFLAGS} -fsanitize=address -fno-omit-frame-pointer" CFLAGS="${CFLAGS} -fsanitize=address -fno-omit-frame-pointer"'
ubsanEnv: 'CXXFLAGS="${CXXFLAGS} -fsanitize=undefined" CFLAGS="${CFLAGS} -fsanitize=undefined"'
msanEnv: 'CC=clang LDFLAGS="-L../libcxx/libcxx_msan/lib -lc++abi -Wl,-rpath=../libcxx/libcxx_msan/lib" CXX=clang++ CXXFLAGS="${CXXFLAGS} -stdlib=libc++ -fsanitize-memory-track-origins -fsanitize=memory -fPIE -fno-omit-frame-pointer -g -O2" CFLAGS="${CFLAGS} -stdlib=libc -fsanitize=memory -fsanitize-memory-track-origins -fno-omit-frame-pointer -g -O2"'
# TBD:
# test python bindings
# build documentation
# Asan, ubsan, msan
# Disabled pending clang dependencies for std::unordered_map
jobs:
- job: "LinuxPythonDebug"
displayName: "Ubuntu build - python make - debug"
pool:
vmImage: "Ubuntu-16.04"
vmImage: "Ubuntu-latest"
strategy:
matrix:
MT:
cmdLine: 'python scripts/mk_make.py -d --java --dotnet'
runRegressions: 'True'
ST:
cmdLine: './configure --single-threaded'
runRegressions: 'False'
steps:
- script: $(cmdLine)
- script: |
@ -26,44 +40,46 @@ jobs:
make -j3 test-z3
cd ..
- template: scripts/test-z3.yml
- ${{if eq(variables['runRegressions'], 'True')}}:
- template: scripts/test-regressions.yml
- job: "Ubuntu18Python"
displayName: "Ubuntu 18 with ocaml"
pool:
vmImage: "Ubuntu-18.04"
steps:
- script: sudo apt-get install ocaml opam libgmp-dev
- script: opam init -y
- script: eval `opam config env`; opam install zarith ocamlfind -y
- script: python scripts/mk_make.py --ml --staticlib
- script: |
set -e
cd build
eval `opam config env`
make -j3
make -j3 examples
make -j3 test-z3
./ml_example
cd ..
- template: scripts/test-z3.yml
- template: scripts/test-regressions.yml
- template: scripts/generate-doc.yml
# ./cpp_example
# ./c_example
# TBD:
# test python bindings
# build documentation
# Asan, ubsan, msan
- job: "LinuxCMake"
- job: "LinuxMSan"
displayName: "Ubuntu build - cmake"
condition: eq(0,1)
pool:
vmImage: "Ubuntu-16.04"
vmImage: "Ubuntu-latest"
strategy:
matrix:
debugClang:
cmdLine: 'CC=clang CXX=clang++ cmake $(cmakeStdArgs)'
msanClang:
cmdLine: '$(msanEnv) cmake $(cmakeStdArgs)'
runUnitTest: 'True'
releaseClang:
cmdLine: 'CC=clang CXX=clang++ cmake -DCMAKE_BUILD_TYPE=Release $(cmakeStdArgs)'
runUnitTest: 'True'
debugGcc:
cmdLine: 'CC=gcc CXX=g++ cmake $(cmakeStdArgs)'
runUnitTest: 'True'
releaseSTGcc:
cmdLine: 'CC=gcc CXX=g++ cmake -DCMAKE_BUILD_TYPE=Release -DSINGLE_THREADED=ON $(cmakeStdArgs)'
runUnitTest: 'True'
# gccX86:
# cmdLine: 'CXXFLAGS="${CXXFLAGS} -m32" CFLAGS="${CFLAGS} -m32" CC=gcc-5 CXX=g++-5 cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo $(cmakeStdArgs)'
# runUnitTest: 'True'
# asan:
# cmdLine: '$(asanEnv) cmake $(cmakeStdArgs)'
# runUnitTest: 'False'
runExample: 'False' # Examples don't seem to build with MSAN
steps:
- script: sudo apt-get install ninja-build
- script: sudo apt-get install ninja-build libc++-dev libc++abi-dev
- script: ./scripts/build_libcxx_msan.sh
- script: |
set -e
mkdir build
@ -74,33 +90,131 @@ jobs:
cd ..
- script: |
cd build
export MSAN_SYMBOLIZER_PATH=/usr/lib/llvm-6.0/bin/llvm-symbolizer
./test-z3 -a
cd ..
condition: eq(variables['runUnitTest'], 'True')
- template: scripts/test-examples-cmake.yml
- ${{if eq(variables['runExample'], 'True')}}:
- template: scripts/test-examples-cmake.yml
# - template: scripts/test-jupyter.yml
# - template: scripts/test-java-cmake.yml
- template: scripts/test-regressions.yml
- job: "Windows2017"
displayName: "Windows 2017 build"
# - template: scripts/test-regressions.yml
- job: "Ubuntu16CMake"
displayName: "Ubuntu build - cmake"
pool:
vmImage: "vs2017-win2016"
vmImage: "Ubuntu-latest"
strategy:
matrix:
releaseClang:
setupCmd1: ''
setupCmd2: ''
buildCmd: 'CC=clang CXX=clang++ cmake -DCMAKE_BUILD_TYPE=Release $(cmakeStdArgs)'
runTests: 'True'
debugClang:
setupCmd1: 'julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\", version=\"0.7.0\"))"'
setupCmd2: 'JlCxxDir=$(julia -e "using libcxxwrap_julia_jll; print(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path))")'
buildCmd: 'CC=clang CXX=clang++ cmake -DJlCxx_DIR=$JlCxxDir/cmake/JlCxx $(cmakeJulia) $(cmakeStdArgs)'
runTests: 'True'
debugGcc:
setupCmd1: ''
setupCmd2: ''
buildCmd: 'CC=gcc CXX=g++ cmake $(cmakeStdArgs)'
runTests: 'True'
releaseSTGcc:
setupCmd1: ''
setupCmd2: ''
buildCmd: 'CC=gcc CXX=g++ cmake -DCMAKE_BUILD_TYPE=Release -DZ3_SINGLE_THREADED=ON $(cmakeStdArgs)'
runTests: 'True'
steps:
- script: scripts\vsts-vs2017.cmd x64
- script: sudo apt-get install ninja-build
- script: |
set -e
mkdir build
cd build
$(setupCmd1)
$(setupCmd2)
$(buildCmd)
ninja
ninja test-z3
cd ..
- script: |
cd build
./test-z3 -a
cd ..
condition: eq(variables['runTests'], 'True')
- ${{if eq(variables['runTests'], 'True')}}:
- template: scripts/test-examples-cmake.yml
# - template: scripts/test-jupyter.yml
# - template: scripts/test-java-cmake.yml
- ${{if eq(variables['runTests'], 'True')}}:
- template: scripts/test-regressions.yml
- job: "Windows2017ARM64"
displayName: "Windows 2017 ARM64 build"
pool:
vmImage: "vs2017-win2016"
- job: "WindowsLatest"
displayName: "Windows"
pool:
vmImage: "windows-latest"
strategy:
matrix:
x86:
arch: 'x86'
setupCmd1: ''
setupCmd2: ''
setupCmd3: ''
bindings: '$(cmakePy)'
runTests: 'False'
x64:
arch: 'x64'
setupCmd1: 'julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\", version=\"0.7.0\"))"'
setupCmd2: 'julia -e "using libcxxwrap_julia_jll; print(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path))" > tmp.env'
setupCmd3: 'set /P JlCxxDir=<tmp.env'
bindings: '-DJlCxx_DIR=%JlCxxDir%\..\lib\cmake\JlCxx $(cmakeJava) $(cmakeNet) $(cmakePy) -DCMAKE_BUILD_TYPE=RelWithDebInfo'
runTests: 'True'
arm64:
arch: 'amd64_arm64'
setupCmd1: ''
setupCmd2: ''
setupCmd3: ''
bindings: ''
runTests: 'False'
steps:
- script: scripts\vsts-vs2017.cmd amd64_arm64
- script: md build
- script: |
cd build
$(setupCmd1)
$(setupCmd2)
$(setupCmd3)
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" $(arch)
cmake $(bindings) -G "NMake Makefiles" ../
nmake
cd ..
- script: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" $(arch)
pushd build\python
python z3test.py z3
python z3test.py z3num
popd
pushd build
nmake cpp_example
examples\cpp_example_build_dir\cpp_example.exe
nmake c_example
examples\c_example_build_dir\c_example.exe
nmake java_example
nmake dotnet_example
nmake test-z3
test-z3.exe -a
popd
condition: eq(variables['runTests'], 'True')
- script: |
git clone https://github.com/z3prover/z3test z3test
python z3test\scripts\test_benchmarks.py build\z3.exe z3test\regressions\smt2
condition: eq(variables['runTests'], 'True')
- job: "MacOS"
- job: "MacOSPython"
displayName: "MacOS build"
pool:
vmImage: "macOS-10.14"
vmImage: "macOS-latest"
steps:
- script: python scripts/mk_make.py -d --java --dotnet
- script: |
@ -112,25 +226,29 @@ jobs:
./cpp_example
./c_example
cd ..
- template: scripts/test-z3.yml
# Skip as dead-slow in debug mode:
# - template: scripts/test-z3.yml
- template: scripts/test-regressions.yml
- job: "MacOSCMake"
displayName: "MacOS build with CMake"
pool:
vmImage: "macOS-10.14"
vmImage: "macOS-latest"
steps:
- script: brew install ninja
# - script: brew install --cask julia
- script: |
julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\", version=\"0.7.0\"))"
JlCxxDir=$(julia -e "using libcxxwrap_julia_jll; println(joinpath(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path), \"cmake\", \"JlCxx\"))")
set -e
mkdir build
cd build
CC=clang CXX=clang++ cmake -DZ3_BUILD_JAVA_BINDINGS=True -DZ3_BUILD_PYTHON_BINDINGS=True -DZ3_BUILD_DOTNET_BINDINGS=False -G "Ninja" ../
cmake -DJlCxx_DIR=$JlCxxDir $(cmakeJulia) $(cmakeJava) $(cmakePy) -DZ3_BUILD_DOTNET_BINDINGS=False -G "Ninja" ../
ninja
ninja test-z3
cd ..
- template: scripts/test-z3.yml
- template: scripts/test-examples-cmake.yml
# - template: scripts/test-examples-cmake.yml
- template: scripts/test-regressions.yml
# - template: scripts/test-java-cmake.yml

View File

@ -18,11 +18,11 @@ if (Z3_LINK_TIME_OPTIMIZATION)
set(_lto_compiler_flag "")
set(_lto_linker_flag "")
if (("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") OR
("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU"))
if ((CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR
(CMAKE_CXX_COMPILER_ID MATCHES "GNU"))
set(_lto_compiler_flag "-flto")
set(_lto_linker_flag "-flto")
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
set(_lto_compiler_flag "/GL")
set(_lto_linker_flag "/LTCG")
else()

View File

@ -46,22 +46,22 @@ set(CLANG_WARNINGS_AS_ERRORS
################################################################################
set(WARNING_FLAGS_TO_CHECK "")
set(WARNING_AS_ERROR_FLAGS_TO_CHECK "")
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU")
if (CMAKE_CXX_COMPILER_ID MATCHES "GNU")
list(APPEND WARNING_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS})
list(APPEND WARNING_FLAGS_TO_CHECK ${GCC_ONLY_WARNINGS})
list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS_AS_ERRORS})
list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${GCC_WARNINGS_AS_ERRORS})
elseif ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
list(APPEND WARNING_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS})
list(APPEND WARNING_FLAGS_TO_CHECK ${CLANG_ONLY_WARNINGS})
list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${GCC_AND_CLANG_WARNINGS_AS_ERRORS})
list(APPEND WARNING_AS_ERROR_FLAGS_TO_CHECK ${CLANG_WARNINGS_AS_ERRORS})
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
list(APPEND WARNING_FLAGS_TO_CHECK ${MSVC_WARNINGS})
# CMake's default flags include /W3 already so remove them if
# they already exist.
if ("${CMAKE_CXX_FLAGS}" MATCHES "/W3")
if (CMAKE_CXX_FLAGS MATCHES "/W3")
string(REPLACE "/W3" "" _cmake_cxx_flags_remove_w3 "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${_cmake_cxx_flags_remove_w3}" CACHE STRING "" FORCE)
endif()
@ -84,7 +84,7 @@ get_property(
PROPERTY
TYPE
)
if ("${WARNINGS_AS_ERRORS_CACHE_VAR_TYPE}" STREQUAL "BOOL")
if (WARNINGS_AS_ERRORS_CACHE_VAR_TYPE STREQUAL "BOOL")
message(WARNING "Detected legacy WARNINGS_AS_ERRORS option. Upgrading")
set(WARNINGS_AS_ERRORS_DEFAULT "${WARNINGS_AS_ERRORS}")
# Delete old entry
@ -106,25 +106,25 @@ set_property(
"ON;OFF;SERIOUS_ONLY"
)
if ("${WARNINGS_AS_ERRORS}" STREQUAL "ON")
if (WARNINGS_AS_ERRORS STREQUAL "ON")
message(STATUS "Treating compiler warnings as errors")
if (("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang") OR ("${CMAKE_CXX_COMPILER_ID}" MATCHES "GNU"))
if ((CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR (CMAKE_CXX_COMPILER_ID MATCHES "GNU"))
list(APPEND Z3_COMPONENT_CXX_FLAGS "-Werror")
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
list(APPEND Z3_COMPONENT_CXX_FLAGS "/WX")
else()
message(AUTHOR_WARNING "Unknown compiler")
endif()
elseif ("${WARNINGS_AS_ERRORS}" STREQUAL "SERIOUS_ONLY")
elseif (WARNINGS_AS_ERRORS STREQUAL "SERIOUS_ONLY")
message(STATUS "Treating only serious compiler warnings as errors")
# Loop through the flags
foreach (flag ${WARNING_AS_ERROR_FLAGS_TO_CHECK})
# Add globally because some flags need to be passed at link time.
z3_add_cxx_flag("${flag}" GLOBAL)
endforeach()
elseif ("${WARNINGS_AS_ERRORS}" STREQUAL "OFF")
elseif (WARNINGS_AS_ERRORS STREQUAL "OFF")
message(STATUS "Not treating compiler warnings as errors")
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Warnings as errors is off by default for MSVC so setting this
# is not necessary but this duplicates the behaviour of the old
# build system.

View File

@ -26,7 +26,7 @@ function(add_git_dir_dependency GIT_DOT_FILE SUCCESS_VAR)
# git directory
file(READ "${GIT_DOT_FILE}" GIT_DOT_FILE_DATA LIMIT 512)
string(STRIP "${GIT_DOT_FILE_DATA}" GIT_DOT_FILE_DATA_STRIPPED)
if ("${GIT_DOT_FILE_DATA_STRIPPED}" MATCHES "^gitdir:[ ]*(.+)$")
if (GIT_DOT_FILE_DATA_STRIPPED MATCHES "^gitdir:[ ]*(.+)$")
# Git worktree
message(STATUS "Found git worktree")
set(GIT_WORKTREE_DIR "${CMAKE_MATCH_1}")
@ -34,11 +34,20 @@ function(add_git_dir_dependency GIT_DOT_FILE SUCCESS_VAR)
# Figure out where real git directory lives
set(GIT_COMMON_DIR_FILE "${GIT_WORKTREE_DIR}/commondir")
if (NOT EXISTS "${GIT_COMMON_DIR_FILE}")
message(FATAL_ERROR "Found git worktree dir but could not find \"${GIT_COMMON_DIR_FILE}\"")
get_filename_component(GIT_WORKTREE_PARENT "${GIT_WORKTREE_DIR}" DIRECTORY)
get_filename_component(GIT_WORKTREE_PARENT "${GIT_WORKTREE_PARENT}" NAME)
if (EXISTS "${Z3_SOURCE_DIR}/${GIT_HEAD_FILE}" AND EXISTS "${Z3_SOURCE_DIR}/${GIT_WORKTREE_DIR}")
# Z3 is a git submodule
set(GIT_HEAD_FILE "${Z3_SOURCE_DIR}/${GIT_HEAD_FILE}")
set(GIT_DIR "${Z3_SOURCE_DIR}/${GIT_WORKTREE_DIR}")
else()
message(FATAL_ERROR "Found git worktree dir but could not find \"${GIT_COMMON_DIR_FILE}\"")
endif()
else()
file(READ "${GIT_COMMON_DIR_FILE}" GIT_COMMON_DIR_FILE_DATA LIMIT 512)
string(STRIP "${GIT_COMMON_DIR_FILE_DATA}" GIT_COMMON_DIR_FILE_DATA_STRIPPED)
get_filename_component(GIT_DIR "${GIT_WORKTREE_DIR}/${GIT_COMMON_DIR_FILE_DATA_STRIPPED}" ABSOLUTE)
endif()
file(READ "${GIT_COMMON_DIR_FILE}" GIT_COMMON_DIR_FILE_DATA LIMIT 512)
string(STRIP "${GIT_COMMON_DIR_FILE_DATA}" GIT_COMMON_DIR_FILE_DATA_STRIPPED)
get_filename_component(GIT_DIR "${GIT_WORKTREE_DIR}/${GIT_COMMON_DIR_FILE_DATA_STRIPPED}" ABSOLUTE)
if (NOT IS_DIRECTORY "${GIT_DIR}")
message(FATAL_ERROR "Failed to compute path to git directory from git worktree")
endif()
@ -66,7 +75,7 @@ function(add_git_dir_dependency GIT_DOT_FILE SUCCESS_VAR)
file(READ "${GIT_HEAD_FILE}" GIT_HEAD_DATA LIMIT 128)
string(STRIP "${GIT_HEAD_DATA}" GIT_HEAD_DATA_STRIPPED)
if ("${GIT_HEAD_DATA_STRIPPED}" MATCHES "^ref:[ ]*(.+)$")
if (GIT_HEAD_DATA_STRIPPED MATCHES "^ref:[ ]*(.+)$")
# HEAD points at a reference.
set(GIT_REF "${CMAKE_MATCH_1}")
if (EXISTS "${GIT_DIR}/${GIT_REF}")

View File

@ -13,52 +13,37 @@ find_library(GMP_C_LIBRARIES
NAMES gmp
DOC "GMP C libraries"
)
if (GMP_C_LIBRARIES)
message(STATUS "Found GMP C library: \"${GMP_C_LIBRARIES}\"")
else()
message(STATUS "Could not find GMP C library")
endif()
find_library(GMP_CXX_LIBRARIES
NAMES gmpxx
DOC "GMP C++ libraries"
)
if (GMP_CXX_LIBRARIES)
message(STATUS "Found GMP C++ library: \"${GMP_CXX_LIBRARIES}\"")
else()
message(STATUS "Could not find GMP C++ library")
endif()
# Try to find headers
find_path(GMP_C_INCLUDES
NAMES gmp.h
DOC "GMP C header"
)
if (GMP_C_INCLUDES)
message(STATUS "Found GMP C include path: \"${GMP_C_INCLUDES}\"")
else()
message(STATUS "Could not find GMP C include path")
endif()
find_path(GMP_CXX_INCLUDES
NAMES gmpxx.h
DOC "GMP C++ header"
)
if (GMP_CXX_INCLUDES)
message(STATUS "Found GMP C++ include path: \"${GMP_CXX_INCLUDES}\"")
else()
message(STATUS "Could not find GMP C++ include path")
endif()
if (GMP_C_LIBRARIES AND GMP_CXX_LIBRARIES AND GMP_C_INCLUDES AND GMP_CXX_INCLUDES)
set(GMP_INCLUDE_DIRS "${GMP_C_INCLUDES}" "${GMP_CXX_INCLUDES}")
list(REMOVE_DUPLICATES GMP_INCLUDE_DIRS)
message(STATUS "Found GMP")
else()
message(STATUS "Could not find GMP")
endif()
# TODO: We should check we can link some simple code against libgmp and libgmpxx
# Handle QUIET and REQUIRED and check the necessary variables were set and if so
# set ``GMP_FOUND``
find_package_handle_standard_args(GMP DEFAULT_MSG GMP_INCLUDE_DIRS GMP_C_LIBRARIES GMP_CXX_LIBRARIES)
find_package_handle_standard_args(GMP
REQUIRED_VARS GMP_C_LIBRARIES GMP_C_INCLUDES GMP_CXX_LIBRARIES GMP_CXX_INCLUDES)
if (GMP_FOUND)
set(GMP_INCLUDE_DIRS "${GMP_C_INCLUDES}" "${GMP_CXX_INCLUDES}")
list(REMOVE_DUPLICATES GMP_INCLUDE_DIRS)
if (NOT TARGET GMP::GMP)
add_library(GMP::GMP UNKNOWN IMPORTED)
set_target_properties(GMP::GMP PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${GMP_C_INCLUDES}"
IMPORTED_LOCATION "${GMP_C_LIBRARIES}")
endif()
endif()

View File

@ -22,7 +22,7 @@ set(Z3_MSVC_LEGACY_DEFINES
_UNICODE
)
if ("${TARGET_ARCHITECTURE}" STREQUAL "x86_64")
if (TARGET_ARCHITECTURE STREQUAL "x86_64")
list(APPEND Z3_MSVC_LEGACY_DEFINES ""
# Don't set `_LIB`. The old build system sets this for x86_64 release
# build. This flag doesn't seem to be documented but a stackoverflow
@ -57,10 +57,10 @@ endif()
# Note we don't set WIN32 or _WINDOWS because
# CMake provides that for us. As a sanity check make sure the option
# is present.
if (NOT "${CMAKE_CXX_FLAGS}" MATCHES "/D[ ]*WIN32")
if (NOT CMAKE_CXX_FLAGS MATCHES "[-/]D[ ]*WIN32")
message(FATAL_ERROR "\"/D WIN32\" is missing")
endif()
if (NOT "${CMAKE_CXX_FLAGS}" MATCHES "/D[ ]*_WINDOWS")
if (NOT CMAKE_CXX_FLAGS MATCHES "[-/]D[ ]*_WINDOWS")
message(FATAL_ERROR "\"/D _WINDOWS\" is missing")
endif()
@ -82,7 +82,7 @@ endif()
# build system kept the frame pointer for all configurations
# except x86_64 release (I don't know why the frame pointer
# is kept for i686 release).
if ("${TARGET_ARCHITECTURE}" STREQUAL "x86_64")
if (TARGET_ARCHITECTURE STREQUAL "x86_64")
list(APPEND Z3_COMPONENT_CXX_FLAGS
$<$<CONFIG:Debug>:${NO_OMIT_FRAME_POINTER_MSVC_FLAG}>
$<$<CONFIG:MinSizeRel>:${NO_OMIT_FRAME_POINTER_MSVC_FLAG}>
@ -91,7 +91,7 @@ else()
list(APPEND Z3_COMPONENT_CXX_FLAGS ${NO_OMIT_FRAME_POINTER_MSVC_FLAG})
endif()
if (("${TARGET_ARCHITECTURE}" STREQUAL "x86_64") OR ("${TARGET_ARCHITECTURE}" STREQUAL "i686"))
if ((TARGET_ARCHITECTURE STREQUAL "x86_64") OR (TARGET_ARCHITECTURE STREQUAL "i686"))
# Use __cdecl calling convention. Apparently this is MSVC's default
# but the old build system set it so for completeness set it too.
# See https://msdn.microsoft.com/en-us/library/46t77ak2.aspx
@ -153,9 +153,9 @@ string(APPEND CMAKE_SHARED_LINKER_FLAGS " /SUBSYSTEM:WINDOWS")
# in the old build system except release x86_64. We try to emulate this here but
# this is likely the wrong thing to do.
foreach (_build_type ${_build_types_as_upper})
if ("${TARGET_ARCHITECTURE}" STREQUAL "x86_64" AND
("${_build_type}" STREQUAL "RELEASE" OR
"${_build_type}" STREQUAL "RELWITHDEBINFO")
if (TARGET_ARCHITECTURE STREQUAL "x86_64" AND
(_build_type STREQUAL "RELEASE" OR
_build_type STREQUAL "RELWITHDEBINFO")
)
message(AUTHOR_WARNING "Skipping legacy linker MSVC options for x86_64 ${_build_type}")
else()

53
contrib/.travis.yml Normal file
View File

@ -0,0 +1,53 @@
cache:
# This persistent cache is used to cache the building of
# docker base images.
directories:
- $DOCKER_TRAVIS_CI_CACHE_DIR
sudo: required
language: cpp
services:
- docker
env:
global:
# This environment variable tells the `travis_ci_linux_entry_point.sh`
# script to look for a cached Docker image.
- DOCKER_TRAVIS_CI_CACHE_DIR=$HOME/.cache/docker
# Configurations
matrix:
###############################################################################
# Ubuntu 20.04 LTS
###############################################################################
# Note the unit tests for the debug builds are compiled but **not**
# executed. This is because the debug build of unit tests takes a large
# amount of time to execute compared to the optimized builds.
# clang
- LINUX_BASE=ubuntu_20.04 C_COMPILER=clang CXX_COMPILER=clang++ TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug
- LINUX_BASE=ubuntu_20.04 C_COMPILER=clang CXX_COMPILER=clang++ TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Release UBSAN_BUILD=1
- LINUX_BASE=ubuntu_20.04 C_COMPILER=clang CXX_COMPILER=clang++ TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Release ASAN_BUILD=1 DOTNET_BINDINGS=0 JAVA_BINDINGS=0 PYTHON_BINDINGS=0
# gcc
# ubsan/msan builds too slow
- LINUX_BASE=ubuntu_20.04 C_COMPILER=gcc CXX_COMPILER=g++ TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Release BUILD_DOCS=1
- LINUX_BASE=ubuntu_20.04 C_COMPILER=gcc CXX_COMPILER=g++ TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug
# GMP library
- LINUX_BASE=ubuntu_20.04 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Release USE_LIBGMP=1
- LINUX_BASE=ubuntu_20.04 TARGET_ARCH=x86_64 Z3_BUILD_TYPE=Debug USE_LIBGMP=1
# Unix Makefile generator build
- LINUX_BASE=ubuntu_20.04 TARGET_ARCH=x86_64 Z3_CMAKE_GENERATOR="Unix Makefiles"
# LTO build
# too slow
#- LINUX_BASE=ubuntu_20.04 C_COMPILER=gcc CXX_COMPILER=g++ TARGET_ARCH=x86_64 USE_LTO=1 RUN_UNIT_TESTS=BUILD_ONLY RUN_SYSTEM_TESTS=0
#- LINUX_BASE=ubuntu_20.04 C_COMPILER=clang CXX_COMPILER=clang++ TARGET_ARCH=x86_64 USE_LTO=1 RUN_UNIT_TESTS=BUILD_ONLY RUN_SYSTEM_TESTS=0
# Static build. Note we have disable building the bindings because they won't work with a static libz3
- LINUX_BASE=ubuntu_20.04 TARGET_ARCH=x86_64 Z3_STATIC_BUILD=1 DOTNET_BINDINGS=0 JAVA_BINDINGS=0 PYTHON_BINDINGS=0
script:
# Use `travis_wait` when building because some configurations don't produce an
# output for a long time (linking & testing)
- travis_wait 55 contrib/ci/scripts/travis_ci_entry_point.sh || exit 1;

View File

@ -1,52 +0,0 @@
# This base image is not officially supported by Docker it
# is generated by running
# ```
# ./update.sh xenial
# ```
# from git@github.com:daald/docker-brew-ubuntu-core-32bit.git
# at commit 34ea593b40b423755b7d46b6c8c89fc8162ea74b
#
# We could actually store the image generated by this Dockerfile
# rather than just the bare image. However given we have a TravisCI
# cache I'm not sure if it faster to use the TravisCI cache or to
# download from DockerHub everytime.
FROM z3prover/ubuntu32:16.04
RUN apt-get update && \
apt-get -y --no-install-recommends install \
binutils \
clang \
clang-3.9 \
cmake \
doxygen \
default-jdk \
gcc \
gcc-5 \
git \
graphviz \
g++ \
g++ \
libgmp-dev \
libgomp1 \
libomp5 \
libomp-dev \
llvm-3.9 \
make \
ninja-build \
python3 \
python3-setuptools \
python2.7 \
python-setuptools \
sudo
# Create `user` user for container with password `user`. and give it
# password-less sudo access
RUN useradd -m user && \
echo user:user | chpasswd && \
cp /etc/sudoers /etc/sudoers.bak && \
echo 'user ALL=(root) NOPASSWD: ALL' >> /etc/sudoers
USER user
WORKDIR /home/user
# TODO .NET core does not support Linux x86 yet, disable it for now.
# see: https://github.com/dotnet/coreclr/issues/9265
ENV ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-3.9/bin/llvm-symbolizer DOTNET_BINDINGS=0

View File

@ -1,44 +0,0 @@
FROM ubuntu:14.04
RUN apt-get update && \
apt-get -y --no-install-recommends install \
apt-transport-https \
binutils \
clang-3.9 \
curl \
doxygen \
default-jdk \
gcc-multilib \
gcc-4.8-multilib \
git \
graphviz \
g++-multilib \
g++-4.8-multilib \
libgmp-dev \
libgomp1 \
lib32gomp1 \
llvm-3.9 \
make \
ninja-build \
python3 \
python3-setuptools \
python2.7 \
python-setuptools
RUN curl -SL https://packages.microsoft.com/config/ubuntu/14.04/packages-microsoft-prod.deb --output packages-microsoft-prod.deb && \
dpkg -i packages-microsoft-prod.deb && \
apt-get update && \
apt-get -y --no-install-recommends install dotnet-sdk-2.1
RUN curl -SL https://cmake.org/files/v3.12/cmake-3.12.0-Linux-x86_64.sh --output cmake-3.12.0-Linux-x86_64.sh && \
sh cmake-3.12.0-Linux-x86_64.sh --prefix=/usr/local --exclude-subdir
# Create `user` user for container with password `user`. and give it
# password-less sudo access
RUN useradd -m user && \
echo user:user | chpasswd && \
cp /etc/sudoers /etc/sudoers.bak && \
echo 'user ALL=(root) NOPASSWD: ALL' >> /etc/sudoers
USER user
WORKDIR /home/user
ENV ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-3.9/bin/llvm-symbolizer

View File

@ -1,35 +1,24 @@
FROM ubuntu:16.04
FROM ubuntu:20.04
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
apt-get -y --no-install-recommends install \
apt-transport-https \
binutils \
clang \
clang-3.9 \
cmake \
make \
ninja-build \
clang \
g++ \
curl \
doxygen \
default-jdk \
gcc-multilib \
gcc-5-multilib \
git \
graphviz \
g++-multilib \
g++-5-multilib \
libgmp-dev \
libgomp1 \
libomp5 \
libomp-dev \
llvm-3.9 \
make \
ninja-build \
python3 \
python3-setuptools \
python2.7 \
python-setuptools \
python-is-python3 \
sudo
RUN curl -SL https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb --output packages-microsoft-prod.deb && \
RUN curl -SL https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb --output packages-microsoft-prod.deb && \
dpkg -i packages-microsoft-prod.deb && \
apt-get update && \
apt-get -y --no-install-recommends install dotnet-sdk-2.1
@ -42,4 +31,4 @@ RUN useradd -m user && \
echo 'user ALL=(root) NOPASSWD: ALL' >> /etc/sudoers
USER user
WORKDIR /home/user
ENV ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-3.9/bin/llvm-symbolizer
#ENV ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-7/bin/llvm-symbolizer

View File

@ -5,7 +5,6 @@ FROM ${DOCKER_IMAGE_BASE}
# Build arguments. This can be changed when invoking
# `docker build`.
ARG ASAN_BUILD
ARG ASAN_DSO
ARG BUILD_DOCS
ARG CC
ARG CXX
@ -13,7 +12,7 @@ ARG DOTNET_BINDINGS
ARG JAVA_BINDINGS
ARG NO_SUPPRESS_OUTPUT
ARG PYTHON_BINDINGS
ARG PYTHON_EXECUTABLE=/usr/bin/python2.7
ARG PYTHON_EXECUTABLE=/usr/bin/python
ARG RUN_API_EXAMPLES
ARG RUN_SYSTEM_TESTS
ARG RUN_UNIT_TESTS
@ -34,7 +33,6 @@ ARG Z3_VERBOSE_BUILD_OUTPUT
ENV \
ASAN_BUILD=${ASAN_BUILD} \
ASAN_DSO=${ASAN_DSO} \
BUILD_DOCS=${BUILD_DOCS} \
CC=${CC} \
CXX=${CXX} \
@ -75,7 +73,7 @@ ADD /doc ${Z3_SRC_DIR}/doc/
ADD /examples ${Z3_SRC_DIR}/examples/
ADD /scripts ${Z3_SRC_DIR}/scripts/
ADD /src ${Z3_SRC_DIR}/src/
ADD *.txt *.md RELEASE_NOTES ${Z3_SRC_DIR}/
ADD *.txt *.md *.cmake.in RELEASE_NOTES ${Z3_SRC_DIR}/
ADD \
/contrib/ci/scripts/build_z3_cmake.sh \

View File

@ -42,7 +42,7 @@ the future.
* `Z3_BUILD_TYPE` - CMake build type (`RelWithDebInfo`, `Release`, `Debug`, or `MinSizeRel`)
* `Z3_CMAKE_GENERATOR` - CMake generator (`Ninja` or `Unix Makefiles`)
* `Z3_VERBOSE_BUILD_OUTPUT` - Show compile commands in CMake builds (`0` or `1`)
* `Z3_STATIC_BUILD` - Build Z3 binaries and libraries statically (`0` or `1`)
* `Z3_BUILD_LIBZ3_SHARED` - Build Z3 binaries and libraries dynamically/statically (`0` or `1`)
* `Z3_SYSTEM_TEST_GIT_REVISION` - Git revision of [z3test](https://github.com/Z3Prover/z3test). If empty lastest revision will be used.
* `Z3_WARNINGS_AS_ERRORS` - Set the `WARNINGS_AS_ERRORS` CMake option passed to Z3 (`OFF`, `ON`, or `SERIOUS_ONLY`)

View File

@ -11,7 +11,8 @@ if [ "X${ASAN_BUILD}" = "X1" ]; then
# NOTE: If you get bad stacktraces try using `fast_unwind_on_malloc=0`
# NOTE: `malloc_context_size` controls size of recorded stacktrace for allocations.
# If the reported stacktraces appear incomplete try increasing the value.
export ASAN_OPTIONS="malloc_context_size=100,suppressions=${Z3_SRC_DIR}/contrib/suppressions/sanitizers/asan.txt"
# leak checking disabled because it doesn't work on unpriviledged docker
export ASAN_OPTIONS="malloc_context_size=100,detect_leaks=0,suppressions=${Z3_SRC_DIR}/contrib/suppressions/sanitizers/asan.txt"
: ${SANITIZER_PRINT_SUPPRESSIONS?"SANITIZER_PRINT_SUPPRESSIONS must be specified"}
if [ "X${SANITIZER_PRINT_SUPPRESSIONS}" = "X1" ]; then
@ -22,40 +23,15 @@ if [ "X${ASAN_BUILD}" = "X1" ]; then
export ASAN_OPTIONS="${ASAN_OPTIONS},print_suppressions=0"
fi
: ${ASAN_SYMBOLIZER_PATH?"ASAN_SYMBOLIZER_PATH must be specified"}
#: ${ASAN_SYMBOLIZER_PATH?"ASAN_SYMBOLIZER_PATH must be specified"}
# Run command without checking for leaks
function run_no_lsan() {
ASAN_OPTIONS="${ASAN_OPTIONS},detect_leaks=0" "${@}"
}
# Check path to ASan DSO
: ${ASAN_DSO?"ASAN_DSO must be specified"}
if [ ! -e "${ASAN_DSO}" ]; then
echo "ASAN_DSO (${ASAN_DSO}) does not exist"
exit 1
fi
# FIXME: We'll need to refactor this when we can do UBSan builds
# against a UBSan DSO.
function run_non_native_binding() {
# We need to preload the ASan DSO that libz3
# will have undefined references to.
# Don't run leak checking because we get lots reported leaks
# in the language runtime (e.g. python).
PLATFORM="$(uname -s)"
case "${PLATFORM}" in
Linux*)
LD_PRELOAD="${ASAN_DSO}" run_no_lsan "${@}"
;;
Darwin*)
DYLD_INSERT_LIBRARIES="${ASAN_DSO}" run_no_lsan "${@}"
;;
*)
echo "Unknown platform \"${PLATFORM}\""
exit 1
;;
esac
unset PLATFORM
"${@}"
}
else
# In non-ASan build just run directly

View File

@ -141,17 +141,9 @@ if [ -n "${Z3_WARNINGS_AS_ERRORS}" ]; then
fi
case ${LINUX_BASE} in
ubuntu_14.04)
BASE_DOCKER_FILE="${DOCKER_FILE_DIR}/z3_base_ubuntu_14.04.Dockerfile"
BASE_DOCKER_IMAGE_NAME="z3_base_ubuntu:14.04"
;;
ubuntu_16.04)
BASE_DOCKER_FILE="${DOCKER_FILE_DIR}/z3_base_ubuntu_16.04.Dockerfile"
BASE_DOCKER_IMAGE_NAME="z3_base_ubuntu:16.04"
;;
ubuntu32_16.04)
BASE_DOCKER_FILE="${DOCKER_FILE_DIR}/z3_base_ubuntu32_16.04.Dockerfile"
BASE_DOCKER_IMAGE_NAME="z3_base_ubuntu32:16.04"
ubuntu_20.04)
BASE_DOCKER_FILE="${DOCKER_FILE_DIR}/z3_base_ubuntu_20.04.Dockerfile"
BASE_DOCKER_IMAGE_NAME="z3_base_ubuntu:20.04"
;;
*)
echo "Unknown Linux base ${LINUX_BASE}"

0
contrib/qprofdiff/Makefile Normal file → Executable file
View File

View File

@ -61,9 +61,9 @@ int parse(string const & filename, map<string, map_entry> & data) {
if (line.substr(0, prefix_len) == prefix) {
line = trim(line.substr(prefix_len));
size_t from = 0, ti = 0;
for (size_t inx = line.find(':', from);
for (size_t inx = line.find(" : ", from);
inx != string::npos;
inx = line.find(':', from)) {
inx = line.find(" : ", from)) {
tokens[ti] = trim(line.substr(from, inx-from));
from = inx+1;
ti++;

View File

@ -1,5 +1 @@
# LeakSanitizer suppression file
# Ignore Clang OpenMP leaks.
# See https://github.com/Z3Prover/z3/issues/1308
leak:___kmp_allocate

12
debian/changelog vendored
View File

@ -1,12 +0,0 @@
z3 (4.8.7-ok2) yangtze; urgency=medium
*if arch == rv64g export DEB_LDFLAGS_MAINT_APPEND = -Wl,--no-as-needed
-Wl,-latomic -Wl,--as-needed. Add schema settings to the control file.
-- root <zhouningyi@smart-core.cn> Thu, 14 Mar 2024 07:36:38 +0000
z3 (4.8.7-ok1) yangtze; urgency=medium
* Build for openKylin.
-- openKylinBot <openKylinBot@openkylin.com> Mon, 25 Apr 2022 22:03:04 +0800

96
debian/control vendored
View File

@ -1,96 +0,0 @@
Source: z3
Section: science
Priority: optional
Maintainer: Openkylin Developers <packaging@lists.openkylin.top>
Uploaders: Fabian Wolff <fabi.wolff@arcor.de>
Build-Depends: debhelper-compat (= 12),
dh-python, python3, cmake,
javahelper [!hppa !hurd-i386 !m68k !sh4],
default-jdk [!hppa !hurd-i386 !m68k !sh4]
Standards-Version: 4.4.1
Homepage: https://github.com/Z3Prover/z3
Vcs-Git: https://salsa.debian.org/pkg-llvm-team/z3.git
Vcs-Browser: https://salsa.debian.org/pkg-llvm-team/z3
Package: z3
Architecture: any
Depends: ${misc:Depends}, ${shlibs:Depends}
Description: theorem prover from Microsoft Research
Z3 is a state-of-the art theorem prover from Microsoft Research. It can be
used to check the satisfiability of logical formulas over one or more
theories. Z3 offers a compelling match for software analysis and verification
tools, since several common software constructs map directly into supported
theories.
.
The Z3 input format is an extension of the one defined by the SMT-LIB 2.0
standard.
Package: libz3-4
Architecture: any
Multi-Arch: same
Section: libs
Depends: ${shlibs:Depends},
${misc:Depends}
Breaks: libz3-dev (<< 4.4.1)
Replaces: libz3-dev (<< 4.4.1)
Description: theorem prover from Microsoft Research - runtime libraries
Z3 is a state-of-the art theorem prover from Microsoft Research. It can be
used to check the satisfiability of logical formulas over one or more
theories. Z3 offers a compelling match for software analysis and verification
tools, since several common software constructs map directly into supported
theories.
.
This package contains runtime libraries. You shouldn't have to install it
manually.
Package: libz3-dev
Section: libdevel
Architecture: any
Multi-Arch: same
Depends: libz3-4 (= ${binary:Version}), ${misc:Depends}
Description: theorem prover from Microsoft Research - development files
Z3 is a state-of-the art theorem prover from Microsoft Research. It can be
used to check the satisfiability of logical formulas over one or more
theories. Z3 offers a compelling match for software analysis and verification
tools, since several common software constructs map directly into supported
theories.
.
This package can be used to invoke Z3 via its C++ API.
Package: python3-z3
Section: python
Architecture: any
Pre-Depends: ${misc:Pre-Depends}
Depends: libz3-dev (= ${binary:Version}),
python3-pkg-resources,
${misc:Depends},
${python3:Depends},
${shlibs:Depends}
Description: theorem prover from Microsoft Research - Python 3 bindings
Z3 is a state-of-the art theorem prover from Microsoft Research. See the z3
package for a detailed description.
.
This package can be used to invoke Z3 via its Python 3 API.
Package: libz3-java
Section: java
Architecture: amd64 arm64 armel armhf i386 mips mips64el mipsel powerpc ppc64el s390x alpha kfreebsd-amd64 kfreebsd-i386 powerpcspe riscv64 rv64g sparc64 x32
Multi-Arch: foreign
Depends: libz3-jni (>= ${binary:Version}), libz3-jni (<< ${source:Version}.1~), libz3-dev, ${misc:Depends}, ${java:Depends}
Description: theorem prover from Microsoft Research - java bindings
Z3 is a state-of-the art theorem prover from Microsoft Research. See the z3
package for a detailed description.
.
This package can be used to invoke Z3 via its Java API.
Package: libz3-jni
Section: java
Architecture: amd64 arm64 armel armhf i386 mips mips64el mipsel powerpc ppc64el s390x alpha kfreebsd-amd64 kfreebsd-i386 powerpcspe riscv64 rv64g sparc64 x32
Multi-Arch: same
Pre-Depends: ${misc:Pre-Depends}
Depends: libz3-dev (= ${binary:Version}), ${misc:Depends}, ${shlibs:Depends}
Description: theorem prover from Microsoft Research - JNI library
Z3 is a state-of-the art theorem prover from Microsoft Research. See the z3
package for a detailed description.
.
This package provides the JNI library to invoke Z3 via its Java API.

47
debian/copyright vendored
View File

@ -1,47 +0,0 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: z3
Source: https://github.com/Z3Prover/z3
Files: *
Copyright: 2006-2019 Microsoft Corporation
2006, 2010, 2017-2019 Arie Gurfinkel
2017-2018 Saint-Petersburg State University
2017 Matteo Marescotti
License: Expat
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the ""Software""), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
.
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Files: debian/*
Copyright: 2016-2019 Fabian Wolff <fabi.wolff@arcor.de>
2011 Michael Tautschnig <mt@debian.org>
License: GPL-2+
This package is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.
This package is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
.
On Debian systems, the complete text of the GNU General
Public License version 2 can be found in "/usr/share/common-licenses/GPL-2".

1
debian/docs vendored
View File

@ -1 +0,0 @@
README.md

View File

@ -1 +0,0 @@
usr/lib/*/libz3.so.*

View File

@ -1,2 +0,0 @@
usr/include/*
usr/lib/*/libz3.so

View File

@ -1 +0,0 @@
usr/share/java/

View File

@ -1,21 +0,0 @@
#!/bin/sh
set -e
case "$1" in
install|upgrade)
# dpkg does not replace directories with symlinks.
if dpkg --compare-versions "$2" lt "4.4.0-3" ; then
rm -rf /usr/share/doc/libz3-java
fi
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

View File

@ -1 +0,0 @@
usr/lib/*/jni/libz3java.so

View File

@ -1,21 +0,0 @@
#!/bin/sh
set -e
case "$1" in
install|upgrade)
# dpkg does not replace directories with symlinks.
if dpkg --compare-versions "$2" lt "4.4.0-3" ; then
rm -rf /usr/share/doc/libz3-jni
fi
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

1
debian/manpages vendored
View File

@ -1 +0,0 @@
debian/z3.1

View File

@ -1 +0,0 @@
usr/lib/python3/dist-packages/

39
debian/rules vendored
View File

@ -1,39 +0,0 @@
#!/usr/bin/make -f
# Uncomment this to turn on verbose mode.
# export DH_VERBOSE=1
export DEB_BUILD_MAINT_OPTIONS = hardening=+all
export DEB_CXXFLAGS_MAINT_APPEND = -fPIC
ifeq ($(DEB_HOST_ARCH),rv64g)
export DEB_LDFLAGS_MAINT_APPEND = -Wl,--no-as-needed -Wl,-latomic -Wl,--as-needed
endif
DEB_HOST_MULTIARCH ?= $(shell dpkg-architecture -qDEB_HOST_MULTIARCH)
ifneq (,$(shell dh_listpackages -a | grep libz3-jni))
WITH_JAVA ?= ON
WITH_JAVAHELPER ?= ,javahelper
else
WITH_JAVA ?= OFF
WITH_JAVAHELPER ?=
endif
%:
dh $@ --with python3$(WITH_JAVAHELPER)
override_dh_auto_configure:
dh_auto_configure --buildsystem=cmake+makefile -- \
-DCMAKE_INSTALL_PYTHON_PKG_DIR=lib/python3/dist-packages \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DZ3_BUILD_PYTHON_BINDINGS=ON \
-DZ3_BUILD_DOTNET_BINDINGS=OFF \
-DZ3_BUILD_JAVA_BINDINGS=$(WITH_JAVA)
override_dh_installchangelogs:
dh_installchangelogs RELEASE_NOTES
for p in python3-z3 libz3-java libz3-jni ; do \
$(RM) -rf debian/$$p/usr/share/doc/$$p/ ; \
ln -s libz3-dev debian/$$p/usr/share/doc/$$p || true ; \
done

View File

@ -1 +0,0 @@
3.0 (native)

View File

@ -1,6 +0,0 @@
Tests: import-z3-test include-z3-test run-z3-test
Depends: @, build-essential, python3
Restrictions: superficial
Tests: z3-int-logic-test python3-z3-int-logic-test
Depends: @, build-essential, python3

View File

@ -1,6 +0,0 @@
#!/usr/bin/env python3
# This is a superficial test that checks that the Python 3 module can
# actually be imported.
import z3

View File

@ -1,54 +0,0 @@
#!/bin/sh
# This is a relatively superficial test that checks that one can
# include and compile with the z3.h (C) and z3++.h (C++) header files,
# and link with -lz3, to produce working executables.
set -e
TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" EXIT
cd $TMPDIR
cat <<EOF > test-include-z3-h.c
#include <z3.h>
int main ()
{
Z3_config cfg;
Z3_context ctx;
cfg = Z3_mk_config ();
ctx = Z3_mk_context (cfg);
Z3_del_config (cfg);
return 0;
}
EOF
cat <<EOF > test-include-z3pp-h.cc
#include <z3++.h>
using namespace z3;
int main ()
{
context c;
expr x = c.bool_const("x");
expr y = c.bool_const("y");
expr conj = (!(x && y)) == (!x || !y);
solver s(c);
s.add(!conj);
return 0;
}
EOF
cc -o test-include-z3-h test-include-z3-h.c -lz3
[ -x test-include-z3-h ]
./test-include-z3-h
c++ -o test-include-z3pp-h test-include-z3pp-h.cc -lz3
[ -x test-include-z3pp-h ]
./test-include-z3pp-h

View File

@ -1,18 +0,0 @@
#!/usr/bin/env python3
# This is a test that checks that the Python 3 module is functional by
# trying out a small example involving integer logic.
from z3 import *
x, y = Ints("x y")
s = Solver()
s.add(x > 0, y > 0, x <= 2, y <= 2, x + 1 <= y)
assert s.check()
m = s.model()
assert m[x] == 1
assert m[y] == 2

View File

@ -1,23 +0,0 @@
#!/bin/sh
# This is a superficial test that checks that the z3 executable is
# installed properly, and that one can execute a simple SMT-LIBv2
# script with it.
set -e
TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" EXIT
cd $TMPDIR
# Check that z3 runs at all
z3 --version > /dev/null
cat <<EOF > test.smt2
(echo "Hello, world!")
(exit)
EOF
z3 test.smt2 > out.txt
echo "Hello, world!" > expected.txt
diff out.txt expected.txt

View File

@ -1,40 +0,0 @@
#!/bin/sh
# This is a test that checks that the z3 executable is installed
# properly, and that one can execute a simple SMT-LIBv2 script
# involving some integer logic with the expected output.
set -e
TMPDIR=$(mktemp -d)
trap "rm -rf $TMPDIR" EXIT
cd $TMPDIR
cat <<EOF > test-sat.smt2
(set-logic QF_LIA)
(declare-const x Int)
(declare-const y Int)
(assert (> x 0))
(assert (> y x))
(assert (> y 0))
(check-sat)
EOF
cat <<EOF > test-unsat.smt2
(set-logic QF_LIA)
(declare-const x Int)
(declare-const y Int)
(assert (> x 0))
(assert (> y x))
(assert (< y 0))
(check-sat)
EOF
echo "sat" > sat.txt
echo "unsat" > unsat.txt
z3 test-sat.smt2 > out-sat.txt
diff out-sat.txt sat.txt
z3 test-unsat.smt2 > out-unsat.txt
diff out-unsat.txt unsat.txt

3
debian/watch vendored
View File

@ -1,3 +0,0 @@
version=4
opts=filenamemangle=s/.+\/(z3|Z3)-(\d\S*)\.tar\.gz/z3-$2\.tar\.gz/ \
https://github.com/Z3Prover/z3/tags .*/(?:z3|Z3)-(\d\S*)\.tar\.gz

114
debian/z3.1 vendored
View File

@ -1,114 +0,0 @@
.\" Hey, EMACS: -*- nroff -*-
.\" First parameter, NAME, should be all caps
.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
.\" other parameters are allowed: see man(7), man(1)
.TH Z3 1 "May 25, 2015"
.\" Please adjust this date whenever revising the manpage.
.\"
.\" Some roff macros, for reference:
.\" .nh disable hyphenation
.\" .hy enable hyphenation
.\" .ad l left justify
.\" .ad b justify to both left and right margins
.\" .nf disable filling
.\" .fi enable filling
.\" .br insert line break
.\" .sp <n> insert n+1 empty lines
.\" for manpage-specific macros, see man(7)
.SH NAME
z3 \- a state-of-the art theorem prover from Microsoft Research
.SH SYNOPSIS
.B z3
.RI [ options ]
.RI [\-file:]file
.SH DESCRIPTION
This manual page documents briefly the
.B z3
command.
.PP
.\" TeX users may be more comfortable with the \fB<whatever>\fP and
.\" \fI<whatever>\fP escape sequences to invode bold face and italics,
.\" respectively.
\fBz3\fP Z3 is a state-of-the art theorem prover from Microsoft Research. It can
be used to check the satisfiability of logical formulas over one or more
theories. Z3 offers a compelling match for software analysis and verification
tools, since several common software constructs map directly into supported
theories.
.SH Input format
.TP
.B \-smt
Use parser for SMT input format.
.TP
.B \-smt2
Use parser for SMT 2 input format.
.TP
.B \-dl
Use parser for Datalog input format.
.TP
.B \-dimacs
Use parser for DIMACS input format.
.TP
.B \-log
Use parser for Z3 log input format.
.TP
.B \-in
Read formula from standard input.
.SH Miscellaneous
.TP
.B \-h | -?
Prints the usage information.
.TP
.B \-version
Prints version number of Z3.
.TP
.B \-v:level
Be verbose, where <level> is the verbosity level.
.TP
.B \-nw
Disable warning messages.
.TP
.B \-p
Display Z3 global (and module) parameters.
.TP
.B \-pd
Display Z3 global (and module) parameter descriptions.
.TP
.B \-pm:name
Display Z3 module <name> parameters.
.TP
.B \-pp:name
Display Z3 parameter description, if <name> is not provided, then all module
names are listed.
.TP
.B \-\-
All remaining arguments are assumed to be part of the input file name. This
option allows Z3 to read files with strange names such as: \-foo.smt2.
.SH Resources
.TP
.B \-T:timeout
Set the timeout (in seconds).
.TP
.B \-t:timeout
Set the soft timeout (in milli seconds). It only kills the current query.
.TP
.B \-memory:Megabytes
Set a limit for virtual memory consumption.
.SH Output
.TP
.B \-st
Display statistics.
.SH Parameter setting
Global and module parameters can be set in the command line.
Use 'z3 \-p' for the complete list of global and module parameters.
.TP
.B param_name=value
For setting global parameters.
.TP
.B module_name.param_name=value
For setting module parameters.
.SH AUTHOR
Z3 Copyright 2006-2014 Microsoft Corp.
.PP
This manual page was written by Michael Tautschnig <mt@debian.org>,
for the Debian project (and may be used by others).

1
debian/z3.install vendored
View File

@ -1 +0,0 @@
debian/tmp/usr/bin/z3

View File

@ -11,7 +11,7 @@ set(DOTNET_API_OPTIONS "")
set(JAVA_API_OPTIONS "")
SET(DOC_EXTRA_DEPENDS "")
if (BUILD_PYTHON_BINDINGS)
if (Z3_BUILD_PYTHON_BINDINGS)
# FIXME: Don't hard code this path
list(APPEND PYTHON_API_OPTIONS "--z3py-package-path" "${PROJECT_BINARY_DIR}/python/z3")
list(APPEND DOC_EXTRA_DEPENDS "build_z3_python_bindings")

View File

@ -14,6 +14,7 @@ import subprocess
import shutil
ML_ENABLED=False
MLD_ENABLED=False
BUILD_DIR='../build'
DOXYGEN_EXE='doxygen'
TEMP_DIR=os.path.join(os.getcwd(), 'tmp')
@ -27,7 +28,7 @@ JAVA_API_SEARCH_PATHS=['../src/api/java']
SCRIPT_DIR=os.path.abspath(os.path.dirname(__file__))
def parse_options():
global ML_ENABLED, BUILD_DIR, DOXYGEN_EXE, TEMP_DIR, OUTPUT_DIRECTORY
global ML_ENABLED, MLD_ENABLED, BUILD_DIR, DOXYGEN_EXE, TEMP_DIR, OUTPUT_DIRECTORY
global Z3PY_PACKAGE_PATH, Z3PY_ENABLED, DOTNET_ENABLED, JAVA_ENABLED
global DOTNET_API_SEARCH_PATHS, JAVA_API_SEARCH_PATHS
parser = argparse.ArgumentParser(description=__doc__)
@ -41,6 +42,11 @@ def parse_options():
default=False,
help='Include ML/OCaml API documentation'
)
parser.add_argument('--mld',
action='store_true',
default=False,
help='Include ML/OCaml API documentation'
)
parser.add_argument('--doxygen-executable',
dest='doxygen_executable',
default=DOXYGEN_EXE,
@ -98,6 +104,7 @@ def parse_options():
)
pargs = parser.parse_args()
ML_ENABLED = pargs.ml
MLD_ENABLED = pargs.mld
BUILD_DIR = pargs.build
DOXYGEN_EXE = pargs.doxygen_executable
TEMP_DIR = pargs.temp_dir
@ -225,6 +232,15 @@ try:
website_dox_substitutions = {}
bullet_point_prefix='\n - '
website_dox_substitutions['CPP_API'] = (
'{prefix}<a class="el" href="namespacez3.html">C++ API</a> '
).format(
prefix=bullet_point_prefix)
website_dox_substitutions['C_API'] = (
'{prefix}<a class="el" href="z3__api_8h.html">C API</a> '
).format(
prefix=bullet_point_prefix)
if Z3PY_ENABLED:
print("Python documentation enabled")
website_dox_substitutions['PYTHON_API'] = (
@ -250,7 +266,7 @@ try:
prefix=bullet_point_prefix)
else:
website_dox_substitutions['JAVA_API'] = ''
if ML_ENABLED:
if ML_ENABLED or MLD_ENABLED:
website_dox_substitutions['OCAML_API'] = (
'{prefix}<a class="el" href="ml/index.html">ML/OCaml API</a>'
).format(
@ -317,7 +333,7 @@ try:
if ML_ENABLED:
ml_output_dir = os.path.join(OUTPUT_DIRECTORY, 'html', 'ml')
mk_dir(ml_output_dir)
if subprocess.call(['ocamldoc', '-html', '-d', ml_output_dir, '-sort', '-hide', 'Z3', '-I', '%s/api/ml' % BUILD_DIR, '%s/api/ml/z3enums.mli' % BUILD_DIR, '%s/api/ml/z3.mli' % BUILD_DIR]) != 0:
if subprocess.call(['ocamldoc', '-html', '-d', ml_output_dir, '-sort', '-hide', 'Z3', '-I', '$(ocamlfind query zarith)', '-I', '%s/api/ml' % BUILD_DIR, '%s/api/ml/z3enums.mli' % BUILD_DIR, '%s/api/ml/z3.mli' % BUILD_DIR]) != 0:
print("ERROR: ocamldoc failed.")
exit(1)
print("Generated ML/OCaml documentation.")

View File

@ -8,7 +8,7 @@
This website hosts the automatically generated documentation for the Z3 APIs.
- \ref capi
- \ref cppapi @DOTNET_API@ @JAVA_API@ @PYTHON_API@ @OCAML_API@
- \ref @C_API@
- \ref @CPP_API@ @DOTNET_API@ @JAVA_API@ @PYTHON_API@ @OCAML_API@
- Try Z3 online at <a href="http://rise4fun.com/z3">RiSE4Fun</a>.
*/

File diff suppressed because it is too large Load Diff

View File

@ -28,7 +28,7 @@ add_executable(cpp_example example.cpp)
target_include_directories(cpp_example PRIVATE ${Z3_CXX_INCLUDE_DIRS})
target_link_libraries(cpp_example PRIVATE ${Z3_LIBRARIES})
if ("${CMAKE_SYSTEM_NAME}" MATCHES "[Ww]indows")
if (CMAKE_SYSTEM_NAME MATCHES "[Ww]indows")
# On Windows we need to copy the Z3 libraries
# into the same directory as the executable
# so that they can be found.

View File

@ -1044,6 +1044,29 @@ void opt_example() {
}
}
/**
* translate from one optimization context to another.
*/
void opt_translate_example() {
context c1, c2;
optimize o1(c1);
expr x = c1.int_const("x");
expr y = c1.int_const("y");
o1.add(10 >= x && x >= 0);
o1.add(10 >= y && y >= 0);
o1.add(x + y <= 11);
optimize::handle h1 = o1.maximize(x);
optimize::handle h2 = o1.maximize(y);
(void)h1;
(void)h2;
optimize o2(c2, o1);
expr z = c2.int_const("z");
expr x2 = c2.int_const("x");
o2.add(x2 + z == 2);
o2.minimize(z);
std::cout << o2 << "\n";
}
void extract_example() {
std::cout << "extract example\n";
context c;
@ -1305,6 +1328,7 @@ int main() {
exists_expr_vector_example(); std::cout << "\n";
substitute_example(); std::cout << "\n";
opt_example(); std::cout << "\n";
opt_translate_example(); std::cout << "\n";
extract_example(); std::cout << "\n";
param_descrs_example(); std::cout << "\n";
sudoku_example(); std::cout << "\n";
@ -1320,5 +1344,6 @@ int main() {
catch (exception & ex) {
std::cout << "unexpected error: " << ex << "\n";
}
Z3_finalize_memory();
return 0;
}

View File

@ -40,7 +40,7 @@ endif()
target_include_directories(c_example PRIVATE ${Z3_C_INCLUDE_DIRS})
target_link_libraries(c_example PRIVATE ${Z3_LIBRARIES})
if ("${CMAKE_SYSTEM_NAME}" MATCHES "[Ww]indows")
if (CMAKE_SYSTEM_NAME MATCHES "[Ww]indows")
# On Windows we need to copy the Z3 libraries
# into the same directory as the executable
# so that they can be found.

View File

@ -1539,6 +1539,7 @@ void two_contexts_example1()
x1 = Z3_mk_const(ctx1, Z3_mk_int_symbol(ctx1,0), Z3_mk_bool_sort(ctx1));
x2 = Z3_mk_const(ctx2, Z3_mk_int_symbol(ctx2,0), Z3_mk_bool_sort(ctx2));
(void)x1;
Z3_del_context(ctx1);
/* ctx2 can still be used. */
@ -2104,6 +2105,7 @@ void forest_example() {
(void)f3;
(void)f2;
(void)t4;
(void)t3;
(void)t2;
/* nil != cons(nil,nil) */

View File

@ -1,6 +1,6 @@
find_package(Dotnet REQUIRED)
if("${TARGET_ARCHITECTURE}" STREQUAL "i686")
if(TARGET_ARCHITECTURE STREQUAL "i686")
set(Z3_DOTNET_PLATFORM "x86")
else()
set(Z3_DOTNET_PLATFORM "AnyCPU")

View File

@ -599,7 +599,7 @@ class JavaExample
System.out.println("QuantifierExample");
Log.append("QuantifierExample");
Sort[] types = new Sort[3];
IntSort[] types = new IntSort[3];
IntExpr[] xs = new IntExpr[3];
Symbol[] names = new Symbol[3];
IntExpr[] vars = new IntExpr[3];
@ -1398,7 +1398,7 @@ class JavaExample
System.out.println("BitvectorExample1");
Log.append("BitvectorExample1");
Sort bv_type = ctx.mkBitVecSort(32);
BitVecSort bv_type = ctx.mkBitVecSort(32);
BitVecExpr x = (BitVecExpr) ctx.mkConst("x", bv_type);
BitVecNum zero = (BitVecNum) ctx.mkNumeral("0", bv_type);
BitVecNum ten = ctx.mkBV(10, 32);
@ -1420,7 +1420,7 @@ class JavaExample
Log.append("BitvectorExample2");
/* construct x ^ y - 103 == x * y */
Sort bv_type = ctx.mkBitVecSort(32);
BitVecSort bv_type = ctx.mkBitVecSort(32);
BitVecExpr x = ctx.mkBVConst("x", 32);
BitVecExpr y = ctx.mkBVConst("y", 32);
BitVecExpr x_xor_y = ctx.mkBVXOR(x, y);

File diff suppressed because it is too large Load Diff

View File

@ -35,7 +35,7 @@ if (FORCE_CXX_LINKER)
)
endif()
if ("${CMAKE_SYSTEM_NAME}" MATCHES "[Ww]indows")
if (CMAKE_SYSTEM_NAME MATCHES "[Ww]indows")
# On Windows we need to copy the Z3 libraries
# into the same directory as the executable
# so that they can be found.

View File

@ -13,17 +13,18 @@ set(z3py_bindings_build_dest "${PROJECT_BINARY_DIR}/python")
set(build_z3_python_examples_target_depends "")
foreach (example_file ${python_example_files})
add_custom_command(OUTPUT "${z3py_bindings_build_dest}/${example_file}"
get_filename_component(example_file_name "${example_file}" NAME)
add_custom_command(OUTPUT "${z3py_bindings_build_dest}/${example_file_name}"
COMMAND "${CMAKE_COMMAND}" "-E" "copy"
"${CMAKE_CURRENT_SOURCE_DIR}/${example_file}"
# We flatten the hierarchy so that all python files have
# the `z3` directory in their directory so that their import
# statements "just work".
"${z3py_bindings_build_dest}/"
"${z3py_bindings_build_dest}/${example_file_name}"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${example_file}"
COMMENT "Copying \"${example_file}\" to ${z3py_bindings_build_dest}/${example_file}"
COMMENT "Copying \"${example_file}\" to ${z3py_bindings_build_dest}/${example_file_name}"
)
list(APPEND build_z3_python_examples_target_depends "${z3py_bindings_build_dest}/${example_file}")
list(APPEND build_z3_python_examples_target_depends "${z3py_bindings_build_dest}/${example_file_name}")
endforeach()
add_custom_target(build_z3_python_examples

View File

@ -65,13 +65,13 @@ def process_model(s, xij, n):
def all_models(n):
count = 0
s, xij = ais(n)
start = time.clock()
start = time.time()
while sat == s.check():
block = process_model(s, xij, n)
s.add(Not(And(block)))
count += 1
print(s.statistics())
print(time.clock() - start)
print(time.time() - start)
print(count)
set_option(verbose=1)

View File

@ -0,0 +1,69 @@
# BUBBLESORT - Copyright (c) June, 2020 - Matteo Nicoli
from z3 import Solver, Int, Array, IntSort, And, Not, If, Select, Store, sat
def init(i,j) :
return And(i == 0, j == 0)
def invert(A0, A1, tmp, i0, i1) :
return If(Select(A0, i0) > Select(A0, i0 + 1), \
And(tmp == Select(A0, i0), \
A1 == Store(A0, i0, Select(A0, i0 + 1)), \
A1 == Store(A0, i0 + 1, tmp)), \
A1 == A0)
def bsort_step(A0, A1, tmp, i0, j0, i1, j1, dim) :
return If( j0 < dim - 1, \
And( \
If( i0 < dim - 1, \
And(invert(A0, A1, tmp, i0, i1),i1 == i0 + 1), \
i1 == i0 + 1), \
j1 == j0 + 1), \
And(j1 == j0 + 1, A1 == A0))
def mk_tran_condition(A, i, j, tmp, dim) :
condition = []
for _ in range(dim) :
condition.append(bsort_step(A[0],A[1],tmp[0],i[0],j[0],i[1],j[1],dim))
A = A[1:]
i = i[1:]
j = j[1:]
tmp = tmp[1:]
return condition
def check(variables, Ar, dim) :
for e in range(dim) :
yield variables[e] == Select(Ar,e)
def mk_post_condition(values) :
condition = []
for v1,v2 in zip(values,values[1:]) :
condition.append(v1 <= v2)
return And(*condition)
dim = int(input("size of the array: "))
i = [Int(f"i_{x}") for x in range(dim + 1)]
j = [Int(f"j_{x}") for x in range(dim + 1)]
A = [Array(f"A_{x}",IntSort(),IntSort()) for x in range(dim + 1)]
tmp = [Int(f"tmp_{x}") for x in range(dim)]
s = Solver()
init_condition = init(i[0],j[0])
s.add(init_condition)
tran_condition= mk_tran_condition(A, i, j, tmp, dim)
s.add(And(*tran_condition))
values = [Int(f"n_{x}") for x in range(dim)]
init_check_condition = check(values,A[-1],dim)
s.add(And(*init_check_condition))
post_condition = mk_post_condition(values)
s.add(Not(post_condition))
if s.check() == sat :
print(f"counterexample:\n{s.model()}")
else :
print("valid")

493
examples/python/hs.py Normal file
View File

@ -0,0 +1,493 @@
#
# Unweighted hitting set maxsat solver.
# interleaved with local hill-climbing improvements
# and also maxres relaxation steps to reduce number
# of soft constraints.
#
from z3 import *
import random
counter = 0
def add_def(s, fml):
global counter
name = Bool(f"def-{counter}")
counter += 1
s.add(name == fml)
return name
def relax_core(s, core, Fs):
core = list(core)
if len(core) == 0:
return
prefix = BoolVal(True)
Fs -= { f for f in core }
for i in range(len(core)-1):
prefix = add_def(s, And(core[i], prefix))
Fs |= { add_def(s, Or(prefix, core[i+1])) }
def restrict_cs(s, cs, Fs):
cs = list(cs)
if len(cs) == 0:
return
prefix = BoolVal(False)
Fs -= { f for f in cs }
for i in range(len(cs)-1):
prefix = add_def(s, Or(cs[i], prefix))
Fs |= { add_def(s, And(prefix, cs[i+1])) }
def count_sets_by_size(sets):
sizes = {}
for core in sets:
sz = len(core)
if sz not in sizes:
sizes[sz] = 0
sizes[sz] += 1
sizes = list(sizes.items())
sizes = sorted(sizes, key = lambda p : p[0])
print(sizes)
#set_param("sat.euf", True)
#set_param("tactic.default_tactic","sat")
#set_param("sat.cardinality.solver",False)
#set_param("sat.cardinality.encoding", "circuit")
#set_param(verbose=1)
class Soft:
def __init__(self, soft):
self.formulas = set(soft)
self.original_soft = soft.copy()
self.offset = 0
self.init_names()
def init_names(self):
self.name2formula = { Bool(f"s{s}") : s for s in self.formulas }
self.formula2name = { s : v for (v, s) in self.name2formula.items() }
#
# TODO: try to replace this by a recursive invocation of HsMaxSAT
# such that the invocation is incremental with respect to adding constraints
# and has resource bounded invocation.
#
class HsPicker:
def __init__(self, soft):
self.soft = soft
self.opt_backoff_limit = 0
self.opt_backoff_count = 0
self.timeout_value = 6000
def pick_hs_(self, Ks, lo):
hs = set()
for ks in Ks:
if not any(k in ks for k in hs):
h = random.choice([h for h in ks])
hs = hs | { h }
print("approximate hitting set", len(hs), "smallest possible size", lo)
return hs, lo
#
# This can improve lower bound, but is expensive.
# Note that Z3 does not work well for hitting set optimization.
# MIP solvers contain better
# tuned approaches thanks to LP lower bounds and likely other properties.
# Would be nice to have a good hitting set
# heuristic built into Z3....
#
def pick_hs(self, Ks, lo):
if len(Ks) == 0:
return set(), lo
if self.opt_backoff_count < self.opt_backoff_limit:
self.opt_backoff_count += 1
return self.pick_hs_(Ks, lo)
opt = Optimize()
for k in Ks:
opt.add(Or([self.soft.formula2name[f] for f in k]))
for n in self.soft.formula2name.values():
obj = opt.add_soft(Not(n))
opt.set("timeout", self.timeout_value)
is_sat = opt.check()
lo = max(lo, opt.lower(obj).as_long())
self.opt_backoff_count = 0
if is_sat == sat:
if self.opt_backoff_limit > 1:
self.opt_backoff_limit -= 1
self.timeout_value += 500
mdl = opt.model()
hs = [self.soft.name2formula[n] for n in self.soft.formula2name.values() if is_true(mdl.eval(n))]
return set(hs), lo
else:
print("Timeout", self.timeout_value, "lo", lo, "limit", self.opt_backoff_limit)
self.opt_backoff_limit += 1
self.timeout_value += 500
return self.pick_hs_(Ks, lo)
class HsMaxSAT:
def __init__(self, soft, s):
self.s = s # solver object
self.soft = Soft(soft) # Soft constraints
self.hs = HsPicker(self.soft) # Pick a hitting set
self.model = None # Current best model
self.lo = 0 # Current lower bound
self.hi = len(soft) # Current upper bound
self.Ks = [] # Set of Cores
self.Cs = [] # Set of correction sets
self.small_set_size = 6
self.small_set_threshold = 1
self.num_max_res_failures = 0
self.corr_set_enabled = True
self.patterns = []
def has_many_small_sets(self, sets):
small_count = len([c for c in sets if len(c) <= self.small_set_size])
return self.small_set_threshold <= small_count
def get_small_disjoint_sets(self, sets):
hs = set()
result = []
min_size = min(len(s) for s in sets)
def insert(bound, sets, hs, result):
for s in sets:
if len(s) == bound and not any(c in hs for c in s):
result += [s]
hs = hs | set(s)
return hs, result
for sz in range(min_size, min_size + 3):
hs, result = insert(sz, sets, hs, result)
return result
def reinit_soft(self, num_cores_relaxed):
self.soft.init_names()
self.soft.offset += num_cores_relaxed
self.Ks = []
self.Cs = []
self.lo -= num_cores_relaxed
print("New offset", self.soft.offset)
def maxres(self):
#
# If there are sufficiently many small cores, then
# we reduce the soft constraints by maxres.
#
if self.has_many_small_sets(self.Ks) or (not self.corr_set_enabled and not self.has_many_small_sets(self.Cs) and self.num_max_res_failures > 0):
self.num_max_res_failures = 0
cores = self.get_small_disjoint_sets(self.Ks)
for core in cores:
self.small_set_size = max(4, min(self.small_set_size, len(core) - 2))
relax_core(self.s, core, self.soft.formulas)
self.reinit_soft(len(cores))
self.corr_set_enabled = True
return
#
# If there are sufficiently many small correction sets, then
# we reduce the soft constraints by dual maxres (IJCAI 2015)
#
# TODO: the heuristic for when to invoking correction set restriction
# needs fine-tuning. For example, the if min(Ks)*optimality_gap < min(Cs)*(max(SS))
# we might want to prioritize core relaxation to make progress with less overhead.
# here: max(SS) = |Soft|-min(Cs) is the size of the maximal satisfying subset
# the optimality gap is self.hi - self.offset
# which is a bound on how many cores have to be relaxed before determining optimality.
#
if self.corr_set_enabled and self.has_many_small_sets(self.Cs):
self.num_max_res_failures = 0
cs = self.get_small_disjoint_sets(self.Cs)
for corr_set in cs:
print("restrict cs", len(corr_set))
# self.small_set_size = max(4, min(self.small_set_size, len(corr_set) - 2))
restrict_cs(self.s, corr_set, self.soft.formulas)
self.s.add(Or(corr_set))
self.reinit_soft(0)
self.corr_set_enabled = False
return
#
# Increment the failure count. If the failure count reaches a threshold
# then increment the lower bounds for performing maxres or dual maxres
#
self.num_max_res_failures += 1
print("Small set size", self.small_set_size, "num skips", self.num_max_res_failures)
if self.num_max_res_failures > 3:
self.num_max_res_failures = 0
self.small_set_size += 100
def pick_hs(self):
hs, self.lo = self.hs.pick_hs(self.Ks, self.lo)
return hs
def save_model(self):
#
# You can save a model here.
# For example, add the string: self.model.sexpr()
# to a file, or print bounds in custom format.
#
# print(f"Bound: {self.lo}")
# for f in self.soft.original_soft:
# print(f"{f} := {self.model.eval(f)}")
pass
def add_pattern(self, orig_cs):
named = { f"{f}" : f for f in self.soft.original_soft }
sorted_names = sorted(named.keys())
sorted_soft = [named[f] for f in sorted_names]
bits = [1 if f not in orig_cs else 0 for f in sorted_soft]
def eq_bits(b1, b2):
return all(b1[i] == b2[i] for i in range(len(b1)))
def num_overlaps(b1, b2):
return sum(b1[i] == b2[i] for i in range(len(b1)))
if not any(eq_bits(b, bits) for b in self.patterns):
if len(self.patterns) > 0:
print(num_overlaps(bits, self.patterns[-1]), len(bits), bits)
self.patterns += [bits]
counts = [sum(b[i] for b in self.patterns) for i in range(len(bits))]
print(counts)
#
# Crude, quick core reduction attempt
#
def reduce_core(self, core):
s = self.s
if len(core) <= 4:
return core
s.set("timeout", 200)
i = 0
num_undef = 0
orig_len = len(core)
core = list(core)
while i < len(core):
is_sat = s.check([core[j] for j in range(len(core)) if j != i])
if is_sat == unsat:
core = s.unsat_core()
elif is_sat == sat:
self.improve(s.model())
bound = self.hi - self.soft.offset - 1
else:
num_undef += 1
if num_undef > 3:
break
i += 1
print("Reduce", orig_len, "->", len(core), "iterations", i, "unknown", num_undef)
s.set("timeout", 100000000)
return core
def improve(self, new_model):
mss = { f for f in self.soft.formulas if is_true(new_model.eval(f)) }
cs = self.soft.formulas - mss
self.Cs += [cs]
orig_cs = { f for f in self.soft.original_soft if not is_true(new_model.eval(f)) }
cost = len(orig_cs)
if self.model is None:
self.model = new_model
if cost <= self.hi:
self.add_pattern(orig_cs)
print("improve", self.hi, cost)
self.model = new_model
self.save_model()
assert self.model
if cost < self.hi:
self.hi = cost
return True
return False
def try_rotate(self, mss):
backbones = set()
backbone2core = {}
ps = self.soft.formulas - mss
num_sat = 0
num_unsat = 0
improved = False
while len(ps) > 0:
p = random.choice([p for p in ps])
ps = ps - { p }
is_sat = self.s.check(mss | backbones | { p })
if is_sat == sat:
mdl = self.s.model()
mss = mss | {p}
ps = ps - {p}
if self.improve(mdl):
improved = True
num_sat += 1
elif is_sat == unsat:
backbones = backbones | { Not(p) }
core = set()
for c in self.s.unsat_core():
if c in backbone2core:
core = core | backbone2core[c]
else:
core = core | { c }
if len(core) < 20:
self.Ks += [core]
backbone2core[Not(p)] = set(core) - { p }
num_unsat += 1
else:
print("unknown")
print("rotate-1 done, sat", num_sat, "unsat", num_unsat)
if improved:
self.mss_rotate(mss, backbone2core)
return improved
def mss_rotate(self, mss, backbone2core):
counts = { c : 0 for c in mss }
max_count = 0
max_val = None
for core in backbone2core.values():
for c in core:
assert c in mss
counts[c] += 1
if max_count < counts[c]:
max_count = counts[c]
max_val = c
print("rotate max-count", max_count, "num occurrences", len({c for c in counts if counts[c] == max_count}))
print("Number of plateaus", len({ c for c in counts if counts[c] <= 1 }))
for c in counts:
if counts[c] > 1:
print("try-rotate", counts[c])
if self.try_rotate(mss - { c }):
break
def local_mss(self, new_model):
mss = { f for f in self.soft.formulas if is_true(new_model.eval(f)) }
########################################
# test effect of random sub-sampling
#
#mss = list(mss)
#ms = set()
#for i in range(len(mss)//2):
# ms = ms | { random.choice([p for p in mss]) }
#mss = ms
####
ps = self.soft.formulas - mss
backbones = set()
qs = set()
backbone2core = {}
while len(ps) > 0:
p = random.choice([p for p in ps])
ps = ps - { p }
is_sat = self.s.check(mss | backbones | { p })
print(len(ps), is_sat)
sys.stdout.flush()
if is_sat == sat:
mdl = self.s.model()
rs = { p }
#
# by commenting this out, we use a more stubborn exploration
# by using the random seed as opposed to current model as a guide
# to what gets satisfied.
#
# Not sure if it really has an effect.
# rs = rs | { q for q in ps if is_true(mdl.eval(q)) }
#
rs = rs | { q for q in qs if is_true(mdl.eval(q)) }
mss = mss | rs
ps = ps - rs
qs = qs - rs
if self.improve(mdl):
self.mss_rotate(mss, backbone2core)
elif is_sat == unsat:
core = set()
for c in self.s.unsat_core():
if c in backbone2core:
core = core | backbone2core[c]
else:
core = core | { c }
core = self.reduce_core(core)
self.Ks += [core]
backbone2core[Not(p)] = set(core) - { p }
backbones = backbones | { Not(p) }
else:
qs = qs | { p }
if len(qs) > 0:
print("Number undetermined", len(qs))
def unsat_core(self):
core = self.s.unsat_core()
return self.reduce_core(core)
def get_cores(self, hs):
core = self.unsat_core()
remaining = self.soft.formulas - hs
num_cores = 0
cores = [core]
if len(core) == 0:
self.lo = self.hi - self.soft.offset
return
while True:
is_sat = self.s.check(remaining)
if unsat == is_sat:
core = self.unsat_core()
if len(core) == 0:
self.lo = self.hi - self.soft.offset
return
cores += [core]
h = random.choice([c for c in core])
remaining = remaining - { h }
elif sat == is_sat and num_cores == len(cores):
self.local_mss(self.s.model())
break
elif sat == is_sat:
self.improve(self.s.model())
#
# Extend the size of the hitting set using the new cores
# and update remaining using these cores.
# The new hitting set contains at least one new element
# from the original cores
#
hs = hs | { random.choice([c for c in cores[i]]) for i in range(num_cores, len(cores)) }
remaining = self.soft.formulas - hs
num_cores = len(cores)
else:
print(is_sat)
break
self.Ks += [set(core) for core in cores]
print("total number of cores", len(self.Ks))
print("total number of correction sets", len(self.Cs))
def step(self):
soft = self.soft
hs = self.pick_hs()
is_sat = self.s.check(soft.formulas - set(hs))
if is_sat == sat:
self.improve(self.s.model())
elif is_sat == unsat:
self.get_cores(hs)
else:
print("unknown")
print("maxsat [", self.lo + soft.offset, ", ", self.hi, "]","offset", soft.offset)
count_sets_by_size(self.Ks)
count_sets_by_size(self.Cs)
self.maxres()
def run(self):
while self.lo + self.soft.offset < self.hi:
self.step()
#set_option(verbose=1)
def main(file):
s = Solver()
opt = Optimize()
opt.from_file(file)
s.add(opt.assertions())
#
# We just assume this is an unweighted MaxSAT optimization problem.
# Weights are ignored.
#
soft = [f.arg(0) for f in opt.objectives()[0].children()]
hs = HsMaxSAT(soft, s)
hs.run()
if __name__ == '__main__':
main(sys.argv[1])

View File

@ -177,6 +177,7 @@ def enumerate_sets(csolver, map):
yield ("MSS", csolver.to_c_lits(MSS))
map.block_down(MSS)
else:
seed = csolver.seed_from_core()
MUS = csolver.shrink(seed)
yield ("MUS", csolver.to_c_lits(MUS))
map.block_up(MUS)

View File

@ -8,8 +8,6 @@
from z3 import *
from z3 import *
def tt(s, f):
return is_true(s.model().eval(f))

View File

@ -1,5 +1,5 @@
# Jupyter version of the Z3 Python Tutorial.
Thanks to Peter Gragert, the Z3 tutorial guide is now available in the Jupyter notebook format. You can try it directly online from:
Thanks to Peter Gragert, the Z3 tutorial guide is now available in the Jupyter notebook format.
https://z3examples-nbjorner.notebooks.azure.com/j/notebooks/guide.ipynb
An online tutorial is available from https://github.com/philzook58/z3_tutorial

View File

@ -13,6 +13,13 @@ find_package(Z3
# use this option.
NO_DEFAULT_PATH
)
################################################################################
# Z3 C++ API bindings require C++11
################################################################################
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
message(STATUS "Z3_FOUND: ${Z3_FOUND}")
message(STATUS "Found Z3 ${Z3_VERSION_STRING}")
message(STATUS "Z3_DIR: ${Z3_DIR}")
@ -21,7 +28,7 @@ add_executable(z3_tptp5 tptp5.cpp tptp5.lex.cpp)
target_include_directories(z3_tptp5 PRIVATE ${Z3_CXX_INCLUDE_DIRS})
target_link_libraries(z3_tptp5 PRIVATE ${Z3_LIBRARIES})
if ("${CMAKE_SYSTEM_NAME}" MATCHES "[Ww]indows")
if (CMAKE_SYSTEM_NAME MATCHES "[Ww]indows")
# On Windows we need to copy the Z3 libraries
# into the same directory as the executable
# so that they can be found.

View File

@ -1338,9 +1338,10 @@ public:
}
else if (e.is_quantifier()) {
bool is_forall = Z3_is_quantifier_forall(ctx, e);
bool is_lambda = Z3_is_lambda(ctx, e);
unsigned nb = Z3_get_quantifier_num_bound(ctx, e);
out << (is_forall?"!":"?") << "[";
out << (is_lambda?"^":(is_forall?"!":"?")) << "[";
for (unsigned i = 0; i < nb; ++i) {
Z3_symbol n = Z3_get_quantifier_bound_name(ctx, e, i);
names.push_back(upper_case_var(z3::symbol(ctx, n)));
@ -1680,6 +1681,9 @@ public:
case Z3_OP_PR_HYPER_RESOLVE:
display_inference(out, "hyper_resolve", "thm", p);
break;
case Z3_OP_PR_BIND:
display_inference(out, "bind", "th", p);
break;
default:
out << "TBD: " << m_node_number << "\n" << p << "\n";
throw failure_ex("rule not handled");
@ -2192,13 +2196,6 @@ static bool is_smt2_file(char const* filename) {
return (len > 4 && !strcmp(filename + len - 5,".smt2"));
}
static void check_error(z3::context& ctx) {
Z3_error_code e = Z3_get_error_code(ctx);
if (e != Z3_OK) {
std::cout << Z3_get_error_msg(ctx, e) << "\n";
exit(1);
}
}
static void display_tptp(std::ostream& out) {
// run SMT2 parser, pretty print TFA format.

View File

@ -27,7 +27,7 @@ public:
m_names.push_back(symbol(name));
}
unsigned size() const { return m_fmls.size(); }
expr*const* c_ptr() const { return m_fmls.c_ptr(); }
expr*const* data() const { return m_fmls.data(); }
expr* operator[](unsigned i) { return m_fmls[i].get(); }
symbol const& name(unsigned i) { return m_names[i]; }
void set_has_conjecture() {

View File

@ -905,9 +905,9 @@ extern int yylex (void);
*/
YY_DECL
{
register yy_state_type yy_current_state;
register char *yy_cp, *yy_bp;
register int yy_act;
yy_state_type yy_current_state;
char *yy_cp, *yy_bp;
int yy_act;
#line 110 "tptp5.l"
@ -966,7 +966,7 @@ YY_DECL
yy_match:
do
{
register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
@ -1827,9 +1827,9 @@ ECHO;
*/
static int yy_get_next_buffer (void)
{
register char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
register char *source = (yytext_ptr);
register int number_to_move, i;
char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
char *source = (yytext_ptr);
int number_to_move, i;
int ret_val;
if ( (yy_c_buf_p) > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] )
@ -1932,8 +1932,8 @@ static int yy_get_next_buffer (void)
static yy_state_type yy_get_previous_state (void)
{
register yy_state_type yy_current_state;
register char *yy_cp;
yy_state_type yy_current_state;
char *yy_cp;
yy_current_state = (yy_start);
@ -1942,7 +1942,7 @@ static int yy_get_next_buffer (void)
for ( yy_cp = (yytext_ptr) + YY_MORE_ADJ; yy_cp < (yy_c_buf_p); ++yy_cp )
{
register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
@ -1963,9 +1963,9 @@ static int yy_get_next_buffer (void)
*/
static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state )
{
register int yy_is_jam;
int yy_is_jam;
register YY_CHAR yy_c = 1;
YY_CHAR yy_c = 1;
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
@ -1980,9 +1980,9 @@ static int yy_get_next_buffer (void)
return yy_is_jam ? 0 : yy_current_state;
}
static void yyunput (int c, register char * yy_bp )
static void yyunput (int c, char * yy_bp )
{
register char *yy_cp;
char *yy_cp;
yy_cp = (yy_c_buf_p);
@ -1992,10 +1992,10 @@ static int yy_get_next_buffer (void)
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
{ /* need to shift things up to make room */
/* +2 for EOB chars. */
register int number_to_move = (yy_n_chars) + 2;
register char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
int number_to_move = (yy_n_chars) + 2;
char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
YY_CURRENT_BUFFER_LVALUE->yy_buf_size + 2];
register char *source =
char *source =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move];
while ( source > YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
@ -2632,7 +2632,7 @@ int yylex_destroy (void)
#ifndef yytext_ptr
static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
{
register int i;
int i;
for ( i = 0; i < n; ++i )
s1[i] = s2[i];
}
@ -2641,7 +2641,7 @@ static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * s )
{
register int n;
int n;
for ( n = 0; s[n]; ++n )
;

View File

@ -1,22 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<metadata>
<id>Microsoft.Z3.x64</id>
<version>$(releaseVersion)</version>
<copyright>© Microsoft Corporation. All rights reserved.</copyright>
<authors>Microsoft</authors>
<iconUrl>https://raw.githubusercontent.com/Z3Prover/z3/$(releaseCommitHash)/package/icon.jpg</iconUrl>
<projectUrl>https://github.com/Z3Prover/z3</projectUrl>
<licenseUrl>https://raw.githubusercontent.com/Z3Prover/z3/$(releaseCommitHash)/LICENSE.txt</licenseUrl>
<repository
type="git"
url="https://github.com/Z3Prover/z3.git"
branch="master"
commit="$(releaseCommitHash)"
/>
<requireLicenseAcceptance>true</requireLicenseAcceptance>
<description>Z3 is a satisfiability modulo theories solver from Microsoft Research.</description>
<tags>smt constraint solver theorem prover</tags>
<language>en</language>
</metadata>
</package>

View File

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<None Include="$(MSBuildThisFileDirectory)libz3.dll">
<Visible>false</Visible>
<Link>libz3.dll</Link>
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@ -1,34 +0,0 @@
# Z3 NuGet packaging
## Creation
1. After tagging a commit for release, sign Microsoft.Z3.dll and libz3.dll (both x86 and x64 versions) with Microsoft's Authenticode certificate
2. Test the signed DLLs with the `Get-AuthenticodeSignature` PowerShell commandlet
3. Create the following directory structure for the x64 package (for x86, substitute the "x64" strings for "x86" and use x86 DLLs):
```
+-- Microsoft.Z3.x64
| +-- Microsoft.Z3.x64.nuspec
| +-- lib
| +-- net40
| +-- Microsoft.Z3.dll
| +-- build
| +-- Microsoft.Z3.x64.targets
| +-- libz3.dll
```
4. Open the nuspec file and fill in the appropriate macro values:
* $(releaseVersion) - the Z3 version being released in this package
* $(releaseCommitHash) - hash of the release commit (there are several of these)
5. Run `nuget pack Microsoft.Z3.x64\Microsoft.Z3.x64.nuspec`
6. Test the resulting nupkg file (described below) then submit the package for signing before uploading to NuGet.org
## Testing
1. Create a directory on your machine at C:\nuget-test-source
2. Put the Microsoft.Z3.x64.nupkg file in the directory
3. Open Visual Studio 2017, create a new C# project, then right click the project and click "Manage NuGet packages"
4. Add a new package source - your C:\nuget-test-source directory
5. Find the Microsoft.Z3.x64 package, ensuring in preview window that icon is present and all fields correct
6. Install the Microsoft.Z3.x64 package, ensuring you are asked to accept the license
7. Build your project. Check the output directory to ensure both Microsoft.Z3.dll and libz3.dll are present
8. Import Microsoft.Z3 to your project then add a simple line of code like `using (var ctx = new Context()) { }`; build then run your project to ensure the assemblies load properly

View File

@ -1,15 +0,0 @@
{
"Version": "1.0.0",
"AuthenticationType": "AAD_CERT",
"ClientId": "1c614a83-2dbe-4d3c-853b-effaefd4fb20",
"AuthCert": {
"SubjectName": "1c614a83-2dbe-4d3c-853b-effaefd4fb20.microsoft.com",
"StoreLocation": "LocalMachine",
"StoreName": "My"
},
"RequestSigningCert": {
"SubjectName": "1c614a83-2dbe-4d3c-853b-effaefd4fb20",
"StoreLocation": "LocalMachine",
"StoreName": "My"
}
}

View File

@ -0,0 +1,81 @@
parameters:
ReleaseVersion: ''
BuildArchitecture: ''
jobs:
- job: WindowsBuild${{parameters.BuildArchitecture}}
displayName: "Windows build (${{parameters.BuildArchitecture}})"
pool:
vmImage: "windows-latest"
steps:
- powershell: write-host $(System.TeamProjectId)
displayName: 'System.TeamProjectId'
- powershell: write-host $(System.DefinitionId)
displayName: 'System.DefinitionId'
- powershell: write-host $(Build.BuildId)
displayName: 'Build.BuildId'
- task: CmdLine@2
displayName: Build
inputs:
script:
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" ${{parameters.BuildArchitecture}} &&
python scripts\mk_win_dist.py
--${{parameters.BuildArchitecture}}-only
--dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- task: EsrpCodeSigning@1
displayName: Sign
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
FolderPath: 'dist/z3-${{parameters.ReleaseVersion}}-${{parameters.BuildArchitecture}}-win/bin'
Pattern: 'Microsoft.Z3.dll,libz3.dll,libz3java.dll,z3.exe'
signConfigType: 'inlineSignParams'
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "Microsoft"
},
{
"parameterName": "OpusInfo",
"parameterValue": "http://www.microsoft.com"
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd sha256"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "signtool.exe",
"toolVersion": "6.2.9304.0"
}
]
SessionTimeout: '60'
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: DeleteFiles@1
displayName: Cleanup
inputs:
SourceFolder: 'dist/z3-${{parameters.ReleaseVersion}}-${{parameters.BuildArchitecture}}-win/bin'
Contents: 'CodeSignSummary*'
- task: ArchiveFiles@2
displayName: Zip
inputs:
rootFolderOrFile: 'dist/z3-${{parameters.ReleaseVersion}}-${{parameters.BuildArchitecture}}-win'
includeRootFolder: true
archiveType: 'zip'
archiveFile: '$(Build.ArtifactStagingDirectory)/z3-${{parameters.ReleaseVersion}}-${{parameters.BuildArchitecture}}-win.zip'
- task: PublishPipelineArtifact@1
inputs:
targetPath: '$(Build.ArtifactStagingDirectory)/z3-${{parameters.ReleaseVersion}}-${{parameters.BuildArchitecture}}-win.zip'
artifactName: 'WindowsBuild-${{parameters.BuildArchitecture}}'

14
scripts/build_libcxx_msan.sh Executable file
View File

@ -0,0 +1,14 @@
#!/bin/sh
mkdir libcxx
cd libcxx
# Checkout LLVM, libc++ and libc++abi
svn co http://llvm.org/svn/llvm-project/llvm/trunk llvm
(cd llvm/projects && svn co http://llvm.org/svn/llvm-project/libcxx/trunk libcxx)
(cd llvm/projects && svn co http://llvm.org/svn/llvm-project/libcxxabi/trunk libcxxabi)
# Build libc++ with MSan:
mkdir libcxx_msan && cd libcxx_msan
cmake ../llvm -DCMAKE_BUILD_TYPE=Release -DLLVM_USE_SANITIZER=Memory -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++
make cxx -j4
cd ..

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# -- /usr/bin/env python
"""
Reads a list of Z3 API header files and
generate the constant declarations need

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# - /usr/bin/env python
"""
Reads a list of Z3 API header files and
generate a ``.def`` file to define the

View File

@ -644,7 +644,7 @@ def mk_gparams_register_modules_internal(h_files_full_path, path):
for code in cmds:
fout.write('{ param_descrs d; %s(d); gparams::register_global(d); }\n' % code)
for (mod, code) in mod_cmds:
fout.write('{ param_descrs * d = alloc(param_descrs); %s(*d); gparams::register_module("%s", d); }\n' % (code, mod))
fout.write('{ std::function<param_descrs *(void)> f = []() { auto* d = alloc(param_descrs); %s(*d); return d; }; gparams::register_module("%s", f); }\n' % (code, mod))
for (mod, descr) in mod_descrs:
fout.write('gparams::register_module_descr("%s", "%s");\n' % (mod, descr))
fout.write('}\n')

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# -- /usr/bin/env python
"""
Determines the available global parameters from a list of header files and
generates a ``gparams_register_modules.cpp`` file in the destination directory

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# -- /usr/bin/env python
"""
Determines the available tactics from a list of header files and generates a
``install_tactic.cpp`` file in the destination directory that defines a

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# -- /usr/bin/env python
"""
Scans the listed header files for
memory initializers and finalizers and

View File

@ -1,187 +0,0 @@
#
# Copyright (c) 2018 Microsoft Corporation
#
# 1. download releases from github
# 2. copy over libz3.dll for the different architectures
# 3. copy over Microsoft.Z3.dll from suitable distribution
# 4. copy nuspec file from packages
# 5. call nuget pack
# 6. sign package
import json
import os
import urllib.request
import zipfile
import sys
import os.path
import shutil
import subprocess
import mk_util
import mk_project
release_data = json.loads(urllib.request.urlopen("https://api.github.com/repos/Z3Prover/z3/releases/latest").read().decode())
release_tag_name = release_data['tag_name']
release_tag_ref_data = json.loads(urllib.request.urlopen("https://api.github.com/repos/Z3Prover/z3/git/refs/tags/%s" % release_tag_name).read().decode())
release_tag_sha = release_tag_ref_data['object']['sha']
#release_tag_data = json.loads(urllib.request.urlopen("https://api.github.com/repos/Z3Prover/z3/commits/%s" % release_tag_sha).read().decode())
release_version = release_tag_name[3:]
release_commit = release_tag_sha # release_tag_data['object']['sha']
print(release_version)
def mk_dir(d):
if not os.path.exists(d):
os.makedirs(d)
def download_installs():
for asset in release_data['assets']:
url = asset['browser_download_url']
name = asset['name']
print("Downloading ", url)
sys.stdout.flush()
urllib.request.urlretrieve(url, "packages/%s" % name)
os_info = {"z64-ubuntu-14" : ('so', 'ubuntu.14.04-x64'),
'ubuntu-16' : ('so', 'ubuntu-x64'),
'x64-win' : ('dll', 'win-x64'),
# Skip x86 as I can't get dotnet build to produce AnyCPU TargetPlatform
# 'x86-win' : ('dll', 'win-x86'),
'osx' : ('dylib', 'macos'),
'debian' : ('so', 'debian.8-x64') }
def classify_package(f):
for os_name in os_info:
if os_name in f:
ext, dst = os_info[os_name]
return os_name, f[:-4], ext, dst
return None
def unpack():
shutil.rmtree("out", ignore_errors=True)
# unzip files in packages
# out
# +- runtimes
# +- win-x64
# +- win-x86
# +- ubuntu.16.04-x64
# +- ubuntu.14.04-x64
# +- debian.8-x64
# +- macos
# +
for f in os.listdir("packages"):
print(f)
if f.endswith(".zip") and classify_package(f):
os_name, package_dir, ext, dst = classify_package(f)
path = os.path.abspath(os.path.join("packages", f))
zip_ref = zipfile.ZipFile(path, 'r')
zip_ref.extract("%s/bin/libz3.%s" % (package_dir, ext), "tmp")
mk_dir("out/runtimes/%s/native" % dst)
shutil.move("tmp/%s/bin/libz3.%s" % (package_dir, ext), "out/runtimes/%s/native/." % dst, "/y")
if "x64-win" in f:
mk_dir("out/lib/netstandard1.4/")
for b in ["Microsoft.Z3.dll"]:
zip_ref.extract("%s/bin/%s" % (package_dir, b), "tmp")
shutil.move("tmp/%s/bin/%s" % (package_dir, b), "out/lib/netstandard1.4/%s" % b)
def mk_targets():
mk_dir("out/build")
shutil.copy("../src/api/dotnet/Microsoft.Z3.targets.in", "out/build/Microsoft.Z3.targets")
def create_nuget_spec():
contents = """<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<metadata>
<id>Microsoft.Z3.x64</id>
<version>{0}</version>
<authors>Microsoft</authors>
<description>
Z3 is a satisfiability modulo theories solver from Microsoft Research.
Linux Dependencies:
libgomp.so.1 installed
</description>
<copyright>&#169; Microsoft Corporation. All rights reserved.</copyright>
<tags>smt constraint solver theorem prover</tags>
<iconUrl>https://raw.githubusercontent.com/Z3Prover/z3/{1}/resources/icon.jpg</iconUrl>
<projectUrl>https://github.com/Z3Prover/z3</projectUrl>
<licenseUrl>https://raw.githubusercontent.com/Z3Prover/z3/{1}/LICENSE.txt</licenseUrl>
<repository
type="git"
url="https://github.com/Z3Prover/z3.git"
branch="master"
commit="{1}"
/>
<requireLicenseAcceptance>true</requireLicenseAcceptance>
<language>en</language>
</metadata>
</package>""".format(release_version, release_commit)
with open("out/Microsoft.Z3.x64.nuspec", 'w') as f:
f.write(contents)
def create_nuget_package():
subprocess.call(["nuget", "pack"], cwd="out")
nuget_sign_input = """
{
"Version": "1.0.0",
"SignBatches"
:
[
{
"SourceLocationType": "UNC",
"SourceRootDirectory": "%s",
"DestinationLocationType": "UNC",
"DestinationRootDirectory": "%s",
"SignRequestFiles": [
{
"CustomerCorrelationId": "42fc9577-af9e-4ac9-995d-1788d8721d17",
"SourceLocation": "Microsoft.Z3.x64.%s.nupkg",
"DestinationLocation": "Microsoft.Z3.x64.%s.nupkg"
}
],
"SigningInfo": {
"Operations": [
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetSign",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
}
}
]
}"""
def sign_nuget_package():
package_name = "Microsoft.Z3.x64.%s.nupkg" % release_version
input_file = "out/nuget_sign_input.json"
output_path = os.path.abspath("out").replace("\\","\\\\")
with open(input_file, 'w') as f:
f.write(nuget_sign_input % (output_path, output_path, release_version, release_version))
r = subprocess.call(["EsrpClient.exe", "sign", "-a", "authorization.json", "-p", "policy.json", "-i", input_file, "-o", "out\\diagnostics.json"], shell=True, stderr=subprocess.STDOUT)
print(r)
def main():
mk_dir("packages")
download_installs()
unpack()
mk_targets()
create_nuget_spec()
create_nuget_package()
sign_nuget_package()
main()

View File

@ -22,57 +22,74 @@ def mk_dir(d):
os.makedirs(d)
os_info = {"z64-ubuntu-14" : ('so', 'ubuntu.14.04-x64'),
'ubuntu-16' : ('so', 'ubuntu-x64'),
os_info = {"z64-ubuntu-14" : ('so', 'linux-x64'),
'ubuntu-18' : ('so', 'linux-x64'),
'ubuntu-20' : ('so', 'linux-x64'),
'glibc-2.31' : ('so', 'linux-x64'),
'x64-win' : ('dll', 'win-x64'),
# Skip x86 as I can't get dotnet build to produce AnyCPU TargetPlatform
# 'x86-win' : ('dll', 'win-x86'),
'osx' : ('dylib', 'macos'),
'debian' : ('so', 'debian.8-x64') }
'x86-win' : ('dll', 'win-x86'),
'osx' : ('dylib', 'osx-x64'),
'debian' : ('so', 'linux-x64') }
def classify_package(f):
for os_name in os_info:
if os_name in f:
ext, dst = os_info[os_name]
return os_name, f[:-4], ext, dst
print("Could not classify", f)
return None
def unpack(packages):
def replace(src, dst):
try:
os.remove(dst)
except:
shutil.move(src, dst)
def unpack(packages, symbols):
# unzip files in packages
# out
# +- runtimes
# +- win-x64
# +- win-x86
# +- ubuntu.16.04-x64
# +- ubuntu.14.04-x64
# +- debian.8-x64
# +- macos
# +- linux-x64
# +- osx-x64
# +
tmp = "tmp" if not symbols else "tmpsym"
for f in os.listdir(packages):
print(f)
if f.endswith(".zip") and classify_package(f):
os_name, package_dir, ext, dst = classify_package(f)
path = os.path.abspath(os.path.join(packages, f))
zip_ref = zipfile.ZipFile(path, 'r')
zip_ref.extract("%s/bin/libz3.%s" % (package_dir, ext), "tmp")
mk_dir("out/runtimes/%s/native" % dst)
shutil.move("tmp/%s/bin/libz3.%s" % (package_dir, ext), "out/runtimes/%s/native/." % dst)
zip_ref.extract(f"{package_dir}/bin/libz3.{ext}", f"{tmp}")
mk_dir(f"out/runtimes/{dst}/native")
replace(f"{tmp}/{package_dir}/bin/libz3.{ext}", f"out/runtimes/{dst}/native/libz3.{ext}")
if "x64-win" in f:
mk_dir("out/lib/netstandard1.4/")
for b in ["Microsoft.Z3.dll"]:
zip_ref.extract("%s/bin/%s" % (package_dir, b), "tmp")
shutil.move("tmp/%s/bin/%s" % (package_dir, b), "out/lib/netstandard1.4/%s" % b)
if symbols:
zip_ref.extract(f"{package_dir}/bin/libz3.pdb", f"{tmp}")
replace(f"{tmp}/{package_dir}/bin/libz3.pdb", f"out/runtimes/{dst}/native/libz3.pdb")
files = ["Microsoft.Z3.dll"]
if symbols:
files += ["Microsoft.Z3.pdb", "Microsoft.Z3.xml"]
for b in files:
zip_ref.extract(f"{package_dir}/bin/{b}", f"{tmp}")
replace(f"{tmp}/{package_dir}/bin/{b}", f"out/lib/netstandard1.4/{b}")
def mk_targets():
def mk_targets(source_root):
mk_dir("out/build")
shutil.copy("../src/api/dotnet/Microsoft.Z3.targets.in", "out/build/Microsoft.Z3.targets")
shutil.copy(f"{source_root}/src/api/dotnet/Microsoft.Z3.targets.in", "out/build/Microsoft.Z3.targets")
def mk_icon(source_root):
mk_dir("out/content")
shutil.copy(f"{source_root}/resources/icon.jpg", "out/content/icon.jpg")
def create_nuget_spec(release_version, release_commit):
def create_nuget_spec(version, repo, branch, commit, symbols):
contents = """<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
<metadata>
<id>Microsoft.Z3.x64</id>
<id>Microsoft.Z3</id>
<version>{0}</version>
<authors>Microsoft</authors>
<description>
@ -83,78 +100,41 @@ Linux Dependencies:
</description>
<copyright>&#169; Microsoft Corporation. All rights reserved.</copyright>
<tags>smt constraint solver theorem prover</tags>
<iconUrl>https://raw.githubusercontent.com/Z3Prover/z3/{1}/resources/icon.jpg</iconUrl>
<icon>content/icon.jpg</icon>
<projectUrl>https://github.com/Z3Prover/z3</projectUrl>
<licenseUrl>https://raw.githubusercontent.com/Z3Prover/z3/{1}/LICENSE.txt</licenseUrl>
<repository type="git" url="https://github.com/Z3Prover/z3.git" branch="master" commit="{1}" />
<license type="expression">MIT</license>
<repository type="git" url="{1}" branch="{2}" commit="{3}" />
<requireLicenseAcceptance>true</requireLicenseAcceptance>
<language>en</language>
<dependencies>
<group targetFramework=".NETStandard1.4" />
</dependencies>
</metadata>
</package>""".format(release_version, release_commit)
</package>""".format(version, repo, branch, commit)
print(contents)
with open("out/Microsoft.Z3.x64.nuspec", 'w') as f:
sym = "sym." if symbols else ""
file = f"out/Microsoft.Z3.{sym}nuspec"
print(file)
with open(file, 'w') as f:
f.write(contents)
nuget_sign_input = """
{
"Version": "1.0.0",
"SignBatches"
:
[
{
"SourceLocationType": "UNC",
"SourceRootDirectory": "%s",
"DestinationLocationType": "UNC",
"DestinationRootDirectory": "%s",
"SignRequestFiles": [
{
"CustomerCorrelationId": "42fc9577-af9e-4ac9-995d-1788d8721d17",
"SourceLocation": "Microsoft.Z3.x64.%s.nupkg",
"DestinationLocation": "Microsoft.Z3.x64.%s.nupkg"
}
],
"SigningInfo": {
"Operations": [
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetSign",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
}
}
]
}"""
def create_sign_input(release_version):
package_name = "Microsoft.Z3.x64.%s.nupkg" % release_version
input_file = "out/nuget_sign_input.json"
output_path = os.path.abspath("out").replace("\\","\\\\")
with open(input_file, 'w') as f:
f.write(nuget_sign_input % (output_path, output_path, release_version, release_version))
def main():
packages = sys.argv[1]
release_version = sys.argv[2]
release_commit = sys.argv[3]
version = sys.argv[2]
repo = sys.argv[3]
branch = sys.argv[4]
commit = sys.argv[5]
source_root = sys.argv[6]
symbols = False
if len(sys.argv) > 7:
print(sys.argv[7])
if len(sys.argv) > 7 and "symbols" == sys.argv[7]:
symbols = True
print(packages)
mk_dir(packages)
unpack(packages)
mk_targets()
create_nuget_spec(release_version, release_commit)
create_sign_input(release_version)
# create_nuget_package()
# sign_nuget_package(release_version)
mk_dir(packages)
unpack(packages, symbols)
mk_targets(source_root)
mk_icon(source_root)
create_nuget_spec(version, repo, branch, commit, symbols)
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# -- /usr/bin/env python
"""
Reads a pattern database and generates the corresponding
header file.

View File

@ -8,55 +8,61 @@
from mk_util import *
def init_version():
set_version(4, 8, 7, 0)
set_version(4, 8, 12, 0)
# Z3 Project definition
def init_project_def():
init_version()
add_lib('util', [], includes2install = ['z3_version.h'])
add_lib('polynomial', ['util'], 'math/polynomial')
add_lib('sat', ['util'])
add_lib('nlsat', ['polynomial', 'sat'])
add_lib('lp', ['util','nlsat'], 'util/lp')
add_lib('hilbert', ['util'], 'math/hilbert')
add_lib('simplex', ['util'], 'math/simplex')
add_lib('automata', ['util'], 'math/automata')
add_lib('interval', ['util'], 'math/interval')
add_lib('dd', ['util', 'interval'], 'math/dd')
add_lib('simplex', ['util'], 'math/simplex')
add_lib('hilbert', ['util'], 'math/hilbert')
add_lib('automata', ['util'], 'math/automata')
add_lib('realclosure', ['interval'], 'math/realclosure')
add_lib('subpaving', ['interval'], 'math/subpaving')
add_lib('ast', ['util', 'polynomial'])
add_lib('rewriter', ['ast', 'polynomial', 'automata'], 'ast/rewriter')
add_lib('euf', ['ast', 'util'], 'ast/euf')
add_lib('params', ['util'])
add_lib('smt_params', ['params'], 'smt/params')
add_lib('grobner', ['ast', 'dd', 'simplex'], 'math/grobner')
add_lib('sat', ['util', 'dd', 'grobner'])
add_lib('nlsat', ['polynomial', 'sat'])
add_lib('lp', ['util', 'nlsat', 'grobner', 'interval', 'smt_params'], 'math/lp')
add_lib('rewriter', ['ast', 'polynomial', 'automata', 'params'], 'ast/rewriter')
add_lib('macros', ['rewriter'], 'ast/macros')
add_lib('normal_forms', ['rewriter'], 'ast/normal_forms')
add_lib('model', ['rewriter'])
add_lib('model', ['rewriter', 'macros'])
add_lib('tactic', ['ast', 'model'])
add_lib('substitution', ['ast', 'rewriter'], 'ast/substitution')
add_lib('parser_util', ['ast'], 'parsers/util')
add_lib('grobner', ['ast'], 'math/grobner')
add_lib('euclid', ['util'], 'math/euclid')
add_lib('proofs', ['rewriter', 'util'], 'ast/proofs')
add_lib('solver', ['model', 'tactic', 'proofs'])
add_lib('cmd_context', ['solver', 'rewriter'])
add_lib('sat_tactic', ['tactic', 'sat', 'solver'], 'sat/tactic')
add_lib('cmd_context', ['solver', 'rewriter', 'params'])
add_lib('smt2parser', ['cmd_context', 'parser_util'], 'parsers/smt2')
add_lib('pattern', ['normal_forms', 'smt2parser', 'rewriter'], 'ast/pattern')
add_lib('core_tactics', ['tactic', 'macros', 'normal_forms', 'rewriter', 'pattern'], 'tactic/core')
add_lib('arith_tactics', ['core_tactics', 'sat'], 'tactic/arith')
add_lib('nlsat_tactic', ['nlsat', 'sat_tactic', 'arith_tactics'], 'nlsat/tactic')
add_lib('subpaving_tactic', ['core_tactics', 'subpaving'], 'math/subpaving/tactic')
add_lib('solver_assertions', ['pattern','smt_params','cmd_context'], 'solver/assertions')
add_lib('aig_tactic', ['tactic'], 'tactic/aig')
add_lib('ackermannization', ['model', 'rewriter', 'ast', 'solver', 'tactic'], 'ackermannization')
add_lib('fpa', ['ast', 'util', 'rewriter', 'model'], 'ast/fpa')
add_lib('bit_blaster', ['rewriter', 'rewriter'], 'ast/rewriter/bit_blaster')
add_lib('smt_params', ['ast', 'rewriter', 'pattern', 'bit_blaster'], 'smt/params')
add_lib('bit_blaster', ['rewriter', 'params'], 'ast/rewriter/bit_blaster')
add_lib('core_tactics', ['tactic', 'macros', 'normal_forms', 'rewriter', 'pattern'], 'tactic/core')
add_lib('arith_tactics', ['core_tactics', 'sat'], 'tactic/arith')
add_lib('mbp', ['model', 'simplex'], 'qe/mbp')
add_lib('sat_smt', ['sat', 'euf', 'tactic', 'solver', 'smt_params', 'bit_blaster', 'fpa', 'mbp', 'normal_forms', 'lp', 'pattern'], 'sat/smt')
add_lib('sat_tactic', ['tactic', 'sat', 'solver', 'sat_smt'], 'sat/tactic')
add_lib('nlsat_tactic', ['nlsat', 'sat_tactic', 'arith_tactics'], 'nlsat/tactic')
add_lib('subpaving_tactic', ['core_tactics', 'subpaving'], 'math/subpaving/tactic')
add_lib('proto_model', ['model', 'rewriter', 'smt_params'], 'smt/proto_model')
add_lib('smt', ['bit_blaster', 'macros', 'normal_forms', 'cmd_context', 'proto_model',
'substitution', 'grobner', 'euclid', 'simplex', 'proofs', 'pattern', 'parser_util', 'fpa', 'lp'])
add_lib('smt', ['bit_blaster', 'macros', 'normal_forms', 'cmd_context', 'proto_model', 'solver_assertions',
'substitution', 'grobner', 'simplex', 'proofs', 'pattern', 'parser_util', 'fpa', 'lp'])
add_lib('bv_tactics', ['tactic', 'bit_blaster', 'core_tactics'], 'tactic/bv')
add_lib('fuzzing', ['ast'], 'test/fuzzing')
add_lib('smt_tactic', ['smt'], 'smt/tactic')
add_lib('sls_tactic', ['tactic', 'normal_forms', 'core_tactics', 'bv_tactics'], 'tactic/sls')
add_lib('qe', ['smt','sat','nlsat','tactic','nlsat_tactic'], 'qe')
add_lib('qe', ['smt', 'mbp', 'nlsat', 'tactic', 'nlsat_tactic'], 'qe')
add_lib('sat_solver', ['solver', 'core_tactics', 'aig_tactic', 'bv_tactics', 'arith_tactics', 'sat_tactic'], 'sat/sat_solver')
add_lib('fd_solver', ['core_tactics', 'arith_tactics', 'sat_solver', 'smt'], 'tactic/fd_solver')
add_lib('muz', ['smt', 'sat', 'smt2parser', 'aig_tactic', 'qe'], 'muz/base')
@ -69,17 +75,17 @@ def init_project_def():
add_lib('ddnf', ['muz', 'transforms', 'rel'], 'muz/ddnf')
add_lib('bmc', ['muz', 'transforms', 'fd_solver'], 'muz/bmc')
add_lib('fp', ['muz', 'clp', 'tab', 'rel', 'bmc', 'ddnf', 'spacer'], 'muz/fp')
add_lib('ufbv_tactic', ['normal_forms', 'core_tactics', 'macros', 'smt_tactic', 'rewriter'], 'tactic/ufbv')
add_lib('smtlogic_tactics', ['ackermannization', 'sat_solver', 'arith_tactics', 'bv_tactics', 'nlsat_tactic', 'smt_tactic', 'aig_tactic', 'fp', 'muz','qe'], 'tactic/smtlogics')
add_lib('smtlogic_tactics', ['ackermannization', 'sat_solver', 'arith_tactics', 'bv_tactics', 'nlsat_tactic', 'smt_tactic', 'aig_tactic', 'fp', 'muz', 'qe'], 'tactic/smtlogics')
add_lib('ufbv_tactic', ['normal_forms', 'core_tactics', 'macros', 'smt_tactic', 'rewriter', 'smtlogic_tactics'], 'tactic/ufbv')
add_lib('fpa_tactics', ['fpa', 'core_tactics', 'bv_tactics', 'sat_tactic', 'smt_tactic', 'arith_tactics', 'smtlogic_tactics'], 'tactic/fpa')
add_lib('portfolio', ['smtlogic_tactics', 'sat_solver', 'ufbv_tactic', 'fpa_tactics', 'aig_tactic', 'fp', 'fd_solver', 'qe','sls_tactic', 'subpaving_tactic'], 'tactic/portfolio')
add_lib('portfolio', ['smtlogic_tactics', 'sat_solver', 'ufbv_tactic', 'fpa_tactics', 'aig_tactic', 'fp', 'fd_solver', 'qe', 'sls_tactic', 'subpaving_tactic'], 'tactic/portfolio')
add_lib('opt', ['smt', 'smtlogic_tactics', 'sls_tactic', 'sat_solver'], 'opt')
API_files = ['z3_api.h', 'z3_ast_containers.h', 'z3_algebraic.h', 'z3_polynomial.h', 'z3_rcf.h', 'z3_fixedpoint.h', 'z3_optimization.h', 'z3_fpa.h', 'z3_spacer.h']
add_lib('api', ['portfolio', 'realclosure', 'opt'],
includes2install=['z3.h', 'z3_v1.h', 'z3_macros.h'] + API_files)
add_lib('extra_cmds', ['cmd_context', 'subpaving_tactic', 'qe', 'arith_tactics'], 'cmd_context/extra_cmds')
add_exe('shell', ['api', 'sat', 'extra_cmds','opt'], exe_name='z3')
add_exe('test', ['api', 'fuzzing', 'simplex'], exe_name='test-z3', install=False)
add_exe('shell', ['api', 'sat', 'extra_cmds', 'opt'], exe_name='z3')
add_exe('test', ['api', 'fuzzing', 'simplex', 'sat_smt'], exe_name='test-z3', install=False)
_libz3Component = add_dll('api_dll', ['api', 'sat', 'extra_cmds'], 'api/dll',
reexports=['api'],
dll_name='libz3',

View File

@ -148,8 +148,8 @@ def get_os_name():
import platform
basic = os.uname()[0].lower()
if basic == 'linux':
dist = platform.linux_distribution()
if len(dist) == 3 and len(dist[0]) > 0 and len(dist[1]) > 0:
dist = platform.libc_ver()
if len(dist) == 2 and len(dist[0]) > 0 and len(dist[1]) > 0:
return '%s-%s' % (dist[0].lower(), dist[1].lower())
else:
return basic

View File

@ -37,9 +37,7 @@ OCAMLC=getenv("OCAMLC", "ocamlc")
OCAMLOPT=getenv("OCAMLOPT", "ocamlopt")
OCAML_LIB=getenv("OCAML_LIB", None)
OCAMLFIND=getenv("OCAMLFIND", "ocamlfind")
CSC=getenv("CSC", None)
DOTNET="dotnet"
GACUTIL=getenv("GACUTIL", 'gacutil')
# Standard install directories relative to PREFIX
INSTALL_BIN_DIR=getenv("Z3_INSTALL_BIN_DIR", "bin")
INSTALL_LIB_DIR=getenv("Z3_INSTALL_LIB_DIR", "lib")
@ -48,7 +46,6 @@ INSTALL_PKGCONFIG_DIR=getenv("Z3_INSTALL_PKGCONFIG_DIR", os.path.join(INSTALL_LI
CXX_COMPILERS=['g++', 'clang++']
C_COMPILERS=['gcc', 'clang']
CSC_COMPILERS=['csc', 'mcs']
JAVAC=None
JAR=None
PYTHON_PACKAGE_DIR=distutils.sysconfig.get_python_lib(prefix=getenv("PREFIX", None))
@ -75,6 +72,7 @@ IS_OSX=False
IS_FREEBSD=False
IS_NETBSD=False
IS_OPENBSD=False
IS_SUNOS=False
IS_CYGWIN=False
IS_CYGWIN_MINGW=False
IS_MSYS2=False
@ -91,7 +89,6 @@ VS_PROJ = False
TRACE = False
PYTHON_ENABLED=False
DOTNET_CORE_ENABLED=False
ESRP_SIGN=False
DOTNET_KEY_FILE=getenv("Z3_DOTNET_KEY_FILE", None)
JAVA_ENABLED=False
ML_ENABLED=False
@ -158,6 +155,9 @@ def is_netbsd():
def is_openbsd():
return IS_OPENBSD
def is_sunos():
return IS_SUNOS
def is_osx():
return IS_OSX
@ -363,7 +363,7 @@ def check_java():
oo = TempFile('output')
eo = TempFile('errout')
try:
subprocess.call([JAVAC, 'Hello.java', '-verbose'], stdout=oo.fname, stderr=eo.fname)
subprocess.call([JAVAC, 'Hello.java', '-verbose', '-source', '1.8', '-target', '1.8' ], stdout=oo.fname, stderr=eo.fname)
oo.commit()
eo.commit()
except:
@ -492,7 +492,10 @@ def find_ml_lib():
def is64():
global LINUX_X64
return LINUX_X64 and sys.maxsize >= 2**32
if is_sunos() and sys.version_info.major < 3:
return LINUX_X64
else:
return LINUX_X64 and sys.maxsize >= 2**32
def check_ar():
if is_verbose():
@ -592,7 +595,6 @@ if os.name == 'nt':
elif os.name == 'posix':
if os.uname()[0] == 'Darwin':
IS_OSX=True
PREFIX="/usr/local"
elif os.uname()[0] == 'Linux':
IS_LINUX=True
elif os.uname()[0] == 'GNU':
@ -603,6 +605,8 @@ elif os.name == 'posix':
IS_NETBSD=True
elif os.uname()[0] == 'OpenBSD':
IS_OPENBSD=True
elif os.uname()[0] == 'SunOS':
IS_SUNOS=True
elif os.uname()[0][:6] == 'CYGWIN':
IS_CYGWIN=True
if (CC != None and "mingw" in CC):
@ -668,8 +672,6 @@ def display_help(exit_code):
print(" OCAMLFIND Ocaml find tool (only relevant with --ml)")
print(" OCAMLOPT Ocaml native compiler (only relevant with --ml)")
print(" OCAML_LIB Ocaml library directory (only relevant with --ml)")
print(" CSC C# Compiler (only relevant if .NET bindings are enabled)")
print(" GACUTIL GAC Utility (only relevant if .NET bindings are enabled)")
print(" Z3_INSTALL_BIN_DIR Install directory for binaries relative to install prefix")
print(" Z3_INSTALL_LIB_DIR Install directory for libraries relative to install prefix")
print(" Z3_INSTALL_INCLUDE_DIR Install directory for header files relative to install prefix")
@ -679,14 +681,14 @@ def display_help(exit_code):
# Parse configuration option for mk_make script
def parse_options():
global VERBOSE, DEBUG_MODE, IS_WINDOWS, VS_X64, ONLY_MAKEFILES, SHOW_CPPS, VS_PROJ, TRACE, VS_PAR, VS_PAR_NUM
global DOTNET_CORE_ENABLED, DOTNET_KEY_FILE, JAVA_ENABLED, ML_ENABLED, JS_ENABLED, STATIC_LIB, STATIC_BIN, PREFIX, GMP, PYTHON_PACKAGE_DIR, GPROF, GIT_HASH, GIT_DESCRIBE, PYTHON_INSTALL_ENABLED, PYTHON_ENABLED, ESRP_SIGN
global DOTNET_CORE_ENABLED, DOTNET_KEY_FILE, JAVA_ENABLED, ML_ENABLED, JS_ENABLED, STATIC_LIB, STATIC_BIN, PREFIX, GMP, PYTHON_PACKAGE_DIR, GPROF, GIT_HASH, GIT_DESCRIBE, PYTHON_INSTALL_ENABLED, PYTHON_ENABLED
global LINUX_X64, SLOW_OPTIMIZE, LOG_SYNC, SINGLE_THREADED
global GUARD_CF, ALWAYS_DYNAMIC_BASE
try:
options, remainder = getopt.gnu_getopt(sys.argv[1:],
'b:df:sxhmcvtnp:gj',
['build=', 'debug', 'silent', 'x64', 'help', 'makefiles', 'showcpp', 'vsproj', 'guardcf',
'trace', 'dotnet', 'dotnet-key=', 'esrp', 'staticlib', 'prefix=', 'gmp', 'java', 'parallel=', 'gprof', 'js',
'trace', 'dotnet', 'dotnet-key=', 'staticlib', 'prefix=', 'gmp', 'java', 'parallel=', 'gprof', 'js',
'githash=', 'git-describe', 'x86', 'ml', 'optimize', 'pypkgdir=', 'python', 'staticbin', 'log-sync', 'single-threaded'])
except:
print("ERROR: Invalid command line option")
@ -722,8 +724,6 @@ def parse_options():
DOTNET_CORE_ENABLED = True
elif opt in ('--dotnet-key'):
DOTNET_KEY_FILE = arg
elif opt in ('--esrp'):
ESRP_SIGN = True
elif opt in ('--staticlib'):
STATIC_LIB = True
elif opt in ('--staticbin'):
@ -769,7 +769,7 @@ def parse_options():
# Return a list containing a file names included using '#include' in
# the given C/C++ file named fname.
def extract_c_includes(fname):
result = []
result = {}
# We look for well behaved #include directives
std_inc_pat = re.compile("[ \t]*#include[ \t]*\"(.*)\"[ \t]*")
system_inc_pat = re.compile("[ \t]*#include[ \t]*\<.*\>[ \t]*")
@ -786,7 +786,7 @@ def extract_c_includes(fname):
if slash_pos >= 0 and root_file_name.find("..") < 0 : #it is a hack for lp include files that behave as continued from "src"
# print(root_file_name)
root_file_name = root_file_name[slash_pos+1:]
result.append(root_file_name)
result[root_file_name] = m1.group(1)
elif not system_inc_pat.match(line) and non_std_inc_pat.match(line):
raise MKException("Invalid #include directive at '%s':%s" % (fname, line))
linenum = linenum + 1
@ -921,20 +921,23 @@ def is_CXX_clangpp():
return is_clang_in_gpp_form(CXX)
return is_compiler(CXX, 'clang++')
def get_files_with_ext(path, ext):
return filter(lambda f: f.endswith(ext), os.listdir(path))
def get_cpp_files(path):
return filter(lambda f: f.endswith('.cpp'), os.listdir(path))
return get_files_with_ext(path,'.cpp')
def get_c_files(path):
return filter(lambda f: f.endswith('.c'), os.listdir(path))
return get_files_with_ext(path,'.c')
def get_cs_files(path):
return filter(lambda f: f.endswith('.cs'), os.listdir(path))
return get_files_with_ext(path,'.cs')
def get_java_files(path):
return filter(lambda f: f.endswith('.java'), os.listdir(path))
return get_files_with_ext(path,'.java')
def get_ml_files(path):
return filter(lambda f: f.endswith('.ml'), os.listdir(path))
return get_files_with_ext(path,'.ml')
def find_all_deps(name, deps):
new_deps = []
@ -971,15 +974,52 @@ class Component:
# Find fname in the include paths for the given component.
# ownerfile is only used for creating error messages.
# That is, we were looking for fname when processing ownerfile
def find_file(self, fname, ownerfile):
def find_file(self, fname, ownerfile, orig_include=None):
full_fname = os.path.join(self.src_dir, fname)
# Store all our possible locations
possibilities = set()
# If the our file exists in the current directory, then we store it
if os.path.exists(full_fname):
return self
# We cannot return here, as we might have files with the same
# basename, but different include paths
possibilities.add(self)
for dep in self.deps:
c_dep = get_component(dep)
full_fname = os.path.join(c_dep.src_dir, fname)
if os.path.exists(full_fname):
return c_dep
possibilities.add(c_dep)
if possibilities:
# We have ambiguity
if len(possibilities) > 1:
# We expect orig_path to be non-None here, so we can disambiguate
assert orig_include is not None
# Get the original directory name
orig_dir = os.path.dirname(orig_include)
# Iterate through all of the possibilities
for possibility in possibilities:
path = possibility.path.replace("\\","/")
# If we match the suffix of the path ...
if path.endswith(orig_dir):
# ... use our new match
return possibility
# This means we didn't make an exact match ...
#
# We return any one possibility, just to ensure we don't break Z3's
# builds
return possibilities.pop()
raise MKException("Failed to find include file '%s' for '%s' when processing '%s'." % (fname, ownerfile, self.name))
# Display all dependencies of file basename located in the given component directory.
@ -987,16 +1027,16 @@ class Component:
def add_cpp_h_deps(self, out, basename):
includes = extract_c_includes(os.path.join(self.src_dir, basename))
out.write(os.path.join(self.to_src_dir, basename))
for include in includes:
owner = self.find_file(include, basename)
for include, orig_include in includes.items():
owner = self.find_file(include, basename, orig_include)
out.write(' %s.node' % os.path.join(owner.build_dir, include))
# Add a rule for each #include directive in the file basename located at the current component.
def add_rule_for_each_include(self, out, basename):
fullname = os.path.join(self.src_dir, basename)
includes = extract_c_includes(fullname)
for include in includes:
owner = self.find_file(include, fullname)
for include, orig_include in includes.items():
owner = self.find_file(include, fullname, orig_include)
owner.add_h_rule(out, include)
# Display a Makefile rule for an include file located in the given component directory.
@ -1042,8 +1082,8 @@ class Component:
dependencies.add(os.path.join(self.to_src_dir, cppfile))
self.add_rule_for_each_include(out, cppfile)
includes = extract_c_includes(os.path.join(self.src_dir, cppfile))
for include in includes:
owner = self.find_file(include, cppfile)
for include, orig_include in includes.items():
owner = self.find_file(include, cppfile, orig_include)
dependencies.add('%s.node' % os.path.join(owner.build_dir, include))
for cppfile in cppfiles:
out.write('%s$(OBJ_EXT) ' % os.path.join(self.build_dir, os.path.splitext(cppfile)[0]))
@ -1662,6 +1702,7 @@ class DotNetDLLComponent(Component):
<AssemblyName>Microsoft.Z3</AssemblyName>
<OutputType>Library</OutputType>
<PackageId>Microsoft.Z3</PackageId>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<RuntimeFrameworkVersion>1.0.4</RuntimeFrameworkVersion>
<Version>%s</Version>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
@ -1697,79 +1738,9 @@ class DotNetDLLComponent(Component):
dotnetCmdLine.extend(['-o', path])
MakeRuleCmd.write_cmd(out, ' '.join(dotnetCmdLine))
self.sign_esrp(out)
out.write('\n')
out.write('%s: %s\n\n' % (self.name, dllfile))
def sign_esrp(self, out):
global ESRP_SIGNx
print("esrp-sign", ESRP_SIGN)
if not ESRP_SIGN:
return
import uuid
guid = str(uuid.uuid4())
path = os.path.abspath(BUILD_DIR).replace("\\","\\\\")
assemblySignStr = """
{
"Version": "1.0.0",
"SignBatches"
:
[
{
"SourceLocationType": "UNC",
"SourceRootDirectory": "%s",
"DestinationLocationType": "UNC",
"DestinationRootDirectory": "c:\\\\ESRP\\\\output",
"SignRequestFiles": [
{
"CustomerCorrelationId": "%s",
"SourceLocation": "libz3.dll",
"DestinationLocation": "libz3.dll"
},
{
"CustomerCorrelationId": "%s",
"SourceLocation": "Microsoft.Z3.dll",
"DestinationLocation": "Microsoft.Z3.dll"
}
],
"SigningInfo": {
"Operations": [
{
"KeyCode" : "CP-230012",
"OperationCode" : "SigntoolSign",
"Parameters" : {
"OpusName": "Microsoft",
"OpusInfo": "http://www.microsoft.com",
"FileDigest": "/fd \\"SHA256\\"",
"PageHash": "/NPH",
"TimeStamp": "/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"
},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-230012",
"OperationCode" : "SigntoolVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
}
}
]
} """ % (path, guid, guid)
assemblySign = os.path.join(os.path.abspath(BUILD_DIR), 'dotnet', 'assembly-sign-input.json')
with open(assemblySign, 'w') as ous:
ous.write(assemblySignStr)
outputFile = os.path.join(os.path.abspath(BUILD_DIR), 'dotnet', "esrp-out.json")
esrpCmdLine = ["esrpclient.exe", "sign", "-a", "C:\\esrp\\config\\authorization.json", "-p", "C:\\esrp\\config\\policy.json", "-i", assemblySign, "-o", outputFile]
MakeRuleCmd.write_cmd(out, ' '.join(esrpCmdLine))
MakeRuleCmd.write_cmd(out, "move /Y C:\\esrp\\output\\libz3.dll .")
MakeRuleCmd.write_cmd(out, "move /Y C:\\esrp\\output\\Microsoft.Z3.dll .")
def main_component(self):
return is_dotnet_core_enabled()
@ -1785,6 +1756,8 @@ class DotNetDLLComponent(Component):
'%s.dll' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
shutil.copy('%s.pdb' % os.path.join(build_path, self.dll_name),
'%s.pdb' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
shutil.copy('%s.xml' % os.path.join(build_path, self.dll_name),
'%s.xml' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
shutil.copy('%s.deps.json' % os.path.join(build_path, self.dll_name),
'%s.deps.json' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
if DEBUG_MODE:
@ -1796,6 +1769,8 @@ class DotNetDLLComponent(Component):
mk_dir(os.path.join(dist_path, INSTALL_BIN_DIR))
shutil.copy('%s.dll' % os.path.join(build_path, self.dll_name),
'%s.dll' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
shutil.copy('%s.xml' % os.path.join(build_path, self.dll_name),
'%s.xml' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
shutil.copy('%s.deps.json' % os.path.join(build_path, self.dll_name),
'%s.deps.json' % os.path.join(dist_path, INSTALL_BIN_DIR, self.dll_name))
@ -1839,6 +1814,8 @@ class JavaDLLComponent(Component):
t = t.replace('PLATFORM', 'netbsd')
elif IS_OPENBSD:
t = t.replace('PLATFORM', 'openbsd')
elif IS_SUNOS:
t = t.replace('PLATFORM', 'SunOS')
elif IS_CYGWIN:
t = t.replace('PLATFORM', 'cygwin')
elif IS_MSYS2:
@ -1863,9 +1840,9 @@ class JavaDLLComponent(Component):
#if IS_WINDOWS:
JAVAC = '"%s"' % JAVAC
JAR = '"%s"' % JAR
t = ('\t%s %s.java -d %s\n' % (JAVAC, os.path.join(self.to_src_dir, 'enumerations', '*'), os.path.join('api', 'java', 'classes')))
t = ('\t%s -source 1.8 -target 1.8 %s.java -d %s\n' % (JAVAC, os.path.join(self.to_src_dir, 'enumerations', '*'), os.path.join('api', 'java', 'classes')))
out.write(t)
t = ('\t%s -cp %s %s.java -d %s\n' % (JAVAC,
t = ('\t%s -source 1.8 -target 1.8 -cp %s %s.java -d %s\n' % (JAVAC,
os.path.join('api', 'java', 'classes'),
os.path.join(self.to_src_dir, '*'),
os.path.join('api', 'java', 'classes')))
@ -1977,11 +1954,11 @@ class MLComponent(Component):
src_dir = self.to_src_dir
mk_dir(os.path.join(BUILD_DIR, self.sub_dir))
api_src = get_component(API_COMPONENT).to_src_dir
# remove /GL and -std=c++11; the ocaml tools don't like them.
if IS_WINDOWS:
# remove /GL and -std=c++17; the ocaml tools don't like them.
if IS_WINDOWS:
out.write('CXXFLAGS_OCAML=$(CXXFLAGS:/GL=)\n')
else:
out.write('CXXFLAGS_OCAML=$(subst -std=c++11,,$(CXXFLAGS))\n')
out.write('CXXFLAGS_OCAML=$(subst -std=c++17,,$(CXXFLAGS))\n')
substitutions = { 'VERSION': "{}.{}.{}.{}".format(VER_MAJOR, VER_MINOR, VER_BUILD, VER_TWEAK) }
@ -2031,7 +2008,7 @@ class MLComponent(Component):
OCAMLMKLIB = 'ocamlmklib'
LIBZ3 = '-cclib -l' + z3link
LIBZ3 = '-l' + z3link + ' -lstdc++'
if is_cygwin() and not(is_cygwin_mingw()):
LIBZ3 = z3linkdep
@ -2044,9 +2021,9 @@ class MLComponent(Component):
z3mls = os.path.join(self.sub_dir, 'z3ml')
out.write('%s.cma: %s %s %s\n' % (z3mls, cmos, stubso, z3linkdep))
out.write('\t%s -o %s -I %s %s %s %s\n' % (OCAMLMKLIB, z3mls, self.sub_dir, stubso, cmos, LIBZ3))
out.write('\t%s -o %s -I %s -L. %s %s %s\n' % (OCAMLMKLIB, z3mls, self.sub_dir, stubso, cmos, LIBZ3))
out.write('%s.cmxa: %s %s %s %s.cma\n' % (z3mls, cmxs, stubso, z3linkdep, z3mls))
out.write('\t%s -o %s -I %s %s %s %s\n' % (OCAMLMKLIB, z3mls, self.sub_dir, stubso, cmxs, LIBZ3))
out.write('\t%s -o %s -I %s -L. %s %s %s\n' % (OCAMLMKLIB, z3mls, self.sub_dir, stubso, cmxs, LIBZ3))
out.write('%s.cmxs: %s.cmxa\n' % (z3mls, z3mls))
out.write('\t%s -linkall -shared -o %s.cmxs -I . -I %s %s.cmxa\n' % (OCAMLOPTF, z3mls, self.sub_dir, z3mls))
@ -2289,10 +2266,10 @@ class MLExampleComponent(ExampleComponent):
for mlfile in get_ml_files(self.ex_dir):
out.write(' %s' % os.path.join(self.to_ex_dir, mlfile))
out.write('\n')
out.write('\t%s ' % OCAMLC)
out.write('\tocamlfind %s ' % OCAMLC)
if DEBUG_MODE:
out.write('-g ')
out.write('-custom -o ml_example.byte -I -zarith -I api/ml -cclib "-L. -lz3" zarith.cma z3ml.cma')
out.write('-custom -o ml_example.byte -package zarith -I api/ml -cclib "-L. -lpthread -lstdc++ -lz3" -linkpkg z3ml.cma')
for mlfile in get_ml_files(self.ex_dir):
out.write(' %s/%s' % (self.to_ex_dir, mlfile))
out.write('\n')
@ -2300,10 +2277,10 @@ class MLExampleComponent(ExampleComponent):
for mlfile in get_ml_files(self.ex_dir):
out.write(' %s' % os.path.join(self.to_ex_dir, mlfile))
out.write('\n')
out.write('\t%s ' % OCAMLOPT)
out.write('\tocamlfind %s ' % OCAMLOPT)
if DEBUG_MODE:
out.write('-g ')
out.write('-o ml_example$(EXE_EXT) -I -zarith -I api/ml -cclib "-L. -lz3" zarith.cmxa z3ml.cmxa')
out.write('-o ml_example$(EXE_EXT) -package zarith -I api/ml -cclib "-L. -lpthread -lstdc++ -lz3" -linkpkg z3ml.cmxa')
for mlfile in get_ml_files(self.ex_dir):
out.write(' %s/%s' % (self.to_ex_dir, mlfile))
out.write('\n')
@ -2347,6 +2324,10 @@ def add_lib(name, deps=[], path=None, includes2install=[]):
c = LibComponent(name, path, deps, includes2install)
reg_component(name, c)
def add_clib(name, deps=[], path=None, includes2install=[]):
c = CLibComponent(name, path, deps, includes2install)
reg_component(name, c)
def add_hlib(name, path=None, includes2install=[]):
c = HLibComponent(name, path, includes2install)
reg_component(name, c)
@ -2415,8 +2396,9 @@ def mk_config():
if ONLY_MAKEFILES:
return
config = open(os.path.join(BUILD_DIR, 'config.mk'), 'w')
global CXX, CC, GMP, CPPFLAGS, CXXFLAGS, LDFLAGS, EXAMP_DEBUG_FLAG, FPMATH_FLAGS, LOG_SYNC, SINGLE_THREADED
global CXX, CC, GMP, GUARD_CF, STATIC_BIN, GIT_HASH, CPPFLAGS, CXXFLAGS, LDFLAGS, EXAMP_DEBUG_FLAG, FPMATH_FLAGS, LOG_SYNC, SINGLE_THREADED
if IS_WINDOWS:
CXXFLAGS = '/nologo /Zi /D WIN32 /D _WINDOWS /EHsc /GS /Gd /std:c++17'
config.write(
'CC=cl\n'
'CXX=cl\n'
@ -2457,7 +2439,7 @@ def mk_config():
'SLINK_FLAGS=/nologo /LDd\n' % static_opt)
if VS_X64:
config.write(
'CXXFLAGS=/c /Zi /nologo /W3 /WX- /Od /Oy- /D WIN32 /D _DEBUG /D Z3DEBUG /D _CONSOLE /D _TRACE /D _WINDOWS /Gm- /EHsc /RTC1 /GS /Gd %s %s\n' % (extra_opt, static_opt))
'CXXFLAGS=/c %s /W3 /WX- /Od /Oy- /D _DEBUG /D Z3DEBUG /D _CONSOLE /D _TRACE /Gm- /RTC1 %s %s\n' % (CXXFLAGS, extra_opt, static_opt))
config.write(
'LINK_EXTRA_FLAGS=/link /DEBUG /MACHINE:X64 /SUBSYSTEM:CONSOLE /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 /DYNAMICBASE /NXCOMPAT %s\n'
'SLINK_EXTRA_FLAGS=/link /DEBUG /MACHINE:X64 /SUBSYSTEM:WINDOWS /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 %s %s\n' % (link_extra_opt, maybe_disable_dynamic_base, link_extra_opt))
@ -2466,7 +2448,7 @@ def mk_config():
exit(1)
else:
config.write(
'CXXFLAGS=/c /Zi /nologo /W3 /WX- /Od /Oy- /D WIN32 /D _DEBUG /D Z3DEBUG /D _CONSOLE /D _TRACE /D _WINDOWS /Gm- /EHsc /RTC1 /GS /Gd /arch:SSE2 %s %s\n' % (extra_opt, static_opt))
'CXXFLAGS=/c %s /W3 /WX- /Od /Oy- /D _DEBUG /D Z3DEBUG /D _CONSOLE /D _TRACE /Gm- /RTC1 /arch:SSE2 %s %s\n' % (CXXFLAGS, extra_opt, static_opt))
config.write(
'LINK_EXTRA_FLAGS=/link /DEBUG /MACHINE:X86 /SUBSYSTEM:CONSOLE /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 /DYNAMICBASE /NXCOMPAT %s\n'
'SLINK_EXTRA_FLAGS=/link /DEBUG /MACHINE:X86 /SUBSYSTEM:WINDOWS /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 %s %s\n' % (link_extra_opt, maybe_disable_dynamic_base, link_extra_opt))
@ -2482,7 +2464,7 @@ def mk_config():
extra_opt = '%s /D _TRACE ' % extra_opt
if VS_X64:
config.write(
'CXXFLAGS=/c%s /Zi /nologo /W3 /WX- /O2 /D _EXTERNAL_RELEASE /D WIN32 /D NDEBUG /D _LIB /D _WINDOWS /D _UNICODE /D UNICODE /Gm- /EHsc /GS /Gd /GF /Gy /TP %s %s\n' % (GL, extra_opt, static_opt))
'CXXFLAGS=/c%s %s /W3 /WX- /O2 /D _EXTERNAL_RELEASE /D NDEBUG /D _LIB /D UNICODE /Gm- /GF /Gy /TP %s %s\n' % (GL, CXXFLAGS, extra_opt, static_opt))
config.write(
'LINK_EXTRA_FLAGS=/link%s /profile /MACHINE:X64 /SUBSYSTEM:CONSOLE /STACK:8388608 %s\n'
'SLINK_EXTRA_FLAGS=/link%s /profile /MACHINE:X64 /SUBSYSTEM:WINDOWS /STACK:8388608 %s\n' % (LTCG, link_extra_opt, LTCG, link_extra_opt))
@ -2491,7 +2473,7 @@ def mk_config():
exit(1)
else:
config.write(
'CXXFLAGS=/nologo /c%s /Zi /W3 /WX- /O2 /Oy- /D _EXTERNAL_RELEASE /D WIN32 /D NDEBUG /D _CONSOLE /D _WINDOWS /D ASYNC_COMMANDS /Gm- /EHsc /GS /Gd /arch:SSE2 %s %s\n' % (GL, extra_opt, static_opt))
'CXXFLAGS=/c%s %s /WX- /O2 /Oy- /D _EXTERNAL_RELEASE /D NDEBUG /D _CONSOLE /D ASYNC_COMMANDS /Gm- /arch:SSE2 %s %s\n' % (GL, CXXFLAGS, extra_opt, static_opt))
config.write(
'LINK_EXTRA_FLAGS=/link%s /DEBUG /MACHINE:X86 /SUBSYSTEM:CONSOLE /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 /DYNAMICBASE /NXCOMPAT %s\n'
'SLINK_EXTRA_FLAGS=/link%s /DEBUG /MACHINE:X86 /SUBSYSTEM:WINDOWS /INCREMENTAL:NO /STACK:8388608 /OPT:REF /OPT:ICF /TLBID:1 %s %s\n' % (LTCG, link_extra_opt, LTCG, maybe_disable_dynamic_base, link_extra_opt))
@ -2532,8 +2514,8 @@ def mk_config():
CPPFLAGS = '%s -D_MP_INTERNAL' % CPPFLAGS
if GIT_HASH:
CPPFLAGS = '%s -DZ3GITHASH=%s' % (CPPFLAGS, GIT_HASH)
CXXFLAGS = '%s -std=c++11' % CXXFLAGS
CXXFLAGS = '%s -fvisibility=hidden -c' % CXXFLAGS
CXXFLAGS = '%s -std=c++17' % CXXFLAGS
CXXFLAGS = '%s -fvisibility=hidden -fvisibility-inlines-hidden -c' % CXXFLAGS
FPMATH = test_fpmath(CXX)
CXXFLAGS = '%s %s' % (CXXFLAGS, FPMATH_FLAGS)
if LOG_SYNC:
@ -2581,6 +2563,12 @@ def mk_config():
OS_DEFINES = '-D_OPENBSD_'
SO_EXT = '.so'
SLIBFLAGS = '-shared'
elif sysname == 'SunOS':
CXXFLAGS = '%s -D_SUNOS_' % CXXFLAGS
OS_DEFINES = '-D_SUNOS_'
SO_EXT = '.so'
SLIBFLAGS = '-shared'
SLIBEXTRAFLAGS = '%s -mimpure-text' % SLIBEXTRAFLAGS
elif sysname.startswith('CYGWIN'):
CXXFLAGS = '%s -D_CYGWIN' % CXXFLAGS
OS_DEFINES = '-D_CYGWIN'
@ -2618,7 +2606,7 @@ def mk_config():
config.write('CC=%s\n' % CC)
config.write('CXX=%s\n' % CXX)
config.write('CXXFLAGS=%s %s\n' % (CPPFLAGS, CXXFLAGS))
config.write('CFLAGS=%s %s\n' % (CPPFLAGS, CXXFLAGS.replace('-std=c++11', '')))
config.write('CFLAGS=%s %s\n' % (CPPFLAGS, CXXFLAGS.replace('-std=c++17', '')))
config.write('EXAMP_DEBUG_FLAG=%s\n' % EXAMP_DEBUG_FLAG)
config.write('CXX_OUT_FLAG=-o \n')
config.write('C_OUT_FLAG=-o \n')
@ -2632,7 +2620,7 @@ def mk_config():
config.write('LINK_FLAGS=\n')
config.write('LINK_OUT_FLAG=-o \n')
if is_linux() and (build_static_lib() or build_static_bin()):
config.write('LINK_EXTRA_FLAGS=-Wl,--whole-archive -lpthread -Wl,--no-whole-archive %s\n' % LDFLAGS)
config.write('LINK_EXTRA_FLAGS=-Wl,--whole-archive -lrt -lpthread -Wl,--no-whole-archive %s\n' % LDFLAGS)
else:
config.write('LINK_EXTRA_FLAGS=-lpthread %s\n' % LDFLAGS)
config.write('SO_EXT=%s\n' % SO_EXT)

View File

@ -1,9 +0,0 @@
call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
python scripts\mk_win_dist.py --x64-only --dotnet-key=$(Agent.TempDirectory)\z3.snk
call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvars32.bat"
python scripts\mk_win_dist.py --x86-only --dotnet-key=$(Agent.TempDirectory)\z3.snk

View File

@ -25,9 +25,9 @@ VERBOSE=True
DIST_DIR='dist'
FORCE_MK=False
DOTNET_CORE_ENABLED=True
ESRP_SIGN=False
DOTNET_KEY_FILE=None
JAVA_ENABLED=True
ZIP_BUILD_OUTPUTS=False
GIT_HASH=False
PYTHON_ENABLED=True
X86ONLY=False
@ -63,10 +63,10 @@ def display_help():
print(" -b <sudir>, --build=<subdir> subdirectory where x86 and x64 Z3 versions will be built (default: build-dist).")
print(" -f, --force force script to regenerate Makefiles.")
print(" --nodotnet do not include .NET bindings in the binary distribution files.")
print(" --dotnet-key=<file> sign the .NET assembly with the private key in <file>.")
print(" --esrp sign with esrp.")
print(" --dotnet-key=<file> strongname sign the .NET assembly with the private key in <file>.")
print(" --nojava do not include Java bindings in the binary distribution files.")
print(" --nopython do not include Python bindings in the binary distribution files.")
print(" --zip package build outputs in zip file.")
print(" --githash include git hash in the Zip file.")
print(" --x86-only x86 dist only.")
print(" --x64-only x64 dist only.")
@ -74,7 +74,7 @@ def display_help():
# Parse configuration option for mk_make script
def parse_options():
global FORCE_MK, JAVA_ENABLED, GIT_HASH, DOTNET_CORE_ENABLED, DOTNET_KEY_FILE, PYTHON_ENABLED, X86ONLY, X64ONLY, ESRP_SIGN
global FORCE_MK, JAVA_ENABLED, ZIP_BUILD_OUTPUTS, GIT_HASH, DOTNET_CORE_ENABLED, DOTNET_KEY_FILE, PYTHON_ENABLED, X86ONLY, X64ONLY
path = BUILD_DIR
options, remainder = getopt.gnu_getopt(sys.argv[1:], 'b:hsf', ['build=',
'help',
@ -83,13 +83,12 @@ def parse_options():
'nojava',
'nodotnet',
'dotnet-key=',
'esrp',
'zip',
'githash',
'nopython',
'x86-only',
'x64-only'
])
print(options)
for opt, arg in options:
if opt in ('-b', '--build'):
if arg == 'src':
@ -107,10 +106,10 @@ def parse_options():
PYTHON_ENABLED = False
elif opt == '--dotnet-key':
DOTNET_KEY_FILE = arg
elif opt == '--esrp':
ESRP_SIGN = True
elif opt == '--nojava':
JAVA_ENABLED = False
elif opt == '--zip':
ZIP_BUILD_OUTPUTS = True
elif opt == '--githash':
GIT_HASH = True
elif opt == '--x86-only' and not X64ONLY:
@ -138,13 +137,12 @@ def mk_build_dir(path, x64):
opts.append('--java')
if x64:
opts.append('-x')
if ESRP_SIGN:
opts.append('--esrp')
if GIT_HASH:
opts.append('--githash=%s' % mk_util.git_hash())
opts.append('--git-describe')
if PYTHON_ENABLED:
opts.append('--python')
opts.append('--guardcf')
if subprocess.call(opts) != 0:
raise MKException("Failed to generate build directory at '%s'" % path)
@ -183,10 +181,10 @@ def exec_cmds(cmds):
def mk_z3(x64):
cmds = []
if x64:
cmds.append('call "%VCINSTALLDIR%vcvarsall.bat" amd64')
cmds.append('call "%VCINSTALLDIR%Auxiliary\\build\\vcvarsall.bat" amd64')
cmds.append('cd %s' % BUILD_X64_DIR)
else:
cmds.append('call "%VCINSTALLDIR%vcvarsall.bat" x86')
cmds.append('call "%VCINSTALLDIR%Auxiliary\\build\\vcvarsall.bat" x86')
cmds.append('cd %s' % BUILD_X86_DIR)
cmds.append('nmake')
if exec_cmds(cmds) != 0:
@ -208,7 +206,6 @@ def get_z3_name(x64):
return 'z3-%s.%s.%s-%s-win' % (major, minor, build, platform)
def mk_dist_dir(x64):
global ESRP_SIGN
if x64:
platform = "x64"
build_path = BUILD_X64_DIR
@ -217,15 +214,10 @@ def mk_dist_dir(x64):
build_path = BUILD_X86_DIR
dist_path = os.path.join(DIST_DIR, get_z3_name(x64))
mk_dir(dist_path)
mk_util.ESRP_SIGN = ESRP_SIGN
mk_util.DOTNET_CORE_ENABLED = True
mk_util.DOTNET_KEY_FILE = DOTNET_KEY_FILE
mk_util.JAVA_ENABLED = JAVA_ENABLED
mk_util.PYTHON_ENABLED = PYTHON_ENABLED
mk_win_dist(build_path, dist_path)
if is_verbose():
print("Generated %s distribution folder at '%s'" % (platform, dist_path))
print(f"Generated {platform} distribution folder at '{dist_path}'")
def mk_dist_dirs():
mk_dist_dir(False)
mk_dist_dir(True)
@ -257,7 +249,8 @@ def mk_zips():
VS_RUNTIME_PATS = [re.compile('vcomp.*\.dll'),
re.compile('msvcp.*\.dll'),
re.compile('msvcr.*\.dll')]
re.compile('msvcr.*\.dll'),
re.compile('vcrun.*\.dll')]
# Copy Visual Studio Runtime libraries
def cp_vs_runtime(x64):
@ -305,6 +298,15 @@ def cp_licenses():
cp_license(True)
cp_license(False)
def init_flags():
global DOTNET_KEY_FILE, JAVA_ENABLED, PYTHON_ENABLED
mk_util.DOTNET_CORE_ENABLED = True
mk_util.DOTNET_KEY_FILE = DOTNET_KEY_FILE
mk_util.JAVA_ENABLED = JAVA_ENABLED
mk_util.PYTHON_ENABLED = PYTHON_ENABLED
mk_util.ALWAYS_DYNAMIC_BASE = True
# Entry point
def main():
if os.name != 'nt':
@ -312,6 +314,7 @@ def main():
parse_options()
check_vc_cmd_prompt()
init_flags()
if X86ONLY:
mk_build_dir(BUILD_X86_DIR, False)
@ -320,7 +323,8 @@ def main():
mk_dist_dir(False)
cp_license(False)
cp_vs_runtime(False)
mk_zip(False)
if ZIP_BUILD_OUTPUTS:
mk_zip(False)
elif X64ONLY:
mk_build_dir(BUILD_X64_DIR, True)
mk_z3(True)
@ -328,7 +332,8 @@ def main():
mk_dist_dir(True)
cp_license(True)
cp_vs_runtime(True)
mk_zip(True)
if ZIP_BUILD_OUTPUTS:
mk_zip(True)
else:
mk_build_dirs()
mk_z3s()
@ -336,7 +341,8 @@ def main():
mk_dist_dirs()
cp_licenses()
cp_vs_runtimes()
mk_zips()
if ZIP_BUILD_OUTPUTS:
mk_zips()
main()

View File

@ -1,194 +1,352 @@
variables:
z3Version: '4.8.7'
ReleaseVersion: '4.8.11'
jobs:
stages:
- stage: Build
jobs:
- job: Mac
displayName: "Mac Build"
pool:
vmImage: "macOS-10.14"
steps:
- task: DownloadSecureFile@1
inputs:
secureFile: 'z3.snk'
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Agent.TempDirectory)/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
- script: python z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Mac'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Mac
displayName: "Mac Build"
pool:
vmImage: "macOS-latest"
steps:
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
- script: python z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@1
inputs:
artifactName: 'Mac'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Ubuntu
displayName: "Ubuntu build"
pool:
vmImage: "ubuntu-16.04"
steps:
- task: DownloadSecureFile@1
inputs:
secureFile: 'z3.snk'
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Agent.TempDirectory)/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
- script: python z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Ubuntu'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Ubuntu
displayName: "Ubuntu build"
pool:
vmImage: "ubuntu-latest"
steps:
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
- script: python z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Ubuntu'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Manylinux
displayName: "Manylinux build"
pool:
vmImage: "ubuntu-16.04"
container: "rhelmot/manylinux1_x86_64:latest"
variables:
python: "/opt/python/cp35-cp35m/bin/python"
steps:
- script: $(python) scripts/mk_unix_dist.py --nodotnet --nojava
- script: git clone https://github.com/z3prover/z3test z3test
- script: $(python) z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Manylinux'
targetPath: $(Build.ArtifactStagingDirectory)
- job: UbuntuDoc
displayName: "Ubuntu Doc build"
pool:
vmImage: "Ubuntu-18.04"
steps:
- script: sudo apt-get install ocaml opam libgmp-dev
- script: sudo apt-get install doxygen
- script: sudo apt-get install graphviz
- script: opam init -y
- script: eval `opam config env`; opam install zarith ocamlfind -y
- script: python scripts/mk_make.py --ml --staticlib
- script: |
set -e
cd build
eval `opam config env`
make -j3
make -j3 examples
make -j3 test-z3
./ml_example
cd ..
- script: |
set -e
eval `opam config env`
cd doc
python mk_api_doc.py --mld --z3py-package-path=../build/python/z3
mkdir api/html/ml
ocamldoc -html -d api/html/ml -sort -hide Z3 -I $( ocamlfind query zarith ) -I ../build/api/ml ../build/api/ml/z3enums.mli ../build/api/ml/z3.mli
cd ..
- script: zip -r z3doc.zip doc/api
- script: cp z3doc.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'UbuntuDoc'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Windows
displayName: "Windows build"
pool:
vmImage: "vs2017-win2016"
steps:
- task: DownloadSecureFile@1
inputs:
secureFile: 'z3.snk'
- script: scripts\mk_win_dist.cmd
# - script: git clone https://github.com/z3prover/z3test z3test
# - script: python z3test/scripts/test_benchmarks.py build-dist\z3.exe z3test/regressions/smt2
- script: xcopy dist\*.zip $(Build.ArtifactStagingDirectory)\* /y
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Manylinux
displayName: "Manylinux build"
pool:
vmImage: "Ubuntu-18.04"
container: "quay.io/pypa/manylinux2010_x86_64:latest"
variables:
python: "/opt/python/cp37-cp37m/bin/python"
steps:
- script: $(python) scripts/mk_unix_dist.py --nodotnet --nojava
- script: git clone https://github.com/z3prover/z3test z3test
- script: $(python) z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Manylinux'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Windows32
displayName: "Windows 32-bit build"
pool:
vmImage: "windows-latest"
steps:
- task: CmdLine@2
inputs:
script:
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x86 &
python scripts\mk_win_dist.py
--x86-only
--dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
--zip
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(Build.ArtifactStagingDirectory)
artifactName: 'Windows32'
- job: Windows64
displayName: "Windows 64-bit build"
pool:
vmImage: "windows-latest"
steps:
- task: CmdLine@2
inputs:
script:
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" x64 &
python scripts\mk_win_dist.py
--x64-only
--dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
--zip
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(Build.ArtifactStagingDirectory)
artifactName: 'Windows64'
- stage: Package
jobs:
- job: NuGet
displayName: "NuGet packaging"
pool:
vmImage: "windows-latest"
steps:
- powershell: write-host $(System.DefinitionId)
displayName: 'System.DefinitionId'
- powershell: write-host $(Build.BuildId)
displayName: 'Build.BuildId'
- powershell: write-host $(System.TeamProjectId)
displayName: 'System.TeamProjectId'
- task: DownloadPipelineArtifact@2
displayName: 'Download Win64 Build'
inputs:
artifact: 'Windows64'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Win32 Build'
inputs:
artifact: 'Windows32'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu Build'
inputs:
artifact: 'Ubuntu'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'Mac'
path: $(Agent.TempDirectory)\package
- task: NuGetToolInstaller@0
inputs:
versionSpec: 5.x
checkLatest: false
- task: PythonScript@0
displayName: 'Python: assemble files'
inputs:
scriptSource: 'filepath'
scriptPath: scripts\mk_nuget_task.py
workingDirectory: $(Agent.TempDirectory)\package
arguments:
$(Agent.TempDirectory)\package
$(ReleaseVersion)
$(Build.Repository.Uri)
$(Build.SourceBranchName)
$(Build.SourceVersion)
$(Build.SourcesDirectory)
symbols
- task: NugetCommand@2
displayName: 'NuGet Pack Symbols'
inputs:
command: custom
arguments: 'pack $(Agent.TempDirectory)\package\out\Microsoft.Z3.sym.nuspec -OutputDirectory $(Build.ArtifactStagingDirectory) -Verbosity detailed -Symbols -SymbolPackageFormat snupkg -BasePath $(Agent.TempDirectory)\package\out'
- task: EsrpCodeSigning@1
displayName: 'Sign Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
FolderPath: $(Build.ArtifactStagingDirectory)
Pattern: Microsoft.Z3.$(ReleaseVersion).nupkg
signConfigType: 'inlineSignParams'
inlineOperation: |
[
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetSign",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
SessionTimeout: '60'
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: EsrpCodeSigning@1
displayName: 'Sign Symbol Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
FolderPath: $(Build.ArtifactStagingDirectory)
Pattern: Microsoft.Z3.$(ReleaseVersion).snupkg
signConfigType: 'inlineSignParams'
inlineOperation: |
[
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetSign",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
SessionTimeout: '60'
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(Build.ArtifactStagingDirectory)
artifactName: 'NuGet'
- job: NuGet
displayName: "Create Nuget Package"
dependsOn:
- Mac
- Ubuntu
- Windows
steps:
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: tmp
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Mac'
targetPath: tmp
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Ubuntu'
targetPath: tmp
- script: |
cd scripts
python mk_nuget_task.py ../tmp $(z3Version) $(Build.SourceVersion)
cd ..
# - task: NuGetCommand@2
# inputs:
# command: pack
# packagesToPack: scripts/out/*.nuspec
# packDestination: $(Build.ArtifactStagingDirectory)
# - task: NuGetCommand@2
# inputs:
# command: 'pack'
# packagesToPack: scripts/out/*.nuspec
# includesymbols: true
# packDestination: $(Build.ArtifactStagingDirectory)
- job: Python
displayName: "Python packaging"
pool:
vmImage: "ubuntu-latest"
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'Windows32'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'Windows64'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'Manylinux'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
inputs:
artifactName: 'Mac'
targetPath: $(Agent.TempDirectory)
- script: cd $(Agent.TempDirectory); mkdir osx-bin; cd osx-bin; unzip ../*osx*.zip
- script: cd $(Agent.TempDirectory); mkdir linux-bin; cd linux-bin; unzip ../*glibc*.zip
- script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip
- script: cd $(Agent.TempDirectory); mkdir win64-bin; cd win64-bin; unzip ../*x64-win*.zip
- script: python3 -m pip install --user -U setuptools wheel
- script: cd src/api/python; python3 setup.py sdist
# take a look at this PREMIUM HACK I came up with to get around the fact that the azure variable syntax overloads the bash syntax for subshells
- script: cd src/api/python; echo $(Agent.TempDirectory)/linux-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win32-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Python packages'
targetPath: src/api/python/dist
# Not available as a task?
# - script: |
# cd scripts
# EsprClient.exe sign -a authorization.json -p policy.json -i out/nuget_sign_input.json -o out/diagnostics.json
# cd ..
- job: Python
displayName: "Python packaging"
dependsOn:
- Manylinux
- Windows
pool:
vmImage: "ubuntu-16.04"
steps:
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Manylinux'
targetPath: $(Agent.TempDirectory)
- script: cd $(Agent.TempDirectory); mkdir linux-bin; cd linux-bin; unzip ../*centos*.zip
- script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip
- script: cd $(Agent.TempDirectory); mkdir win64-bin; cd win64-bin; unzip ../*x64-win*.zip
- script: python -m pip install --user -U setuptools wheel
- script: cd src/api/python; python setup.py sdist
# take a look at this PREMIUM HACK I came up with to get around the fact that the azure variable syntax overloads the bash syntax for subshells
- script: cd src/api/python; echo $(Agent.TempDirectory)/linux-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win32-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python setup.py bdist_wheel
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Python packages'
targetPath: src/api/python/dist
- job: Deploy
displayName: "Deploy into GitHub"
dependsOn:
- Mac
- Ubuntu
- Windows
steps:
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: tmp
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Mac'
targetPath: tmp
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Ubuntu'
targetPath: tmp
# - task: DownloadPipelineArtifact@0
# inputs:
# artifactName: 'NuGet'
# targetPath: tmp
- task: GitHubRelease@0
inputs:
gitHubConnection: Z3GitHub
repositoryName: 'Z3Prover/z3'
action: 'delete'
- stage: Deployment
jobs:
- job: Deploy
displayName: "Deploy into GitHub"
steps:
- task: DownloadPipelineArtifact@2
displayName: "Download windows32"
inputs:
artifactName: 'Windows32'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download windows64"
inputs:
artifactName: 'Windows64'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download Mac"
inputs:
artifactName: 'Mac'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download Ubuntu"
inputs:
artifactName: 'Ubuntu'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download Doc"
inputs:
artifactName: 'UbuntuDoc'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download Python"
inputs:
artifactName: 'Python packages'
targetPath: tmp
- task: DownloadPipelineArtifact@2
displayName: "Download NuGet"
inputs:
artifactName: 'NuGet'
targetPath: tmp
- task: GitHubRelease@0
inputs:
gitHubConnection: Z3GitHub
repositoryName: 'Z3Prover/z3'
action: 'delete'
# target: '$(Build.SourceVersion)'
tagSource: 'manual'
tag: 'Nightly'
- task: GitHubRelease@0
inputs:
gitHubConnection: Z3GitHub
repositoryName: 'Z3Prover/z3'
action: 'create'
tagSource: 'manual'
tag: 'Nightly'
- task: GitHubRelease@0
inputs:
gitHubConnection: Z3GitHub
repositoryName: 'Z3Prover/z3'
action: 'create'
# target: '$(Build.SourceVersion)'
tagSource: 'manual'
tag: 'Nightly'
title: 'Nightly'
releaseNotesSource: 'input'
releaseNotes: 'nightly build'
assets: 'tmp/*'
assetUploadMode: 'replace'
isDraft: false
isPreRelease: true
tagSource: 'manual'
tag: 'Nightly'
title: 'Nightly'
releaseNotesSource: 'input'
releaseNotes: 'nightly build'
assets: 'tmp/*'
assetUploadMode: 'replace'
isDraft: false
isPreRelease: true
# TBD: run regression tests on generated binaries.

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# - /usr/bin/env python
"""
Reads a pyg file and emits the corresponding
C++ header file into the specified destination

View File

@ -1,150 +1,405 @@
jobs:
# Release pipeline (must be triggered manually)
# * Builds for all platforms (with code signing)
# * Creates packages for Python and NuGet
# * Uploads build products to stores (GitHub, NuGet, PyPI)
- job: Mac
displayName: "Mac Build"
pool:
vmImage: "macOS-10.14"
steps:
- task: DownloadSecureFile@1
inputs:
secureFile: 'z3.snk'
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Agent.TempDirectory)/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
- script: python z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Mac'
targetPath: $(Build.ArtifactStagingDirectory)
trigger: none
- job: Ubuntu
displayName: "Ubuntu build"
pool:
vmImage: "ubuntu-16.04"
steps:
- task: DownloadSecureFile@1
inputs:
secureFile: 'z3.snk'
- script: python scripts/mk_unix_dist.py --dotnet-key=$(Agent.TempDirectory)/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
- script: python z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Ubuntu'
targetPath: $(Build.ArtifactStagingDirectory)
variables:
ReleaseVersion: '4.8.12'
- job: Manylinux
displayName: "Manylinux build"
pool:
vmImage: "ubuntu-16.04"
container: "rhelmot/manylinux1_x86_64:latest"
variables:
python: "/opt/python/cp35-cp35m/bin/python"
steps:
- script: $(python) scripts/mk_unix_dist.py --nodotnet --nojava
- script: git clone https://github.com/z3prover/z3test z3test
- script: $(python) z3test/scripts/test_benchmarks.py build-dist/z3 z3test/regressions/smt2
- script: cp dist/*.zip $(Build.ArtifactStagingDirectory)/
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Manylinux'
targetPath: $(Build.ArtifactStagingDirectory)
stages:
- job: Windows
displayName: "Windows build"
pool:
vmImage: "vs2017-win2016"
steps:
- task: DownloadSecureFile@1
inputs:
secureFile: 'z3.snk'
- script: scripts\mk_win_dist.cmd
- script: xcopy dist\*.zip $(Build.ArtifactStagingDirectory)\* /y
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: $(Build.ArtifactStagingDirectory)
# Builds Z3 on various platforms
- stage: Build
jobs:
- job: MacBuild
displayName: "macOS Build"
pool:
vmImage: "macOS-latest"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'macOSBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: UbuntuBuild
displayName: "Ubuntu build"
pool:
vmImage: "ubuntu-latest"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --dotnet-key=$(Build.SourcesDirectory)/resources/z3.snk
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'UbuntuBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Python
displayName: "Python packaging"
dependsOn:
- Manylinux
- Windows
pool:
vmImage: "ubuntu-16.04"
steps:
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Manylinux'
targetPath: $(Agent.TempDirectory)
- script: cd $(Agent.TempDirectory); mkdir linux-bin; cd linux-bin; unzip ../*centos*.zip
- script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip
- script: cd $(Agent.TempDirectory); mkdir win64-bin; cd win64-bin; unzip ../*x64-win*.zip
- script: python -m pip install --user -U setuptools wheel
- script: cd src/api/python; python setup.py sdist
# take a look at this PREMIUM HACK I came up with to get around the fact that the azure variable syntax overloads the bash syntax for subshells
- script: cd src/api/python; echo $(Agent.TempDirectory)/linux-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win32-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python setup.py bdist_wheel
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'Python packages'
targetPath: src/api/python/dist
- job: UbuntuDoc
displayName: "Ubuntu Doc build"
pool:
vmImage: "Ubuntu-18.04"
steps:
- script: sudo apt-get install ocaml opam libgmp-dev
- script: sudo apt-get install doxygen
- script: sudo apt-get install graphviz
- script: opam init -y
- script: eval `opam config env`; opam install zarith ocamlfind -y
- script: python scripts/mk_make.py --ml --staticlib
- script: |
set -e
cd build
eval `opam config env`
make -j3
make -j3 examples
make -j3 test-z3
./ml_example
cd ..
- script: |
set -e
eval `opam config env`
cd doc
python mk_api_doc.py --mld --z3py-package-path=../build/python/z3
mkdir api/html/ml
ocamldoc -html -d api/html/ml -sort -hide Z3 -I $( ocamlfind query zarith ) -I ../build/api/ml ../build/api/ml/z3enums.mli ../build/api/ml/z3.mli
cd ..
- script: zip -r z3doc.zip doc/api
- script: cp z3doc.zip $(Build.ArtifactStagingDirectory)/.
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'UbuntuDoc'
targetPath: $(Build.ArtifactStagingDirectory)
- job: Deploy
displayName: "Deploy into GitHub and PyPI"
dependsOn:
- Mac
- Ubuntu
- Windows
- Python
steps:
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Windows'
targetPath: tmp
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Mac'
targetPath: tmp
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Ubuntu'
targetPath: tmp
# TBD: build NuGet package
# TBD: this script should build a specific pre-specified tag
- task: GitHubRelease@0
inputs:
gitHubConnection: Z3GitHub
repositoryName: 'Z3Prover/z3'
action: 'create'
target: '$(Build.SourceVersion)'
tagSource: 'manual'
tag: 'z3-4.8.7'
title: 'z3-4.8.7'
releaseNotesSource: 'input'
releaseNotes: '4.8.7 release'
assets: 'tmp/*'
isDraft: true
isPreRelease: true
- task: DownloadPipelineArtifact@0
inputs:
artifactName: 'Python packages'
targetPath: dist
- task: DownloadSecureFile@1
name: pypirc
inputs:
secureFile: 'pypirc'
- script: pip install --upgrade pip
- script: python -m pip install --user -U setuptools importlib_metadata wheel twine
# Uncomment on release:
- script: python -m twine upload --config-file $(pypirc.secureFilePath) -r $(pypiReleaseServer) dist/*
- job: ManyLinuxBuild
displayName: "ManyLinux build"
pool:
vmImage: "ubuntu-latest"
container: "quay.io/pypa/manylinux2010_x86_64:latest"
variables:
python: "/opt/python/cp37-cp37m/bin/python"
steps:
- task: PythonScript@0
displayName: Build
inputs:
scriptSource: 'filepath'
scriptPath: scripts/mk_unix_dist.py
arguments: --nodotnet --nojava
pythonInterpreter: $(python)
- script: git clone https://github.com/z3prover/z3test z3test
displayName: 'Clone z3test'
- task: PythonScript@0
displayName: Test
inputs:
scriptSource: 'filepath'
scriptPath: z3test/scripts/test_benchmarks.py
arguments: build-dist/z3 z3test/regressions/smt2
pythonInterpreter: $(python)
- task: CopyFiles@2
inputs:
sourceFolder: dist
contents: '*.zip'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'ManyLinuxBuild'
targetPath: $(Build.ArtifactStagingDirectory)
- template: build-win-signed.yml
parameters:
ReleaseVersion: $(ReleaseVersion)
BuildArchitecture: 'x64'
- template: build-win-signed.yml
parameters:
ReleaseVersion: $(ReleaseVersion)
BuildArchitecture: 'x86'
# TBD: run regression tests on generated binaries.
# Creates Z3 packages in various formats
- stage: Package
jobs:
- job: NuGetPackage
displayName: "NuGet packaging"
pool:
vmImage: "windows-latest"
steps:
- powershell: write-host $(System.DefinitionId)
displayName: 'System.DefinitionId'
- powershell: write-host $(Build.BuildId)
displayName: 'Build.BuildId'
- powershell: write-host $(System.TeamProjectId)
displayName: 'System.TeamProjectId'
- task: DownloadPipelineArtifact@2
displayName: 'Download Win64 Build'
inputs:
artifact: 'WindowsBuild-x64'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu Build'
inputs:
artifact: 'UbuntuBuild'
path: $(Agent.TempDirectory)\package
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'macOSBuild'
path: $(Agent.TempDirectory)\package
- task: PythonScript@0
displayName: 'Python: assemble files'
inputs:
scriptSource: 'filepath'
scriptPath: scripts\mk_nuget_task.py
workingDirectory: $(Agent.TempDirectory)\package
arguments:
$(Agent.TempDirectory)\package
$(ReleaseVersion)
$(Build.Repository.Uri)
$(Build.SourceBranchName)
$(Build.SourceVersion)
$(Build.SourcesDirectory)
symbols
- task: NuGetToolInstaller@0
inputs:
versionSpec: 5.x
checkLatest: false
- task: NugetCommand@2
displayName: 'NuGet Pack Symbols'
inputs:
command: custom
arguments: 'pack $(Agent.TempDirectory)\package\out\Microsoft.Z3.sym.nuspec -OutputDirectory $(Build.ArtifactStagingDirectory) -Verbosity detailed -Symbols -SymbolPackageFormat snupkg -BasePath $(Agent.TempDirectory)\package\out'
- task: EsrpCodeSigning@1
displayName: 'Sign Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
FolderPath: $(Build.ArtifactStagingDirectory)
Pattern: Microsoft.Z3.$(ReleaseVersion).nupkg
signConfigType: 'inlineSignParams'
inlineOperation: |
[
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetSign",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
SessionTimeout: '60'
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: EsrpCodeSigning@1
displayName: 'Sign Symbol Package'
inputs:
ConnectedServiceName: 'z3-esrp-signing-2'
FolderPath: $(Build.ArtifactStagingDirectory)
Pattern: Microsoft.Z3.$(ReleaseVersion).snupkg
signConfigType: 'inlineSignParams'
inlineOperation: |
[
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetSign",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
},
{
"KeyCode" : "CP-401405",
"OperationCode" : "NuGetVerify",
"Parameters" : {},
"ToolName" : "sign",
"ToolVersion" : "1.0"
}
]
SessionTimeout: '60'
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(Build.ArtifactStagingDirectory)
artifactName: 'NuGetPackage'
- job: PythonPackage
displayName: "Python packaging"
pool:
vmImage: "ubuntu-latest"
steps:
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'macOSBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download ManyLinux Build'
inputs:
artifact: 'ManyLinuxBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win32 Build'
inputs:
artifact: 'WindowsBuild-x86'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win64 Build'
inputs:
artifact: 'WindowsBuild-x64'
path: $(Agent.TempDirectory)
- script: cd $(Agent.TempDirectory); mkdir osx-bin; cd osx-bin; unzip ../*osx*.zip
- script: cd $(Agent.TempDirectory); mkdir linux-bin; cd linux-bin; unzip ../*glibc*.zip
- script: cd $(Agent.TempDirectory); mkdir win32-bin; cd win32-bin; unzip ../*x86-win*.zip
- script: cd $(Agent.TempDirectory); mkdir win64-bin; cd win64-bin; unzip ../*x64-win*.zip
- script: python3 -m pip install --user -U setuptools wheel
- script: cd src/api/python; python3 setup.py sdist
# take a look at this PREMIUM HACK I came up with to get around the fact that the azure variable syntax overloads the bash syntax for subshells
- script: cd src/api/python; echo $(Agent.TempDirectory)/osx-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/linux-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win32-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- script: cd src/api/python; echo $(Agent.TempDirectory)/win64-bin/* | xargs printf 'PACKAGE_FROM_RELEASE=%s\n' | xargs -I '{}' env '{}' python3 setup.py bdist_wheel
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'PythonPackage'
targetPath: src/api/python/dist
# Uploads Z3 packages to various package stores
- stage: Publish
jobs:
- job: GitHubPublish
displayName: "Publish to GitHub"
pool:
vmImage: "windows-latest"
steps:
- task: DownloadPipelineArtifact@2
displayName: 'Download Ubuntu Build'
inputs:
artifact: 'UbuntuBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: "Download Doc"
inputs:
artifact: 'UbuntuDoc'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download macOS Build'
inputs:
artifact: 'macOSBuild'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win32 Build'
inputs:
artifact: 'WindowsBuild-x86'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: "Download Python"
inputs:
artifactName: 'PythonPackage'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download Win64 Build'
inputs:
artifact: 'WindowsBuild-x64'
path: $(Agent.TempDirectory)
- task: DownloadPipelineArtifact@2
displayName: 'Download NuGet Package'
inputs:
artifact: 'NuGetPackage'
path: $(Agent.TempDirectory)
- task: GitHubRelease@0
inputs:
gitHubConnection: Z3GitHub
repositoryName: $(Build.Repository.Name)
action: 'create'
target: '$(Build.SourceVersion)'
tagSource: 'manual'
tag: 'z3-$(ReleaseVersion)'
title: 'z3-$(ReleaseVersion)'
releaseNotesSource: 'input'
releaseNotes: '$(ReleaseVersion) release'
assets: '$(Agent.TempDirectory)/*.*'
isDraft: true
isPreRelease: false
# Enable on release (after fixing Nuget key)
- job: NuGetPublish
condition: eq(1,0)
displayName: "Publish to NuGet.org"
steps:
- task: DownloadPipelineArtifact@2
displayName: 'Download NuGet Package'
inputs:
artifact: 'NuGetPackage'
path: $(Agent.TempDirectory)
- task: NuGetToolInstaller@0
inputs:
versionSpec: 5.x
checkLatest: false
- task: NuGetCommand@2
inputs:
command: push
nuGetFeedType: External
publishFeedCredentials: Z3Nuget
packagesToPush: $(Agent.TempDirectory)/*.nupkg
# Enable on release:
- job: PyPIPublish
condition: eq(1,1)
displayName: "Publish to PyPI"
pool:
vmImage: "ubuntu-latest"
steps:
- task: DownloadPipelineArtifact@2
inputs:
artifact: 'PythonPackage'
path: dist
- task: DownloadSecureFile@1
name: pypirc
inputs:
secureFile: 'pypirc'
- script: python3 -m pip install --upgrade pip
- script: python3 -m pip install --user -U setuptools importlib_metadata wheel twine
- script: python3 -m twine upload --config-file $(pypirc.secureFilePath) -r $(pypiReleaseServer) dist/*

View File

@ -11,3 +11,5 @@ steps:
examples/tptp_build_dir/z3_tptp5 -help
examples/c_maxsat_example_build_dir/c_maxsat_example ../examples/maxsat/ex.smt
cd ..

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
# - !/usr/bin/env python
############################################
# Copyright (c) 2012 Microsoft Corporation
#
@ -70,13 +70,13 @@ Type2Str = { VOID : 'void', VOID_PTR : 'void*', INT : 'int', UINT : 'unsigned',
Type2PyStr = { VOID_PTR : 'ctypes.c_void_p', INT : 'ctypes.c_int', UINT : 'ctypes.c_uint', INT64 : 'ctypes.c_longlong',
UINT64 : 'ctypes.c_ulonglong', DOUBLE : 'ctypes.c_double', FLOAT : 'ctypes.c_float',
STRING : 'ctypes.c_char_p', STRING_PTR : 'ctypes.POINTER(ctypes.c_char_p)', BOOL : 'ctypes.c_bool', SYMBOL : 'Symbol',
PRINT_MODE : 'ctypes.c_uint', ERROR_CODE : 'ctypes.c_uint', CHAR : 'ctypes.c_char', CHAR_PTR: 'ctypes.c_char_p'
PRINT_MODE : 'ctypes.c_uint', ERROR_CODE : 'ctypes.c_uint', CHAR : 'ctypes.c_char', CHAR_PTR: 'ctypes.POINTER(ctypes.c_char)'
}
# Mapping to .NET types
Type2Dotnet = { VOID : 'void', VOID_PTR : 'IntPtr', INT : 'int', UINT : 'uint', INT64 : 'Int64', UINT64 : 'UInt64', DOUBLE : 'double',
FLOAT : 'float', STRING : 'string', STRING_PTR : 'byte**', BOOL : 'byte', SYMBOL : 'IntPtr',
PRINT_MODE : 'uint', ERROR_CODE : 'uint', CHAR : 'char', CHAR_PTR : 'char*' }
PRINT_MODE : 'uint', ERROR_CODE : 'uint', CHAR : 'char', CHAR_PTR : 'IntPtr' }
# Mapping to Java types
Type2Java = { VOID : 'void', VOID_PTR : 'long', INT : 'int', UINT : 'int', INT64 : 'long', UINT64 : 'long', DOUBLE : 'double',
@ -90,7 +90,7 @@ Type2JavaW = { VOID : 'void', VOID_PTR : 'jlong', INT : 'jint', UINT : 'jint', I
# Mapping to ML types
Type2ML = { VOID : 'unit', VOID_PTR : 'VOIDP', INT : 'int', UINT : 'int', INT64 : 'int', UINT64 : 'int', DOUBLE : 'float',
FLOAT : 'float', STRING : 'string', STRING_PTR : 'char**',
BOOL : 'bool', SYMBOL : 'z3_symbol', PRINT_MODE : 'int', ERROR_CODE : 'int', CHAR : 'char', CHAR_PTR : 'char const*' }
BOOL : 'bool', SYMBOL : 'z3_symbol', PRINT_MODE : 'int', ERROR_CODE : 'int', CHAR : 'char', CHAR_PTR : 'string' }
next_type_id = FIRST_OBJ_ID
@ -308,7 +308,7 @@ def display_args_to_z3(params):
if i > 0:
core_py.write(", ")
if param_type(p) == STRING:
core_py.write("_to_ascii(a%s)" % i)
core_py.write("_str_to_bytes(a%s)" % i)
else:
core_py.write("a%s" % i)
i = i + 1
@ -337,6 +337,30 @@ def Z3_set_error_handler(ctx, hndlr, _elems=Elementaries(_lib.Z3_set_error_handl
_elems.Check(ctx)
return ceh
def Z3_solver_propagate_init(ctx, s, user_ctx, push_eh, pop_eh, fresh_eh, _elems = Elementaries(_lib.Z3_solver_propagate_init)):
_elems.f(ctx, s, user_ctx, push_eh, pop_eh, fresh_eh)
_elems.Check(ctx)
def Z3_solver_propagate_final(ctx, s, final_eh, _elems = Elementaries(_lib.Z3_solver_propagate_final)):
_elems.f(ctx, s, final_eh)
_elems.Check(ctx)
def Z3_solver_propagate_fixed(ctx, s, fixed_eh, _elems = Elementaries(_lib.Z3_solver_propagate_fixed)):
_elems.f(ctx, s, fixed_eh)
_elems.Check(ctx)
def Z3_solver_propagate_eq(ctx, s, eq_eh, _elems = Elementaries(_lib.Z3_solver_propagate_eq)):
_elems.f(ctx, s, eq_eh)
_elems.Check(ctx)
def Z3_solver_propagate_diseq(ctx, s, diseq_eh, _elems = Elementaries(_lib.Z3_solver_propagate_diseq)):
_elems.f(ctx, s, diseq_eh)
_elems.Check(ctx)
def Z3_optimize_register_model_eh(ctx, o, m, user_ctx, on_model_eh, _elems = Elementaries(_lib.Z3_optimize_register_model_eh)):
_elems.f(ctx, o, m, user_ctx, on_model_eh)
_elems.Check(ctx)
""")
for sig in _API2PY:
@ -521,8 +545,13 @@ def mk_java(java_dir, package_name):
java_native.write(' public static native void setInternalErrorHandler(long ctx);\n\n')
java_native.write(' static {\n')
java_native.write(' try { System.loadLibrary("z3java"); }\n')
java_native.write(' catch (UnsatisfiedLinkError ex) { System.loadLibrary("libz3java"); }\n')
java_native.write(' if (null == System.getProperty("z3.skipLibraryLoad")) {\n')
java_native.write(' try {\n')
java_native.write(' System.loadLibrary("z3java");\n')
java_native.write(' } catch (UnsatisfiedLinkError ex) {\n')
java_native.write(' System.loadLibrary("libz3java");\n')
java_native.write(' }\n')
java_native.write(' }\n')
java_native.write(' }\n')
java_native.write('\n')
@ -780,7 +809,7 @@ def mk_js(js_output_dir):
ous.write(" {\n")
ous.write(" \"name\": \"%s\",\n" % name)
ous.write(" \"c_type\": \"%s\",\n" % Type2Str[result])
ous.write(" \"napi_type\": \"%s\",\n" % type2napi(result))
ous.write(" \"napi_type\": \"%s\",\n" % type2napi(result))
ous.write(" \"arg_list\": [")
first = True
for p in params:
@ -793,7 +822,7 @@ def mk_js(js_output_dir):
k = t
ous.write(" \"name\": \"%s\",\n" % "") # TBD
ous.write(" \"c_type\": \"%s\",\n" % type2str(t))
ous.write(" \"napi_type\": \"%s\",\n" % type2napi(t))
ous.write(" \"napi_type\": \"%s\",\n" % type2napi(t))
ous.write(" \"napi_builder\": \"%s\"\n" % type2napibuilder(t))
ous.write( " }")
ous.write("],\n")
@ -967,6 +996,9 @@ def def_API(name, result, params):
elif ty == BOOL:
log_c.write(" I(a%s);\n" % i)
exe_c.write("in.get_bool(%s)" % i)
elif ty == VOID_PTR:
log_c.write(" P(0);\n")
exe_c.write("in.get_obj_addr(%s)" % i)
elif ty == PRINT_MODE or ty == ERROR_CODE:
log_c.write(" U(static_cast<unsigned>(a%s));\n" % i)
exe_c.write("static_cast<%s>(in.get_uint(%s))" % (type2str(ty), i))
@ -1691,7 +1723,7 @@ def write_log_h_preamble(log_h):
log_h.write('#include<atomic>\n')
log_h.write('extern std::ostream * g_z3_log;\n')
log_h.write('extern std::atomic<bool> g_z3_log_enabled;\n')
log_h.write('class z3_log_ctx { bool m_prev; public: z3_log_ctx() { m_prev = g_z3_log_enabled.exchange(false); } ~z3_log_ctx() { g_z3_log_enabled = m_prev; } bool enabled() const { return m_prev; } };\n')
log_h.write('class z3_log_ctx { bool m_prev; public: z3_log_ctx() { m_prev = g_z3_log && g_z3_log_enabled.exchange(false); } ~z3_log_ctx() { if (g_z3_log) g_z3_log_enabled = m_prev; } bool enabled() const { return m_prev; } };\n')
log_h.write('inline void SetR(void * obj) { *g_z3_log << "= " << obj << "\\n"; }\ninline void SetO(void * obj, unsigned pos) { *g_z3_log << "* " << obj << " " << pos << "\\n"; } \ninline void SetAO(void * obj, unsigned pos, unsigned idx) { *g_z3_log << "@ " << obj << " " << pos << " " << idx << "\\n"; }\n')
log_h.write('#define RETURN_Z3(Z3RES) if (_LOG_CTX.enabled()) { SetR(Z3RES); } return Z3RES\n')
log_h.write('void _Z3_append_log(char const * msg);\n')
@ -1719,7 +1751,7 @@ del _default_dirs
del _all_dirs
del _ext
""")
def write_core_py_preamble(core_py):
core_py.write(
"""
@ -1738,6 +1770,8 @@ _default_dirs = ['.',
os.path.join(sys.prefix, 'lib'),
None]
_all_dirs = []
# search the default dirs first
_all_dirs.extend(_default_dirs)
if sys.version < '3':
import __builtin__
@ -1754,8 +1788,6 @@ for v in ('Z3_LIBRARY_PATH', 'PATH', 'PYTHONPATH'):
lds = lp.split(';') if sys.platform in ('win32') else lp.split(':')
_all_dirs.extend(lds)
_all_dirs.extend(_default_dirs)
_failures = []
for d in _all_dirs:
try:
@ -1781,7 +1813,7 @@ if _lib is None:
print("Could not find libz3.%s; consider adding the directory containing it to" % _ext)
print(" - your system's PATH environment variable,")
print(" - the Z3_LIBRARY_PATH environment variable, or ")
print(" - to the custom Z3_LIBRARY_DIRS Python-builtin before importing the z3 module, e.g. via")
print(" - to the custom Z3_LIB_DIRS Python-builtin before importing the z3 module, e.g. via")
if sys.version < '3':
print(" import __builtin__")
print(" __builtin__.Z3_LIB_DIRS = [ '/path/to/libz3.%s' ] " % _ext)
@ -1790,25 +1822,24 @@ if _lib is None:
print(" builtins.Z3_LIB_DIRS = [ '/path/to/libz3.%s' ] " % _ext)
raise Z3Exception("libz3.%s not found." % _ext)
def _to_ascii(s):
if isinstance(s, str):
try:
return s.encode('ascii')
except:
# kick the bucket down the road. :-J
return s
else:
return s
if sys.version < '3':
def _str_to_bytes(s):
return s
def _to_pystr(s):
return s
else:
def _str_to_bytes(s):
if isinstance(s, str):
enc = sys.stdout.encoding
return s.encode(enc if enc != None else 'latin-1')
else:
return s
def _to_pystr(s):
if s != None:
enc = sys.stdout.encoding
if enc != None: return s.decode(enc)
else: return s.decode('ascii')
return s.decode(enc if enc != None else 'latin-1')
else:
return ""
@ -1817,6 +1848,33 @@ _error_handler_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_uint)
_lib.Z3_set_error_handler.restype = None
_lib.Z3_set_error_handler.argtypes = [ContextObj, _error_handler_type]
push_eh_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p)
pop_eh_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_uint)
fresh_eh_type = ctypes.CFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p)
fixed_eh_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_void_p)
final_eh_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
eq_eh_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint)
_lib.Z3_solver_propagate_init.restype = None
_lib.Z3_solver_propagate_init.argtypes = [ContextObj, SolverObj, ctypes.c_void_p, push_eh_type, pop_eh_type, fresh_eh_type]
_lib.Z3_solver_propagate_final.restype = None
_lib.Z3_solver_propagate_final.argtypes = [ContextObj, SolverObj, final_eh_type]
_lib.Z3_solver_propagate_fixed.restype = None
_lib.Z3_solver_propagate_fixed.argtypes = [ContextObj, SolverObj, fixed_eh_type]
_lib.Z3_solver_propagate_eq.restype = None
_lib.Z3_solver_propagate_eq.argtypes = [ContextObj, SolverObj, eq_eh_type]
_lib.Z3_solver_propagate_diseq.restype = None
_lib.Z3_solver_propagate_diseq.argtypes = [ContextObj, SolverObj, eq_eh_type]
on_model_eh_type = ctypes.CFUNCTYPE(None, ctypes.c_void_p)
_lib.Z3_optimize_register_model_eh.restype = None
_lib.Z3_optimize_register_model_eh.argtypes = [ContextObj, OptimizeObj, ModelObj, ctypes.c_void_p, on_model_eh_type]
"""
)
@ -1906,7 +1964,7 @@ def generate_files(api_files,
mk_dotnet_wrappers(dotnet_file)
if mk_util.is_verbose():
print("Generated '{}'".format(dotnet_file.name))
if java_output_dir:
mk_java(java_output_dir, java_package_name)

2
scripts/vsts-mac.sh Normal file → Executable file
View File

@ -2,7 +2,7 @@
cd ..
mkdir build
CSC=/usr/bin/csc GACUTIL=/usr/bin/gacutil CXX=clang++ CC=clang python scripts/mk_make.py --java --python
CXX=clang++ CC=clang python scripts/mk_make.py --java --python
cd build
make
make test-z3

View File

@ -36,9 +36,7 @@ endforeach()
# that has not yet been declared.
add_subdirectory(util)
add_subdirectory(math/polynomial)
add_subdirectory(sat)
add_subdirectory(nlsat)
add_subdirectory(util/lp)
add_subdirectory(math/dd)
add_subdirectory(math/hilbert)
add_subdirectory(math/simplex)
add_subdirectory(math/automata)
@ -46,31 +44,38 @@ add_subdirectory(math/interval)
add_subdirectory(math/realclosure)
add_subdirectory(math/subpaving)
add_subdirectory(ast)
add_subdirectory(params)
add_subdirectory(ast/rewriter)
add_subdirectory(ast/normal_forms)
add_subdirectory(ast/macros)
add_subdirectory(model)
add_subdirectory(tactic)
add_subdirectory(ast/substitution)
add_subdirectory(ast/euf)
add_subdirectory(smt/params)
add_subdirectory(parsers/util)
add_subdirectory(math/grobner)
add_subdirectory(math/euclid)
add_subdirectory(sat)
add_subdirectory(nlsat)
add_subdirectory(tactic/core)
add_subdirectory(math/subpaving/tactic)
add_subdirectory(tactic/aig)
add_subdirectory(solver)
add_subdirectory(sat/tactic)
add_subdirectory(tactic/arith)
add_subdirectory(nlsat/tactic)
add_subdirectory(ackermannization)
add_subdirectory(solver)
add_subdirectory(cmd_context)
add_subdirectory(cmd_context/extra_cmds)
add_subdirectory(parsers/smt2)
add_subdirectory(ast/proofs)
add_subdirectory(ast/fpa)
add_subdirectory(ast/macros)
add_subdirectory(solver/assertions)
add_subdirectory(ast/pattern)
add_subdirectory(ast/rewriter/bit_blaster)
add_subdirectory(smt/params)
add_subdirectory(math/lp)
add_subdirectory(qe/mbp)
add_subdirectory(sat/smt)
add_subdirectory(sat/tactic)
add_subdirectory(nlsat/tactic)
add_subdirectory(ackermannization)
add_subdirectory(ast/proofs)
add_subdirectory(ast/fpa)
add_subdirectory(smt/proto_model)
add_subdirectory(smt)
add_subdirectory(tactic/bv)
@ -96,7 +101,6 @@ add_subdirectory(tactic/portfolio)
add_subdirectory(opt)
add_subdirectory(api)
add_subdirectory(api/dll)
################################################################################
# libz3
################################################################################
@ -111,6 +115,9 @@ else()
set(lib_type "STATIC")
endif()
add_library(libz3 ${lib_type} ${object_files})
target_include_directories(libz3 INTERFACE
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/api>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
set_target_properties(libz3 PROPERTIES
# VERSION determines the version in the filename of the shared library.
# SOVERSION determines the value of the DT_SONAME field on ELF platforms.
@ -119,8 +126,8 @@ set_target_properties(libz3 PROPERTIES
# libz3.so.W.X.
# This indicates that no breaking API changes will be made within a single
# minor version.
VERSION 4
SOVERSION 4)
VERSION ${Z3_VERSION}
SOVERSION ${Z3_VERSION_MAJOR}.${Z3_VERSION_MINOR})
if (NOT MSVC)
# On UNIX like platforms if we don't change the OUTPUT_NAME
@ -209,12 +216,26 @@ endif()
################################################################################
# Z3 executable
################################################################################
add_subdirectory(shell)
cmake_dependent_option(Z3_BUILD_EXECUTABLE
"Build the z3 executable" ON
"CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR" OFF)
if (Z3_BUILD_EXECUTABLE)
add_subdirectory(shell)
endif()
################################################################################
# z3-test
################################################################################
add_subdirectory(test)
cmake_dependent_option(Z3_BUILD_TEST_EXECUTABLES
"Build test executables" ON
"CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR" OFF)
if (Z3_BUILD_TEST_EXECUTABLES)
add_subdirectory(test)
endif()
################################################################################
@ -253,4 +274,16 @@ if (Z3_BUILD_JAVA_BINDINGS)
add_subdirectory(api/java)
endif()
################################################################################
# Julia bindings
################################################################################
option(Z3_BUILD_JULIA_BINDINGS "Build Julia bindings for Z3" OFF)
if (Z3_BUILD_JULIA_BINDINGS)
if (NOT Z3_BUILD_LIBZ3_SHARED)
message(FATAL_ERROR "The Julia bindings will not work with a static libz3."
"You either need to disable Z3_BUILD_JULIA_BINDINGS or enable Z3_BUILD_LIBZ3_SHARED")
endif()
add_subdirectory(api/julia)
endif()
# TODO: Implement support for other bindigns

View File

@ -14,12 +14,10 @@
Revision History:
--*/
#ifndef ACKERMANNIZE_BV_MODEL_CONVERTER_H_
#define ACKERMANNIZE_BV_MODEL_CONVERTER_H_
#pragma once
#include "tactic/model_converter.h"
#include "ackermannization/ackr_info.h"
model_converter * mk_ackermannize_bv_model_converter(ast_manager & m, const ackr_info_ref& info);
#endif /* ACKERMANNIZE_BV_MODEL_CONVERTER_H_ */

View File

@ -32,10 +32,10 @@ public:
~ackermannize_bv_tactic() override { }
void operator()(goal_ref const & g, goal_ref_buffer & result) override {
tactic_report report("ackermannize", *g);
tactic_report report("ackermannize_bv", *g);
fail_if_unsat_core_generation("ackermannize", g);
fail_if_proof_generation("ackermannize", g);
TRACE("ackermannize", g->display(tout << "in\n"););
TRACE("goal", g->display(tout << "in\n"););
ptr_vector<expr> flas;
const unsigned sz = g->size();
@ -58,8 +58,7 @@ public:
}
resg->inc_depth();
TRACE("ackermannize", resg->display(tout << "out\n"););
SASSERT(resg->is_well_sorted());
TRACE("goal", resg->display(tout << "out\n"););
}

View File

@ -14,8 +14,7 @@ Mikolas Janota
Revision History:
--*/
#ifndef _ACKERMANNIZE_TACTIC_H_
#define _ACKERMANNIZE_TACTIC_H_
#pragma once
#include "tactic/tactical.h"
tactic * mk_ackermannize_bv_tactic(ast_manager & m, params_ref const & p);
@ -24,5 +23,4 @@ tactic * mk_ackermannize_bv_tactic(ast_manager & m, params_ref const & p);
ADD_TACTIC("ackermannize_bv", "A tactic for performing full Ackermannization on bv instances.", "mk_ackermannize_bv_tactic(m, p)")
*/
#endif

View File

@ -15,8 +15,7 @@
Revision History:
--*/
#ifndef ACKR_BOUND_PROBE_H_
#define ACKR_BOUND_PROBE_H_
#pragma once
#include "tactic/probe.h"
@ -26,4 +25,3 @@ probe * mk_ackr_bound_probe();
ADD_PROBE("ackr-bound-probe", "A probe to give an upper bound of Ackermann congruence lemmas that a formula might generate.", "mk_ackr_bound_probe()")
*/
#endif /* ACKR_BOUND_PROBE_H_ */

Some files were not shown because too many files have changed in this diff Show More