diff --git a/.github/workflows/build_linux_arm64_wheels-gh-2.yml b/.github/workflows/build_linux_arm64_wheels-gh-2.yml new file mode 100644 index 00000000000..441e7987857 --- /dev/null +++ b/.github/workflows/build_linux_arm64_wheels-gh-2.yml @@ -0,0 +1,250 @@ +name: Build & Test Linux arm64-2 + +on: + workflow_dispatch: + inputs: + TAG_NAME: + description: 'Release Version Tag' + required: true + release: + types: [created] + push: + branches: + - main + paths-ignore: + - '**/*.md' + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches: + - main + paths-ignore: + - '**/*.md' + + +jobs: + build_universal_wheel: + name: Build & Test Universal Wheel (Linux arm64)-2 + runs-on: [self-hosted, linux, arm64, ubuntu-latest] + if: ${{ !github.event.pull_request.draft }} + steps: + - name: Check CPU capabilities and requirements + run: | + echo "=== CPU Information ===" + cat /proc/cpuinfo + echo "" + echo "=== Checking CPU requirements ===" + + if grep -P "^(?=.*atomic)(?=.*ssbs)" /proc/cpuinfo > /dev/null; then + echo "CPU meets minimum requirements (atomic and ssbs flags found)" + else + echo "CPU does not meet minimum requirements" + echo "Missing required flags: atomic and/or ssbs" + echo "This may cause build failures. Consider using -DNO_ARMV81_OR_HIGHER=1" + fi + - name: Install Python build dependencies + run: | + sudo apt-get update + sudo apt-get install -y make build-essential libssl-dev zlib1g-dev \ + libbz2-dev libreadline-dev wget curl llvm \ + libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev \ + libffi-dev liblzma-dev libsqlite3-dev golang-go + - name: Setup pyenv + run: | + # Remove existing pyenv installation if present + rm -rf $HOME/.pyenv + curl https://pyenv.run | bash + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv install 3.8:latest + pyenv install 3.13:latest + pyenv global 3.8 3.13 + + # Verify installations + echo "Installed versions:" + pyenv versions + - name: Install dependencies for all Python versions + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + for version in 3.8 3.13; do + echo "Installing dependencies for Python $version" + pyenv shell $version + python -m pip install --upgrade pip + python -m pip install setuptools tox pandas pyarrow twine psutil deltalake wheel jupyter nbconvert + pyenv shell --unset + done + - name: Upgrade Rust toolchain + run: | + rustup toolchain install nightly-2025-07-07 + rustup default nightly-2025-07-07 + rustup component add rust-src + rustc --version + cargo --version + - name: Install clang++ for Ubuntu + run: | + pwd + uname -a + wget https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + sudo ./llvm.sh 19 + which clang++-19 + clang++-19 --version + sudo apt-get install -y make cmake ccache ninja-build yasm gawk wget + # Install WebAssembly linker (wasm-ld) + sudo apt-get install -y lld-19 + # Create symlink for wasm-ld + if ! command -v wasm-ld &> /dev/null; then + sudo ln -sf /usr/bin/wasm-ld-19 /usr/bin/wasm-ld || true + fi + which wasm-ld || echo "wasm-ld not found in PATH" + ccache -s + - name: Update git + run: | + sudo add-apt-repository ppa:git-core/ppa -y + sudo apt-get update + sudo apt-get install -y git + git --version + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Update submodules + run: | + git submodule update --init --recursive --jobs 4 + - name: Update version for release + if: startsWith(github.ref, 'refs/tags/v') + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv shell 3.13 + + # Install bump-my-version + python -m pip install bump-my-version + TAG_NAME=${GITHUB_REF#refs/tags/v} + bump-my-version replace --new-version $TAG_NAME + echo "Version files updated to $TAG_NAME" + pyenv shell --unset + - name: ccache + uses: hendrikmuhs/ccache-action@v1.2 + with: + key: ubuntu-24.04-aarch64 + max-size: 5G + append-timestamp: true + - name: remove old clang and link clang-19 to clang + run: | + sudo rm -f /usr/bin/clang || true + sudo ln -s /usr/bin/clang-19 /usr/bin/clang + sudo rm -f /usr/bin/clang++ || true + sudo ln -s /usr/bin/clang++-19 /usr/bin/clang++ + which clang++ + clang++ --version + - name: Run chdb/build.sh + timeout-minutes: 600 + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + source ~/.cargo/env + pyenv shell 3.8 + export CC=/usr/bin/clang + export CXX=/usr/bin/clang++ + bash ./chdb/build.sh + pyenv shell 3.13 + bash -x ./chdb/test_smoke.sh + continue-on-error: false + - name: Check ccache statistics + run: | + ccache -s + ls -lh chdb + df -h + - name: Build wheels + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + export CC=/usr/bin/clang + export CXX=/usr/bin/clang++ + pyenv shell 3.13 + make wheel + - name: Install patchelf from github + run: | + wget https://github.com/NixOS/patchelf/releases/download/0.18.0/patchelf-0.18.0-aarch64.tar.gz -O patchelf.tar.gz + tar -xvf patchelf.tar.gz + sudo cp bin/patchelf /usr/bin/ + sudo chmod +x /usr/bin/patchelf + patchelf --version + - name: Audit wheels + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv shell 3.13 + python -m pip install auditwheel + auditwheel -v repair -w dist/ --plat manylinux_2_17_aarch64 dist/*.whl + continue-on-error: false + - name: Show files + run: | + # e.g: remove chdb-0.11.4-cp310-cp310-linux_aarch64.whl, keep chdb-0.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl + sudo rm -f dist/*linux_aarch64.whl + ls -lh dist + shell: bash + - name: Setup core dump collection + run: | + mkdir -p tmp/core + echo "tmp/core/core.%p" | sudo tee /proc/sys/kernel/core_pattern + ulimit -c unlimited + - name: Install GDB + run: | + sudo apt-get update + sudo apt-get install -y gdb + - name: Test wheel with GDB exception catching (loop until failure) + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv shell 3.13 + python -m pip install dist/*.whl --force-reinstall + + echo "Starting loop test with GDB exception catching..." + PYTHON_PATH=$(pyenv which python) + + for i in $(seq 1 5000); do + if ! make test 2>&1; then + echo "FAILED at iteration $i on make test" + exit 1 + fi + + echo "Iteration $i passed" + done + + echo "All 5000 iterations passed!" + pyenv shell --unset + continue-on-error: false + - name: Check and upload core files if present + if: always() + run: | + if ls tmp/core/core.* >/dev/null 2>&1; then + echo "CORE_FILES_FOUND=true" >> $GITHUB_ENV + tar -czvf core-files-linux-aarch64.tar.gz tmp/core/core.* + echo "Core files tar created: core-files-linux-aarch64.tar.gz" + ls -lh core-files-linux-aarch64.tar.gz + else + echo "CORE_FILES_FOUND=false" >> $GITHUB_ENV + echo "No core files found in tmp/core" + fi + continue-on-error: true + - name: Keep killall ccache and wait for ccache to finish + if: always() + run: | + sleep 60 + while ps -ef | grep ccache | grep -v grep; do \ + killall ccache; \ + sleep 10; \ + done + - name: Upload core files if present + if: always() && env.CORE_FILES_FOUND == 'true' + uses: actions/upload-artifact@v4 + with: + name: core-files-linux-aarch64 + path: core-files-linux-aarch64.tar.gz + - uses: actions/upload-artifact@v4 + with: + name: chdb-artifacts-linux-aarch64 + path: | + ./dist/*.whl + overwrite: true diff --git a/.github/workflows/build_linux_arm64_wheels-gh.yml b/.github/workflows/build_linux_arm64_wheels-gh.yml index b2ed42e6897..76769bbc0b6 100644 --- a/.github/workflows/build_linux_arm64_wheels-gh.yml +++ b/.github/workflows/build_linux_arm64_wheels-gh.yml @@ -48,50 +48,6 @@ jobs: libbz2-dev libreadline-dev wget curl llvm \ libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev \ libffi-dev liblzma-dev libsqlite3-dev golang-go - - name: Install GitHub CLI - run: | - wget https://github.com/cli/cli/releases/download/v2.82.1/gh_2.82.1_linux_arm64.tar.gz -O gh.tar.gz - tar -xf gh.tar.gz - sudo cp gh_*/bin/gh /usr/local/bin/ - sudo chmod +x /usr/local/bin/gh - if ! gh --version; then - echo "ERROR: GitHub CLI installation failed!" - exit 1 - fi - echo "GitHub CLI installed successfully" - - name: Scan SQLite vulnerabilities with grype - run: | - # Install grype and required tools - mkdir -p $HOME/.local/bin - curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $HOME/.local/bin - echo "$HOME/.local/bin" >> $GITHUB_PATH - sudo apt-get update && sudo apt-get install -y jq lsb-release - - # Detect OS distribution info - DISTRO_ID=$(lsb_release -si | tr '[:upper:]' '[:lower:]') - DISTRO_VERSION=$(lsb_release -sr) - echo "Detected OS: $DISTRO_ID:$DISTRO_VERSION" - - # Update grype vulnerability database - $HOME/.local/bin/grype db update - - # Check SQLite vulnerabilities in installed packages - echo "Scanning SQLite packages for vulnerabilities..." - GRYPE_RAW_OUTPUT=$($HOME/.local/bin/grype dir:/var/lib/dpkg --distro "$DISTRO_ID:$DISTRO_VERSION" --scope all-layers 2>/dev/null || true) - echo "Raw grype output:" - echo "$GRYPE_RAW_OUTPUT" - - SQLITE_SCAN_OUTPUT=$(echo "$GRYPE_RAW_OUTPUT" | grep -i sqlite || true) - - if [ -n "$SQLITE_SCAN_OUTPUT" ]; then - echo "❌ SQLite vulnerabilities found in packages! Build should be reviewed." - echo "SQLite vulnerability details:" - echo "$SQLITE_SCAN_OUTPUT" - exit 1 - else - echo "✅ No SQLite vulnerabilities found" - fi - continue-on-error: true - name: Setup pyenv run: | # Remove existing pyenv installation if present @@ -100,38 +56,17 @@ jobs: export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" pyenv install 3.8:latest - pyenv install 3.9:latest - pyenv install 3.10:latest - pyenv install 3.11:latest - pyenv install 3.12:latest pyenv install 3.13:latest - pyenv install 3.14:latest - pyenv global 3.8 3.9 3.10 3.11 3.12 3.13 3.14 + pyenv global 3.8 3.13 # Verify installations echo "Installed versions:" pyenv versions - - name: Verify pyenv installations - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - echo "Installed Python versions:" - pyenv versions - echo "" - echo "Verifying all required Python versions are available:" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - if ! pyenv versions --bare | grep -q "^$version"; then - echo "ERROR: Python $version is not installed!" - exit 1 - fi - echo "✓ Python $version is installed" - done - echo "All Python versions verified successfully!" - name: Install dependencies for all Python versions run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do + for version in 3.8 3.13; do echo "Installing dependencies for Python $version" pyenv shell $version python -m pip install --upgrade pip @@ -180,7 +115,7 @@ jobs: run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - pyenv shell 3.9 + pyenv shell 3.13 # Install bump-my-version python -m pip install bump-my-version @@ -212,57 +147,9 @@ jobs: export CC=/usr/bin/clang export CXX=/usr/bin/clang++ bash ./chdb/build.sh - pyenv shell 3.8 + pyenv shell 3.13 bash -x ./chdb/test_smoke.sh continue-on-error: false - - name: Run chdb/build/build_static_lib.sh - timeout-minutes: 600 - run: | - export CC=/usr/bin/clang - export CXX=/usr/bin/clang++ - export PATH="$HOME/.pyenv/bin:$PATH" - source ~/.cargo/env - eval "$(pyenv init -)" - pyenv shell 3.8 - bash ./chdb/build/build_static_lib.sh - pyenv shell --unset - continue-on-error: false - - name: Scan chdb libraries with grype - run: | - echo "Scanning chdb libraries for vulnerabilities..." - - # Files to scan - FILES_TO_SCAN="" - [ -f libchdb.so ] && FILES_TO_SCAN="$FILES_TO_SCAN libchdb.so" - [ -f libchdb.a ] && FILES_TO_SCAN="$FILES_TO_SCAN libchdb.a" - FILES_TO_SCAN="$FILES_TO_SCAN $(find chdb/ \( -name "*.so" -o -name "*.dylib" \) 2>/dev/null || true)" - - SQLITE_VULNERABILITIES_FOUND=false - - for file in $FILES_TO_SCAN; do - if [ -f "$file" ]; then - echo "=== Scanning $file ===" - SCAN_OUTPUT=$($HOME/.local/bin/grype "$file" 2>/dev/null || true) - echo "$SCAN_OUTPUT" - - if echo "$SCAN_OUTPUT" | grep -qi sqlite; then - echo "❌ SQLite vulnerability found in $file" - SQLITE_VULNERABILITIES_FOUND=true - fi - fi - done - - if [ "$SQLITE_VULNERABILITIES_FOUND" = true ]; then - echo "❌ SQLite vulnerabilities detected in chdb libraries!" - exit 1 - else - echo "✅ No SQLite vulnerabilities found in chdb libraries" - fi - continue-on-error: false - - name: Run libchdb stub in examples dir - run: | - bash -x ./examples/runStub.sh - bash -x ./examples/runArrowTest.sh - name: Check ccache statistics run: | ccache -s @@ -274,7 +161,7 @@ jobs: eval "$(pyenv init -)" export CC=/usr/bin/clang export CXX=/usr/bin/clang++ - pyenv shell 3.8 + pyenv shell 3.13 make wheel - name: Install patchelf from github run: | @@ -287,7 +174,7 @@ jobs: run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - pyenv shell 3.9 + pyenv shell 3.13 python -m pip install auditwheel auditwheel -v repair -w dist/ --plat manylinux_2_17_aarch64 dist/*.whl continue-on-error: false @@ -302,26 +189,47 @@ jobs: mkdir -p tmp/core echo "tmp/core/core.%p" | sudo tee /proc/sys/kernel/core_pattern ulimit -c unlimited - - name: Test wheel on all Python versions + - name: Install GDB run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Testing chdb on Python $version" - pyenv shell $version - python -m pip install dist/*.whl --force-reinstall - python -c "import chdb; res = chdb.query('select 1112222222,555', 'CSV'); print(f'Python $version: {res}')" - make test - pyenv shell --unset - done - continue-on-error: false - - name: Run notebook tests + sudo apt-get update + sudo apt-get install -y gdb + - name: Test wheel with GDB exception catching (loop until failure) run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - pyenv shell 3.9 + pyenv shell 3.13 python -m pip install dist/*.whl --force-reinstall - jupyter nbconvert --to notebook --execute tests/test_data_insertion.ipynb --output test_data_insertion_output.ipynb + + echo "Starting loop test with GDB exception catching..." + PYTHON_PATH=$(pyenv which python) + + for i in $(seq 1 5000); do + # echo "Running test_parallel_dataframe_query.py..." + # if ! gdb -batch -x tests/gdb_catch_throw.gdb --args "$PYTHON_PATH" tests/test_parallel_dataframe_query.py 2>&1; then + # echo "FAILED at iteration $i on test_parallel_dataframe_query.py" + # exit 1 + # fi + + # echo "Running test_issue60.py..." + # if ! gdb -batch -x tests/gdb_catch_throw.gdb --args "$PYTHON_PATH" tests/test_issue60.py 2>&1; then + # echo "FAILED at iteration $i on test_issue60.py" + # exit 1 + # fi + + if ! "$PYTHON_PATH" tests/test_parallel_dataframe_query.py 2>&1; then + echo "FAILED at iteration $i on test_parallel_dataframe_query.py" + exit 1 + fi + + if ! "$PYTHON_PATH" tests/test_issue60.py 2>&1; then + echo "FAILED at iteration $i on test_issue60.py" + exit 1 + fi + + if [ $((i % 100)) -eq 0 ]; then echo "Iteration $i passed"; fi + done + + echo "All 5000 iterations passed!" pyenv shell --unset continue-on-error: false - name: Check and upload core files if present @@ -351,49 +259,9 @@ jobs: with: name: core-files-linux-aarch64 path: core-files-linux-aarch64.tar.gz - - name: Upload wheels to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} dist/*.whl --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Packege libchdb.so - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf linux-aarch64-libchdb.tar.gz libchdb.so chdb.h chdb.hpp - - name: Package libchdb.a - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf linux-aarch64-libchdb-static.tar.gz libchdb.a chdb.h chdb.hpp - - name: Upload libchdb.so to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} linux-aarch64-libchdb.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Upload libchdb.a to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} linux-aarch64-libchdb-static.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - uses: actions/upload-artifact@v4 with: name: chdb-artifacts-linux-aarch64 path: | ./dist/*.whl - ./linux-aarch64-libchdb.tar.gz - ./linux-aarch64-libchdb-static.tar.gz overwrite: true - - name: Upload pypi - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.8 - python -m twine upload dist/*.whl - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/build_linux_x86_wheels-2.yml b/.github/workflows/build_linux_x86_wheels-2.yml new file mode 100644 index 00000000000..168963a3a5f --- /dev/null +++ b/.github/workflows/build_linux_x86_wheels-2.yml @@ -0,0 +1,235 @@ +name: Build & Test Linux x86_64-2 + +on: + workflow_dispatch: + inputs: + TAG_NAME: + description: 'Release Version Tag' + required: true + release: + types: [created] + push: + branches: + - main + paths-ignore: + - '**/*.md' + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches: + - main + paths-ignore: + - '**/*.md' + + +jobs: + build_universal_wheel: + name: Build & Test Universal Wheel (Linux x86_64)-2 + runs-on: [self-hosted, linux, x64, ubuntu-latest] + if: ${{ !github.event.pull_request.draft }} + steps: + - name: Install Python build dependencies + run: | + sudo apt-get update + sudo apt-get install -y make build-essential libssl-dev zlib1g-dev \ + libbz2-dev libreadline-dev wget curl llvm \ + libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev \ + libffi-dev liblzma-dev libsqlite3-dev golang-go + - name: Setup pyenv + run: | + # Remove existing pyenv installation if present + rm -rf $HOME/.pyenv + curl https://pyenv.run | bash + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv install 3.8:latest + pyenv install 3.13:latest + pyenv global 3.8 3.13 + + # Verify installations + echo "Installed versions:" + pyenv versions + - name: Install dependencies for all Python versions + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + for version in 3.8 3.13; do + echo "Installing dependencies for Python $version" + pyenv shell $version + python -m pip install --upgrade pip + python -m pip install setuptools tox pandas pyarrow twine psutil deltalake wheel jupyter nbconvert + pyenv shell --unset + done + - name: Upgrade Rust toolchain + run: | + rustup toolchain install nightly-2025-07-07 + rustup default nightly-2025-07-07 + rustup component add rust-src + rustc --version + cargo --version + - name: Install clang++ for Ubuntu + run: | + pwd + uname -a + wget https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + sudo ./llvm.sh 19 + which clang++-19 + clang++-19 --version + sudo apt-get install -y make cmake ccache ninja-build yasm gawk wget + # Install WebAssembly linker (wasm-ld) + sudo apt-get install -y lld-19 + # Create symlink for wasm-ld + if ! command -v wasm-ld &> /dev/null; then + sudo ln -sf /usr/bin/wasm-ld-19 /usr/bin/wasm-ld || true + fi + which wasm-ld || echo "wasm-ld not found in PATH" + ccache -s + - name: Update git + run: | + sudo add-apt-repository ppa:git-core/ppa -y + sudo apt-get update + sudo apt-get install -y git + git --version + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Update submodules + run: | + git submodule update --init --recursive --jobs 4 + - name: Update version for release + if: startsWith(github.ref, 'refs/tags/v') + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv shell 3.13 + + # Install bump-my-version + python -m pip install bump-my-version + TAG_NAME=${GITHUB_REF#refs/tags/v} + bump-my-version replace --new-version $TAG_NAME + echo "Version files updated to $TAG_NAME" + pyenv shell --unset + - name: ccache + uses: hendrikmuhs/ccache-action@v1.2 + with: + key: ubuntu-22.04-x86_64 + max-size: 5G + append-timestamp: true + - name: remove old clang and link clang-19 to clang + run: | + sudo rm -f /usr/bin/clang || true + sudo ln -s /usr/bin/clang-19 /usr/bin/clang + sudo rm -f /usr/bin/clang++ || true + sudo ln -s /usr/bin/clang++-19 /usr/bin/clang++ + which clang++ + clang++ --version + - name: Run chdb/build.sh + timeout-minutes: 600 + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + source ~/.cargo/env + pyenv shell 3.8 + export CC=/usr/bin/clang + export CXX=/usr/bin/clang++ + bash ./chdb/build.sh + pyenv shell 3.13 + bash -x ./chdb/test_smoke.sh + continue-on-error: false + - name: Check ccache statistics + run: | + ccache -s + ls -lh chdb + df -h + - name: Build wheels + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + export CC=/usr/bin/clang + export CXX=/usr/bin/clang++ + pyenv shell 3.13 + make wheel + - name: Install patchelf from github + run: | + wget https://github.com/NixOS/patchelf/releases/download/0.18.0/patchelf-0.18.0-x86_64.tar.gz -O patchelf.tar.gz + tar -xvf patchelf.tar.gz + sudo cp bin/patchelf /usr/bin/ + sudo chmod +x /usr/bin/patchelf + patchelf --version + - name: Audit wheels + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv shell 3.13 + python -m pip install auditwheel + auditwheel -v repair -w dist/ --plat manylinux2014_x86_64 dist/*.whl + continue-on-error: false + - name: Show files + run: | + sudo rm -f dist/*-linux_x86_64.whl + ls -lh dist + shell: bash + - name: Setup core dump collection + run: | + mkdir -p tmp/core + echo "tmp/core/core.%p" | sudo tee /proc/sys/kernel/core_pattern + ulimit -c unlimited + - name: Install GDB + run: | + sudo apt-get update + sudo apt-get install -y gdb + - name: Test wheel with GDB exception catching (loop until failure) + run: | + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + pyenv shell 3.13 + python -m pip install dist/*.whl --force-reinstall + + echo "Starting loop test with GDB exception catching..." + PYTHON_PATH=$(pyenv which python) + + for i in $(seq 1 5000); do + if ! make test 2>&1; then + echo "FAILED at iteration $i on make test" + exit 1 + fi + + echo "Iteration $i passed" + done + + echo "All 5000 iterations passed!" + pyenv shell --unset + continue-on-error: false + - name: Check and upload core files if present + if: always() + run: | + if ls tmp/core/core.* >/dev/null 2>&1; then + echo "CORE_FILES_FOUND=true" >> $GITHUB_ENV + tar -czvf core-files-linux-x86_64.tar.gz tmp/core/core.* + echo "Core files tar created: core-files-linux-x86_64.tar.gz" + ls -lh core-files-linux-x86_64.tar.gz + else + echo "CORE_FILES_FOUND=false" >> $GITHUB_ENV + echo "No core files found in tmp/core" + fi + continue-on-error: true + - name: Keep killall ccache and wait for ccache to finish + if: always() + run: | + sleep 60 + while ps -ef | grep ccache | grep -v grep; do \ + killall ccache; \ + sleep 10; \ + done + - name: Upload core files artifact + if: always() && env.CORE_FILES_FOUND == 'true' + uses: actions/upload-artifact@v4 + with: + name: core-files-linux-x86_64 + path: core-files-linux-x86_64.tar.gz + - uses: actions/upload-artifact@v4 + with: + name: chdb-artifacts-linux-x86_64 + path: | + ./dist/*.whl + overwrite: true diff --git a/.github/workflows/build_linux_x86_wheels.yml b/.github/workflows/build_linux_x86_wheels.yml index 6d06b93b753..62bfb8d3a6b 100644 --- a/.github/workflows/build_linux_x86_wheels.yml +++ b/.github/workflows/build_linux_x86_wheels.yml @@ -34,50 +34,6 @@ jobs: libbz2-dev libreadline-dev wget curl llvm \ libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev \ libffi-dev liblzma-dev libsqlite3-dev golang-go - - name: Install GitHub CLI - run: | - wget https://github.com/cli/cli/releases/download/v2.82.1/gh_2.82.1_linux_amd64.tar.gz -O gh.tar.gz - tar -xf gh.tar.gz - sudo cp gh_*/bin/gh /usr/local/bin/ - sudo chmod +x /usr/local/bin/gh - if ! gh --version; then - echo "ERROR: GitHub CLI installation failed!" - exit 1 - fi - echo "GitHub CLI installed successfully" - - name: Scan SQLite vulnerabilities with grype - run: | - # Install grype and required tools - mkdir -p $HOME/.local/bin - curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $HOME/.local/bin - echo "$HOME/.local/bin" >> $GITHUB_PATH - sudo apt-get update && sudo apt-get install -y jq lsb-release - - # Detect OS distribution info - DISTRO_ID=$(lsb_release -si | tr '[:upper:]' '[:lower:]') - DISTRO_VERSION=$(lsb_release -sr) - echo "Detected OS: $DISTRO_ID:$DISTRO_VERSION" - - # Update grype vulnerability database - $HOME/.local/bin/grype db update - - # Check SQLite vulnerabilities in installed packages - echo "Scanning SQLite packages for vulnerabilities..." - GRYPE_RAW_OUTPUT=$($HOME/.local/bin/grype dir:/var/lib/dpkg --distro "$DISTRO_ID:$DISTRO_VERSION" --scope all-layers 2>/dev/null || true) - echo "Raw grype output:" - echo "$GRYPE_RAW_OUTPUT" - - SQLITE_SCAN_OUTPUT=$(echo "$GRYPE_RAW_OUTPUT" | grep -i sqlite || true) - - if [ -n "$SQLITE_SCAN_OUTPUT" ]; then - echo "❌ SQLite vulnerabilities found in packages! Build should be reviewed." - echo "SQLite vulnerability details:" - echo "$SQLITE_SCAN_OUTPUT" - exit 1 - else - echo "✅ No SQLite vulnerabilities found" - fi - continue-on-error: true - name: Setup pyenv run: | # Remove existing pyenv installation if present @@ -86,38 +42,17 @@ jobs: export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" pyenv install 3.8:latest - pyenv install 3.9:latest - pyenv install 3.10:latest - pyenv install 3.11:latest - pyenv install 3.12:latest pyenv install 3.13:latest - pyenv install 3.14:latest - pyenv global 3.8 3.9 3.10 3.11 3.12 3.13 3.14 + pyenv global 3.8 3.13 # Verify installations echo "Installed versions:" pyenv versions - - name: Verify pyenv installations - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - echo "Installed Python versions:" - pyenv versions - echo "" - echo "Verifying all required Python versions are available:" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - if ! pyenv versions --bare | grep -q "^$version"; then - echo "ERROR: Python $version is not installed!" - exit 1 - fi - echo "✓ Python $version is installed" - done - echo "All Python versions verified successfully!" - name: Install dependencies for all Python versions run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do + for version in 3.8 3.13; do echo "Installing dependencies for Python $version" pyenv shell $version python -m pip install --upgrade pip @@ -166,7 +101,7 @@ jobs: run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - pyenv shell 3.9 + pyenv shell 3.13 # Install bump-my-version python -m pip install bump-my-version @@ -198,57 +133,9 @@ jobs: export CC=/usr/bin/clang export CXX=/usr/bin/clang++ bash ./chdb/build.sh - pyenv shell 3.8 + pyenv shell 3.13 bash -x ./chdb/test_smoke.sh continue-on-error: false - - name: Run chdb/build/build_static_lib.sh - timeout-minutes: 600 - run: | - export CC=/usr/bin/clang - export CXX=/usr/bin/clang++ - export PATH="$HOME/.pyenv/bin:$PATH" - source ~/.cargo/env - eval "$(pyenv init -)" - pyenv shell 3.8 - bash ./chdb/build/build_static_lib.sh - pyenv shell --unset - continue-on-error: false - - name: Scan chdb libraries with grype - run: | - echo "Scanning chdb libraries for vulnerabilities..." - - # Files to scan - FILES_TO_SCAN="" - [ -f libchdb.so ] && FILES_TO_SCAN="$FILES_TO_SCAN libchdb.so" - [ -f libchdb.a ] && FILES_TO_SCAN="$FILES_TO_SCAN libchdb.a" - FILES_TO_SCAN="$FILES_TO_SCAN $(find chdb/ \( -name "*.so" -o -name "*.dylib" \) 2>/dev/null || true)" - - SQLITE_VULNERABILITIES_FOUND=false - - for file in $FILES_TO_SCAN; do - if [ -f "$file" ]; then - echo "=== Scanning $file ===" - SCAN_OUTPUT=$($HOME/.local/bin/grype "$file" 2>/dev/null || true) - echo "$SCAN_OUTPUT" - - if echo "$SCAN_OUTPUT" | grep -qi sqlite; then - echo "❌ SQLite vulnerability found in $file" - SQLITE_VULNERABILITIES_FOUND=true - fi - fi - done - - if [ "$SQLITE_VULNERABILITIES_FOUND" = true ]; then - echo "❌ SQLite vulnerabilities detected in chdb libraries!" - exit 1 - else - echo "✅ No SQLite vulnerabilities found in chdb libraries" - fi - continue-on-error: false - - name: Run libchdb stub in examples dir - run: | - bash -x ./examples/runStub.sh - bash -x ./examples/runArrowTest.sh - name: Check ccache statistics run: | ccache -s @@ -260,7 +147,7 @@ jobs: eval "$(pyenv init -)" export CC=/usr/bin/clang export CXX=/usr/bin/clang++ - pyenv shell 3.8 + pyenv shell 3.13 make wheel - name: Install patchelf from github run: | @@ -273,7 +160,7 @@ jobs: run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - pyenv shell 3.9 + pyenv shell 3.13 python -m pip install auditwheel auditwheel -v repair -w dist/ --plat manylinux2014_x86_64 dist/*.whl continue-on-error: false @@ -287,26 +174,46 @@ jobs: mkdir -p tmp/core echo "tmp/core/core.%p" | sudo tee /proc/sys/kernel/core_pattern ulimit -c unlimited - - name: Test wheel on all Python versions + - name: Install GDB run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Testing chdb on Python $version" - pyenv shell $version - python -m pip install dist/*.whl --force-reinstall - python -c "import chdb; res = chdb.query('select 1112222222,555', 'CSV'); print(f'Python $version: {res}')" - make test - pyenv shell --unset - done - continue-on-error: false - - name: Run notebook tests + sudo apt-get update + sudo apt-get install -y gdb + - name: Test wheel with GDB exception catching (loop until failure) run: | export PATH="$HOME/.pyenv/bin:$PATH" eval "$(pyenv init -)" - pyenv shell 3.9 + pyenv shell 3.13 python -m pip install dist/*.whl --force-reinstall - jupyter nbconvert --to notebook --execute tests/test_data_insertion.ipynb --output test_data_insertion_output.ipynb + + echo "Starting loop test with GDB exception catching..." + PYTHON_PATH=$(pyenv which python) + + for i in $(seq 1 5000); do + # echo "Running test_parallel_dataframe_query.py..." + # if ! gdb -batch -x tests/gdb_catch_throw.gdb --args "$PYTHON_PATH" tests/test_parallel_dataframe_query.py 2>&1; then + # echo "FAILED at iteration $i on test_parallel_dataframe_query.py" + # exit 1 + # fi + + # echo "Running test_issue60.py..." + # if ! gdb -batch -x tests/gdb_catch_throw.gdb --args "$PYTHON_PATH" tests/test_issue60.py 2>&1; then + # echo "FAILED at iteration $i on test_issue60.py" + # exit 1 + # fi + if ! "$PYTHON_PATH" tests/test_parallel_dataframe_query.py 2>&1; then + echo "FAILED at iteration $i on test_parallel_dataframe_query.py" + exit 1 + fi + + if ! "$PYTHON_PATH" tests/test_issue60.py 2>&1; then + echo "FAILED at iteration $i on test_issue60.py" + exit 1 + fi + + if [ $((i % 100)) -eq 0 ]; then echo "Iteration $i passed"; fi + done + + echo "All 5000 iterations passed!" pyenv shell --unset continue-on-error: false - name: Check and upload core files if present @@ -336,49 +243,9 @@ jobs: with: name: core-files-linux-x86_64 path: core-files-linux-x86_64.tar.gz - - name: Upload wheels to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} dist/*.whl --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Packege libchdb.so - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf linux-x86_64-libchdb.tar.gz libchdb.so chdb.h chdb.hpp - - name: Package libchdb.a - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf linux-x86_64-libchdb-static.tar.gz libchdb.a chdb.h chdb.hpp - - name: Upload libchdb.so to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} linux-x86_64-libchdb.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Upload libchdb.a to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} linux-x86_64-libchdb-static.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - uses: actions/upload-artifact@v4 with: name: chdb-artifacts-linux-x86_64 path: | ./dist/*.whl - ./linux-x86_64-libchdb.tar.gz - ./linux-x86_64-libchdb-static.tar.gz overwrite: true - - name: Upload pypi - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.8 - python -m twine upload dist/*.whl - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/build_macos_arm64_wheels.yml b/.github/workflows/build_macos_arm64_wheels.yml deleted file mode 100644 index 45f4b8419f1..00000000000 --- a/.github/workflows/build_macos_arm64_wheels.yml +++ /dev/null @@ -1,378 +0,0 @@ -name: Build & Test macOS arm64 - -on: - workflow_dispatch: - inputs: - TAG_NAME: - description: 'Release Version Tag' - required: true - release: - types: [created] - push: - branches: - - main - paths-ignore: - - '**/*.md' - pull_request: - types: [opened, synchronize, reopened, ready_for_review] - branches: - - main - paths-ignore: - - '**/*.md' - -jobs: - build_universal_wheel_on_linux: - name: Build on Linux (cross-compile for macOS arm64) - runs-on: [self-hosted, linux, arm64, ubuntu-latest] - if: ${{ !github.event.pull_request.draft }} - timeout-minutes: 600 - steps: - - name: Install Python build dependencies - run: | - sudo apt-get update - sudo apt-get install -y make build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev \ - libffi-dev liblzma-dev p7zip-full - - name: Upgrade Rust toolchain - run: | - rustup toolchain install nightly-2025-07-07 - rustup default nightly-2025-07-07 - rustup component add rust-src - rustc --version - cargo --version - - name: Install clang++ for Ubuntu - run: | - pwd - uname -a - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 19 - which clang++-19 - clang++-19 --version - sudo apt-get install -y make cmake ccache ninja-build yasm gawk wget - # Install WebAssembly linker (wasm-ld) - sudo apt-get install -y lld-19 - # Create symlink for wasm-ld - if ! command -v wasm-ld &> /dev/null; then - sudo ln -sf /usr/bin/wasm-ld-19 /usr/bin/wasm-ld || true - fi - which wasm-ld || echo "wasm-ld not found in PATH" - ccache -s - - name: Update git - run: | - sudo add-apt-repository ppa:git-core/ppa -y - sudo apt-get update - sudo apt-get install -y git - git --version - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Update submodules - run: | - git submodule update --init --recursive --jobs 4 - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ubuntu-24.04-aarch64-cross-compile - max-size: 5G - append-timestamp: true - - name: remove old clang and link clang-19 to clang - run: | - sudo rm -f /usr/bin/clang || true - sudo ln -s /usr/bin/clang-19 /usr/bin/clang - sudo rm -f /usr/bin/clang++ || true - sudo ln -s /usr/bin/clang++-19 /usr/bin/clang++ - which clang++ - clang++ --version - - name: Run chdb/build_mac_on_linux.sh - timeout-minutes: 600 - run: | - source ~/.cargo/env - bash ./chdb/build_mac_on_linux.sh arm64 - continue-on-error: false - - name: Run chdb/build/build_static_lib_mac_on_linux.sh - timeout-minutes: 600 - run: | - source ~/.cargo/env - bash ./chdb/build/build_static_lib_mac_on_linux.sh arm64 - continue-on-error: false - - name: Check ccache statistics - run: | - ccache -s - ls -lh chdb - df -h - - name: Keep killall ccache and wait for ccache to finish - if: always() - run: | - sleep 60 - while ps -ef | grep ccache | grep -v grep; do \ - killall ccache; \ - sleep 10; \ - done - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: macos-arm64-build-artifacts - path: | - ./libchdb.so - ./libchdb.a - ./chdb/_chdb.abi3.so - ./chdb/libpybind11nonlimitedapi_stubs.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.8.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.9.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.10.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.11.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.12.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.13.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.14.dylib - retention-days: 1 - - test_on_macos: - name: Test on macOS arm64 - runs-on: macos-14-xlarge - needs: build_universal_wheel_on_linux - if: ${{ !github.event.pull_request.draft }} - timeout-minutes: 600 - steps: - - name: Check machine architecture - run: | - echo "=== Machine Architecture Information ===" - echo "Machine type: $(uname -m)" - echo "Architecture: $(arch)" - echo "System info: $(uname -a)" - echo "Hardware info:" - system_profiler SPHardwareDataType | grep "Chip\|Processor" - if sysctl -n hw.optional.arm64 2>/dev/null | grep -q "1"; then - echo "This is an ARM64 (Apple Silicon) machine" - else - echo "This is an x86_64 (Intel) machine" - fi - - name: Setup pyenv - run: | - curl https://pyenv.run | bash - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - - pyenv install 3.8:latest - pyenv install 3.9:latest - pyenv install 3.10:latest - pyenv install 3.11:latest - pyenv install 3.12:latest - pyenv install 3.13:latest - pyenv install 3.14:latest - pyenv global 3.8 3.9 3.10 3.11 3.12 3.13 3.14 - - echo "Installed versions:" - pyenv versions - - name: Verify pyenv installations - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - echo "Installed Python versions:" - pyenv versions - echo "" - echo "Verifying all required Python versions are available:" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - if ! pyenv versions --bare | grep -q "^$version"; then - echo "ERROR: Python $version is not installed!" - exit 1 - fi - echo "✓ Python $version is installed" - done - echo "All Python versions verified successfully!" - - name: Install dependencies for all Python versions - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Installing dependencies for Python $version" - pyenv shell $version - python -m pip install --upgrade pip - python -m pip install setuptools wheel tox pandas pyarrow twine psutil deltalake wheel>=0.40.0 jupyter nbconvert - pyenv shell --unset - done - - name: Remove /usr/local/bin/python3 - run: | - sudo rm -f /usr/local/bin/python3 - - name: Install go for macOS - run: | - brew update - brew install go - go version - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Update version for release - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - - # Install bump-my-version - python -m pip install bump-my-version - TAG_NAME=${GITHUB_REF#refs/tags/v} - bump-my-version replace --new-version $TAG_NAME - echo "Version files updated to $TAG_NAME" - pyenv shell --unset - - name: Download build artifacts - uses: actions/download-artifact@v4 - with: - name: macos-arm64-build-artifacts - path: ./artifacts - - name: Restore artifacts to original paths - run: | - mv ./artifacts/libchdb.so ./ - mv ./artifacts/libchdb.a ./ - mv ./artifacts/chdb/_chdb.abi3.so ./chdb/ - mv ./artifacts/chdb/libpybind11nonlimitedapi_stubs.dylib ./chdb/ - for v in 8 9 10 11 12 13 14; do - mv ./artifacts/chdb/libpybind11nonlimitedapi_chdb_3.${v}.dylib ./chdb/ - done - ls -lh ./libchdb.so ./libchdb.a - ls -lh ./chdb/*.so ./chdb/*.dylib - - name: Sign macOS binaries - run: | - echo "Signing cross-compiled binaries for macOS..." - codesign -f -s - ./libchdb.so - codesign -f -s - ./chdb/_chdb.abi3.so - codesign -f -s - ./chdb/libpybind11nonlimitedapi_stubs.dylib - for f in ./chdb/libpybind11nonlimitedapi_chdb_3.*.dylib; do - codesign -f -s - "$f" - done - echo "Verifying signatures..." - codesign -dvv ./libchdb.so - codesign -dvv ./chdb/_chdb.abi3.so - - name: Run chdb/test_smoke.sh - timeout-minutes: 600 - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.8 - bash gen_manifest.sh - bash -x ./chdb/test_smoke.sh cross-compile - - name: Run chdb/build/test_go_example.sh - timeout-minutes: 600 - run: | - bash ./chdb/build/test_go_example.sh ${{ github.workspace }}/libchdb.a - continue-on-error: false - - name: Run libchdb stub in examples dir - run: | - bash -x ./examples/runStub.sh - bash -x ./examples/runArrowTest.sh - - name: Build wheels - run: | - rm -rf chdb/build/ - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.8 - make wheel - - name: Fix wheel platform tag - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.8 - python -m wheel tags --platform-tag=macosx_11_0_arm64 --remove dist/*.whl - - name: Verify wheel sizes - run: | - echo "=== Wheel sizes ===" - du -sh dist/* - - name: Setup core dump collection - run: | - mkdir -p tmp/core - sudo sysctl kern.corefile=$PWD/tmp/core/core.%P - sudo sysctl kern.coredump=1 - ulimit -c unlimited - - name: Test wheel on all Python versions - run: | - ulimit -c unlimited - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Testing chdb on Python $version" - pyenv shell $version - python -m pip install dist/*.whl --force-reinstall --no-cache-dir - python -c "import chdb; res = chdb.query('select 1112222222,555', 'CSV'); print(f'Python $version: {res}')" - make test - python -m pip uninstall -y chdb - pyenv shell --unset - done - continue-on-error: false - - name: Run notebook tests - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - python -m pip install dist/*.whl --force-reinstall - jupyter nbconvert --to notebook --execute tests/test_data_insertion.ipynb --output test_data_insertion_output.ipynb - pyenv shell --unset - continue-on-error: false - - name: Check and upload core files if present - if: always() - run: | - if ls tmp/core/core.* >/dev/null 2>&1; then - echo "CORE_FILES_FOUND=true" >> $GITHUB_ENV - tar -czvf core-files-macos-arm64.tar.gz tmp/core/core.* - echo "Core files tar created: core-files-macos-arm64.tar.gz" - ls -lh core-files-macos-arm64.tar.gz - else - echo "CORE_FILES_FOUND=false" >> $GITHUB_ENV - echo "No core files found in tmp/core" - fi - continue-on-error: true - - name: Upload core files artifact - if: always() && env.CORE_FILES_FOUND == 'true' - uses: actions/upload-artifact@v4 - with: - name: core-files-macos-arm64 - path: core-files-macos-arm64.tar.gz - - name: Show files - run: ls -lh dist - shell: bash - - name: Upload wheels to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} dist/*.whl --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Packege libchdb.so - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf macos-arm64-libchdb.tar.gz libchdb.so chdb.h chdb.hpp - - name: Package libchdb.a - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf macos-arm64-libchdb-static.tar.gz libchdb.a chdb.h chdb.hpp - - name: Upload libchdb.so to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} macos-arm64-libchdb.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Upload libchdb.a to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} macos-arm64-libchdb-static.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - uses: actions/upload-artifact@v4 - with: - name: chdb-artifacts-macos-arm64 - path: | - ./dist/*.whl - ./macos-arm64-libchdb.tar.gz - ./macos-arm64-libchdb-static.tar.gz - overwrite: true - - name: Upload pypi - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.8 - python -m twine upload dist/*.whl - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/build_macos_x86_wheels.yml b/.github/workflows/build_macos_x86_wheels.yml deleted file mode 100644 index db03cc3d512..00000000000 --- a/.github/workflows/build_macos_x86_wheels.yml +++ /dev/null @@ -1,376 +0,0 @@ -name: Build & Test macOS x86_64 - -on: - workflow_dispatch: - inputs: - TAG_NAME: - description: 'Release Version Tag' - required: true - release: - types: [created] - push: - branches: - - main - paths-ignore: - - '**/*.md' - pull_request: - types: [opened, synchronize, reopened, ready_for_review] - branches: - - main - paths-ignore: - - '**/*.md' - -jobs: - build_universal_wheel_on_linux: - name: Build on Linux (cross-compile for macOS x86_64) - runs-on: [self-hosted, linux, x64, ubuntu-latest] - if: ${{ !github.event.pull_request.draft }} - timeout-minutes: 600 - steps: - - name: Install Python build dependencies - run: | - sudo apt-get update - sudo apt-get install -y make build-essential libssl-dev zlib1g-dev \ - libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm \ - libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev \ - libffi-dev liblzma-dev p7zip-full - - name: Upgrade Rust toolchain - run: | - rustup toolchain install nightly-2025-07-07 - rustup default nightly-2025-07-07 - rustup component add rust-src - rustc --version - cargo --version - - name: Install clang++ for Ubuntu - run: | - pwd - uname -a - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 19 - which clang++-19 - clang++-19 --version - sudo apt-get install -y make cmake ccache ninja-build yasm gawk wget - # Install WebAssembly linker (wasm-ld) - sudo apt-get install -y lld-19 - # Create symlink for wasm-ld - if ! command -v wasm-ld &> /dev/null; then - sudo ln -sf /usr/bin/wasm-ld-19 /usr/bin/wasm-ld || true - fi - which wasm-ld || echo "wasm-ld not found in PATH" - ccache -s - - name: Update git - run: | - sudo add-apt-repository ppa:git-core/ppa -y - sudo apt-get update - sudo apt-get install -y git - git --version - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Update submodules - run: | - git submodule update --init --recursive --jobs 4 - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: ubuntu-22.04-x86_64-cross-compile - max-size: 5G - append-timestamp: true - - name: remove old clang and link clang-19 to clang - run: | - sudo rm -f /usr/bin/clang || true - sudo ln -s /usr/bin/clang-19 /usr/bin/clang - sudo rm -f /usr/bin/clang++ || true - sudo ln -s /usr/bin/clang++-19 /usr/bin/clang++ - which clang++ - clang++ --version - - name: Run chdb/build_mac_on_linux.sh - timeout-minutes: 600 - run: | - source ~/.cargo/env - bash ./chdb/build_mac_on_linux.sh x86_64 - continue-on-error: false - - name: Run chdb/build/build_static_lib_mac_on_linux.sh - timeout-minutes: 600 - run: | - source ~/.cargo/env - bash ./chdb/build/build_static_lib_mac_on_linux.sh x86_64 - continue-on-error: false - - name: Check ccache statistics - run: | - ccache -s - ls -lh chdb - df -h - - name: Keep killall ccache and wait for ccache to finish - if: always() - run: | - sleep 60 - while ps -ef | grep ccache | grep -v grep; do \ - killall ccache; \ - sleep 10; \ - done - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: macos-x86_64-build-artifacts - path: | - ./libchdb.so - ./libchdb.a - ./chdb/_chdb.abi3.so - ./chdb/libpybind11nonlimitedapi_stubs.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.8.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.9.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.10.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.11.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.12.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.13.dylib - ./chdb/libpybind11nonlimitedapi_chdb_3.14.dylib - retention-days: 1 - - test_on_macos: - name: Test on macOS x86_64 - runs-on: macos-15-intel - needs: build_universal_wheel_on_linux - if: ${{ !github.event.pull_request.draft }} - timeout-minutes: 600 - steps: - - name: Check machine architecture - run: | - echo "=== Machine Architecture Information ===" - echo "Machine type: $(uname -m)" - echo "Architecture: $(arch)" - echo "System info: $(uname -a)" - echo "Hardware info:" - system_profiler SPHardwareDataType | grep "Chip\|Processor" - if sysctl -n hw.optional.arm64 2>/dev/null | grep -q "1"; then - echo "This is an ARM64 (Apple Silicon) machine" - else - echo "This is an x86_64 (Intel) machine" - fi - - name: Setup pyenv - run: | - curl https://pyenv.run | bash - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - - pyenv install 3.9:latest - pyenv install 3.10:latest - pyenv install 3.11:latest - pyenv install 3.12:latest - pyenv install 3.13:latest - pyenv install 3.14:latest - pyenv global 3.9 3.10 3.11 3.12 3.13 3.14 - - echo "Installed versions:" - pyenv versions - - name: Verify pyenv installations - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - echo "Installed Python versions:" - pyenv versions - echo "" - echo "Verifying all required Python versions are available:" - for version in 3.9 3.10 3.11 3.12 3.13 3.14; do - if ! pyenv versions --bare | grep -q "^$version"; then - echo "ERROR: Python $version is not installed!" - exit 1 - fi - echo "✓ Python $version is installed" - done - echo "All Python versions verified successfully!" - - name: Install dependencies for all Python versions - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Installing dependencies for Python $version" - pyenv shell $version - python -m pip install --upgrade pip - python -m pip install setuptools tox pandas pyarrow twine psutil deltalake wheel>=0.40.0 jupyter nbconvert - pyenv shell --unset - done - - name: Remove /usr/local/bin/python3 - run: | - sudo rm -f /usr/local/bin/python3 - - name: Install go for macOS - run: | - brew update - brew install go - go version - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Update version for release - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - - # Install bump-my-version - python -m pip install bump-my-version - TAG_NAME=${GITHUB_REF#refs/tags/v} - bump-my-version replace --new-version $TAG_NAME - echo "Version files updated to $TAG_NAME" - pyenv shell --unset - - name: Download build artifacts - uses: actions/download-artifact@v4 - with: - name: macos-x86_64-build-artifacts - path: ./artifacts - - name: Restore artifacts to original paths - run: | - mv ./artifacts/libchdb.so ./ - mv ./artifacts/libchdb.a ./ - mv ./artifacts/chdb/_chdb.abi3.so ./chdb/ - mv ./artifacts/chdb/libpybind11nonlimitedapi_stubs.dylib ./chdb/ - for v in 8 9 10 11 12 13 14; do - mv ./artifacts/chdb/libpybind11nonlimitedapi_chdb_3.${v}.dylib ./chdb/ - done - ls -lh ./libchdb.so ./libchdb.a - ls -lh ./chdb/*.so ./chdb/*.dylib - - name: Sign macOS binaries - run: | - echo "Signing cross-compiled binaries for macOS..." - codesign -f -s - ./libchdb.so - codesign -f -s - ./chdb/_chdb.abi3.so - codesign -f -s - ./chdb/libpybind11nonlimitedapi_stubs.dylib - for f in ./chdb/libpybind11nonlimitedapi_chdb_3.*.dylib; do - codesign -f -s - "$f" - done - echo "Verifying signatures..." - codesign -dvv ./libchdb.so - codesign -dvv ./chdb/_chdb.abi3.so - - name: Run chdb/test_smoke.sh - timeout-minutes: 600 - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - bash gen_manifest.sh - bash -x ./chdb/test_smoke.sh cross-compile - continue-on-error: false - - name: Run chdb/build/test_go_example.sh - timeout-minutes: 600 - run: | - bash ./chdb/build/test_go_example.sh ${{ github.workspace }}/libchdb.a - continue-on-error: false - - name: Run libchdb stub in examples dir - run: | - bash -x ./examples/runStub.sh - bash -x ./examples/runArrowTest.sh - - name: Build wheels - run: | - rm -rf chdb/build/ - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - make wheel - - name: Fix wheel platform tag - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - python -m wheel tags --platform-tag=macosx_10_15_x86_64 --remove dist/*.whl - - name: Verify wheel sizes - run: | - echo "=== Wheel sizes ===" - du -sh dist/* - - name: Setup core dump collection - run: | - mkdir -p tmp/core - sudo sysctl kern.corefile=$PWD/tmp/core/core.%P - sudo sysctl kern.coredump=1 - ulimit -c unlimited - - name: Test wheel on all Python versions - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Testing chdb on Python $version" - pyenv shell $version - python -m pip install dist/*.whl --force-reinstall - python -c "import chdb; res = chdb.query('select 1112222222,555', 'CSV'); print(f'Python $version: {res}')" - make test - pyenv shell --unset - done - continue-on-error: false - - name: Run notebook tests - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - python -m pip install dist/*.whl --force-reinstall - jupyter nbconvert --to notebook --execute tests/test_data_insertion.ipynb --output test_data_insertion_output.ipynb - pyenv shell --unset - continue-on-error: false - - name: Check and upload core files if present - if: always() - run: | - if ls tmp/core/core.* >/dev/null 2>&1; then - echo "CORE_FILES_FOUND=true" >> $GITHUB_ENV - tar -czvf core-files-macos-x86_64.tar.gz tmp/core/core.* - echo "Core files tar created: core-files-macos-x86_64.tar.gz" - ls -lh core-files-macos-x86_64.tar.gz - else - echo "CORE_FILES_FOUND=false" >> $GITHUB_ENV - echo "No core files found in tmp/core" - fi - continue-on-error: true - - name: Upload core files artifact - if: always() && env.CORE_FILES_FOUND == 'true' - uses: actions/upload-artifact@v4 - with: - name: core-files-macos-x86_64 - path: core-files-macos-x86_64.tar.gz - - name: Show files - run: ls -lh dist - shell: bash - - name: Upload wheels to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} dist/*.whl --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Packege libchdb.so - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf macos-x86_64-libchdb.tar.gz libchdb.so chdb.h chdb.hpp - - name: Package libchdb.a - run: | - cp programs/local/chdb.h chdb.h - cp programs/local/chdb.hpp chdb.hpp - tar -czvf macos-x86_64-libchdb-static.tar.gz libchdb.a chdb.h chdb.hpp - - name: Upload libchdb.so to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} macos-x86_64-libchdb.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - name: Upload libchdb.a to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} macos-x86_64-libchdb-static.tar.gz --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - uses: actions/upload-artifact@v4 - with: - name: chdb-artifacts-macos-x86_64 - path: | - ./dist/*.whl - ./macos-x86_64-libchdb.tar.gz - ./macos-x86_64-libchdb-static.tar.gz - overwrite: true - - name: Upload pypi - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - python -m twine upload dist/*.whl - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/build_musllinux_arm64_wheels.yml b/.github/workflows/build_musllinux_arm64_wheels.yml deleted file mode 100644 index 36dc3a1c6fa..00000000000 --- a/.github/workflows/build_musllinux_arm64_wheels.yml +++ /dev/null @@ -1,134 +0,0 @@ -name: Build & Test Linux(musllinux) arm64 - -on: - workflow_dispatch: - inputs: - TAG_NAME: - description: 'Release Version Tag' - required: true - release: - types: [created] - push: - branches: - - main - paths-ignore: - - '**/*.md' - pull_request: - types: [opened, synchronize, reopened, ready_for_review] - branches: - - main - paths-ignore: - - '**/*.md' - -jobs: - build_musllinux_wheels: - name: Build & Test musllinux wheels (Alpine Linux aarch64) - runs-on: [self-hosted, linux, arm64, ubuntu-latest] - if: ${{ !github.event.pull_request.draft }} - steps: - - name: Setup Python and install twine for PyPI upload - run: | - echo "=== Setting up Python for PyPI upload ===" - # Install pip if not available - sudo apt-get update - sudo apt-get install -y python3-pip - python3 --version - python3 -m pip --version - echo "=== Installing twine ===" - python3 -m pip install --upgrade pip --break-system-packages - python3 -m pip install twine --break-system-packages - if ! python3 -m twine --version; then - echo "ERROR: Twine installation failed!" - exit 1 - fi - echo "Twine installed successfully" - - name: Install GitHub CLI - run: | - wget https://github.com/cli/cli/releases/download/v2.82.1/gh_2.82.1_linux_arm64.tar.gz -O gh.tar.gz - tar -xf gh.tar.gz - sudo cp gh_*/bin/gh /usr/local/bin/ - sudo chmod +x /usr/local/bin/gh - if ! gh --version; then - echo "ERROR: GitHub CLI installation failed!" - exit 1 - fi - echo "GitHub CLI installed successfully" - - - name: Setup Docker permissions - run: | - # Ensure Docker is running - sudo systemctl start docker - sudo systemctl enable docker - - # Add current user to docker group - sudo usermod -aG docker $USER - - # Set proper permissions on docker socket - sudo chmod 666 /var/run/docker.sock - - # Verify Docker is working - docker --version - docker info - - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Configure git safe directory - run: | - git config --global --add safe.directory '*' - - - name: Update submodules - run: | - git submodule update --init --recursive --jobs 4 - - - name: Build chdb wheels in container - run: | - docker run --rm -v ${{ github.workspace }}:/workspace --privileged -e GITHUB_REF=${{ github.ref }} \ - quay.io/pypa/musllinux_1_2_aarch64 /bin/sh /workspace/.github/scripts/build-musllinux-arm64.sh - continue-on-error: false - # Check test success before upload - - name: Verify test completion - run: | - echo "=== Verifying test completion ===" - if [ ! -f ".test_success_marker" ]; then - echo "ERROR: Test success marker file not found!" - echo "This indicates that the wheel testing did not complete successfully." - echo "Aborting upload process." - exit 1 - fi - echo "Test success marker found. All tests completed successfully." - echo "Proceeding with wheel upload..." - ls -la ./dist/ - continue-on-error: false - # Upload wheels to release - - name: Upload wheels to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - echo "=== Uploading wheels to release ===" - ls -la ./dist/ - gh release upload ${{ github.ref_name }} ./dist/*.whl --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - continue-on-error: true - - # Upload to PyPI - - name: Upload pypi - if: startsWith(github.ref, 'refs/tags/v') - run: | - echo "=== Uploading to PyPI ===" - ls -la ./dist/ - python3 -m twine upload dist/*.whl - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - - # Upload artifacts - - name: Upload build artifacts - if: always() - uses: actions/upload-artifact@v4 - with: - name: chdb-artifacts-musllinux-aarch64 - path: | - ./dist/*.whl - overwrite: true diff --git a/.github/workflows/build_musllinux_x86_wheels.yml b/.github/workflows/build_musllinux_x86_wheels.yml deleted file mode 100644 index 4106dbdd835..00000000000 --- a/.github/workflows/build_musllinux_x86_wheels.yml +++ /dev/null @@ -1,306 +0,0 @@ -name: Build & Test Linux(musllinux) x86_64 - -on: - workflow_dispatch: - inputs: - TAG_NAME: - description: 'Release Version Tag' - required: true - release: - types: [created] - push: - branches: - - main - paths-ignore: - - '**/*.md' - pull_request: - types: [opened, synchronize, reopened, ready_for_review] - branches: - - main - paths-ignore: - - '**/*.md' - - -jobs: - build_musllinux_wheels: - name: Build & Test musllinux wheels (Alpine Linux x86_64) - runs-on: [self-hosted, linux, x64, ubuntu-latest] - if: ${{ !github.event.pull_request.draft }} - container: - image: quay.io/pypa/musllinux_1_2_x86_64 - options: --privileged - steps: - - name: Check system info - run: | - echo "System: $(uname -m) $(cat /etc/os-release | grep PRETTY_NAME | cut -d'"' -f2)" - if [ -f /lib/ld-musl-x86_64.so.1 ]; then - echo "musl libc x86_64" - elif [ -f /lib/libc.musl-x86_64.so.1 ]; then - echo "musl libc x86_64" - else - echo "Not musl libc" - fi - - echo "=== CPU Information ===" - cat /proc/cpuinfo - echo "" - echo "=== Checking CPU requirements ===" - if grep -q "ssse3" /proc/cpuinfo && grep -q "sse4_1" /proc/cpuinfo && grep -q "sse4_2" /proc/cpuinfo; then - echo "CPU meets minimum requirements" - else - echo "CPU does not meet minimum requirements" - fi - - name: Install Python build dependencies - run: | - apk update - apk add --no-cache make build-base openssl-dev zlib-dev \ - bzip2-dev readline-dev sqlite-dev wget curl llvm \ - ncurses-dev xz-dev tk-dev libxml2-dev \ - libffi-dev linux-headers - - name: Scan SQLite vulnerabilities with grype - run: | - # Install grype and required tools - curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin - - # Update grype vulnerability database - grype db update - - # Check SQLite vulnerabilities in installed packages - echo "Scanning SQLite packages for vulnerabilities..." - GRYPE_RAW_OUTPUT=$(grype dir:/lib/apk/db --scope all-layers 2>/dev/null || true) - echo "Raw grype output:" - echo "$GRYPE_RAW_OUTPUT" - - SQLITE_SCAN_OUTPUT=$(echo "$GRYPE_RAW_OUTPUT" | grep -i sqlite || true) - - if [ -n "$SQLITE_SCAN_OUTPUT" ]; then - echo "SQLite vulnerabilities found in packages! Build should be reviewed." - echo "SQLite vulnerability details:" - echo "$SQLITE_SCAN_OUTPUT" - else - echo "No SQLite vulnerabilities found" - fi - continue-on-error: false - - name: Setup pyenv - run: | - curl https://pyenv.run | bash - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv install 3.8:latest - pyenv install 3.9:latest - pyenv install 3.10:latest - pyenv install 3.11:latest - pyenv install 3.12:latest - pyenv install 3.13:latest - pyenv install 3.14:latest - pyenv global 3.8 3.9 3.10 3.11 3.12 3.13 3.14 - - # Verify installations - echo "Installed versions:" - pyenv versions - - name: Verify pyenv installations - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - echo "Verifying all required Python versions are available:" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - if ! pyenv versions --bare | grep -q "^$version"; then - echo "ERROR: Python $version is not installed!" - exit 1 - fi - echo "Python $version is installed" - done - echo "All Python versions verified successfully!" - - name: Install dependencies for all Python versions - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.8 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Installing dependencies for Python $version" - pyenv shell $version - python -m pip install --upgrade pip - if [ "$version" = "3.8" ]; then - python -m pip install setuptools tox twine psutil wheel - else - python -m pip install setuptools tox pandas pyarrow twine psutil deltalake wheel - fi - pyenv shell --unset - done - - name: Install clang++ for Alpine - run: | - apk add --no-cache make cmake ccache ninja yasm gawk wget - apk add --no-cache clang20 clang20-dev llvm20 llvm20-dev lld20 - # Install Rust toolchain via rustup for proper target management - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable - source $HOME/.cargo/env - rustup toolchain install nightly-2025-07-07 - rustup component add --toolchain nightly-2025-07-07 rust-src - rustc --version - cargo --version - ccache -s - - name: Update git - run: | - apk add --no-cache git - git --version - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Configure git safe directory - run: | - git config --global --add safe.directory '*' - - name: Update submodules - run: | - git submodule update --init --recursive --jobs 4 - - name: Update version for release - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.9 - - # Install bump-my-version - python -m pip install bump-my-version - TAG_NAME=${GITHUB_REF#refs/tags/v} - bump-my-version replace --new-version $TAG_NAME - echo "Version files updated to $TAG_NAME" - pyenv shell --unset - - name: ccache - uses: hendrikmuhs/ccache-action@v1.2 - with: - key: musllinux-1-2-x86_64 - max-size: 5G - append-timestamp: true - - name: setup clang and link clang-20 to clang - run: | - ln -sf /usr/bin/clang-20 /usr/bin/clang - ln -sf /usr/bin/clang++-20 /usr/bin/clang++ - which clang++ - clang++ --version - - name: Run chdb/build-musl.sh - timeout-minutes: 600 - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - source $HOME/.cargo/env - pyenv shell 3.8 - export CC=/usr/bin/clang - export CXX=/usr/bin/clang++ - bash ./chdb/build-musl.sh - pyenv shell 3.9 - bash -x ./chdb/test_smoke.sh - continue-on-error: false - - name: Scan chdb libraries with grype - run: | - echo "Scanning chdb libraries for vulnerabilities..." - - FILES_TO_SCAN="$FILES_TO_SCAN $(find chdb/ \( -name "*.so" -o -name "*.dylib" \) 2>/dev/null || true)" - - SQLITE_VULNERABILITIES_FOUND=false - - for file in $FILES_TO_SCAN; do - if [ -f "$file" ]; then - echo "=== Scanning $file ===" - SCAN_OUTPUT=$(grype "$file" 2>/dev/null || true) - echo "$SCAN_OUTPUT" - - if echo "$SCAN_OUTPUT" | grep -qi sqlite; then - echo "SQLite vulnerability found in $file" - SQLITE_VULNERABILITIES_FOUND=true - fi - fi - done - - if [ "$SQLITE_VULNERABILITIES_FOUND" = true ]; then - echo "SQLite vulnerabilities detected in chdb libraries!" - else - echo "No SQLite vulnerabilities found in chdb libraries" - fi - continue-on-error: false - - name: Check ccache statistics - run: | - ccache -s - ls -lh chdb - df -h - - name: Build wheels - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - export CC=/usr/bin/clang - export CXX=/usr/bin/clang++ - pyenv shell 3.8 - make wheel - - name: Install patchelf from github - run: | - wget https://github.com/NixOS/patchelf/releases/download/0.18.0/patchelf-0.18.0-x86_64.tar.gz -O patchelf.tar.gz - tar -xvf patchelf.tar.gz - cp bin/patchelf /usr/bin/ - chmod +x /usr/bin/patchelf - patchelf --version - - name: Audit wheels - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.13 - python -m pip install auditwheel - auditwheel -v repair -w dist/ --plat musllinux_1_2_x86_64 dist/*.whl - continue-on-error: false - - name: Show files - run: | - rm -f dist/*-linux_x86_64.whl - ls -lh dist - shell: bash - - name: Test wheel on all Python versions - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - for version in 3.9 3.10 3.11 3.12 3.13 3.14; do - echo "Testing chdb on Python $version" - pyenv shell $version - python -m pip install dist/*.whl --force-reinstall - python -c "import chdb; res = chdb.query('select 1112222222,555', 'CSV'); print(f'Python $version: {res}')" - make test - pyenv shell --unset - done - continue-on-error: false - - name: Install GitHub CLI - run: | - # Install GitHub CLI from binary release (Alpine Linux compatible) - wget https://github.com/cli/cli/releases/download/v2.82.1/gh_2.82.1_linux_amd64.tar.gz -O gh.tar.gz - tar -xf gh.tar.gz - cp gh_*/bin/gh /usr/local/bin/ - chmod +x /usr/local/bin/gh - if ! gh --version; then - echo "ERROR: GitHub CLI installation failed!" - exit 1 - fi - echo "GitHub CLI installed successfully" - - name: Upload wheels to release - if: startsWith(github.ref, 'refs/tags/v') - run: | - gh release upload ${{ github.ref_name }} dist/*.whl --clobber - env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} - - uses: actions/upload-artifact@v4 - with: - name: chdb-artifacts-musllinux-x86_64 - path: | - ./dist/*.whl - overwrite: true - - name: Upload pypi - if: startsWith(github.ref, 'refs/tags/v') - run: | - export PATH="$HOME/.pyenv/bin:$PATH" - eval "$(pyenv init -)" - pyenv shell 3.13 - python -m twine upload dist/*.whl - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - - name: Keep killall ccache and wait for ccache to finish - if: always() - run: | - sleep 60 - while ps -ef | grep ccache | grep -v grep; do \ - killall ccache; \ - sleep 10; \ - done diff --git a/chdb/build.sh b/chdb/build.sh index bdfce37e169..f30481e68c6 100755 --- a/chdb/build.sh +++ b/chdb/build.sh @@ -2,8 +2,8 @@ set -e -# default to build Release -build_type=${1:-Release} +# default to build RelWithDebInfo +build_type=${1:-RelWithDebInfo} DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" @@ -111,68 +111,8 @@ CMAKE_ARGS="-DCMAKE_BUILD_TYPE=${build_type} -DENABLE_THINLTO=0 -DENABLE_TESTS=0 -DCHDB_VERSION=${CHDB_VERSION} \ " -LIBCHDB_SO="libchdb.so" -# Build libchdb.so -cmake ${CMAKE_ARGS} -DENABLE_PYTHON=0 .. -ninja -d keeprsp - BINARY=${BUILD_DIR}/programs/clickhouse -echo -e "\nBINARY: ${BINARY}" -ls -lh ${BINARY} -echo -e "\nldd ${BINARY}" -${LDD} ${BINARY} -rm -f ${BINARY} - -cd ${BUILD_DIR} -ninja -d keeprsp -v > build.log || true -USING_RESPONSE_FILE=$(grep -m 1 'clang++.*-o programs/clickhouse .*' build.log | grep '@CMakeFiles/clickhouse.rsp' || true) - -if [ ! "${USING_RESPONSE_FILE}" == "" ]; then - if [ -f CMakeFiles/clickhouse.rsp ]; then - cp -a CMakeFiles/clickhouse.rsp CMakeFiles/libchdb.rsp - else - echo "CMakeFiles/clickhouse.rsp not found" - exit 1 - fi -fi - -LIBCHDB_CMD=$(grep -m 1 'clang++.*-o programs/clickhouse .*' build.log \ - | sed "s/-o programs\/clickhouse/-fPIC -shared -o ${LIBCHDB_SO}/" \ - | sed 's/^[^&]*&& //' | sed 's/&&.*//' \ - | sed 's/ -Wl,-undefined,error/ -Wl,-undefined,dynamic_lookup/g' \ - | sed 's/ -Xlinker --no-undefined//g' \ - | sed 's/@CMakeFiles\/clickhouse.rsp/@CMakeFiles\/libchdb.rsp/g' \ - ) - -# generate the command to generate libchdb.so -LIBCHDB_CMD=$(echo ${LIBCHDB_CMD} | sed 's/ '${CHDB_PY_MODULE}'/ '${LIBCHDB_SO}'/g') - -if [ ! "${USING_RESPONSE_FILE}" == "" ]; then - ${SED_INPLACE} 's/ '${CHDB_PY_MODULE}'/ '${LIBCHDB_SO}'/g' CMakeFiles/libchdb.rsp -fi - -# Control exported symbols for libchdb.so -if [ "$(uname)" == "Darwin" ]; then - # macOS: use exported_symbols_list file - LIBCHDB_CMD="${LIBCHDB_CMD} -Wl,-exported_symbols_list,${CHDB_DIR}/libchdb_export_macos.txt" -else - # Linux: use version script - LIBCHDB_CMD="${LIBCHDB_CMD} -Wl,--version-script=${CHDB_DIR}/libchdb_export.map" -fi - -LIBCHDB_CMD=$(echo ${LIBCHDB_CMD} | sed 's/@CMakeFiles\/clickhouse.rsp/@CMakeFiles\/libchdb.rsp/g') - -# Step 4: -# save the command to a file for debug -echo ${LIBCHDB_CMD} > libchdb_cmd.sh - -# Step 5: -${LIBCHDB_CMD} - -LIBCHDB_DIR=${BUILD_DIR}/ -LIBCHDB=${LIBCHDB_DIR}/${LIBCHDB_SO} -ls -lh ${LIBCHDB} # build chdb python module py_version="3.8" @@ -243,48 +183,10 @@ ls -lh ${CHDB_PY_MODULE} ## check all the so files LIBCHDB_DIR=${BUILD_DIR}/ - PYCHDB=${LIBCHDB_DIR}/${CHDB_PY_MODULE} -LIBCHDB=${LIBCHDB_DIR}/${LIBCHDB_SO} - -if [ ${build_type} == "Debug" ]; then - echo -e "\nDebug build, skip strip" -else - echo -e "\nStrip the binary:" - ${STRIP} --strip-unneeded --remove-section=.comment --remove-section=.note ${PYCHDB} - ${STRIP} --strip-unneeded --remove-section=.comment --remove-section=.note ${LIBCHDB} -fi -echo -e "\nStripe the binary:" - -echo -e "\nPYCHDB: ${PYCHDB}" -ls -lh ${PYCHDB} -echo -e "\nLIBCHDB: ${LIBCHDB}" -ls -lh ${LIBCHDB} -echo -e "\nldd ${PYCHDB}" -${LDD} ${PYCHDB} -echo -e "\nfile info of ${PYCHDB}" -file ${PYCHDB} -echo -e "\nldd ${LIBCHDB}" -${LDD} ${LIBCHDB} -echo -e "\nfile info of ${LIBCHDB}" -file ${LIBCHDB} rm -f ${CHDB_DIR}/*.so cp -a ${PYCHDB} ${CHDB_DIR}/${CHDB_PY_MODULE} -cp -a ${LIBCHDB} ${PROJ_DIR}/${LIBCHDB_SO} - -echo -e "\nSymbols:" -echo -e "\nPyInit in PYCHDB: ${PYCHDB}" -${NM} ${PYCHDB} | grep PyInit || true -echo -e "\nPyInit in LIBCHDB: ${LIBCHDB}" -${NM} ${LIBCHDB} | grep PyInit || echo "PyInit not found in ${LIBCHDB}, it's OK" -echo -e "\nquery_stable in PYCHDB: ${PYCHDB}" -${NM} ${PYCHDB} | grep query_stable || true -echo -e "\nquery_stable in LIBCHDB: ${LIBCHDB}" -${NM} ${LIBCHDB} | grep query_stable || true - -echo -e "\nAfter copy:" -cd ${PROJ_DIR} && pwd ccache -s || true diff --git a/chdb/build_pybind11.sh b/chdb/build_pybind11.sh index 4dbbcfbc637..a1694ca833f 100755 --- a/chdb/build_pybind11.sh +++ b/chdb/build_pybind11.sh @@ -116,7 +116,7 @@ build_pybind11_nonlimitedapi() { } build_all_pybind11_nonlimitedapi() { - local python_versions=("3.8" "3.9" "3.10" "3.11" "3.12" "3.13" "3.14") + local python_versions=("3.8" "3.13") echo "Building pybind11 nonlimitedapi libraries for all Python versions..." diff --git a/programs/local/ChdbClient.cpp b/programs/local/ChdbClient.cpp index fda17b2e0f3..e3d43536aeb 100644 --- a/programs/local/ChdbClient.cpp +++ b/programs/local/ChdbClient.cpp @@ -35,6 +35,7 @@ ChdbClient::ChdbClient(EmbeddedServerPtr server_ptr) if (!server) throw Exception(ErrorCodes::LOGICAL_ERROR, "EmbeddedServer pointer is null"); + query_kind = ClientInfo::QueryKind::INITIAL_QUERY; configuration = ConfigHelper::createEmpty(); layered_configuration = new Poco::Util::LayeredConfiguration(); layered_configuration->addWriteable(configuration, 0); @@ -58,18 +59,20 @@ ChdbClient::ChdbClient(EmbeddedServerPtr server_ptr) std::unique_ptr ChdbClient::create(EmbeddedServerPtr server_ptr) { - if (!server_ptr) - { - server_ptr = EmbeddedServer::getInstance(); - } + chassert(server_ptr); + return std::make_unique(server_ptr); } ChdbClient::~ChdbClient() { - std::lock_guard lock(client_mutex); - cleanup(); - resetQueryOutputVector(); + { + std::lock_guard lock(client_mutex); + cleanup(); + resetQueryOutputVector(); + server.reset(); + } + EmbeddedServer::releaseInstance(); } void ChdbClient::cleanup() @@ -252,7 +255,7 @@ CHDB::QueryResultPtr ChdbClient::executeMaterializedQuery( #if USE_PYTHON python_table_cache->clear(); #endif - return std::make_unique(getExceptionMessage(e, false)); + return std::make_unique(getExceptionMessage(e, true, true)); } catch (...) { diff --git a/programs/local/ChdbClient.h b/programs/local/ChdbClient.h index 9aa33ccafff..efff9408197 100644 --- a/programs/local/ChdbClient.h +++ b/programs/local/ChdbClient.h @@ -26,7 +26,7 @@ using EmbeddedServerPtr = std::shared_ptr; class ChdbClient : public ClientBase { public: - static std::unique_ptr create(EmbeddedServerPtr server_ptr = nullptr); + static std::unique_ptr create(EmbeddedServerPtr server_ptr); explicit ChdbClient(EmbeddedServerPtr server_ptr); ~ChdbClient() override; diff --git a/programs/local/EmbeddedServer.cpp b/programs/local/EmbeddedServer.cpp index fc541d8c16c..15b67b101c5 100644 --- a/programs/local/EmbeddedServer.cpp +++ b/programs/local/EmbeddedServer.cpp @@ -56,6 +56,7 @@ #include #include #include +#include #include #include #include @@ -393,6 +394,7 @@ void EmbeddedServer::cleanup() { try { + EventNotifier::shutdown(); if (global_context) { global_context->shutdown(); @@ -876,15 +878,15 @@ void EmbeddedServer::applyCmdOptions(ContextMutablePtr context) "output-format", config().getString("format", "TSV"))); } -std::weak_ptr EmbeddedServer::global_instance; +std::shared_ptr EmbeddedServer::global_instance; std::mutex EmbeddedServer::instance_mutex; +size_t EmbeddedServer::client_ref_count = 0; std::shared_ptr EmbeddedServer::getInstance(int argc, char ** argv) { std::lock_guard lock(instance_mutex); - auto instance = global_instance.lock(); - if (instance) + if (global_instance) { if (argc > 0 && argv) { @@ -897,31 +899,54 @@ std::shared_ptr EmbeddedServer::getInstance(int argc, char ** ar break; } } - if (!instance->db_path.empty() && instance->db_path != path) + if (!global_instance->db_path.empty() && global_instance->db_path != path) { throw DB::Exception( ErrorCodes::BAD_ARGUMENTS, "EmbeddedServer already initialized with path '{}', cannot connect with different path '{}'", - instance->db_path, + global_instance->db_path, path); } } - return instance; + ++client_ref_count; + return global_instance; } - instance = std::make_shared(); - if (argc == 0 || !argv) + global_instance = std::make_shared(); + try { - const char * default_argv[] = {"chdb"}; - instance->initializeWithArgs(1, const_cast(default_argv)); + if (argc == 0 || !argv) + { + const char * default_argv[] = {"chdb"}; + global_instance->initializeWithArgs(1, const_cast(default_argv)); + } + else + { + global_instance->initializeWithArgs(argc, argv); + } } - else + catch (...) { - instance->initializeWithArgs(argc, argv); + global_instance.reset(); + throw; } - global_instance = instance; - return instance; + client_ref_count = 1; + return global_instance; +} + +void EmbeddedServer::releaseInstance() +{ + std::lock_guard lock(instance_mutex); + + if (client_ref_count == 0) + return; + + --client_ref_count; + if (client_ref_count == 0) + { + global_instance.reset(); + } } void EmbeddedServer::initializeWithArgs(int argc, char ** argv) diff --git a/programs/local/EmbeddedServer.h b/programs/local/EmbeddedServer.h index 8297e532238..061d9270c13 100644 --- a/programs/local/EmbeddedServer.h +++ b/programs/local/EmbeddedServer.h @@ -43,6 +43,8 @@ class EmbeddedServer : public Poco::Util::Application, public IHints<2>, public static std::shared_ptr getInstance(int argc = 0, char ** argv = nullptr); + static void releaseInstance(); + std::string getPath() const { return db_path; } private: @@ -52,8 +54,9 @@ class EmbeddedServer : public Poco::Util::Application, public IHints<2>, public void processConfig(); void applyCmdOptions(ContextMutablePtr context); void initializeWithArgs(int argc, char ** argv); - static std::weak_ptr global_instance; + static std::shared_ptr global_instance; static std::mutex instance_mutex; + static size_t client_ref_count; std::string db_path; ServerSettings server_settings; std::optional status; diff --git a/src/Analyzer/Resolve/QueryAnalyzer.cpp b/src/Analyzer/Resolve/QueryAnalyzer.cpp index 4bc09ecc116..be434c9b924 100644 --- a/src/Analyzer/Resolve/QueryAnalyzer.cpp +++ b/src/Analyzer/Resolve/QueryAnalyzer.cpp @@ -186,17 +186,6 @@ void QueryAnalyzer::resolve(QueryTreeNodePtr & node, const QueryTreeNodePtr & ta throw Exception(ErrorCodes::LOGICAL_ERROR, "For query analysis table expression must be empty"); - // chDB(todo): this is a hack to reload UDFs when the query is re-analyzed - // the root cause is for chdb, the ClientBase and Server might have different Contexts - // the hacking might impact the performance when running stateful query(with arg "path" specified) - auto global_context = Context::getGlobalContextInstance(); - if (global_context->getConfigRef().has("path")) - { - IUserDefinedSQLObjectsStorage & udf_store - = const_cast(global_context->getUserDefinedSQLObjectsStorage()); - udf_store.reloadObjects(); - } - resolveQuery(node, scope); break; } diff --git a/src/Interpreters/Context.cpp b/src/Interpreters/Context.cpp index 7c483c29f69..27c89f63e13 100644 --- a/src/Interpreters/Context.cpp +++ b/src/Interpreters/Context.cpp @@ -3144,17 +3144,7 @@ void Context::makeSessionContext() void Context::makeGlobalContext() { - /// assert(!global_context_instance); - if (global_context_instance) - { - global_context = shared_from_this(); - global_context_instance = shared_from_this(); - DatabaseCatalog::init(shared_from_this()); - return; - } - global_context_instance = shared_from_this(); - database_context_instance = shared_from_this(); DatabaseCatalog::init(shared_from_this()); EventNotifier::init(); diff --git a/src/Interpreters/Context.h b/src/Interpreters/Context.h index 407ceabd523..23af5825473 100644 --- a/src/Interpreters/Context.h +++ b/src/Interpreters/Context.h @@ -541,7 +541,6 @@ class ContextData bool is_internal_query = false; inline static ContextPtr global_context_instance; - inline static ContextPtr database_context_instance; /// Temporary data for query execution accounting. TemporaryDataOnDiskScopePtr temp_data_on_disk; diff --git a/tests/gdb_catch_mutex_error.py b/tests/gdb_catch_mutex_error.py new file mode 100644 index 00000000000..d3d59a8e086 --- /dev/null +++ b/tests/gdb_catch_mutex_error.py @@ -0,0 +1,35 @@ +""" +GDB Python script to catch mutex-related exceptions. +Usage: gdb -batch -x gdb_commands.gdb --args python test_script.py + +This script sets a breakpoint on __cxa_throw and filters for mutex/system_error exceptions. +""" +import gdb + + +class MutexErrorCatcher(gdb.Breakpoint): + def __init__(self): + # Break on __cxa_throw (C++ exception throw) + super(MutexErrorCatcher, self).__init__("__cxa_throw", internal=True) + self.silent = True + + def stop(self): + try: + print("\n=== C++ EXCEPTION DETECTED ===") + print("Backtrace:") + gdb.execute("bt full") + print("\n=== Thread info ===") + gdb.execute("info threads") + print("\n=== END ===") + # Force crash to generate core dump + print("\n=== FORCING CRASH (SIGABRT) ===") + gdb.execute("signal SIGABRT") + return True + except Exception as e: + print(f"Error in stop(): {e}") + pass + return False + + +MutexErrorCatcher() +print("Mutex error catcher initialized") diff --git a/tests/gdb_catch_throw.gdb b/tests/gdb_catch_throw.gdb new file mode 100644 index 00000000000..1ad8d660181 --- /dev/null +++ b/tests/gdb_catch_throw.gdb @@ -0,0 +1,27 @@ +# GDB command script for catching mutex exceptions +# Usage: gdb -batch -x tests/gdb_catch_throw.gdb --args python test_script.py + +set pagination off +set print thread-events off +set python print-stack full + +# Allow pending breakpoints (for symbols not yet loaded) +set breakpoint pending on + +# Use GDB's built-in catch throw instead of breakpoint on __cxa_throw +catch throw + +# Define commands to run when exception is caught +commands + silent + echo \n=== C++ EXCEPTION CAUGHT ===\n + bt full + echo \n=== Thread info ===\n + info threads + echo \n=== FORCING CRASH (SIGABRT) ===\n + signal SIGABRT + quit 1 +end + +run +quit 0 diff --git a/tests/test_issue60.py b/tests/test_issue60.py index f1d4c976e47..5eacaa7cfe7 100644 --- a/tests/test_issue60.py +++ b/tests/test_issue60.py @@ -48,8 +48,13 @@ def run(self): def print_chdb(threadName, delay): global result - result = chdb.query(query_str, 'CSV') - print(result) + try: + result = chdb.query(query_str, 'CSV') + print(f"[{threadName}] Query result:\n{result}") + except Exception as e: + print(f"[{threadName}] Exception: {type(e).__name__}: {e}") + import traceback + traceback.print_exc() time.sleep(delay) print("%s: %s" % (threadName, time.ctime(time.time())))