diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index bc89cb9afe..0c107ed1b7 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -22,81 +22,6 @@ env: CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: - test-ai-latest: - name: AI (latest) - timeout-minutes: 30 - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ["3.9","3.11","3.12"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 - os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} - steps: - - uses: actions/checkout@v4.2.2 - - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} - with: - python-version: ${{ matrix.python-version }} - allow-prereleases: true - - name: Setup Test Env - run: | - pip install "coverage[toml]" tox - - name: Erase coverage - run: | - coverage erase - - name: Test anthropic latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-anthropic-latest" - - name: Test cohere latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-cohere-latest" - - name: Test langchain latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-langchain-latest" - - name: Test openai latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-openai-latest" - - name: Test huggingface_hub latest - run: | - set -x # print commands that are executed - ./scripts/runtox.sh "py${{ matrix.python-version }}-huggingface_hub-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} - run: | - coverage combine .coverage-sentry-* - coverage xml - - name: Upload coverage to Codecov - if: ${{ !cancelled() }} - uses: codecov/codecov-action@v5.4.2 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: coverage.xml - # make sure no plugins alter our coverage reports - plugin: noop - verbose: true - - name: Upload test results to Codecov - if: ${{ !cancelled() }} - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: .junitxml - verbose: true test-ai-pinned: name: AI (pinned) timeout-minutes: 30 diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 5b268e82c1..18dcbad555 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -156,6 +156,22 @@ "loguru": { "package": "loguru", }, + "openai": { + "package": "openai", + "deps": { + "*": ["tiktoken", "pytest-asyncio"], + "<1.23": ["httpx<0.28.0"], + }, + "variants": ["openai-notiktoken"], + }, + "openai-notiktoken": { + "package": "openai", + "deps": { + "*": ["pytest-asyncio"], + "<1.23": ["httpx<0.28.0"], + }, + "python": ">=3.9", + }, "openfeature": { "package": "openfeature-sdk", }, diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index be91a91f25..76b76cd677 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -74,8 +74,6 @@ "chalice", "gcp", "httpx", - "openai", - "openai_notiktoken", "pure_eval", "quart", "ray", diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 66de126b90..034da457a1 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -77,13 +77,6 @@ envlist = {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest - # OpenAI - {py3.9,py3.11,py3.12}-openai-v1.0 - {py3.9,py3.11,py3.12}-openai-v1.22 - {py3.9,py3.11,py3.12}-openai-v1.55 - {py3.9,py3.11,py3.12}-openai-latest - {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) {py3.7,py3.9,py3.12,py3.13}-opentelemetry @@ -230,20 +223,6 @@ deps = httpx-v0.27: httpx~=0.27.0 httpx-latest: httpx - # OpenAI - openai: pytest-asyncio - openai-v1.0: openai~=1.0.0 - openai-v1.0: tiktoken - openai-v1.0: httpx<0.28.0 - openai-v1.22: openai~=1.22.0 - openai-v1.22: tiktoken - openai-v1.22: httpx<0.28.0 - openai-v1.55: openai~=1.55.0 - openai-v1.55: tiktoken - openai-latest: openai - openai-latest: tiktoken~=0.6.0 - openai-notiktoken: openai - # OpenTelemetry (OTel) opentelemetry: opentelemetry-distro diff --git a/tox.ini b/tox.ini index 6a12dc9b68..a46478caf9 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-05-06T12:10:52.864239+00:00 +# Last generated: 2025-05-06T12:44:28.174129+00:00 [tox] requires = @@ -77,13 +77,6 @@ envlist = {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest - # OpenAI - {py3.9,py3.11,py3.12}-openai-v1.0 - {py3.9,py3.11,py3.12}-openai-v1.22 - {py3.9,py3.11,py3.12}-openai-v1.55 - {py3.9,py3.11,py3.12}-openai-latest - {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) {py3.7,py3.9,py3.12,py3.13}-opentelemetry @@ -149,6 +142,16 @@ envlist = {py3.9,py3.11,py3.12}-langchain-notiktoken-v0.2.17 {py3.9,py3.12,py3.13}-langchain-notiktoken-v0.3.25 + {py3.9,py3.11,py3.12}-openai-v1.0.1 + {py3.9,py3.11,py3.12}-openai-v1.26.0 + {py3.9,py3.11,py3.12}-openai-v1.51.2 + {py3.9,py3.11,py3.12}-openai-v1.77.0 + + {py3.9,py3.11,py3.12}-openai-notiktoken-v1.0.1 + {py3.9,py3.11,py3.12}-openai-notiktoken-v1.26.0 + {py3.9,py3.11,py3.12}-openai-notiktoken-v1.51.2 + {py3.9,py3.11,py3.12}-openai-notiktoken-v1.77.0 + {py3.8,py3.10,py3.11}-huggingface_hub-v0.22.2 {py3.8,py3.10,py3.11}-huggingface_hub-v0.25.2 {py3.8,py3.12,py3.13}-huggingface_hub-v0.28.1 @@ -402,20 +405,6 @@ deps = httpx-v0.27: httpx~=0.27.0 httpx-latest: httpx - # OpenAI - openai: pytest-asyncio - openai-v1.0: openai~=1.0.0 - openai-v1.0: tiktoken - openai-v1.0: httpx<0.28.0 - openai-v1.22: openai~=1.22.0 - openai-v1.22: tiktoken - openai-v1.22: httpx<0.28.0 - openai-v1.55: openai~=1.55.0 - openai-v1.55: tiktoken - openai-latest: openai - openai-latest: tiktoken~=0.6.0 - openai-notiktoken: openai - # OpenTelemetry (OTel) opentelemetry: opentelemetry-distro @@ -525,6 +514,21 @@ deps = langchain-notiktoken-v0.2.17: langchain-community langchain-notiktoken-v0.3.25: langchain-community + openai-v1.0.1: openai==1.0.1 + openai-v1.26.0: openai==1.26.0 + openai-v1.51.2: openai==1.51.2 + openai-v1.77.0: openai==1.77.0 + openai: tiktoken + openai: pytest-asyncio + openai-v1.0.1: httpx<0.28.0 + + openai-notiktoken-v1.0.1: openai==1.0.1 + openai-notiktoken-v1.26.0: openai==1.26.0 + openai-notiktoken-v1.51.2: openai==1.51.2 + openai-notiktoken-v1.77.0: openai==1.77.0 + openai-notiktoken: pytest-asyncio + openai-notiktoken-v1.0.1: httpx<0.28.0 + huggingface_hub-v0.22.2: huggingface_hub==0.22.2 huggingface_hub-v0.25.2: huggingface_hub==0.25.2 huggingface_hub-v0.28.1: huggingface_hub==0.28.1