diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index c9a3b67ba0..4d71ad914b 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -46,6 +46,7 @@ integration_name: { }, "python": python_version_specifier, "include": package_version_specifier, + "variants": another_integration_name, } ``` @@ -153,6 +154,30 @@ be expressed like so: } ``` +### `variants` + +You might want to test a single integration against two different sets of +dependencies. For example, `langchain` should be tested both with and without +`tiktoken`. In that case: + +- add the variant as its own entry in `TEST_SUITE_CONFIG` +- link it from the base variant via the `variants` key + +```python +{ + "langchain": { + "variants": ["langchain-notiktoken"], + ... + }, + "langchain-notiktoken": { + ... + } +} +``` + +Both `langchain` and `langchain-notiktoken` will be run as part of the Langchain +CI workflow. + ## How-Tos diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 4d5d5b14ce..c047584a4d 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -126,6 +126,23 @@ "huggingface_hub": { "package": "huggingface_hub", }, + "langchain": { + "package": "langchain", + "deps": { + "*": ["openai", "tiktoken"], + "<0.2": ["httpx<0.28.0"], + ">0.2": ["langchain-community"], + }, + "variants": ["langchain_notiktoken"], + }, + "langchain_notiktoken": { + "package": "langchain", + "deps": { + "*": ["openai"], + "<0.2": ["httpx<0.28.0"], + ">0.2": ["langchain_community"], + }, + }, "launchdarkly": { "package": "launchdarkly-server-sdk", }, diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 0aeb0f02ef..be91a91f25 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -74,8 +74,6 @@ "chalice", "gcp", "httpx", - "langchain", - "langchain_notiktoken", "openai", "openai_notiktoken", "pure_eval", @@ -648,6 +646,17 @@ def main(fail_on_changes: bool = False) -> None: } ) + variants = TEST_SUITE_CONFIG[integration].get("variants") or [] + for variant in variants: + packages[group].append( + { + "name": variant, + "package": package, + "extra": extra, + "releases": test_releases, + } + ) + if fail_on_changes: old_file_hash = get_file_hash() diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 2869da275b..66de126b90 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -77,12 +77,6 @@ envlist = {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest - # Langchain - {py3.9,py3.11,py3.12}-langchain-v0.1 - {py3.9,py3.11,py3.12}-langchain-v0.3 - {py3.9,py3.11,py3.12}-langchain-latest - {py3.9,py3.11,py3.12}-langchain-notiktoken - # OpenAI {py3.9,py3.11,py3.12}-openai-v1.0 {py3.9,py3.11,py3.12}-openai-v1.22 @@ -236,20 +230,6 @@ deps = httpx-v0.27: httpx~=0.27.0 httpx-latest: httpx - # Langchain - langchain-v0.1: openai~=1.0.0 - langchain-v0.1: langchain~=0.1.11 - langchain-v0.1: tiktoken~=0.6.0 - langchain-v0.1: httpx<0.28.0 - langchain-v0.3: langchain~=0.3.0 - langchain-v0.3: langchain-community - langchain-v0.3: tiktoken - langchain-v0.3: openai - langchain-{latest,notiktoken}: langchain - langchain-{latest,notiktoken}: langchain-openai - langchain-{latest,notiktoken}: openai>=1.6.1 - langchain-latest: tiktoken~=0.6.0 - # OpenAI openai: pytest-asyncio openai-v1.0: openai~=1.0.0 diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index b9e5705b88..6f39491826 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -158,6 +158,7 @@ def test_langchain_agent( model_name="gpt-3.5-turbo", temperature=0, openai_api_key="badkey", + streaming=True, ) agent = create_openai_tools_agent(llm, [get_word_length], prompt) @@ -232,6 +233,7 @@ def test_langchain_error(sentry_init, capture_events): model_name="gpt-3.5-turbo", temperature=0, openai_api_key="badkey", + streaming=True, ) agent = create_openai_tools_agent(llm, [get_word_length], prompt) @@ -327,6 +329,7 @@ def test_span_origin(sentry_init, capture_events): model_name="gpt-3.5-turbo", temperature=0, openai_api_key="badkey", + streaming=True, ) agent = create_openai_tools_agent(llm, [get_word_length], prompt) diff --git a/tox.ini b/tox.ini index 332f541793..024ddd939c 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-05-06T10:23:50.156629+00:00 +# Last generated: 2025-05-06T12:51:20.834069+00:00 [tox] requires = @@ -77,12 +77,6 @@ envlist = {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest - # Langchain - {py3.9,py3.11,py3.12}-langchain-v0.1 - {py3.9,py3.11,py3.12}-langchain-v0.3 - {py3.9,py3.11,py3.12}-langchain-latest - {py3.9,py3.11,py3.12}-langchain-notiktoken - # OpenAI {py3.9,py3.11,py3.12}-openai-v1.0 {py3.9,py3.11,py3.12}-openai-v1.22 @@ -145,6 +139,16 @@ envlist = {py3.9,py3.11,py3.12}-cohere-v5.11.4 {py3.9,py3.11,py3.12}-cohere-v5.15.0 + {py3.9,py3.10,py3.11}-langchain-v0.0.354 + {py3.9,py3.11,py3.12}-langchain-v0.1.20 + {py3.9,py3.11,py3.12}-langchain-v0.2.17 + {py3.9,py3.12,py3.13}-langchain-v0.3.25 + + {py3.9,py3.10,py3.11}-langchain_notiktoken-v0.0.354 + {py3.9,py3.11,py3.12}-langchain_notiktoken-v0.1.20 + {py3.9,py3.11,py3.12}-langchain_notiktoken-v0.2.17 + {py3.9,py3.12,py3.13}-langchain_notiktoken-v0.3.25 + {py3.8,py3.10,py3.11}-huggingface_hub-v0.22.2 {py3.8,py3.10,py3.11}-huggingface_hub-v0.25.2 {py3.8,py3.12,py3.13}-huggingface_hub-v0.28.1 @@ -398,20 +402,6 @@ deps = httpx-v0.27: httpx~=0.27.0 httpx-latest: httpx - # Langchain - langchain-v0.1: openai~=1.0.0 - langchain-v0.1: langchain~=0.1.11 - langchain-v0.1: tiktoken~=0.6.0 - langchain-v0.1: httpx<0.28.0 - langchain-v0.3: langchain~=0.3.0 - langchain-v0.3: langchain-community - langchain-v0.3: tiktoken - langchain-v0.3: openai - langchain-{latest,notiktoken}: langchain - langchain-{latest,notiktoken}: langchain-openai - langchain-{latest,notiktoken}: openai>=1.6.1 - langchain-latest: tiktoken~=0.6.0 - # OpenAI openai: pytest-asyncio openai-v1.0: openai~=1.0.0 @@ -514,6 +504,27 @@ deps = cohere-v5.11.4: cohere==5.11.4 cohere-v5.15.0: cohere==5.15.0 + langchain-v0.0.354: langchain==0.0.354 + langchain-v0.1.20: langchain==0.1.20 + langchain-v0.2.17: langchain==0.2.17 + langchain-v0.3.25: langchain==0.3.25 + langchain: openai + langchain: tiktoken + langchain-v0.0.354: httpx<0.28.0 + langchain-v0.1.20: httpx<0.28.0 + langchain-v0.2.17: langchain-community + langchain-v0.3.25: langchain-community + + langchain_notiktoken-v0.0.354: langchain==0.0.354 + langchain_notiktoken-v0.1.20: langchain==0.1.20 + langchain_notiktoken-v0.2.17: langchain==0.2.17 + langchain_notiktoken-v0.3.25: langchain==0.3.25 + langchain_notiktoken: openai + langchain_notiktoken-v0.0.354: httpx<0.28.0 + langchain_notiktoken-v0.1.20: httpx<0.28.0 + langchain_notiktoken-v0.2.17: langchain_community + langchain_notiktoken-v0.3.25: langchain_community + huggingface_hub-v0.22.2: huggingface_hub==0.22.2 huggingface_hub-v0.25.2: huggingface_hub==0.25.2 huggingface_hub-v0.28.1: huggingface_hub==0.28.1