diff --git a/pyproject.toml.jinja b/pyproject.toml.jinja index 3411fcd..51dbca1 100644 --- a/pyproject.toml.jinja +++ b/pyproject.toml.jinja @@ -6,12 +6,12 @@ readme = "README.md" requires-python = ">=3.12" dependencies = [ "llama-cloud-services>=0.6.69", - "llama-index-workflows>=2.2.0,<3.0.0", + "llama-index-workflows>=2.4.0,<3.0.0", "python-dotenv>=1.1.0", "jsonref>=1.1.0", - "click>=8.2.1,<8.3.0", + "click>=8.2.1,!=8.3.0", "httpx>=0.28.1", - "llama-index-core>=0.14.0", + "llama-index-core>=0.14.0,<0.15.0", ] [project.scripts] @@ -50,4 +50,3 @@ process-file = "{{project_name_snake}}.process_file:workflow" [tool.llamadeploy.ui] directory = "ui" - diff --git a/src/{{ project_name_snake }}/process_file.py b/src/{{ project_name_snake }}/process_file.py index 0bbe4f8..4505cc8 100644 --- a/src/{{ project_name_snake }}/process_file.py +++ b/src/{{ project_name_snake }}/process_file.py @@ -153,9 +153,9 @@ async def record_extracted_data( message=f"Recorded extracted data for file {event.data.file_name}", ) ) - # remove past data when reprocessing the same file + # remove past data when reprocessing the same file. Do not validate the schema in case we've changed it. if event.data.file_hash: - existing_data = await get_data_client().search( + existing_data = await get_data_client().untyped_search( filter={ "file_hash": { "eq": event.data.file_hash, diff --git a/test-proj/pyproject.toml b/test-proj/pyproject.toml index cf1e067..e3dd1f0 100644 --- a/test-proj/pyproject.toml +++ b/test-proj/pyproject.toml @@ -6,12 +6,12 @@ readme = "README.md" requires-python = ">=3.12" dependencies = [ "llama-cloud-services>=0.6.69", - "llama-index-workflows>=2.2.0,<3.0.0", + "llama-index-workflows>=2.4.0,<3.0.0", "python-dotenv>=1.1.0", "jsonref>=1.1.0", - "click>=8.2.1,<8.3.0", + "click>=8.2.1,!=8.3.0", "httpx>=0.28.1", - "llama-index-core>=0.14.0", + "llama-index-core>=0.14.0,<0.15.0", ] [project.scripts] @@ -50,4 +50,3 @@ process-file = "test_proj.process_file:workflow" [tool.llamadeploy.ui] directory = "ui" - diff --git a/test-proj/src/test_proj/process_file.py b/test-proj/src/test_proj/process_file.py index 0bbe4f8..4505cc8 100644 --- a/test-proj/src/test_proj/process_file.py +++ b/test-proj/src/test_proj/process_file.py @@ -153,9 +153,9 @@ async def record_extracted_data( message=f"Recorded extracted data for file {event.data.file_name}", ) ) - # remove past data when reprocessing the same file + # remove past data when reprocessing the same file. Do not validate the schema in case we've changed it. if event.data.file_hash: - existing_data = await get_data_client().search( + existing_data = await get_data_client().untyped_search( filter={ "file_hash": { "eq": event.data.file_hash,