diff --git a/sdk/ai/azure-ai-projects/assets.json b/sdk/ai/azure-ai-projects/assets.json index 4f1bbaf6e870..378e18f0bb23 100644 --- a/sdk/ai/azure-ai-projects/assets.json +++ b/sdk/ai/azure-ai-projects/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/ai/azure-ai-projects", - "Tag": "python/ai/azure-ai-projects_56eacfd554" + "Tag": "python/ai/azure-ai-projects_1d985f7ba5" } diff --git a/sdk/ai/azure-ai-projects/cspell.json b/sdk/ai/azure-ai-projects/cspell.json index f9f65f043709..12f5733ae5c7 100644 --- a/sdk/ai/azure-ai-projects/cspell.json +++ b/sdk/ai/azure-ai-projects/cspell.json @@ -1,31 +1,33 @@ { - "ignoreWords": [ - "agentic", - "aiproject", - "azureopenai", - "GLEU", - "aiservices", - "azureai", - "GENAI", - "UPIA", - "ansii", - "getconnectionwithcredentials", - "quantitive", - "balapvbyostoragecanary", - "fspath", - "aread", - "inpainting", - "CSDL", - "fstring", - "aiprojectclient", - "Tadmaq", - "Udbk", - "Ministral", - "cogsvc", - "evals", - "FineTuning", - "closefd" - ], + "ignoreWords": [ + "agentic", + "aiproject", + "azureopenai", + "GLEU", + "aiservices", + "azureai", + "GENAI", + "UPIA", + "ansii", + "getconnectionwithcredentials", + "quantitive", + "balapvbyostoragecanary", + "fspath", + "aread", + "inpainting", + "CSDL", + "fstring", + "aiprojectclient", + "Tadmaq", + "Udbk", + "Ministral", + "cogsvc", + "evals", + "FineTuning", + "ftjob", + "ftchkpt", + "closefd" + ], "ignorePaths": [ "*.csv", "*.json", diff --git a/sdk/ai/azure-ai-projects/tests/conftest.py b/sdk/ai/azure-ai-projects/tests/conftest.py index a074884c8063..6e3efea24142 100644 --- a/sdk/ai/azure-ai-projects/tests/conftest.py +++ b/sdk/ai/azure-ai-projects/tests/conftest.py @@ -108,6 +108,15 @@ def sanitize_url_paths(): sanitize_url_paths() + # Sanitize fine-tuning job IDs in URLs and response bodies + add_general_regex_sanitizer(regex=r"ftjob-[a-f0-9]+", value="sanitized-ftjob-id") + + # Sanitize file IDs in URLs and response bodies + add_general_regex_sanitizer(regex=r"file-[a-f0-9]+", value="sanitized-file-id") + + # Sanitize checkpoint IDs in URLs and response bodies + add_general_regex_sanitizer(regex=r"ftchkpt-[a-f0-9]+", value="sanitized-checkpoint-id") + # Sanitize API key from service response (this includes Application Insights connection string) add_body_key_sanitizer(json_path="credentials.key", value="sanitized-api-key") diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py index 6af0abfb81f6..03ed19bdd5cb 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py @@ -17,15 +17,15 @@ GLOBAL_STANDARD_TRAINING_TYPE, DEVELOPER_TIER_TRAINING_TYPE, ) -from devtools_testutils import recorded_by_proxy, is_live_and_not_recording +from devtools_testutils import ( + recorded_by_proxy, + RecordedTransport, + is_live_and_not_recording, +) from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient from azure.mgmt.cognitiveservices.models import Deployment, DeploymentProperties, DeploymentModel, Sku -@pytest.mark.skipif( - condition=(not is_live_and_not_recording()), - reason="Skipped because we cannot record network calls with AOAI client", -) class TestFineTuning(TestBase): def _create_sft_finetuning_job(self, openai_client, train_file_id, validation_file_id, training_type, model_type): @@ -104,7 +104,7 @@ def _upload_test_files(self, openai_client, job_type): training_file_path = test_data_dir / self.test_finetuning_params[job_type]["training_file_name"] validation_file_path = test_data_dir / self.test_finetuning_params[job_type]["validation_file_name"] - with open(training_file_path, "rb") as f: + with self.open_with_lf(str(training_file_path), "rb") as f: train_file = openai_client.files.create(file=f, purpose="fine-tune") train_processed_file = openai_client.files.wait_for_processing(train_file.id) assert train_processed_file is not None @@ -112,7 +112,7 @@ def _upload_test_files(self, openai_client, job_type): TestBase.assert_equal_or_not_none(train_processed_file.status, "processed") print(f"[_upload_test_files] Uploaded training file: {train_processed_file.id}") - with open(validation_file_path, "rb") as f: + with self.open_with_lf(str(validation_file_path), "rb") as f: validation_file = openai_client.files.create(file=f, purpose="fine-tune") validation_processed_file = openai_client.files.wait_for_processing(validation_file.id) assert validation_processed_file is not None @@ -290,18 +290,14 @@ def _extract_account_name_from_endpoint(self, project_endpoint, test_prefix): return endpoint_clean.split(".services.ai.azure.com")[0] def _test_deploy_and_infer_helper( - self, completed_job_id_env_var, deployment_format, deployment_capacity, test_prefix, inference_content, **kwargs + self, completed_job_id, deployment_format, deployment_capacity, test_prefix, inference_content, **kwargs ): - completed_job_id = os.getenv(completed_job_id_env_var) - if not completed_job_id: - pytest.skip( - f"{completed_job_id_env_var} environment variable not set - skipping {test_prefix} deploy and infer test" - ) + pytest.skip(f"completed_job_id parameter not set - skipping {test_prefix} deploy and infer test") - subscription_id = os.getenv("AZURE_AI_PROJECTS_TESTS_AZURE_SUBSCRIPTION_ID") - resource_group = os.getenv("AZURE_AI_PROJECTS_TESTS_AZURE_RESOURCE_GROUP") - project_endpoint = os.getenv("AZURE_AI_PROJECTS_TESTS_PROJECT_ENDPOINT") + subscription_id = kwargs.get("azure_ai_projects_tests_azure_subscription_id") + resource_group = kwargs.get("azure_ai_projects_tests_azure_resource_group") + project_endpoint = kwargs.get("azure_ai_projects_tests_project_endpoint") if not all([subscription_id, resource_group, project_endpoint]): pytest.skip( @@ -350,9 +346,9 @@ def _test_deploy_and_infer_helper( print(f"[{test_prefix}] Deployment status: {deployment_operation.status()}") print(f"[{test_prefix}] Deployment completed successfully") - + print(f"[{test_prefix}] Waiting for 120 seconds for deployment to be fully ready.") + time.sleep(120) print(f"[{test_prefix}] Testing inference on deployment: {deployment_name}") - time.sleep(120) # Wait for deployment to be fully ready response = openai_client.responses.create( model=deployment_name, input=[{"role": "user", "content": inference_content}] @@ -374,57 +370,57 @@ def _test_deploy_and_infer_helper( ) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_finetuning_create_job_openai_standard(self, **kwargs): self._test_sft_create_job_helper("openai", STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_finetuning_create_job_openai_developer(self, **kwargs): self._test_sft_create_job_helper("openai", DEVELOPER_TIER_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_finetuning_create_job_openai_globalstandard(self, **kwargs): self._test_sft_create_job_helper("openai", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_finetuning_create_job_oss_globalstandard(self, **kwargs): self._test_sft_create_job_helper("oss", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_dpo_finetuning_create_job_openai_standard(self, **kwargs): self._test_dpo_create_job_helper("openai", STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_dpo_finetuning_create_job_openai__developer(self, **kwargs): self._test_dpo_create_job_helper("openai", DEVELOPER_TIER_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_dpo_finetuning_create_job_openai_globalstandard(self, **kwargs): self._test_dpo_create_job_helper("openai", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_rft_finetuning_create_job_openai_standard(self, **kwargs): self._test_rft_create_job_helper("openai", STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_rft_finetuning_create_job_openai_globalstandard(self, **kwargs): self._test_rft_create_job_helper("openai", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_rft_finetuning_create_job_openai_developer(self, **kwargs): self._test_rft_create_job_helper("openai", DEVELOPER_TIER_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_retrieve_sft_job(self, **kwargs): with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: @@ -456,7 +452,7 @@ def test_finetuning_retrieve_sft_job(self, **kwargs): self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_retrieve_dpo_job(self, **kwargs): with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: @@ -488,7 +484,7 @@ def test_finetuning_retrieve_dpo_job(self, **kwargs): self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_retrieve_rft_job(self, **kwargs): with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: @@ -520,7 +516,7 @@ def test_finetuning_retrieve_rft_job(self, **kwargs): self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_list_jobs(self, **kwargs): with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: @@ -538,57 +534,57 @@ def test_finetuning_list_jobs(self, **kwargs): print(f"[test_finetuning_list] Successfully validated list functionality with {len(jobs_list)} jobs") @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_cancel_job_openai_standard(self, **kwargs): self._test_cancel_job_helper(SFT_JOB_TYPE, "openai", STANDARD_TRAINING_TYPE, "supervised", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_cancel_job_openai_globalstandard(self, **kwargs): self._test_cancel_job_helper(SFT_JOB_TYPE, "openai", GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_cancel_job_openai_developer(self, **kwargs): self._test_cancel_job_helper(SFT_JOB_TYPE, "openai", DEVELOPER_TIER_TRAINING_TYPE, "supervised", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_sft_cancel_job_oss_globalstandard(self, **kwargs): self._test_cancel_job_helper(SFT_JOB_TYPE, "oss", GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_dpo_cancel_job_openai_standard(self, **kwargs): self._test_cancel_job_helper(DPO_JOB_TYPE, "openai", STANDARD_TRAINING_TYPE, "dpo", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_dpo_cancel_job_openai_globalstandard(self, **kwargs): self._test_cancel_job_helper(DPO_JOB_TYPE, "openai", GLOBAL_STANDARD_TRAINING_TYPE, "dpo", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_dpo_cancel_job_openai_developer(self, **kwargs): self._test_cancel_job_helper(DPO_JOB_TYPE, "openai", DEVELOPER_TIER_TRAINING_TYPE, "dpo", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_rft_cancel_job_openai_standard(self, **kwargs): self._test_cancel_job_helper(RFT_JOB_TYPE, "openai", STANDARD_TRAINING_TYPE, "reinforcement", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_rft_cancel_job_openai_globalstandard(self, **kwargs): self._test_cancel_job_helper(RFT_JOB_TYPE, "openai", GLOBAL_STANDARD_TRAINING_TYPE, "reinforcement", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_rft_cancel_job_openai_developer(self, **kwargs): self._test_cancel_job_helper(RFT_JOB_TYPE, "openai", DEVELOPER_TIER_TRAINING_TYPE, "reinforcement", **kwargs) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_list_events(self, **kwargs): with self.create_client(**kwargs) as project_client: @@ -600,17 +596,17 @@ def test_finetuning_list_events(self, **kwargs): fine_tuning_job = self._create_sft_finetuning_job( openai_client, train_file.id, validation_file.id, STANDARD_TRAINING_TYPE, "openai" ) - print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") + print(f"[test_finetuning_list_events] Created job: {fine_tuning_job.id}") TestBase.validate_fine_tuning_job(fine_tuning_job) TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_list_events] Cancelled job: {fine_tuning_job.id}") events_list = list(openai_client.fine_tuning.jobs.list_events(fine_tuning_job.id)) - print(f"[test_finetuning_sft] Listed {len(events_list)} events for job: {fine_tuning_job.id}") + print(f"[test_finetuning_list_events] Listed {len(events_list)} events for job: {fine_tuning_job.id}") assert len(events_list) > 0, "Fine-tuning job should have at least one event" @@ -621,15 +617,19 @@ def test_finetuning_list_events(self, **kwargs): assert event.level is not None, "Event should have a level" assert event.message is not None, "Event should have a message" assert event.type is not None, "Event should have a type" - print(f"[test_finetuning_sft] Successfully validated {len(events_list)} events") + print(f"[test_finetuning_list_events] Successfully validated {len(events_list)} events") self._cleanup_test_file(openai_client, train_file.id) self._cleanup_test_file(openai_client, validation_file.id) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because not able to pause any job", + ) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_pause_job(self, **kwargs): - running_job_id = os.getenv("AZURE_AI_PROJECTS_TESTS_RUNNING_FINE_TUNING_JOB_ID") + running_job_id = kwargs.get("azure_ai_projects_tests_running_fine_tuning_job_id") if not running_job_id: pytest.skip( @@ -639,10 +639,10 @@ def test_finetuning_pause_job(self, **kwargs): with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: - print(f"[test_finetuning_pause] Testing pause functionality on job: {running_job_id}") + print(f"[test_finetuning_pause_job] Testing pause functionality on job: {running_job_id}") job = openai_client.fine_tuning.jobs.retrieve(running_job_id) - print(f"[test_finetuning_pause] Job status before pause: {job.status}") + print(f"[test_finetuning_pause_job] Job status before pause: {job.status}") if job.status != "running": pytest.skip( @@ -650,18 +650,22 @@ def test_finetuning_pause_job(self, **kwargs): ) paused_job = openai_client.fine_tuning.jobs.pause(running_job_id) - print(f"[test_finetuning_pause] Paused job: {paused_job.id}") + print(f"[test_finetuning_pause_job] Paused job: {paused_job.id}") TestBase.validate_fine_tuning_job(paused_job, expected_job_id=running_job_id) - TestBase.assert_equal_or_not_none(paused_job.status, "paused") - print(f"[test_finetuning_pause] Job status after pause: {paused_job.status}") + TestBase.assert_equal_or_not_none(paused_job.status, "pausing") + print(f"[test_finetuning_pause_job] Job status after pause: {paused_job.status}") - print(f"[test_finetuning_pause] Successfully paused and verified job: {running_job_id}") + print(f"[test_finetuning_pause_job] Successfully paused and verified job: {running_job_id}") + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because not able to pause any job", + ) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_resume_job(self, **kwargs): - paused_job_id = os.getenv("AZURE_AI_PROJECTS_TESTS_PAUSED_FINE_TUNING_JOB_ID") + paused_job_id = kwargs.get("azure_ai_projects_tests_paused_fine_tuning_job_id") if not paused_job_id: pytest.skip( @@ -671,10 +675,10 @@ def test_finetuning_resume_job(self, **kwargs): with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: - print(f"[test_finetuning_resume] Testing resume functionality on job: {paused_job_id}") + print(f"[test_finetuning_resume_job] Testing resume functionality on job: {paused_job_id}") job = openai_client.fine_tuning.jobs.retrieve(paused_job_id) - print(f"[test_finetuning_resume] Job status before resume: {job.status}") + print(f"[test_finetuning_resume_job] Job status before resume: {job.status}") if job.status != "paused": pytest.skip( @@ -682,15 +686,17 @@ def test_finetuning_resume_job(self, **kwargs): ) resumed_job = openai_client.fine_tuning.jobs.resume(paused_job_id) - print(f"[test_finetuning_resume] Resumed job: {resumed_job.id}") + print(f"[test_finetuning_resume_job] Resumed job: {resumed_job.id}") TestBase.validate_fine_tuning_job(resumed_job, expected_job_id=paused_job_id) - print(f"[test_finetuning_resume] Job status after resume: {resumed_job.status}") + TestBase.assert_equal_or_not_none(resumed_job.status, "resuming") + print(f"[test_finetuning_resume_job] Job status after resume: {resumed_job.status}") + print(f"[test_finetuning_resume_job] Successfully resumed and verified job: {paused_job_id}") @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_list_checkpoints(self, **kwargs): - completed_job_id = os.getenv("AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_SFT_FINE_TUNING_JOB_ID") + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_sft_fine_tuning_job_id") if not completed_job_id: pytest.skip( @@ -727,11 +733,16 @@ def test_finetuning_list_checkpoints(self, **kwargs): f"[test_finetuning_list_checkpoints] Successfully validated {len(checkpoints_list)} checkpoints for job: {completed_job_id}" ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because API not sending completed or failed status despite job being complete", + ) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_deploy_and_infer_oai_model_sft_job(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_sft_fine_tuning_job_id") self._test_deploy_and_infer_helper( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_SFT_FINE_TUNING_JOB_ID", + completed_job_id, "OpenAI", 50, "test_finetuning_deploy_and_infer_oai_model_sft_job", @@ -739,11 +750,16 @@ def test_finetuning_deploy_and_infer_oai_model_sft_job(self, **kwargs): **kwargs, ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because not able to complete any RFT job", + ) @servicePreparer() @recorded_by_proxy def test_finetuning_deploy_and_infer_oai_model_rft_job(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_rft_fine_tuning_job_id") self._test_deploy_and_infer_helper( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_RFT_FINE_TUNING_JOB_ID", + completed_job_id, "OpenAI", 50, "test_finetuning_deploy_and_infer_oai_model_rft_job", @@ -751,11 +767,16 @@ def test_finetuning_deploy_and_infer_oai_model_rft_job(self, **kwargs): **kwargs, ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because API not sending completed or failed status despite job being complete", + ) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_deploy_and_infer_oai_model_dpo_job(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_dpo_fine_tuning_job_id") self._test_deploy_and_infer_helper( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_DPO_FINE_TUNING_JOB_ID", + completed_job_id, "OpenAI", 50, "test_finetuning_deploy_and_infer_oai_model_dpo_job", @@ -763,11 +784,16 @@ def test_finetuning_deploy_and_infer_oai_model_dpo_job(self, **kwargs): **kwargs, ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because API not sending completed or failed status despite job being complete", + ) @servicePreparer() - @recorded_by_proxy + @recorded_by_proxy(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) def test_finetuning_deploy_and_infer_oss_model_sft_job(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oss_model_sft_fine_tuning_job_id") self._test_deploy_and_infer_helper( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OSS_MODEL_SFT_FINE_TUNING_JOB_ID", + completed_job_id, "Mistral AI", 50, "test_finetuning_deploy_and_infer_oss_model_sft_job", diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py index 6b6c94eef014..3ab3553705d3 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py @@ -18,15 +18,11 @@ DEVELOPER_TIER_TRAINING_TYPE, ) from devtools_testutils.aio import recorded_by_proxy_async -from devtools_testutils import is_live_and_not_recording +from devtools_testutils import is_live_and_not_recording, RecordedTransport from azure.mgmt.cognitiveservices.aio import CognitiveServicesManagementClient as CognitiveServicesManagementClientAsync from azure.mgmt.cognitiveservices.models import Deployment, DeploymentProperties, DeploymentModel, Sku -@pytest.mark.skipif( - condition=(not is_live_and_not_recording()), - reason="Skipped because we cannot record network calls with AOAI client", -) class TestFineTuningAsync(TestBase): async def _create_sft_finetuning_job_async( @@ -111,7 +107,7 @@ async def _upload_test_files_async(self, openai_client, job_type): training_file_path = test_data_dir / self.test_finetuning_params[job_type]["training_file_name"] validation_file_path = test_data_dir / self.test_finetuning_params[job_type]["validation_file_name"] - with open(training_file_path, "rb") as f: + with self.open_with_lf(str(training_file_path), "rb") as f: train_file = await openai_client.files.create(file=f, purpose="fine-tune") train_processed_file = await openai_client.files.wait_for_processing(train_file.id) assert train_processed_file is not None @@ -119,7 +115,7 @@ async def _upload_test_files_async(self, openai_client, job_type): TestBase.assert_equal_or_not_none(train_processed_file.status, "processed") print(f"[_upload_test_files_async] Uploaded training file: {train_processed_file.id}") - with open(validation_file_path, "rb") as f: + with self.open_with_lf(str(validation_file_path), "rb") as f: validation_file = await openai_client.files.create(file=f, purpose="fine-tune") validation_processed_file = await openai_client.files.wait_for_processing(validation_file.id) assert validation_processed_file is not None @@ -299,18 +295,14 @@ def _extract_account_name_from_endpoint(self, project_endpoint, test_prefix): return endpoint_clean.split(".services.ai.azure.com")[0] async def _test_deploy_and_infer_helper_async( - self, completed_job_id_env_var, deployment_format, deployment_capacity, test_prefix, inference_content, **kwargs + self, completed_job_id, deployment_format, deployment_capacity, test_prefix, inference_content, **kwargs ): - completed_job_id = os.getenv(completed_job_id_env_var) - if not completed_job_id: - pytest.skip( - f"{completed_job_id_env_var} environment variable not set - skipping {test_prefix} deploy and infer test" - ) + pytest.skip(f"completed_job_id parameter not set - skipping {test_prefix} deploy and infer test") - subscription_id = os.getenv("AZURE_AI_PROJECTS_TESTS_AZURE_SUBSCRIPTION_ID") - resource_group = os.getenv("AZURE_AI_PROJECTS_TESTS_AZURE_RESOURCE_GROUP") - project_endpoint = os.getenv("AZURE_AI_PROJECTS_TESTS_PROJECT_ENDPOINT") + subscription_id = kwargs.get("azure_ai_projects_tests_azure_subscription_id") + resource_group = kwargs.get("azure_ai_projects_tests_azure_resource_group") + project_endpoint = kwargs.get("azure_ai_projects_tests_project_endpoint") if not all([subscription_id, resource_group, project_endpoint]): pytest.skip( @@ -359,9 +351,9 @@ async def _test_deploy_and_infer_helper_async( print(f"[{test_prefix}] Deployment status: {deployment_operation.status()}") print(f"[{test_prefix}] Deployment completed successfully") - + print(f"[{test_prefix}] Waiting for 120 seconds for deployment to be fully ready.") + await asyncio.sleep(120) print(f"[{test_prefix}] Testing inference on deployment: {deployment_name}") - await asyncio.sleep(120) # Wait for deployment to be fully ready response = await openai_client.responses.create( model=deployment_name, input=[{"role": "user", "content": inference_content}] @@ -383,57 +375,57 @@ async def _test_deploy_and_infer_helper_async( ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_finetuning_create_job_openai_standard_async(self, **kwargs): await self._test_sft_create_job_helper_async("openai", STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_finetuning_create_job_openai_globalstandard_async(self, **kwargs): await self._test_sft_create_job_helper_async("openai", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_finetuning_create_job_openai_developer_async(self, **kwargs): await self._test_sft_create_job_helper_async("openai", DEVELOPER_TIER_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_finetuning_create_job_oss_globalstandard_async(self, **kwargs): await self._test_sft_create_job_helper_async("oss", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_dpo_finetuning_create_job_openai_standard_async(self, **kwargs): await self._test_dpo_create_job_helper_async("openai", STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_dpo_finetuning_create_job_openai_globalstandard_async(self, **kwargs): await self._test_dpo_create_job_helper_async("openai", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_dpo_finetuning_create_job_openai_developer_async(self, **kwargs): await self._test_dpo_create_job_helper_async("openai", DEVELOPER_TIER_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_rft_finetuning_create_job_openai_standard_async(self, **kwargs): await self._test_rft_create_job_helper_async("openai", STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_rft_finetuning_create_job_openai_globalstandard_async(self, **kwargs): await self._test_rft_create_job_helper_async("openai", GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_rft_finetuning_create_job_openai_developer_async(self, **kwargs): await self._test_rft_create_job_helper_async("openai", DEVELOPER_TIER_TRAINING_TYPE, **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_retrieve_sft_job_async(self, **kwargs): project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() @@ -467,7 +459,7 @@ async def test_finetuning_retrieve_sft_job_async(self, **kwargs): await self._cleanup_test_file_async(openai_client, validation_file.id) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_retrieve_dpo_job_async(self, **kwargs): project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() @@ -501,7 +493,7 @@ async def test_finetuning_retrieve_dpo_job_async(self, **kwargs): await self._cleanup_test_file_async(openai_client, validation_file.id) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_retrieve_rft_job_async(self, **kwargs): project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() @@ -535,7 +527,7 @@ async def test_finetuning_retrieve_rft_job_async(self, **kwargs): await self._cleanup_test_file_async(openai_client, validation_file.id) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_list_jobs_async(self, **kwargs): project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() @@ -558,69 +550,69 @@ async def test_finetuning_list_jobs_async(self, **kwargs): print(f"[test_finetuning_list] Successfully validated list functionality with {len(jobs_list)} jobs") @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_cancel_job_openai_standard_async(self, **kwargs): await self._test_cancel_job_helper_async(SFT_JOB_TYPE, "openai", STANDARD_TRAINING_TYPE, "supervised", **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_cancel_job_openai_developer_async(self, **kwargs): await self._test_cancel_job_helper_async( SFT_JOB_TYPE, "openai", DEVELOPER_TIER_TRAINING_TYPE, "supervised", **kwargs ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_cancel_job_openai_globalstandard_async(self, **kwargs): await self._test_cancel_job_helper_async( SFT_JOB_TYPE, "openai", GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_sft_cancel_job_oss_globalstandard_async(self, **kwargs): await self._test_cancel_job_helper_async( SFT_JOB_TYPE, "oss", GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_dpo_cancel_job_openai_standard_async(self, **kwargs): await self._test_cancel_job_helper_async(DPO_JOB_TYPE, "openai", STANDARD_TRAINING_TYPE, "dpo", **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_dpo_cancel_job_openai_developer_async(self, **kwargs): await self._test_cancel_job_helper_async(DPO_JOB_TYPE, "openai", DEVELOPER_TIER_TRAINING_TYPE, "dpo", **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_dpo_cancel_job_openai_globalstandard_async(self, **kwargs): await self._test_cancel_job_helper_async(DPO_JOB_TYPE, "openai", GLOBAL_STANDARD_TRAINING_TYPE, "dpo", **kwargs) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_rft_cancel_job_openai_standard_async(self, **kwargs): await self._test_cancel_job_helper_async( RFT_JOB_TYPE, "openai", STANDARD_TRAINING_TYPE, "reinforcement", **kwargs ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_rft_cancel_job_openai_developer_async(self, **kwargs): await self._test_cancel_job_helper_async( RFT_JOB_TYPE, "openai", DEVELOPER_TIER_TRAINING_TYPE, "reinforcement", **kwargs ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_rft_cancel_job_openai_globalstandard_async(self, **kwargs): await self._test_cancel_job_helper_async( RFT_JOB_TYPE, "openai", GLOBAL_STANDARD_TRAINING_TYPE, "reinforcement", **kwargs ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_list_events_async(self, **kwargs): project_client = self.create_async_client(**kwargs) @@ -633,20 +625,20 @@ async def test_finetuning_list_events_async(self, **kwargs): fine_tuning_job = await self._create_sft_finetuning_job_async( openai_client, train_file.id, validation_file.id, STANDARD_TRAINING_TYPE, "openai" ) - print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") + print(f"[test_finetuning_list_events] Created job: {fine_tuning_job.id}") TestBase.validate_fine_tuning_job(fine_tuning_job) TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_list_events] Cancelled job: {fine_tuning_job.id}") events_list_async = openai_client.fine_tuning.jobs.list_events(fine_tuning_job.id) events_list = [] async for event in events_list_async: events_list.append(event) - print(f"[test_finetuning_sft] Listed {len(events_list)} events for job: {fine_tuning_job.id}") + print(f"[test_finetuning_list_events] Listed {len(events_list)} events for job: {fine_tuning_job.id}") assert len(events_list) > 0, "Fine-tuning job should have at least one event" @@ -657,15 +649,19 @@ async def test_finetuning_list_events_async(self, **kwargs): assert event.level is not None, "Event should have a level" assert event.message is not None, "Event should have a message" assert event.type is not None, "Event should have a type" - print(f"[test_finetuning_sft] Successfully validated {len(events_list)} events") + print(f"[test_finetuning_list_events] Successfully validated {len(events_list)} events") await self._cleanup_test_file_async(openai_client, train_file.id) await self._cleanup_test_file_async(openai_client, validation_file.id) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because not able to pause any job", + ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_pause_job_async(self, **kwargs): - running_job_id = os.getenv("AZURE_AI_PROJECTS_TESTS_RUNNING_FINE_TUNING_JOB_ID") + running_job_id = kwargs.get("azure_ai_projects_tests_running_fine_tuning_job_id") if not running_job_id: pytest.skip( @@ -677,10 +673,10 @@ async def test_finetuning_pause_job_async(self, **kwargs): async with project_client: - print(f"[test_finetuning_pause] Testing pause functionality on job: {running_job_id}") + print(f"[test_finetuning_pause_job] Testing pause functionality on job: {running_job_id}") job = await openai_client.fine_tuning.jobs.retrieve(running_job_id) - print(f"[test_finetuning_pause] Job status before pause: {job.status}") + print(f"[test_finetuning_pause_job] Job status before pause: {job.status}") if job.status != "running": pytest.skip( @@ -688,18 +684,21 @@ async def test_finetuning_pause_job_async(self, **kwargs): ) paused_job = await openai_client.fine_tuning.jobs.pause(running_job_id) - print(f"[test_finetuning_pause] Paused job: {paused_job.id}") + print(f"[test_finetuning_pause_job] Paused job: {paused_job.id}") TestBase.validate_fine_tuning_job(paused_job, expected_job_id=running_job_id) - TestBase.assert_equal_or_not_none(paused_job.status, "paused") - print(f"[test_finetuning_pause] Job status after pause: {paused_job.status}") - - print(f"[test_finetuning_pause] Successfully paused and verified job: {running_job_id}") + TestBase.assert_equal_or_not_none(paused_job.status, "pausing") + print(f"[test_finetuning_pause_job] Job status after pause: {paused_job.status}") + print(f"[test_finetuning_pause_job] Successfully paused and verified job: {running_job_id}") + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because not able to pause any job", + ) @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_resume_job_async(self, **kwargs): - paused_job_id = os.getenv("AZURE_AI_PROJECTS_TESTS_PAUSED_FINE_TUNING_JOB_ID") + paused_job_id = kwargs.get("azure_ai_projects_tests_paused_fine_tuning_job_id") if not paused_job_id: pytest.skip( @@ -711,10 +710,10 @@ async def test_finetuning_resume_job_async(self, **kwargs): async with project_client: - print(f"[test_finetuning_resume] Testing resume functionality on job: {paused_job_id}") + print(f"[test_finetuning_resume_job] Testing resume functionality on job: {paused_job_id}") job = await openai_client.fine_tuning.jobs.retrieve(paused_job_id) - print(f"[test_finetuning_resume] Job status before resume: {job.status}") + print(f"[test_finetuning_resume_job] Job status before resume: {job.status}") if job.status != "paused": pytest.skip( @@ -722,15 +721,17 @@ async def test_finetuning_resume_job_async(self, **kwargs): ) resumed_job = await openai_client.fine_tuning.jobs.resume(paused_job_id) - print(f"[test_finetuning_resume] Resumed job: {resumed_job.id}") + print(f"[test_finetuning_resume_job] Resumed job: {resumed_job.id}") TestBase.validate_fine_tuning_job(resumed_job, expected_job_id=paused_job_id) - print(f"[test_finetuning_resume] Job status after resume: {resumed_job.status}") + TestBase.assert_equal_or_not_none(resumed_job.status, "resuming") + print(f"[test_finetuning_resume_job] Job status after resume: {resumed_job.status}") + print(f"[test_finetuning_resume_job] Successfully resumed and verified job: {paused_job_id}") @servicePreparer() - @recorded_by_proxy_async + @recorded_by_proxy_async(RecordedTransport.AZURE_CORE, RecordedTransport.HTTPX) async def test_finetuning_list_checkpoints_async(self, **kwargs): - completed_job_id = os.getenv("AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_SFT_FINE_TUNING_JOB_ID") + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_sft_fine_tuning_job_id") if not completed_job_id: pytest.skip( @@ -770,11 +771,16 @@ async def test_finetuning_list_checkpoints_async(self, **kwargs): f"[test_finetuning_list_checkpoints] Successfully validated {len(checkpoints_list)} checkpoints for job: {completed_job_id}" ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because API not sending completed or failed status despite job being complete", + ) @servicePreparer() @recorded_by_proxy_async async def test_finetuning_deploy_and_infer_oai_model_sft_job_async(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_sft_fine_tuning_job_id") await self._test_deploy_and_infer_helper_async( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_SFT_FINE_TUNING_JOB_ID", + completed_job_id, "OpenAI", 50, "test_finetuning_deploy_and_infer_oai_model_sft_job", @@ -782,11 +788,16 @@ async def test_finetuning_deploy_and_infer_oai_model_sft_job_async(self, **kwarg **kwargs, ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because not able to complete any RFT job", + ) @servicePreparer() @recorded_by_proxy_async async def test_finetuning_deploy_and_infer_oai_model_rft_job_async(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_rft_fine_tuning_job_id") await self._test_deploy_and_infer_helper_async( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_RFT_FINE_TUNING_JOB_ID", + completed_job_id, "OpenAI", 50, "test_finetuning_deploy_and_infer_oai_model_rft_job", @@ -794,11 +805,16 @@ async def test_finetuning_deploy_and_infer_oai_model_rft_job_async(self, **kwarg **kwargs, ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because API not sending completed or failed status despite job being complete", + ) @servicePreparer() @recorded_by_proxy_async async def test_finetuning_deploy_and_infer_oai_model_dpo_job_async(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oai_model_dpo_fine_tuning_job_id") await self._test_deploy_and_infer_helper_async( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_DPO_FINE_TUNING_JOB_ID", + completed_job_id, "OpenAI", 50, "test_finetuning_deploy_and_infer_oai_model_dpo_job", @@ -806,11 +822,16 @@ async def test_finetuning_deploy_and_infer_oai_model_dpo_job_async(self, **kwarg **kwargs, ) + @pytest.mark.skipif( + condition=(not is_live_and_not_recording()), + reason="Skipped because API not sending completed or failed status despite job being complete", + ) @servicePreparer() @recorded_by_proxy_async async def test_finetuning_deploy_and_infer_oss_model_sft_job_async(self, **kwargs): + completed_job_id = kwargs.get("azure_ai_projects_tests_completed_oss_model_sft_fine_tuning_job_id") await self._test_deploy_and_infer_helper_async( - "AZURE_AI_PROJECTS_TESTS_COMPLETED_OSS_MODEL_SFT_FINE_TUNING_JOB_ID", + completed_job_id, "Mistral AI", 50, "test_finetuning_deploy_and_infer_oss_model_sft_job", diff --git a/sdk/ai/azure-ai-projects/tests/test_base.py b/sdk/ai/azure-ai-projects/tests/test_base.py index 41dc1a545f73..830c8442c38a 100644 --- a/sdk/ai/azure-ai-projects/tests/test_base.py +++ b/sdk/ai/azure-ai-projects/tests/test_base.py @@ -54,6 +54,14 @@ azure_ai_projects_tests_ai_search_index_name="sanitized-index-name", azure_ai_projects_tests_mcp_project_connection_id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/sanitized-resource-group/providers/Microsoft.CognitiveServices/accounts/sanitized-account/projects/sanitized-project/connections/sanitized-mcp-connection", azure_ai_projects_tests_sharepoint_project_connection_id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/sanitized-resource-group/providers/Microsoft.CognitiveServices/accounts/sanitized-account/projects/sanitized-project/connections/sanitized-sharepoint-connection", + azure_ai_projects_tests_completed_oai_model_sft_fine_tuning_job_id="sanitized-ftjob-id", + azure_ai_projects_tests_completed_oai_model_rft_fine_tuning_job_id="sanitized-ftjob-id", + azure_ai_projects_tests_completed_oai_model_dpo_fine_tuning_job_id="sanitized-ftjob-id", + azure_ai_projects_tests_completed_oss_model_sft_fine_tuning_job_id="sanitized-ftjob-id", + azure_ai_projects_tests_running_fine_tuning_job_id="sanitized-ftjob-id", + azure_ai_projects_tests_paused_fine_tuning_job_id="sanitized-ftjob-id", + azure_ai_projects_tests_azure_subscription_id="00000000-0000-0000-0000-000000000000", + azure_ai_projects_tests_azure_resource_group="sanitized-resource-group", azure_ai_projects_tests_ai_search_user_input="What is Azure AI Projects?", azure_ai_projects_tests_sharepoint_user_input="What is SharePoint?", )