diff --git a/.pylintrc b/.pylintrc
index 339e71cb..d0498311 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -26,6 +26,5 @@ reports=no
 max-line-length=79
 
 [MASTER]
-# Ignore anything inside launch/clientlib (since it's documentation)
-ignore=clientlib,api_client
+ignore=api_client,openapi_client
 extension-pkg-whitelist=pydantic
diff --git a/launch/api_client/__init__.py b/launch/api_client/__init__.py
index f6709b0c..99e0c1e4 100644
--- a/launch/api_client/__init__.py
+++ b/launch/api_client/__init__.py
@@ -1,30 +1,16 @@
-# coding: utf-8
+import inspect
 
-# flake8: noqa
+from pydantic import BaseModel
 
-"""
-    launch
-
-    No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)  # noqa: E501
-
-    The version of the OpenAPI document: 1.0.0
-    Generated by: https://openapi-generator.tech
-"""
-
-__version__ = "1.0.5"
-
-# import ApiClient
-from launch.api_client.api_client import ApiClient
-
-# import Configuration
-from launch.api_client.configuration import Configuration
-
-# import exceptions
-from launch.api_client.exceptions import (
-    ApiAttributeError,
-    ApiException,
-    ApiKeyError,
-    ApiTypeError,
-    ApiValueError,
-    OpenApiException,
+from launch.api_client import models
+from launch.api_client.api_client import (  # noqa F401
+    ApiClient,
+    AsyncApis,
+    SyncApis,
 )
+
+for model in inspect.getmembers(models, inspect.isclass):
+    if model[1].__module__ == "launch.api_client.models":
+        model_class = model[1]
+        if isinstance(model_class, BaseModel):
+            model_class.update_forward_refs()
diff --git a/launch/api_client/api/__init__.py b/launch/api_client/api/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/launch/api_client/api/default_api.py b/launch/api_client/api/default_api.py
new file mode 100644
index 00000000..2acb77b2
--- /dev/null
+++ b/launch/api_client/api/default_api.py
@@ -0,0 +1,717 @@
+# flake8: noqa E501
+from asyncio import get_event_loop
+from typing import TYPE_CHECKING, Awaitable
+
+from fastapi.encoders import jsonable_encoder
+
+from launch.api_client import models as m
+
+if TYPE_CHECKING:
+    from launch.api_client.api_client import ApiClient
+
+
+class _DefaultApi:
+    def __init__(self, api_client: "ApiClient"):
+        self.api_client = api_client
+
+    def _build_for_clone_model_bundle_with_changes_v1_model_bundles_clone_with_changes_post(
+        self, clone_model_bundle_request: m.CloneModelBundleRequest
+    ) -> Awaitable[m.CreateModelBundleResponse]:
+        """
+        Creates a ModelBundle by cloning an existing one and then applying changes on top.
+        """
+        body = jsonable_encoder(clone_model_bundle_request)
+
+        return self.api_client.request(
+            type_=m.CreateModelBundleResponse,
+            method="POST",
+            url="/v1/model-bundles/clone-with-changes",
+            json=body,
+        )
+
+    def _build_for_create_async_inference_task_v1_async_tasks_post(
+        self, model_endpoint_id: str, endpoint_predict_request: m.EndpointPredictRequest
+    ) -> Awaitable[m.CreateAsyncTaskResponse]:
+        """
+        Runs an async inference prediction.
+        """
+        query_params = {"model_endpoint_id": str(model_endpoint_id)}
+
+        body = jsonable_encoder(endpoint_predict_request)
+
+        return self.api_client.request(
+            type_=m.CreateAsyncTaskResponse,
+            method="POST",
+            url="/v1/async-tasks",
+            params=query_params,
+            json=body,
+        )
+
+    def _build_for_create_batch_job_v1_batch_jobs_post(
+        self, create_batch_job_request: m.CreateBatchJobRequest
+    ) -> Awaitable[m.CreateBatchJobResponse]:
+        """
+        Runs a sync inference prediction.
+        """
+        body = jsonable_encoder(create_batch_job_request)
+
+        return self.api_client.request(type_=m.CreateBatchJobResponse, method="POST", url="/v1/batch-jobs", json=body)
+
+    def _build_for_create_model_bundle_v1_model_bundles_post(
+        self, create_model_bundle_request: m.CreateModelBundleRequest
+    ) -> Awaitable[m.CreateModelBundleResponse]:
+        """
+        Creates a ModelBundle for the current user.
+        """
+        body = jsonable_encoder(create_model_bundle_request)
+
+        return self.api_client.request(
+            type_=m.CreateModelBundleResponse, method="POST", url="/v1/model-bundles", json=body
+        )
+
+    def _build_for_create_model_endpoint_v1_model_endpoints_post(
+        self, create_model_endpoint_request: m.CreateModelEndpointRequest
+    ) -> Awaitable[m.CreateModelEndpointResponse]:
+        """
+        Creates a Model for the current user.
+        """
+        body = jsonable_encoder(create_model_endpoint_request)
+
+        return self.api_client.request(
+            type_=m.CreateModelEndpointResponse, method="POST", url="/v1/model-endpoints", json=body
+        )
+
+    def _build_for_create_sync_inference_task_v1_sync_tasks_post(
+        self, model_endpoint_id: str, endpoint_predict_request: m.EndpointPredictRequest
+    ) -> Awaitable[m.SyncEndpointPredictResponse]:
+        """
+        Runs a sync inference prediction.
+        """
+        query_params = {"model_endpoint_id": str(model_endpoint_id)}
+
+        body = jsonable_encoder(endpoint_predict_request)
+
+        return self.api_client.request(
+            type_=m.SyncEndpointPredictResponse,
+            method="POST",
+            url="/v1/sync-tasks",
+            params=query_params,
+            json=body,
+        )
+
+    def _build_for_delete_model_endpoint_v1_model_endpoints_model_endpoint_id_delete(
+        self, model_endpoint_id: str
+    ) -> Awaitable[m.DeleteModelEndpointResponse]:
+        """
+        Lists the Models owned by the current owner.
+        """
+        path_params = {"model_endpoint_id": str(model_endpoint_id)}
+
+        return self.api_client.request(
+            type_=m.DeleteModelEndpointResponse,
+            method="DELETE",
+            url="/v1/model-endpoints/{model_endpoint_id}",
+            path_params=path_params,
+        )
+
+    def _build_for_get_async_inference_task_v1_async_tasks_task_id_get(
+        self, task_id: str
+    ) -> Awaitable[m.GetAsyncTaskResponse]:
+        """
+        Gets the status of an async inference task.
+        """
+        path_params = {"task_id": str(task_id)}
+
+        return self.api_client.request(
+            type_=m.GetAsyncTaskResponse,
+            method="GET",
+            url="/v1/async-tasks/{task_id}",
+            path_params=path_params,
+        )
+
+    def _build_for_get_batch_job_v1_batch_jobs_batch_job_id_get(
+        self, batch_job_id: str
+    ) -> Awaitable[m.GetBatchJobResponse]:
+        """
+        Runs a sync inference prediction.
+        """
+        path_params = {"batch_job_id": str(batch_job_id)}
+
+        return self.api_client.request(
+            type_=m.GetBatchJobResponse,
+            method="GET",
+            url="/v1/batch-jobs/{batch_job_id}",
+            path_params=path_params,
+        )
+
+    def _build_for_get_latest_model_bundle_v1_model_bundles_latest_get(
+        self, model_name: str
+    ) -> Awaitable[m.ModelBundleResponse]:
+        """
+        Gets the the latest Model Bundle with the given name owned by the current owner.
+        """
+        query_params = {"model_name": str(model_name)}
+
+        return self.api_client.request(
+            type_=m.ModelBundleResponse,
+            method="GET",
+            url="/v1/model-bundles/latest",
+            params=query_params,
+        )
+
+    def _build_for_get_model_bundle_v1_model_bundles_model_bundle_id_get(
+        self, model_bundle_id: str
+    ) -> Awaitable[m.ModelBundleResponse]:
+        """
+        Gets the details for a given ModelBundle owned by the current owner.
+        """
+        path_params = {"model_bundle_id": str(model_bundle_id)}
+
+        return self.api_client.request(
+            type_=m.ModelBundleResponse,
+            method="GET",
+            url="/v1/model-bundles/{model_bundle_id}",
+            path_params=path_params,
+        )
+
+    def _build_for_get_model_endpoint_v1_model_endpoints_model_endpoint_id_get(
+        self, model_endpoint_id: str
+    ) -> Awaitable[m.GetModelEndpointResponse]:
+        """
+        Lists the Models owned by the current owner.
+        """
+        path_params = {"model_endpoint_id": str(model_endpoint_id)}
+
+        return self.api_client.request(
+            type_=m.GetModelEndpointResponse,
+            method="GET",
+            url="/v1/model-endpoints/{model_endpoint_id}",
+            path_params=path_params,
+        )
+
+    def _build_for_get_model_endpoints_api_v1_model_endpoints_api_get(
+        self,
+    ) -> Awaitable[m.Any]:
+        """
+        Shows the API of the Model Endpoints owned by the current owner.
+        """
+        return self.api_client.request(
+            type_=m.Any,
+            method="GET",
+            url="/v1/model-endpoints-api",
+        )
+
+    def _build_for_get_model_endpoints_schema_v1_model_endpoints_schema_json_get(
+        self,
+    ) -> Awaitable[m.Any]:
+        """
+        Lists the schemas of the Model Endpoints owned by the current owner.
+        """
+        return self.api_client.request(
+            type_=m.Any,
+            method="GET",
+            url="/v1/model-endpoints-schema.json",
+        )
+
+    def _build_for_healthcheck_healthcheck_get(
+        self,
+    ) -> Awaitable[m.Any]:
+        """
+        Returns 200 if the app is healthy.
+        """
+        return self.api_client.request(
+            type_=m.Any,
+            method="GET",
+            url="/healthcheck",
+        )
+
+    def _build_for_healthcheck_healthz_get(
+        self,
+    ) -> Awaitable[m.Any]:
+        """
+        Returns 200 if the app is healthy.
+        """
+        return self.api_client.request(
+            type_=m.Any,
+            method="GET",
+            url="/healthz",
+        )
+
+    def _build_for_healthcheck_readyz_get(
+        self,
+    ) -> Awaitable[m.Any]:
+        """
+        Returns 200 if the app is healthy.
+        """
+        return self.api_client.request(
+            type_=m.Any,
+            method="GET",
+            url="/readyz",
+        )
+
+    def _build_for_list_model_bundles_v1_model_bundles_get(
+        self, model_name: str = None, order_by: m.ModelBundleOrderBy = None
+    ) -> Awaitable[m.ListModelBundlesResponse]:
+        """
+        Lists the ModelBundles owned by the current owner.
+        """
+        query_params = {}
+        if model_name is not None:
+            query_params["model_name"] = str(model_name)
+        if order_by is not None:
+            query_params["order_by"] = str(order_by)
+
+        return self.api_client.request(
+            type_=m.ListModelBundlesResponse,
+            method="GET",
+            url="/v1/model-bundles",
+            params=query_params,
+        )
+
+    def _build_for_list_model_endpoints_v1_model_endpoints_get(
+        self, name: str = None, order_by: m.ModelEndpointOrderBy = None
+    ) -> Awaitable[m.ListModelEndpointsResponse]:
+        """
+        Lists the Models owned by the current owner.
+        """
+        query_params = {}
+        if name is not None:
+            query_params["name"] = str(name)
+        if order_by is not None:
+            query_params["order_by"] = str(order_by)
+
+        return self.api_client.request(
+            type_=m.ListModelEndpointsResponse,
+            method="GET",
+            url="/v1/model-endpoints",
+            params=query_params,
+        )
+
+    def _build_for_update_batch_job_v1_batch_jobs_batch_job_id_put(
+        self, batch_job_id: str, update_batch_job_request: m.UpdateBatchJobRequest
+    ) -> Awaitable[m.UpdateBatchJobResponse]:
+        """
+        Runs a sync inference prediction.
+        """
+        path_params = {"batch_job_id": str(batch_job_id)}
+
+        body = jsonable_encoder(update_batch_job_request)
+
+        return self.api_client.request(
+            type_=m.UpdateBatchJobResponse,
+            method="PUT",
+            url="/v1/batch-jobs/{batch_job_id}",
+            path_params=path_params,
+            json=body,
+        )
+
+    def _build_for_update_model_endpoint_v1_model_endpoints_model_endpoint_id_put(
+        self, model_endpoint_id: str, update_model_endpoint_request: m.UpdateModelEndpointRequest
+    ) -> Awaitable[m.UpdateModelEndpointResponse]:
+        """
+        Lists the Models owned by the current owner.
+        """
+        path_params = {"model_endpoint_id": str(model_endpoint_id)}
+
+        body = jsonable_encoder(update_model_endpoint_request)
+
+        return self.api_client.request(
+            type_=m.UpdateModelEndpointResponse,
+            method="PUT",
+            url="/v1/model-endpoints/{model_endpoint_id}",
+            path_params=path_params,
+            json=body,
+        )
+
+
+class AsyncDefaultApi(_DefaultApi):
+    async def clone_model_bundle_with_changes_v1_model_bundles_clone_with_changes_post(
+        self, clone_model_bundle_request: m.CloneModelBundleRequest
+    ) -> m.CreateModelBundleResponse:
+        """
+        Creates a ModelBundle by cloning an existing one and then applying changes on top.
+        """
+        return await self._build_for_clone_model_bundle_with_changes_v1_model_bundles_clone_with_changes_post(
+            clone_model_bundle_request=clone_model_bundle_request
+        )
+
+    async def create_async_inference_task_v1_async_tasks_post(
+        self, model_endpoint_id: str, endpoint_predict_request: m.EndpointPredictRequest
+    ) -> m.CreateAsyncTaskResponse:
+        """
+        Runs an async inference prediction.
+        """
+        return await self._build_for_create_async_inference_task_v1_async_tasks_post(
+            model_endpoint_id=model_endpoint_id, endpoint_predict_request=endpoint_predict_request
+        )
+
+    async def create_batch_job_v1_batch_jobs_post(
+        self, create_batch_job_request: m.CreateBatchJobRequest
+    ) -> m.CreateBatchJobResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        return await self._build_for_create_batch_job_v1_batch_jobs_post(
+            create_batch_job_request=create_batch_job_request
+        )
+
+    async def create_model_bundle_v1_model_bundles_post(
+        self, create_model_bundle_request: m.CreateModelBundleRequest
+    ) -> m.CreateModelBundleResponse:
+        """
+        Creates a ModelBundle for the current user.
+        """
+        return await self._build_for_create_model_bundle_v1_model_bundles_post(
+            create_model_bundle_request=create_model_bundle_request
+        )
+
+    async def create_model_endpoint_v1_model_endpoints_post(
+        self, create_model_endpoint_request: m.CreateModelEndpointRequest
+    ) -> m.CreateModelEndpointResponse:
+        """
+        Creates a Model for the current user.
+        """
+        return await self._build_for_create_model_endpoint_v1_model_endpoints_post(
+            create_model_endpoint_request=create_model_endpoint_request
+        )
+
+    async def create_sync_inference_task_v1_sync_tasks_post(
+        self, model_endpoint_id: str, endpoint_predict_request: m.EndpointPredictRequest
+    ) -> m.SyncEndpointPredictResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        return await self._build_for_create_sync_inference_task_v1_sync_tasks_post(
+            model_endpoint_id=model_endpoint_id, endpoint_predict_request=endpoint_predict_request
+        )
+
+    async def delete_model_endpoint_v1_model_endpoints_model_endpoint_id_delete(
+        self, model_endpoint_id: str
+    ) -> m.DeleteModelEndpointResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        return await self._build_for_delete_model_endpoint_v1_model_endpoints_model_endpoint_id_delete(
+            model_endpoint_id=model_endpoint_id
+        )
+
+    async def get_async_inference_task_v1_async_tasks_task_id_get(self, task_id: str) -> m.GetAsyncTaskResponse:
+        """
+        Gets the status of an async inference task.
+        """
+        return await self._build_for_get_async_inference_task_v1_async_tasks_task_id_get(task_id=task_id)
+
+    async def get_batch_job_v1_batch_jobs_batch_job_id_get(self, batch_job_id: str) -> m.GetBatchJobResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        return await self._build_for_get_batch_job_v1_batch_jobs_batch_job_id_get(batch_job_id=batch_job_id)
+
+    async def get_latest_model_bundle_v1_model_bundles_latest_get(self, model_name: str) -> m.ModelBundleResponse:
+        """
+        Gets the the latest Model Bundle with the given name owned by the current owner.
+        """
+        return await self._build_for_get_latest_model_bundle_v1_model_bundles_latest_get(model_name=model_name)
+
+    async def get_model_bundle_v1_model_bundles_model_bundle_id_get(
+        self, model_bundle_id: str
+    ) -> m.ModelBundleResponse:
+        """
+        Gets the details for a given ModelBundle owned by the current owner.
+        """
+        return await self._build_for_get_model_bundle_v1_model_bundles_model_bundle_id_get(
+            model_bundle_id=model_bundle_id
+        )
+
+    async def get_model_endpoint_v1_model_endpoints_model_endpoint_id_get(
+        self, model_endpoint_id: str
+    ) -> m.GetModelEndpointResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        return await self._build_for_get_model_endpoint_v1_model_endpoints_model_endpoint_id_get(
+            model_endpoint_id=model_endpoint_id
+        )
+
+    async def get_model_endpoints_api_v1_model_endpoints_api_get(
+        self,
+    ) -> m.Any:
+        """
+        Shows the API of the Model Endpoints owned by the current owner.
+        """
+        return await self._build_for_get_model_endpoints_api_v1_model_endpoints_api_get()
+
+    async def get_model_endpoints_schema_v1_model_endpoints_schema_json_get(
+        self,
+    ) -> m.Any:
+        """
+        Lists the schemas of the Model Endpoints owned by the current owner.
+        """
+        return await self._build_for_get_model_endpoints_schema_v1_model_endpoints_schema_json_get()
+
+    async def healthcheck_healthcheck_get(
+        self,
+    ) -> m.Any:
+        """
+        Returns 200 if the app is healthy.
+        """
+        return await self._build_for_healthcheck_healthcheck_get()
+
+    async def healthcheck_healthz_get(
+        self,
+    ) -> m.Any:
+        """
+        Returns 200 if the app is healthy.
+        """
+        return await self._build_for_healthcheck_healthz_get()
+
+    async def healthcheck_readyz_get(
+        self,
+    ) -> m.Any:
+        """
+        Returns 200 if the app is healthy.
+        """
+        return await self._build_for_healthcheck_readyz_get()
+
+    async def list_model_bundles_v1_model_bundles_get(
+        self, model_name: str = None, order_by: m.ModelBundleOrderBy = None
+    ) -> m.ListModelBundlesResponse:
+        """
+        Lists the ModelBundles owned by the current owner.
+        """
+        return await self._build_for_list_model_bundles_v1_model_bundles_get(model_name=model_name, order_by=order_by)
+
+    async def list_model_endpoints_v1_model_endpoints_get(
+        self, name: str = None, order_by: m.ModelEndpointOrderBy = None
+    ) -> m.ListModelEndpointsResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        return await self._build_for_list_model_endpoints_v1_model_endpoints_get(name=name, order_by=order_by)
+
+    async def update_batch_job_v1_batch_jobs_batch_job_id_put(
+        self, batch_job_id: str, update_batch_job_request: m.UpdateBatchJobRequest
+    ) -> m.UpdateBatchJobResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        return await self._build_for_update_batch_job_v1_batch_jobs_batch_job_id_put(
+            batch_job_id=batch_job_id, update_batch_job_request=update_batch_job_request
+        )
+
+    async def update_model_endpoint_v1_model_endpoints_model_endpoint_id_put(
+        self, model_endpoint_id: str, update_model_endpoint_request: m.UpdateModelEndpointRequest
+    ) -> m.UpdateModelEndpointResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        return await self._build_for_update_model_endpoint_v1_model_endpoints_model_endpoint_id_put(
+            model_endpoint_id=model_endpoint_id,
+            update_model_endpoint_request=update_model_endpoint_request,
+        )
+
+
+class SyncDefaultApi(_DefaultApi):
+    def clone_model_bundle_with_changes_v1_model_bundles_clone_with_changes_post(
+        self, clone_model_bundle_request: m.CloneModelBundleRequest
+    ) -> m.CreateModelBundleResponse:
+        """
+        Creates a ModelBundle by cloning an existing one and then applying changes on top.
+        """
+        coroutine = self._build_for_clone_model_bundle_with_changes_v1_model_bundles_clone_with_changes_post(
+            clone_model_bundle_request=clone_model_bundle_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def create_async_inference_task_v1_async_tasks_post(
+        self, model_endpoint_id: str, endpoint_predict_request: m.EndpointPredictRequest
+    ) -> m.CreateAsyncTaskResponse:
+        """
+        Runs an async inference prediction.
+        """
+        coroutine = self._build_for_create_async_inference_task_v1_async_tasks_post(
+            model_endpoint_id=model_endpoint_id, endpoint_predict_request=endpoint_predict_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def create_batch_job_v1_batch_jobs_post(
+        self, create_batch_job_request: m.CreateBatchJobRequest
+    ) -> m.CreateBatchJobResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        coroutine = self._build_for_create_batch_job_v1_batch_jobs_post(
+            create_batch_job_request=create_batch_job_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def create_model_bundle_v1_model_bundles_post(
+        self, create_model_bundle_request: m.CreateModelBundleRequest
+    ) -> m.CreateModelBundleResponse:
+        """
+        Creates a ModelBundle for the current user.
+        """
+        coroutine = self._build_for_create_model_bundle_v1_model_bundles_post(
+            create_model_bundle_request=create_model_bundle_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def create_model_endpoint_v1_model_endpoints_post(
+        self, create_model_endpoint_request: m.CreateModelEndpointRequest
+    ) -> m.CreateModelEndpointResponse:
+        """
+        Creates a Model for the current user.
+        """
+        coroutine = self._build_for_create_model_endpoint_v1_model_endpoints_post(
+            create_model_endpoint_request=create_model_endpoint_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def create_sync_inference_task_v1_sync_tasks_post(
+        self, model_endpoint_id: str, endpoint_predict_request: m.EndpointPredictRequest
+    ) -> m.SyncEndpointPredictResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        coroutine = self._build_for_create_sync_inference_task_v1_sync_tasks_post(
+            model_endpoint_id=model_endpoint_id, endpoint_predict_request=endpoint_predict_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def delete_model_endpoint_v1_model_endpoints_model_endpoint_id_delete(
+        self, model_endpoint_id: str
+    ) -> m.DeleteModelEndpointResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        coroutine = self._build_for_delete_model_endpoint_v1_model_endpoints_model_endpoint_id_delete(
+            model_endpoint_id=model_endpoint_id
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_async_inference_task_v1_async_tasks_task_id_get(self, task_id: str) -> m.GetAsyncTaskResponse:
+        """
+        Gets the status of an async inference task.
+        """
+        coroutine = self._build_for_get_async_inference_task_v1_async_tasks_task_id_get(task_id=task_id)
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_batch_job_v1_batch_jobs_batch_job_id_get(self, batch_job_id: str) -> m.GetBatchJobResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        coroutine = self._build_for_get_batch_job_v1_batch_jobs_batch_job_id_get(batch_job_id=batch_job_id)
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_latest_model_bundle_v1_model_bundles_latest_get(self, model_name: str) -> m.ModelBundleResponse:
+        """
+        Gets the the latest Model Bundle with the given name owned by the current owner.
+        """
+        coroutine = self._build_for_get_latest_model_bundle_v1_model_bundles_latest_get(model_name=model_name)
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_model_bundle_v1_model_bundles_model_bundle_id_get(self, model_bundle_id: str) -> m.ModelBundleResponse:
+        """
+        Gets the details for a given ModelBundle owned by the current owner.
+        """
+        coroutine = self._build_for_get_model_bundle_v1_model_bundles_model_bundle_id_get(
+            model_bundle_id=model_bundle_id
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_model_endpoint_v1_model_endpoints_model_endpoint_id_get(
+        self, model_endpoint_id: str
+    ) -> m.GetModelEndpointResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        coroutine = self._build_for_get_model_endpoint_v1_model_endpoints_model_endpoint_id_get(
+            model_endpoint_id=model_endpoint_id
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_model_endpoints_api_v1_model_endpoints_api_get(
+        self,
+    ) -> m.Any:
+        """
+        Shows the API of the Model Endpoints owned by the current owner.
+        """
+        coroutine = self._build_for_get_model_endpoints_api_v1_model_endpoints_api_get()
+        return get_event_loop().run_until_complete(coroutine)
+
+    def get_model_endpoints_schema_v1_model_endpoints_schema_json_get(
+        self,
+    ) -> m.Any:
+        """
+        Lists the schemas of the Model Endpoints owned by the current owner.
+        """
+        coroutine = self._build_for_get_model_endpoints_schema_v1_model_endpoints_schema_json_get()
+        return get_event_loop().run_until_complete(coroutine)
+
+    def healthcheck_healthcheck_get(
+        self,
+    ) -> m.Any:
+        """
+        Returns 200 if the app is healthy.
+        """
+        coroutine = self._build_for_healthcheck_healthcheck_get()
+        return get_event_loop().run_until_complete(coroutine)
+
+    def healthcheck_healthz_get(
+        self,
+    ) -> m.Any:
+        """
+        Returns 200 if the app is healthy.
+        """
+        coroutine = self._build_for_healthcheck_healthz_get()
+        return get_event_loop().run_until_complete(coroutine)
+
+    def healthcheck_readyz_get(
+        self,
+    ) -> m.Any:
+        """
+        Returns 200 if the app is healthy.
+        """
+        coroutine = self._build_for_healthcheck_readyz_get()
+        return get_event_loop().run_until_complete(coroutine)
+
+    def list_model_bundles_v1_model_bundles_get(
+        self, model_name: str = None, order_by: m.ModelBundleOrderBy = None
+    ) -> m.ListModelBundlesResponse:
+        """
+        Lists the ModelBundles owned by the current owner.
+        """
+        coroutine = self._build_for_list_model_bundles_v1_model_bundles_get(model_name=model_name, order_by=order_by)
+        return get_event_loop().run_until_complete(coroutine)
+
+    def list_model_endpoints_v1_model_endpoints_get(
+        self, name: str = None, order_by: m.ModelEndpointOrderBy = None
+    ) -> m.ListModelEndpointsResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        coroutine = self._build_for_list_model_endpoints_v1_model_endpoints_get(name=name, order_by=order_by)
+        return get_event_loop().run_until_complete(coroutine)
+
+    def update_batch_job_v1_batch_jobs_batch_job_id_put(
+        self, batch_job_id: str, update_batch_job_request: m.UpdateBatchJobRequest
+    ) -> m.UpdateBatchJobResponse:
+        """
+        Runs a sync inference prediction.
+        """
+        coroutine = self._build_for_update_batch_job_v1_batch_jobs_batch_job_id_put(
+            batch_job_id=batch_job_id, update_batch_job_request=update_batch_job_request
+        )
+        return get_event_loop().run_until_complete(coroutine)
+
+    def update_model_endpoint_v1_model_endpoints_model_endpoint_id_put(
+        self, model_endpoint_id: str, update_model_endpoint_request: m.UpdateModelEndpointRequest
+    ) -> m.UpdateModelEndpointResponse:
+        """
+        Lists the Models owned by the current owner.
+        """
+        coroutine = self._build_for_update_model_endpoint_v1_model_endpoints_model_endpoint_id_put(
+            model_endpoint_id=model_endpoint_id,
+            update_model_endpoint_request=update_model_endpoint_request,
+        )
+        return get_event_loop().run_until_complete(coroutine)
diff --git a/launch/api_client/api_client.py b/launch/api_client/api_client.py
index 61d57dbd..181745a4 100644
--- a/launch/api_client/api_client.py
+++ b/launch/api_client/api_client.py
@@ -1,1543 +1,133 @@
-# coding: utf-8
-"""
-    launch
-
-    No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)  # noqa: E501
-
-    The version of the OpenAPI document: 1.0.0
-    Generated by: https://openapi-generator.tech
-"""
-
-import atexit
-import email
-import enum
-import io
-import json
-import os
-import re
-import tempfile
-import typing
-from dataclasses import dataclass
-from decimal import Decimal
-from multiprocessing.pool import ThreadPool
-from urllib.parse import quote, urlparse
-
-import frozendict
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-from urllib3.fields import RequestField as RequestFieldBase
-
-from launch.api_client import rest
-from launch.api_client.configuration import Configuration
-from launch.api_client.exceptions import ApiTypeError, ApiValueError
-from launch.api_client.schemas import (
-    BinarySchema,
-    BoolClass,
-    FileIO,
-    NoneClass,
-    Schema,
-    Unset,
-    date,
-    datetime,
-    none_type,
-    unset,
+from asyncio import get_event_loop
+from typing import (
+    Any,
+    Awaitable,
+    Callable,
+    Dict,
+    Generic,
+    Type,
+    TypeVar,
+    overload,
 )
 
+from httpx import AsyncClient, Request, Response
+from pydantic import ValidationError, parse_obj_as
 
-class RequestField(RequestFieldBase):
-    def __eq__(self, other):
-        if not isinstance(other, RequestField):
-            return False
-        return self.__dict__ == other.__dict__
-
-
-class JSONEncoder(json.JSONEncoder):
-    compact_separators = (",", ":")
-
-    def default(self, obj):
-        if isinstance(obj, str):
-            return str(obj)
-        elif isinstance(obj, float):
-            return float(obj)
-        elif isinstance(obj, int):
-            return int(obj)
-        elif isinstance(obj, Decimal):
-            if obj.as_tuple().exponent >= 0:
-                return int(obj)
-            return float(obj)
-        elif isinstance(obj, NoneClass):
-            return None
-        elif isinstance(obj, BoolClass):
-            return bool(obj)
-        elif isinstance(obj, (dict, frozendict.frozendict)):
-            return {key: self.default(val) for key, val in obj.items()}
-        elif isinstance(obj, (list, tuple)):
-            return [self.default(item) for item in obj]
-        raise ApiValueError("Unable to prepare type {} for serialization".format(obj.__class__.__name__))
-
-
-class ParameterInType(enum.Enum):
-    QUERY = "query"
-    HEADER = "header"
-    PATH = "path"
-    COOKIE = "cookie"
-
-
-class ParameterStyle(enum.Enum):
-    MATRIX = "matrix"
-    LABEL = "label"
-    FORM = "form"
-    SIMPLE = "simple"
-    SPACE_DELIMITED = "spaceDelimited"
-    PIPE_DELIMITED = "pipeDelimited"
-    DEEP_OBJECT = "deepObject"
-
-
-class PrefixSeparatorIterator:
-    # A class to store prefixes and separators for rfc6570 expansions
-
-    def __init__(self, prefix: str, separator: str):
-        self.prefix = prefix
-        self.separator = separator
-        self.first = True
-        if separator in {".", "|", "%20"}:
-            item_separator = separator
-        else:
-            item_separator = ","
-        self.item_separator = item_separator
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        if self.first:
-            self.first = False
-            return self.prefix
-        return self.separator
-
-
-class ParameterSerializerBase:
-    @classmethod
-    def _get_default_explode(cls, style: ParameterStyle) -> bool:
-        return False
-
-    @staticmethod
-    def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
-        """
-        Get representation if str/float/int/None/items in list/ values in dict
-        None is returned if an item is undefined, use cases are value=
-        - None
-        - []
-        - {}
-        - [None, None None]
-        - {'a': None, 'b': None}
-        """
-        if type(in_data) in {str, float, int}:
-            if percent_encode:
-                return quote(str(in_data))
-            return str(in_data)
-        elif isinstance(in_data, none_type):
-            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
-            return None
-        elif isinstance(in_data, list) and not in_data:
-            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
-            return None
-        elif isinstance(in_data, dict) and not in_data:
-            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
-            return None
-        raise ApiValueError("Unable to generate a ref6570 item representation of {}".format(in_data))
-
-    @staticmethod
-    def _to_dict(name: str, value: str):
-        return {name: value}
-
-    @classmethod
-    def __ref6570_str_float_int_expansion(
-        cls,
-        variable_name: str,
-        in_data: typing.Any,
-        explode: bool,
-        percent_encode: bool,
-        prefix_separator_iterator: PrefixSeparatorIterator,
-        var_name_piece: str,
-        named_parameter_expansion: bool,
-    ) -> str:
-        item_value = cls.__ref6570_item_value(in_data, percent_encode)
-        if item_value is None or (item_value == "" and prefix_separator_iterator.separator == ";"):
-            return next(prefix_separator_iterator) + var_name_piece
-        value_pair_equals = "=" if named_parameter_expansion else ""
-        return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
-
-    @classmethod
-    def __ref6570_list_expansion(
-        cls,
-        variable_name: str,
-        in_data: typing.Any,
-        explode: bool,
-        percent_encode: bool,
-        prefix_separator_iterator: PrefixSeparatorIterator,
-        var_name_piece: str,
-        named_parameter_expansion: bool,
-    ) -> str:
-        item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
-        item_values = [v for v in item_values if v is not None]
-        if not item_values:
-            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
-            return ""
-        value_pair_equals = "=" if named_parameter_expansion else ""
-        if not explode:
-            return (
-                next(prefix_separator_iterator)
-                + var_name_piece
-                + value_pair_equals
-                + prefix_separator_iterator.item_separator.join(item_values)
-            )
-        # exploded
-        return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
-            [var_name_piece + value_pair_equals + val for val in item_values]
-        )
-
-    @classmethod
-    def __ref6570_dict_expansion(
-        cls,
-        variable_name: str,
-        in_data: typing.Any,
-        explode: bool,
-        percent_encode: bool,
-        prefix_separator_iterator: PrefixSeparatorIterator,
-        var_name_piece: str,
-        named_parameter_expansion: bool,
-    ) -> str:
-        in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
-        in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
-        if not in_data_transformed:
-            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
-            return ""
-        value_pair_equals = "=" if named_parameter_expansion else ""
-        if not explode:
-            return (
-                next(prefix_separator_iterator)
-                + var_name_piece
-                + value_pair_equals
-                + prefix_separator_iterator.item_separator.join(
-                    prefix_separator_iterator.item_separator.join(item_pair)
-                    for item_pair in in_data_transformed.items()
-                )
-            )
-        # exploded
-        return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
-            [key + "=" + val for key, val in in_data_transformed.items()]
-        )
-
-    @classmethod
-    def _ref6570_expansion(
-        cls,
-        variable_name: str,
-        in_data: typing.Any,
-        explode: bool,
-        percent_encode: bool,
-        prefix_separator_iterator: PrefixSeparatorIterator,
-    ) -> str:
-        """
-        Separator is for separate variables like dict with explode true, not for array item separation
-        """
-        named_parameter_expansion = prefix_separator_iterator.separator in {
-            "&",
-            ";",
-        }
-        var_name_piece = variable_name if named_parameter_expansion else ""
-        if type(in_data) in {str, float, int}:
-            return cls.__ref6570_str_float_int_expansion(
-                variable_name,
-                in_data,
-                explode,
-                percent_encode,
-                prefix_separator_iterator,
-                var_name_piece,
-                named_parameter_expansion,
-            )
-        elif isinstance(in_data, none_type):
-            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
-            return ""
-        elif isinstance(in_data, list):
-            return cls.__ref6570_list_expansion(
-                variable_name,
-                in_data,
-                explode,
-                percent_encode,
-                prefix_separator_iterator,
-                var_name_piece,
-                named_parameter_expansion,
-            )
-        elif isinstance(in_data, dict):
-            return cls.__ref6570_dict_expansion(
-                variable_name,
-                in_data,
-                explode,
-                percent_encode,
-                prefix_separator_iterator,
-                var_name_piece,
-                named_parameter_expansion,
-            )
-        # bool, bytes, etc
-        raise ApiValueError("Unable to generate a ref6570 representation of {}".format(in_data))
-
-
-class StyleFormSerializer(ParameterSerializerBase):
-    @classmethod
-    def _get_default_explode(cls, style: ParameterStyle) -> bool:
-        if style is ParameterStyle.FORM:
-            return True
-        return super()._get_default_explode(style)
-
-    def _serialize_form(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-        name: str,
-        explode: bool,
-        percent_encode: bool,
-        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None,
-    ) -> str:
-        if prefix_separator_iterator is None:
-            prefix_separator_iterator = PrefixSeparatorIterator("", "&")
-        return self._ref6570_expansion(
-            variable_name=name,
-            in_data=in_data,
-            explode=explode,
-            percent_encode=percent_encode,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-
-
-class StyleSimpleSerializer(ParameterSerializerBase):
-    def _serialize_simple(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-        name: str,
-        explode: bool,
-        percent_encode: bool,
-    ) -> str:
-        prefix_separator_iterator = PrefixSeparatorIterator("", ",")
-        return self._ref6570_expansion(
-            variable_name=name,
-            in_data=in_data,
-            explode=explode,
-            percent_encode=percent_encode,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-
-
-class JSONDetector:
-    """
-    Works for:
-    application/json
-    application/json; charset=UTF-8
-    application/json-patch+json
-    application/geo+json
-    """
-
-    __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
-
-    @classmethod
-    def _content_type_is_json(cls, content_type: str) -> bool:
-        if cls.__json_content_type_pattern.match(content_type):
-            return True
-        return False
-
-
-@dataclass
-class ParameterBase(JSONDetector):
-    name: str
-    in_type: ParameterInType
-    required: bool
-    style: typing.Optional[ParameterStyle]
-    explode: typing.Optional[bool]
-    allow_reserved: typing.Optional[bool]
-    schema: typing.Optional[typing.Type[Schema]]
-    content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
-
-    __style_to_in_type = {
-        ParameterStyle.MATRIX: {ParameterInType.PATH},
-        ParameterStyle.LABEL: {ParameterInType.PATH},
-        ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
-        ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
-        ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
-        ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
-        ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
-    }
-    __in_type_to_default_style = {
-        ParameterInType.QUERY: ParameterStyle.FORM,
-        ParameterInType.PATH: ParameterStyle.SIMPLE,
-        ParameterInType.HEADER: ParameterStyle.SIMPLE,
-        ParameterInType.COOKIE: ParameterStyle.FORM,
-    }
-    __disallowed_header_names = {"Accept", "Content-Type", "Authorization"}
-    _json_encoder = JSONEncoder()
-
-    @classmethod
-    def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
-        if style is None:
-            return
-        in_type_set = cls.__style_to_in_type[style]
-        if in_type not in in_type_set:
-            raise ValueError(
-                "Invalid style and in_type combination. For style={} only in_type={} are allowed".format(
-                    style, in_type_set
-                )
-            )
-
-    def __init__(
-        self,
-        name: str,
-        in_type: ParameterInType,
-        required: bool = False,
-        style: typing.Optional[ParameterStyle] = None,
-        explode: bool = False,
-        allow_reserved: typing.Optional[bool] = None,
-        schema: typing.Optional[typing.Type[Schema]] = None,
-        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
-    ):
-        if schema is None and content is None:
-            raise ValueError("Value missing; Pass in either schema or content")
-        if schema and content:
-            raise ValueError("Too many values provided. Both schema and content were provided. Only one may be input")
-        if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
-            raise ValueError("Invalid name, name may not be one of {}".format(self.__disallowed_header_names))
-        self.__verify_style_to_in_type(style, in_type)
-        if content is None and style is None:
-            style = self.__in_type_to_default_style[in_type]
-        if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
-            raise ValueError("Invalid content length, content length must equal 1")
-        self.in_type = in_type
-        self.name = name
-        self.required = required
-        self.style = style
-        self.explode = explode
-        self.allow_reserved = allow_reserved
-        self.schema = schema
-        self.content = content
-
-    def _serialize_json(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-        eliminate_whitespace: bool = False,
-    ) -> str:
-        if eliminate_whitespace:
-            return json.dumps(in_data, separators=self._json_encoder.compact_separators)
-        return json.dumps(in_data)
-
-
-class PathParameter(ParameterBase, StyleSimpleSerializer):
-    def __init__(
-        self,
-        name: str,
-        required: bool = False,
-        style: typing.Optional[ParameterStyle] = None,
-        explode: bool = False,
-        allow_reserved: typing.Optional[bool] = None,
-        schema: typing.Optional[typing.Type[Schema]] = None,
-        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
-    ):
-        super().__init__(
-            name,
-            in_type=ParameterInType.PATH,
-            required=required,
-            style=style,
-            explode=explode,
-            allow_reserved=allow_reserved,
-            schema=schema,
-            content=content,
-        )
-
-    def __serialize_label(
-        self, in_data: typing.Union[None, int, float, str, bool, dict, list]
-    ) -> typing.Dict[str, str]:
-        prefix_separator_iterator = PrefixSeparatorIterator(".", ".")
-        value = self._ref6570_expansion(
-            variable_name=self.name,
-            in_data=in_data,
-            explode=self.explode,
-            percent_encode=True,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-        return self._to_dict(self.name, value)
-
-    def __serialize_matrix(
-        self, in_data: typing.Union[None, int, float, str, bool, dict, list]
-    ) -> typing.Dict[str, str]:
-        prefix_separator_iterator = PrefixSeparatorIterator(";", ";")
-        value = self._ref6570_expansion(
-            variable_name=self.name,
-            in_data=in_data,
-            explode=self.explode,
-            percent_encode=True,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-        return self._to_dict(self.name, value)
-
-    def __serialize_simple(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-    ) -> typing.Dict[str, str]:
-        value = self._serialize_simple(
-            in_data=in_data,
-            name=self.name,
-            explode=self.explode,
-            percent_encode=True,
-        )
-        return self._to_dict(self.name, value)
-
-    def serialize(
-        self,
-        in_data: typing.Union[
-            Schema,
-            Decimal,
-            int,
-            float,
-            str,
-            date,
-            datetime,
-            None,
-            bool,
-            list,
-            tuple,
-            dict,
-            frozendict.frozendict,
-        ],
-    ) -> typing.Dict[str, str]:
-        if self.schema:
-            cast_in_data = self.schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            """
-            simple -> path
-                path:
-                    returns path_params: dict
-            label -> path
-                returns path_params
-            matrix -> path
-                returns path_params
-            """
-            if self.style:
-                if self.style is ParameterStyle.SIMPLE:
-                    return self.__serialize_simple(cast_in_data)
-                elif self.style is ParameterStyle.LABEL:
-                    return self.__serialize_label(cast_in_data)
-                elif self.style is ParameterStyle.MATRIX:
-                    return self.__serialize_matrix(cast_in_data)
-        # self.content will be length one
-        for content_type, schema in self.content.items():
-            cast_in_data = schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            if self._content_type_is_json(content_type):
-                value = self._serialize_json(cast_in_data)
-                return self._to_dict(self.name, value)
-            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
-
-
-class QueryParameter(ParameterBase, StyleFormSerializer):
-    def __init__(
-        self,
-        name: str,
-        required: bool = False,
-        style: typing.Optional[ParameterStyle] = None,
-        explode: typing.Optional[bool] = None,
-        allow_reserved: typing.Optional[bool] = None,
-        schema: typing.Optional[typing.Type[Schema]] = None,
-        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
-    ):
-        used_style = ParameterStyle.FORM if style is None else style
-        used_explode = self._get_default_explode(used_style) if explode is None else explode
-
-        super().__init__(
-            name,
-            in_type=ParameterInType.QUERY,
-            required=required,
-            style=used_style,
-            explode=used_explode,
-            allow_reserved=allow_reserved,
-            schema=schema,
-            content=content,
-        )
-
-    def __serialize_space_delimited(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator],
-    ) -> typing.Dict[str, str]:
-        if prefix_separator_iterator is None:
-            prefix_separator_iterator = self.get_prefix_separator_iterator()
-        value = self._ref6570_expansion(
-            variable_name=self.name,
-            in_data=in_data,
-            explode=self.explode,
-            percent_encode=True,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-        return self._to_dict(self.name, value)
-
-    def __serialize_pipe_delimited(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator],
-    ) -> typing.Dict[str, str]:
-        if prefix_separator_iterator is None:
-            prefix_separator_iterator = self.get_prefix_separator_iterator()
-        value = self._ref6570_expansion(
-            variable_name=self.name,
-            in_data=in_data,
-            explode=self.explode,
-            percent_encode=True,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-        return self._to_dict(self.name, value)
-
-    def __serialize_form(
-        self,
-        in_data: typing.Union[None, int, float, str, bool, dict, list],
-        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator],
-    ) -> typing.Dict[str, str]:
-        if prefix_separator_iterator is None:
-            prefix_separator_iterator = self.get_prefix_separator_iterator()
-        value = self._serialize_form(
-            in_data,
-            name=self.name,
-            explode=self.explode,
-            percent_encode=True,
-            prefix_separator_iterator=prefix_separator_iterator,
-        )
-        return self._to_dict(self.name, value)
-
-    def get_prefix_separator_iterator(
-        self,
-    ) -> typing.Optional[PrefixSeparatorIterator]:
-        if self.style is ParameterStyle.FORM:
-            return PrefixSeparatorIterator("?", "&")
-        elif self.style is ParameterStyle.SPACE_DELIMITED:
-            return PrefixSeparatorIterator("", "%20")
-        elif self.style is ParameterStyle.PIPE_DELIMITED:
-            return PrefixSeparatorIterator("", "|")
-
-    def serialize(
-        self,
-        in_data: typing.Union[
-            Schema,
-            Decimal,
-            int,
-            float,
-            str,
-            date,
-            datetime,
-            None,
-            bool,
-            list,
-            tuple,
-            dict,
-            frozendict.frozendict,
-        ],
-        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None,
-    ) -> typing.Dict[str, str]:
-        if self.schema:
-            cast_in_data = self.schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            """
-            form -> query
-                query:
-                    - GET/HEAD/DELETE: could use fields
-                    - PUT/POST: must use urlencode to send parameters
-                    returns fields: tuple
-            spaceDelimited -> query
-                returns fields
-            pipeDelimited -> query
-                returns fields
-            deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
-                returns fields
-            """
-            if self.style:
-                # TODO update query ones to omit setting values when [] {} or None is input
-                if self.style is ParameterStyle.FORM:
-                    return self.__serialize_form(cast_in_data, prefix_separator_iterator)
-                elif self.style is ParameterStyle.SPACE_DELIMITED:
-                    return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
-                elif self.style is ParameterStyle.PIPE_DELIMITED:
-                    return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
-        # self.content will be length one
-        if prefix_separator_iterator is None:
-            prefix_separator_iterator = self.get_prefix_separator_iterator()
-        for content_type, schema in self.content.items():
-            cast_in_data = schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            if self._content_type_is_json(content_type):
-                value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
-                return self._to_dict(
-                    self.name,
-                    next(prefix_separator_iterator) + self.name + "=" + quote(value),
-                )
-            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
-
-
-class CookieParameter(ParameterBase, StyleFormSerializer):
-    def __init__(
-        self,
-        name: str,
-        required: bool = False,
-        style: typing.Optional[ParameterStyle] = None,
-        explode: typing.Optional[bool] = None,
-        allow_reserved: typing.Optional[bool] = None,
-        schema: typing.Optional[typing.Type[Schema]] = None,
-        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
-    ):
-        used_style = ParameterStyle.FORM if style is None and content is None and schema else style
-        used_explode = self._get_default_explode(used_style) if explode is None else explode
-
-        super().__init__(
-            name,
-            in_type=ParameterInType.COOKIE,
-            required=required,
-            style=used_style,
-            explode=used_explode,
-            allow_reserved=allow_reserved,
-            schema=schema,
-            content=content,
-        )
-
-    def serialize(
-        self,
-        in_data: typing.Union[
-            Schema,
-            Decimal,
-            int,
-            float,
-            str,
-            date,
-            datetime,
-            None,
-            bool,
-            list,
-            tuple,
-            dict,
-            frozendict.frozendict,
-        ],
-    ) -> typing.Dict[str, str]:
-        if self.schema:
-            cast_in_data = self.schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            """
-            form -> cookie
-                returns fields: tuple
-            """
-            if self.style:
-                """
-                TODO add escaping of comma, space, equals
-                or turn encoding on
-                """
-                value = self._serialize_form(
-                    cast_in_data,
-                    explode=self.explode,
-                    name=self.name,
-                    percent_encode=False,
-                    prefix_separator_iterator=PrefixSeparatorIterator("", "&"),
-                )
-                return self._to_dict(self.name, value)
-        # self.content will be length one
-        for content_type, schema in self.content.items():
-            cast_in_data = schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            if self._content_type_is_json(content_type):
-                value = self._serialize_json(cast_in_data)
-                return self._to_dict(self.name, value)
-            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
-
-
-class HeaderParameter(ParameterBase, StyleSimpleSerializer):
-    def __init__(
-        self,
-        name: str,
-        required: bool = False,
-        style: typing.Optional[ParameterStyle] = None,
-        explode: bool = False,
-        allow_reserved: typing.Optional[bool] = None,
-        schema: typing.Optional[typing.Type[Schema]] = None,
-        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
-    ):
-        super().__init__(
-            name,
-            in_type=ParameterInType.HEADER,
-            required=required,
-            style=style,
-            explode=explode,
-            allow_reserved=allow_reserved,
-            schema=schema,
-            content=content,
-        )
-
-    @staticmethod
-    def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
-        data = tuple(t for t in in_data if t)
-        headers = HTTPHeaderDict()
-        if not data:
-            return headers
-        headers.extend(data)
-        return headers
-
-    def serialize(
-        self,
-        in_data: typing.Union[
-            Schema,
-            Decimal,
-            int,
-            float,
-            str,
-            date,
-            datetime,
-            None,
-            bool,
-            list,
-            tuple,
-            dict,
-            frozendict.frozendict,
-        ],
-    ) -> HTTPHeaderDict:
-        if self.schema:
-            cast_in_data = self.schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            """
-            simple -> header
-                headers: PoolManager needs a mapping, tuple is close
-                    returns headers: dict
-            """
-            if self.style:
-                value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
-                return self.__to_headers(((self.name, value),))
-        # self.content will be length one
-        for content_type, schema in self.content.items():
-            cast_in_data = schema(in_data)
-            cast_in_data = self._json_encoder.default(cast_in_data)
-            if self._content_type_is_json(content_type):
-                value = self._serialize_json(cast_in_data)
-                return self.__to_headers(((self.name, value),))
-            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
-
-
-class Encoding:
-    def __init__(
-        self,
-        content_type: str,
-        headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
-        style: typing.Optional[ParameterStyle] = None,
-        explode: bool = False,
-        allow_reserved: bool = False,
-    ):
-        self.content_type = content_type
-        self.headers = headers
-        self.style = style
-        self.explode = explode
-        self.allow_reserved = allow_reserved
-
-
-@dataclass
-class MediaType:
-    """
-    Used to store request and response body schema information
-    encoding:
-        A map between a property name and its encoding information.
-        The key, being the property name, MUST exist in the schema as a property.
-        The encoding object SHALL only apply to requestBody objects when the media type is
-        multipart or application/x-www-form-urlencoded.
-    """
-
-    schema: typing.Optional[typing.Type[Schema]] = None
-    encoding: typing.Optional[typing.Dict[str, Encoding]] = None
-
-
-@dataclass
-class ApiResponse:
-    response: urllib3.HTTPResponse
-    body: typing.Union[Unset, Schema]
-    headers: typing.Union[Unset, typing.List[HeaderParameter]]
-
-    def __init__(
-        self,
-        response: urllib3.HTTPResponse,
-        body: typing.Union[Unset, typing.Type[Schema]],
-        headers: typing.Union[Unset, typing.List[HeaderParameter]],
-    ):
-        """
-        pycharm needs this to prevent 'Unexpected argument' warnings
-        """
-        self.response = response
-        self.body = body
-        self.headers = headers
-
-
-@dataclass
-class ApiResponseWithoutDeserialization(ApiResponse):
-    response: urllib3.HTTPResponse
-    body: typing.Union[Unset, typing.Type[Schema]] = unset
-    headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
-
-
-class OpenApiResponse(JSONDetector):
-    __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
-
-    def __init__(
-        self,
-        response_cls: typing.Type[ApiResponse] = ApiResponse,
-        content: typing.Optional[typing.Dict[str, MediaType]] = None,
-        headers: typing.Optional[typing.List[HeaderParameter]] = None,
-    ):
-        self.headers = headers
-        if content is not None and len(content) == 0:
-            raise ValueError("Invalid value for content, the content dict must have >= 1 entry")
-        self.content = content
-        self.response_cls = response_cls
-
-    @staticmethod
-    def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
-        # python must be >= 3.9 so we can pass in bytes into json.loads
-        return json.loads(response.data)
-
-    @staticmethod
-    def __file_name_from_response_url(
-        response_url: typing.Optional[str],
-    ) -> typing.Optional[str]:
-        if response_url is None:
-            return None
-        url_path = urlparse(response_url).path
-        if url_path:
-            path_basename = os.path.basename(url_path)
-            if path_basename:
-                _filename, ext = os.path.splitext(path_basename)
-                if ext:
-                    return path_basename
-        return None
-
-    @classmethod
-    def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
-        if content_disposition is None:
-            return None
-        match = cls.__filename_content_disposition_pattern.search(content_disposition)
-        if not match:
-            return None
-        return match.group(1)
+from launch.api_client.api.default_api import AsyncDefaultApi, SyncDefaultApi
+from launch.api_client.exceptions import (
+    ResponseHandlingException,
+    UnexpectedResponse,
+)
 
-    def __deserialize_application_octet_stream(
-        self, response: urllib3.HTTPResponse
-    ) -> typing.Union[bytes, io.BufferedReader]:
-        """
-        urllib3 use cases:
-        1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
-        2. when preload_content=False (stream=True) then supports_chunked_reads is True and
-            a file will be written and returned
-        """
-        if response.supports_chunked_reads():
-            file_name = self.__file_name_from_content_disposition(
-                response.headers.get("content-disposition")
-            ) or self.__file_name_from_response_url(response.geturl())
+ClientT = TypeVar("ClientT", bound="ApiClient")
 
-            if file_name is None:
-                _fd, path = tempfile.mkstemp()
-            else:
-                path = os.path.join(tempfile.gettempdir(), file_name)
 
-            with open(path, "wb") as new_file:
-                chunk_size = 1024
-                while True:
-                    data = response.read(chunk_size)
-                    if not data:
-                        break
-                    new_file.write(data)
-            # release_conn is needed for streaming connections only
-            response.release_conn()
-            new_file = open(path, "rb")
-            return new_file
-        else:
-            return response.data
+class AsyncApis(Generic[ClientT]):
+    def __init__(self, client: ClientT):
+        self.client = client
 
-    @staticmethod
-    def __deserialize_multipart_form_data(
-        response: urllib3.HTTPResponse,
-    ) -> typing.Dict[str, typing.Any]:
-        msg = email.message_from_bytes(response.data)
-        return {
-            part.get_param("name", header="Content-Disposition"): part.get_payload(decode=True).decode(
-                part.get_content_charset()
-            )
-            if part.get_content_charset()
-            else part.get_payload()
-            for part in msg.get_payload()
-        }
+        self.default_api = AsyncDefaultApi(self.client)
 
-    def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
-        content_type = response.getheader("content-type")
-        deserialized_body = unset
-        streamed = response.supports_chunked_reads()
 
-        deserialized_headers = unset
-        if self.headers is not None:
-            # TODO add header deserialiation here
-            pass
+class SyncApis(Generic[ClientT]):
+    def __init__(self, client: ClientT):
+        self.client = client
 
-        if self.content is not None:
-            if content_type not in self.content:
-                raise ApiValueError(
-                    f"Invalid content_type returned. Content_type='{content_type}' was returned "
-                    f"when only {str(set(self.content))} are defined for status_code={str(response.status)}"
-                )
-            body_schema = self.content[content_type].schema
-            if body_schema is None:
-                # some specs do not define response content media type schemas
-                return self.response_cls(response=response, headers=deserialized_headers, body=unset)
+        self.default_api = SyncDefaultApi(self.client)
 
-            if self._content_type_is_json(content_type):
-                body_data = self.__deserialize_json(response)
-            elif content_type == "application/octet-stream":
-                body_data = self.__deserialize_application_octet_stream(response)
-            elif content_type.startswith("multipart/form-data"):
-                body_data = self.__deserialize_multipart_form_data(response)
-                content_type = "multipart/form-data"
-            else:
-                raise NotImplementedError("Deserialization of {} has not yet been implemented".format(content_type))
-            deserialized_body = body_schema.from_openapi_data_oapg(body_data, _configuration=configuration)
-        elif streamed:
-            response.release_conn()
 
-        return self.response_cls(
-            response=response,
-            headers=deserialized_headers,
-            body=deserialized_body,
-        )
+T = TypeVar("T")
+Send = Callable[[Request], Awaitable[Response]]
+MiddlewareT = Callable[[Request, Send], Awaitable[Response]]
 
 
 class ApiClient:
-    """Generic API client for OpenAPI client library builds.
-
-    OpenAPI generic API client. This client handles the client-
-    server communication, and is invariant across implementations. Specifics of
-    the methods and models for each application are generated from the OpenAPI
-    templates.
-
-    NOTE: This class is auto generated by OpenAPI Generator.
-    Ref: https://openapi-generator.tech
-    Do not edit the class manually.
-
-    :param configuration: .Configuration object for this client
-    :param header_name: a header to pass when making calls to the API.
-    :param header_value: a header value to pass when making calls to
-        the API.
-    :param cookie: a cookie to include in the header when making calls
-        to the API
-    :param pool_threads: The number of threads to use for async requests
-        to the API. More threads means more concurrent API requests.
-    """
-
-    _pool = None
+    def __init__(self, host: str = None, **kwargs: Any) -> None:
+        self.host = host
+        self.middleware: MiddlewareT = BaseMiddleware()
+        self._async_client = AsyncClient(**kwargs)
 
-    def __init__(
+    @overload
+    async def request(
         self,
-        configuration: typing.Optional[Configuration] = None,
-        header_name: typing.Optional[str] = None,
-        header_value: typing.Optional[str] = None,
-        cookie: typing.Optional[str] = None,
-        pool_threads: int = 1,
-    ):
-        if configuration is None:
-            configuration = Configuration()
-        self.configuration = configuration
-        self.pool_threads = pool_threads
-
-        self.rest_client = rest.RESTClientObject(configuration)
-        self.default_headers = HTTPHeaderDict()
-        if header_name is not None:
-            self.default_headers[header_name] = header_value
-        self.cookie = cookie
-        # Set default User-Agent.
-        self.user_agent = "OpenAPI-Generator/1.0.5/python"
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        self.close()
-
-    def close(self):
-        if self._pool:
-            self._pool.close()
-            self._pool.join()
-            self._pool = None
-            if hasattr(atexit, "unregister"):
-                atexit.unregister(self.close)
-
-    @property
-    def pool(self):
-        """Create thread pool on first request
-        avoids instantiating unused threadpool for blocking clients.
-        """
-        if self._pool is None:
-            atexit.register(self.close)
-            self._pool = ThreadPool(self.pool_threads)
-        return self._pool
-
-    @property
-    def user_agent(self):
-        """User agent for this API client"""
-        return self.default_headers["User-Agent"]
-
-    @user_agent.setter
-    def user_agent(self, value):
-        self.default_headers["User-Agent"] = value
-
-    def set_default_header(self, header_name, header_value):
-        self.default_headers[header_name] = header_value
-
-    def __call_api(
-        self,
-        resource_path: str,
+        *,
+        type_: Type[T],
         method: str,
-        headers: typing.Optional[HTTPHeaderDict] = None,
-        body: typing.Optional[typing.Union[str, bytes]] = None,
-        fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
-        auth_settings: typing.Optional[typing.List[str]] = None,
-        stream: bool = False,
-        timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
-        host: typing.Optional[str] = None,
-    ) -> urllib3.HTTPResponse:
-        # header parameters
-        used_headers = HTTPHeaderDict(self.default_headers)
-        if self.cookie:
-            headers["Cookie"] = self.cookie
-
-        # auth setting
-        self.update_params_for_auth(used_headers, auth_settings, resource_path, method, body)
-
-        # must happen after cookie setting and auth setting in case user is overriding those
-        if headers:
-            used_headers.update(headers)
-
-        # request url
-        if host is None:
-            url = self.configuration.host + resource_path
-        else:
-            # use server/host defined in path or operation instead
-            url = host + resource_path
-
-        # perform request and return response
-        response = self.request(
-            method,
-            url,
-            headers=used_headers,
-            fields=fields,
-            body=body,
-            stream=stream,
-            timeout=timeout,
-        )
-        return response
+        url: str,
+        path_params: Dict[str, Any] = None,
+        **kwargs: Any,
+    ) -> T:
+        ...
 
-    def call_api(
+    @overload  # noqa F811
+    async def request(
         self,
-        resource_path: str,
+        *,
+        type_: None,
         method: str,
-        headers: typing.Optional[HTTPHeaderDict] = None,
-        body: typing.Optional[typing.Union[str, bytes]] = None,
-        fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
-        auth_settings: typing.Optional[typing.List[str]] = None,
-        async_req: typing.Optional[bool] = None,
-        stream: bool = False,
-        timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
-        host: typing.Optional[str] = None,
-    ) -> urllib3.HTTPResponse:
-        """Makes the HTTP request (synchronous) and returns deserialized data.
-
-        To make an async_req request, set the async_req parameter.
-
-        :param resource_path: Path to method endpoint.
-        :param method: Method to call.
-        :param headers: Header parameters to be
-            placed in the request header.
-        :param body: Request body.
-        :param fields: Request post form parameters,
-            for `application/x-www-form-urlencoded`, `multipart/form-data`.
-        :param auth_settings: Auth Settings names for the request.
-        :param async_req: execute request asynchronously
-        :type async_req: bool, optional TODO remove, unused
-        :param stream: if True, the urllib3.HTTPResponse object will
-                                 be returned without reading/decoding response
-                                 data. Also when True, if the openapi spec describes a file download,
-                                 the data will be written to a local filesystme file and the BinarySchema
-                                 instance will also inherit from FileSchema and FileIO
-                                 Default is False.
-        :type stream: bool, optional
-        :param timeout: timeout setting for this request. If one
-                                 number provided, it will be total request
-                                 timeout. It can also be a pair (tuple) of
-                                 (connection, read) timeouts.
-        :param host: api endpoint host
-        :return:
-            If async_req parameter is True,
-            the request will be called asynchronously.
-            The method will return the request thread.
-            If parameter async_req is False or missing,
-            then the method will return the response directly.
-        """
-
-        if not async_req:
-            return self.__call_api(
-                resource_path,
-                method,
-                headers,
-                body,
-                fields,
-                auth_settings,
-                stream,
-                timeout,
-                host,
-            )
-
-        return self.pool.apply_async(
-            self.__call_api,
-            (
-                resource_path,
-                method,
-                headers,
-                body,
-                json,
-                fields,
-                auth_settings,
-                stream,
-                timeout,
-                host,
-            ),
-        )
+        url: str,
+        path_params: Dict[str, Any] = None,
+        **kwargs: Any,
+    ) -> None:
+        ...
 
-    def request(
+    async def request(  # noqa F811
         self,
+        *,
+        type_: Any,
         method: str,
         url: str,
-        headers: typing.Optional[HTTPHeaderDict] = None,
-        fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
-        body: typing.Optional[typing.Union[str, bytes]] = None,
-        stream: bool = False,
-        timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
-    ) -> urllib3.HTTPResponse:
-        """Makes the HTTP request using RESTClient."""
-        if method == "GET":
-            return self.rest_client.GET(url, stream=stream, timeout=timeout, headers=headers)
-        elif method == "HEAD":
-            return self.rest_client.HEAD(url, stream=stream, timeout=timeout, headers=headers)
-        elif method == "OPTIONS":
-            return self.rest_client.OPTIONS(
-                url,
-                headers=headers,
-                fields=fields,
-                stream=stream,
-                timeout=timeout,
-                body=body,
-            )
-        elif method == "POST":
-            return self.rest_client.POST(
-                url,
-                headers=headers,
-                fields=fields,
-                stream=stream,
-                timeout=timeout,
-                body=body,
-            )
-        elif method == "PUT":
-            return self.rest_client.PUT(
-                url,
-                headers=headers,
-                fields=fields,
-                stream=stream,
-                timeout=timeout,
-                body=body,
-            )
-        elif method == "PATCH":
-            return self.rest_client.PATCH(
-                url,
-                headers=headers,
-                fields=fields,
-                stream=stream,
-                timeout=timeout,
-                body=body,
-            )
-        elif method == "DELETE":
-            return self.rest_client.DELETE(url, headers=headers, stream=stream, timeout=timeout, body=body)
-        else:
-            raise ApiValueError("http method must be `GET`, `HEAD`, `OPTIONS`," " `POST`, `PATCH`, `PUT` or `DELETE`.")
-
-    def update_params_for_auth(self, headers, auth_settings, resource_path, method, body):
-        """Updates header and query params based on authentication setting.
-
-        :param headers: Header parameters dict to be updated.
-        :param auth_settings: Authentication setting identifiers list.
-        :param resource_path: A string representation of the HTTP request resource path.
-        :param method: A string representation of the HTTP request method.
-        :param body: A object representing the body of the HTTP request.
-            The object type is the return value of _encoder.default().
-        """
-        if not auth_settings:
-            return
-
-        for auth in auth_settings:
-            auth_setting = self.configuration.auth_settings().get(auth)
-            if not auth_setting:
-                continue
-            if auth_setting["in"] == "cookie":
-                headers.add("Cookie", auth_setting["value"])
-            elif auth_setting["in"] == "header":
-                if auth_setting["type"] != "http-signature":
-                    headers.add(auth_setting["key"], auth_setting["value"])
-            elif auth_setting["in"] == "query":
-                """TODO implement auth in query
-                need to pass in prefix_separator_iterator
-                and need to output resource_path with query params added
-                """
-                raise ApiValueError("Auth in query not yet implemented")
-            else:
-                raise ApiValueError("Authentication token must be in `query` or `header`")
-
-
-class Api:
-    """NOTE: This class is auto generated by OpenAPI Generator
-    Ref: https://openapi-generator.tech
-
-    Do not edit the class manually.
-    """
-
-    def __init__(self, api_client: typing.Optional[ApiClient] = None):
-        if api_client is None:
-            api_client = ApiClient()
-        self.api_client = api_client
-
-    @staticmethod
-    def _verify_typed_dict_inputs_oapg(
-        cls: typing.Type[typing_extensions.TypedDict],
-        data: typing.Dict[str, typing.Any],
-    ):
-        """
-        Ensures that:
-        - required keys are present
-        - additional properties are not input
-        - value stored under required keys do not have the value unset
-        Note: detailed value checking is done in schema classes
-        """
-        missing_required_keys = []
-        required_keys_with_unset_values = []
-        for required_key in cls.__required_keys__:
-            if required_key not in data:
-                missing_required_keys.append(required_key)
-                continue
-            value = data[required_key]
-            if value is unset:
-                required_keys_with_unset_values.append(required_key)
-        if missing_required_keys:
-            raise ApiTypeError(
-                "{} missing {} required arguments: {}".format(
-                    cls.__name__,
-                    len(missing_required_keys),
-                    missing_required_keys,
-                )
-            )
-        if required_keys_with_unset_values:
-            raise ApiValueError(
-                "{} contains invalid unset values for {} required keys: {}".format(
-                    cls.__name__,
-                    len(required_keys_with_unset_values),
-                    required_keys_with_unset_values,
-                )
-            )
-
-        disallowed_additional_keys = []
-        for key in data:
-            if key in cls.__required_keys__ or key in cls.__optional_keys__:
-                continue
-            disallowed_additional_keys.append(key)
-        if disallowed_additional_keys:
-            raise ApiTypeError(
-                "{} got {} unexpected keyword arguments: {}".format(
-                    cls.__name__,
-                    len(disallowed_additional_keys),
-                    disallowed_additional_keys,
-                )
-            )
-
-    def _get_host_oapg(
-        self,
-        operation_id: str,
-        servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
-        host_index: typing.Optional[int] = None,
-    ) -> typing.Optional[str]:
-        configuration = self.api_client.configuration
+        path_params: Dict[str, Any] = None,
+        **kwargs: Any,
+    ) -> Any:
+        if path_params is None:
+            path_params = {}
+        url = (self.host or "") + url.format(**path_params)
+        request = Request(method, url, **kwargs)
+        return await self.send(request, type_)
+
+    @overload
+    def request_sync(self, *, type_: Type[T], **kwargs: Any) -> T:
+        ...
+
+    @overload  # noqa F811
+    def request_sync(self, *, type_: None, **kwargs: Any) -> None:
+        ...
+
+    def request_sync(self, *, type_: Any, **kwargs: Any) -> Any:  # noqa F811
+        """
+        This method is not used by the generated apis, but is included for convenience
+        """
+        return get_event_loop().run_until_complete(self.request(type_=type_, **kwargs))
+
+    async def send(self, request: Request, type_: Type[T]) -> T:
+        response = await self.middleware(request, self.send_inner)
+        if response.status_code in [200, 201]:
+            try:
+                return parse_obj_as(type_, response.json())
+            except ValidationError as e:
+                raise ResponseHandlingException(e)
+        raise UnexpectedResponse.for_response(response)
+
+    async def send_inner(self, request: Request) -> Response:
         try:
-            if host_index is None:
-                index = configuration.server_operation_index.get(operation_id, configuration.server_index)
-            else:
-                index = host_index
-            server_variables = configuration.server_operation_variables.get(
-                operation_id, configuration.server_variables
-            )
-            host = configuration.get_host_from_settings(index, variables=server_variables, servers=servers)
-        except IndexError:
-            if servers:
-                raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(servers))
-            host = None
-        return host
-
-
-class SerializedRequestBody(typing_extensions.TypedDict, total=False):
-    body: typing.Union[str, bytes]
-    fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
-
-
-class RequestBody(StyleFormSerializer, JSONDetector):
-    """
-    A request body parameter
-    content: content_type to MediaType Schema info
-    """
-
-    __json_encoder = JSONEncoder()
-
-    def __init__(
-        self,
-        content: typing.Dict[str, MediaType],
-        required: bool = False,
-    ):
-        self.required = required
-        if len(content) == 0:
-            raise ValueError("Invalid value for content, the content dict must have >= 1 entry")
-        self.content = content
-
-    def __serialize_json(self, in_data: typing.Any) -> typing.Dict[str, bytes]:
-        in_data = self.__json_encoder.default(in_data)
-        json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode("utf-8")
-        return dict(body=json_str)
-
-    @staticmethod
-    def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
-        if isinstance(in_data, frozendict.frozendict):
-            raise ValueError("Unable to serialize type frozendict.frozendict to text/plain")
-        elif isinstance(in_data, tuple):
-            raise ValueError("Unable to serialize type tuple to text/plain")
-        elif isinstance(in_data, NoneClass):
-            raise ValueError("Unable to serialize type NoneClass to text/plain")
-        elif isinstance(in_data, BoolClass):
-            raise ValueError("Unable to serialize type BoolClass to text/plain")
-        return dict(body=str(in_data))
-
-    def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
-        json_value = self.__json_encoder.default(value)
-        return RequestField(
-            name=key,
-            data=json.dumps(json_value),
-            headers={"Content-Type": "application/json"},
-        )
-
-    def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
-        if isinstance(value, str):
-            return RequestField(
-                name=key,
-                data=str(value),
-                headers={"Content-Type": "text/plain"},
-            )
-        elif isinstance(value, bytes):
-            return RequestField(
-                name=key,
-                data=value,
-                headers={"Content-Type": "application/octet-stream"},
-            )
-        elif isinstance(value, FileIO):
-            request_field = RequestField(
-                name=key,
-                data=value.read(),
-                filename=os.path.basename(value.name),
-                headers={"Content-Type": "application/octet-stream"},
-            )
-            value.close()
-            return request_field
-        else:
-            return self.__multipart_json_item(key=key, value=value)
-
-    def __serialize_multipart_form_data(self, in_data: Schema) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
-        if not isinstance(in_data, frozendict.frozendict):
-            raise ValueError(f"Unable to serialize {in_data} to multipart/form-data because it is not a dict of data")
-        """
-        In a multipart/form-data request body, each schema property, or each element of a schema array property,
-        takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
-        for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
-
-        When passing in multipart types, boundaries MAY be used to separate sections of the content being
-        transferred – thus, the following default Content-Types are defined for multipart:
+            response = await self._async_client.send(request)
+        except Exception as e:
+            raise ResponseHandlingException(e)
+        return response
 
-        If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
-        If the property is complex, or an array of complex values, the default Content-Type is application/json
-            Question: how is the array of primitives encoded?
-        If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
-        """
-        fields = []
-        for key, value in in_data.items():
-            if isinstance(value, tuple):
-                if value:
-                    # values use explode = True, so the code makes a RequestField for each item with name=key
-                    for item in value:
-                        request_field = self.__multipart_form_item(key=key, value=item)
-                        fields.append(request_field)
-                else:
-                    # send an empty array as json because exploding will not send it
-                    request_field = self.__multipart_json_item(key=key, value=value)
-                    fields.append(request_field)
-            else:
-                request_field = self.__multipart_form_item(key=key, value=value)
-                fields.append(request_field)
+    def add_middleware(self, middleware: MiddlewareT) -> None:
+        current_middleware = self.middleware
 
-        return dict(fields=tuple(fields))
+        async def new_middleware(request: Request, call_next: Send) -> Response:
+            async def inner_send(request: Request) -> Response:
+                return await current_middleware(request, call_next)
 
-    def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
-        if isinstance(in_data, bytes):
-            return dict(body=in_data)
-        # FileIO type
-        result = dict(body=in_data.read())
-        in_data.close()
-        return result
+            return await middleware(request, inner_send)
 
-    def __serialize_application_x_www_form_data(self, in_data: typing.Any) -> SerializedRequestBody:
-        """
-        POST submission of form data in body
-        """
-        if not isinstance(in_data, frozendict.frozendict):
-            raise ValueError(
-                f"Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data"
-            )
-        cast_in_data = self.__json_encoder.default(in_data)
-        value = self._serialize_form(cast_in_data, name="", explode=True, percent_encode=True)
-        return dict(body=value)
+        self.middleware = new_middleware
 
-    def serialize(self, in_data: typing.Any, content_type: str) -> SerializedRequestBody:
-        """
-        If a str is returned then the result will be assigned to data when making the request
-        If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
-        Return a tuple of
 
-        The key of the return dict is
-        - body for application/json
-        - encode_multipart and fields for multipart/form-data
-        """
-        media_type = self.content[content_type]
-        if isinstance(in_data, media_type.schema):
-            cast_in_data = in_data
-        elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
-            cast_in_data = media_type.schema(**in_data)
-        else:
-            cast_in_data = media_type.schema(in_data)
-        # TODO check for and use encoding if it exists
-        # and content_type is multipart or application/x-www-form-urlencoded
-        if self._content_type_is_json(content_type):
-            return self.__serialize_json(cast_in_data)
-        elif content_type == "text/plain":
-            return self.__serialize_text_plain(cast_in_data)
-        elif content_type == "multipart/form-data":
-            return self.__serialize_multipart_form_data(cast_in_data)
-        elif content_type == "application/x-www-form-urlencoded":
-            return self.__serialize_application_x_www_form_data(cast_in_data)
-        elif content_type == "application/octet-stream":
-            return self.__serialize_application_octet_stream(cast_in_data)
-        raise NotImplementedError("Serialization has not yet been implemented for {}".format(content_type))
+class BaseMiddleware:
+    async def __call__(self, request: Request, call_next: Send) -> Response:
+        return await call_next(request)
diff --git a/launch/api_client/apis/paths/healthcheck.py b/launch/api_client/apis/paths/healthcheck.py
deleted file mode 100644
index 6f11f7bb..00000000
--- a/launch/api_client/apis/paths/healthcheck.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.healthcheck.get import ApiForget
-
-
-class Healthcheck(
-    ApiForget,
-):
-    pass
diff --git a/launch/api_client/apis/paths/healthz.py b/launch/api_client/apis/paths/healthz.py
deleted file mode 100644
index 62f2a817..00000000
--- a/launch/api_client/apis/paths/healthz.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.healthz.get import ApiForget
-
-
-class Healthz(
-    ApiForget,
-):
-    pass
diff --git a/launch/api_client/apis/paths/readyz.py b/launch/api_client/apis/paths/readyz.py
deleted file mode 100644
index 35b8e737..00000000
--- a/launch/api_client/apis/paths/readyz.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.readyz.get import ApiForget
-
-
-class Readyz(
-    ApiForget,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_async_tasks.py b/launch/api_client/apis/paths/v1_async_tasks.py
deleted file mode 100644
index 74bb2cba..00000000
--- a/launch/api_client/apis/paths/v1_async_tasks.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.v1_async_tasks.post import ApiForpost
-
-
-class V1AsyncTasks(
-    ApiForpost,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_async_tasks_task_id.py b/launch/api_client/apis/paths/v1_async_tasks_task_id.py
deleted file mode 100644
index 473e4bb4..00000000
--- a/launch/api_client/apis/paths/v1_async_tasks_task_id.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.v1_async_tasks_task_id.get import ApiForget
-
-
-class V1AsyncTasksTaskId(
-    ApiForget,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_batch_jobs.py b/launch/api_client/apis/paths/v1_batch_jobs.py
deleted file mode 100644
index 73b60ebc..00000000
--- a/launch/api_client/apis/paths/v1_batch_jobs.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.v1_batch_jobs.post import ApiForpost
-
-
-class V1BatchJobs(
-    ApiForpost,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_batch_jobs_batch_job_id.py b/launch/api_client/apis/paths/v1_batch_jobs_batch_job_id.py
deleted file mode 100644
index 40bb9f1a..00000000
--- a/launch/api_client/apis/paths/v1_batch_jobs_batch_job_id.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from launch.api_client.paths.v1_batch_jobs_batch_job_id.get import ApiForget
-from launch.api_client.paths.v1_batch_jobs_batch_job_id.put import ApiForput
-
-
-class V1BatchJobsBatchJobId(
-    ApiForget,
-    ApiForput,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_model_bundles.py b/launch/api_client/apis/paths/v1_model_bundles.py
deleted file mode 100644
index 205a87dc..00000000
--- a/launch/api_client/apis/paths/v1_model_bundles.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from launch.api_client.paths.v1_model_bundles.get import ApiForget
-from launch.api_client.paths.v1_model_bundles.post import ApiForpost
-
-
-class V1ModelBundles(
-    ApiForget,
-    ApiForpost,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_model_bundles_latest.py b/launch/api_client/apis/paths/v1_model_bundles_latest.py
deleted file mode 100644
index e84f6419..00000000
--- a/launch/api_client/apis/paths/v1_model_bundles_latest.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.v1_model_bundles_latest.get import ApiForget
-
-
-class V1ModelBundlesLatest(
-    ApiForget,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_model_endpoints.py b/launch/api_client/apis/paths/v1_model_endpoints.py
deleted file mode 100644
index 070bb511..00000000
--- a/launch/api_client/apis/paths/v1_model_endpoints.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from launch.api_client.paths.v1_model_endpoints.get import ApiForget
-from launch.api_client.paths.v1_model_endpoints.post import ApiForpost
-
-
-class V1ModelEndpoints(
-    ApiForget,
-    ApiForpost,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_model_endpoints_api.py b/launch/api_client/apis/paths/v1_model_endpoints_api.py
deleted file mode 100644
index 148b69f8..00000000
--- a/launch/api_client/apis/paths/v1_model_endpoints_api.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.v1_model_endpoints_api.get import ApiForget
-
-
-class V1ModelEndpointsApi(
-    ApiForget,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_model_endpoints_model_endpoint_id.py b/launch/api_client/apis/paths/v1_model_endpoints_model_endpoint_id.py
deleted file mode 100644
index 7f4d3e3a..00000000
--- a/launch/api_client/apis/paths/v1_model_endpoints_model_endpoint_id.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from launch.api_client.paths.v1_model_endpoints_model_endpoint_id.delete import (
-    ApiFordelete,
-)
-from launch.api_client.paths.v1_model_endpoints_model_endpoint_id.get import (
-    ApiForget,
-)
-from launch.api_client.paths.v1_model_endpoints_model_endpoint_id.put import (
-    ApiForput,
-)
-
-
-class V1ModelEndpointsModelEndpointId(
-    ApiForget,
-    ApiForput,
-    ApiFordelete,
-):
-    pass
diff --git a/launch/api_client/apis/paths/v1_sync_tasks.py b/launch/api_client/apis/paths/v1_sync_tasks.py
deleted file mode 100644
index 6f4bfe5d..00000000
--- a/launch/api_client/apis/paths/v1_sync_tasks.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from launch.api_client.paths.v1_sync_tasks.post import ApiForpost
-
-
-class V1SyncTasks(
-    ApiForpost,
-):
-    pass
diff --git a/launch/api_client/exceptions.py b/launch/api_client/exceptions.py
index 41d4f771..45437e1c 100644
--- a/launch/api_client/exceptions.py
+++ b/launch/api_client/exceptions.py
@@ -1,137 +1,46 @@
-# coding: utf-8
+import json
+from typing import Any, Dict, Optional
 
-"""
-    launch
+from httpx import Headers, Response
 
-    No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)  # noqa: E501
+MAX_CONTENT = 200
 
-    The version of the OpenAPI document: 1.0.0
-    Generated by: https://openapi-generator.tech
-"""
 
+class ApiException(Exception):
+    """Base class"""
 
-class OpenApiException(Exception):
-    """The base exception class for all OpenAPIExceptions"""
 
+class UnexpectedResponse(ApiException):
+    def __init__(self, status_code: Optional[int], reason_phrase: str, content: bytes, headers: Headers) -> None:
+        self.status_code = status_code
+        self.reason_phrase = reason_phrase
+        self.content = content
+        self.headers = headers
 
-class ApiTypeError(OpenApiException, TypeError):
-    def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None):
-        """Raises an exception for TypeErrors
+    @staticmethod
+    def for_response(response: Response) -> "ApiException":
+        return UnexpectedResponse(
+            status_code=response.status_code,
+            reason_phrase=response.reason_phrase,
+            content=response.content,
+            headers=response.headers,
+        )
 
-        Args:
-            msg (str): the exception message
-
-        Keyword Args:
-            path_to_item (list): a list of keys an indices to get to the
-                                 current_item
-                                 None if unset
-            valid_classes (tuple): the primitive classes that current item
-                                   should be an instance of
-                                   None if unset
-            key_type (bool): False if our value is a value in a dict
-                             True if it is a key in a dict
-                             False if our item is an item in a list
-                             None if unset
-        """
-        self.path_to_item = path_to_item
-        self.valid_classes = valid_classes
-        self.key_type = key_type
-        full_msg = msg
-        if path_to_item:
-            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
-        super(ApiTypeError, self).__init__(full_msg)
-
-
-class ApiValueError(OpenApiException, ValueError):
-    def __init__(self, msg, path_to_item=None):
-        """
-        Args:
-            msg (str): the exception message
-
-        Keyword Args:
-            path_to_item (list) the path to the exception in the
-                received_data dict. None if unset
-        """
-
-        self.path_to_item = path_to_item
-        full_msg = msg
-        if path_to_item:
-            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
-        super(ApiValueError, self).__init__(full_msg)
-
-
-class ApiAttributeError(OpenApiException, AttributeError):
-    def __init__(self, msg, path_to_item=None):
-        """
-        Raised when an attribute reference or assignment fails.
-
-        Args:
-            msg (str): the exception message
-
-        Keyword Args:
-            path_to_item (None/list) the path to the exception in the
-                received_data dict
-        """
-        self.path_to_item = path_to_item
-        full_msg = msg
-        if path_to_item:
-            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
-        super(ApiAttributeError, self).__init__(full_msg)
-
-
-class ApiKeyError(OpenApiException, KeyError):
-    def __init__(self, msg, path_to_item=None):
-        """
-        Args:
-            msg (str): the exception message
-
-        Keyword Args:
-            path_to_item (None/list) the path to the exception in the
-                received_data dict
-        """
-        self.path_to_item = path_to_item
-        full_msg = msg
-        if path_to_item:
-            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
-        super(ApiKeyError, self).__init__(full_msg)
-
-
-class ApiException(OpenApiException):
-    def __init__(
-        self,
-        status=None,
-        reason=None,
-        api_response: "launch.api_client.api_client.ApiResponse" = None,
-    ):
-        if api_response:
-            self.status = api_response.response.status
-            self.reason = api_response.response.reason
-            self.body = api_response.response.data
-            self.headers = api_response.response.getheaders()
+    def __str__(self) -> str:
+        status_code_str = f"{self.status_code}" if self.status_code is not None else ""
+        if self.reason_phrase == "" and self.status_code is not None:
+            reason_phrase_str = "(Unrecognized Status Code)"
         else:
-            self.status = status
-            self.reason = reason
-            self.body = None
-            self.headers = None
-
-    def __str__(self):
-        """Custom error messages for exception"""
-        error_message = "({0})\n" "Reason: {1}\n".format(self.status, self.reason)
-        if self.headers:
-            error_message += "HTTP response headers: {0}\n".format(self.headers)
+            reason_phrase_str = f"({self.reason_phrase})"
+        status_str = f"{status_code_str} {reason_phrase_str}".strip()
+        short_content = self.content if len(self.content) <= MAX_CONTENT else self.content[: MAX_CONTENT - 3] + b" ..."
+        raw_content_str = f"Raw response content:\n{short_content!r}"
+        return f"Unexpected Response: {status_str}\n{raw_content_str}"
 
-        if self.body:
-            error_message += "HTTP response body: {0}\n".format(self.body)
+    def structured(self) -> Dict[str, Any]:
+        return json.loads(self.content)
 
-        return error_message
 
-
-def render_path(path_to_item):
-    """Returns a string representation of a path"""
-    result = ""
-    for pth in path_to_item:
-        if isinstance(pth, int):
-            result += "[{0}]".format(pth)
-        else:
-            result += "['{0}']".format(pth)
-    return result
+class ResponseHandlingException(ApiException):
+    def __init__(self, source: Exception):
+        self.source = source
diff --git a/launch/api_client/models.py b/launch/api_client/models.py
new file mode 100644
index 00000000..b59991f1
--- /dev/null
+++ b/launch/api_client/models.py
@@ -0,0 +1,285 @@
+from datetime import datetime
+from enum import Enum
+from typing import Any  # noqa
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class BatchJobSerializationFormat(str, Enum):
+    JSON = "JSON"
+    PICKLE = "PICKLE"
+
+
+class BatchJobStatus(str, Enum):
+    PENDING = "PENDING"
+    CREATING_ENDPOINT = "CREATING_ENDPOINT"
+    STARTED = "STARTED"
+    SUCCESS = "SUCCESS"
+    FAILURE = "FAILURE"
+    CANCELLED = "CANCELLED"
+    UNDEFINED = "UNDEFINED"
+
+
+class CloneModelBundleRequest(BaseModel):
+    new_app_config: "Optional[Any]" = Field(None, alias="new_app_config")
+    original_model_bundle_id: "str" = Field(..., alias="original_model_bundle_id")
+
+
+class CreateAsyncTaskResponse(BaseModel):
+    task_id: "str" = Field(..., alias="task_id")
+
+
+class CreateBatchJobRequest(BaseModel):
+    input_path: "str" = Field(..., alias="input_path")
+    labels: "Dict[str, str]" = Field(..., alias="labels")
+    model_bundle_id: "str" = Field(..., alias="model_bundle_id")
+    resource_requests: "CreateBatchJobResourceRequests" = Field(..., alias="resource_requests")
+    serialization_format: "BatchJobSerializationFormat" = Field(..., alias="serialization_format")
+
+
+class CreateBatchJobResourceRequests(BaseModel):
+    cpus: "Optional[Any]" = Field(None, alias="cpus")
+    gpu_type: "Optional[GpuType]" = Field(None, alias="gpu_type")
+    gpus: "Optional[int]" = Field(None, alias="gpus")
+    max_workers: "Optional[int]" = Field(None, alias="max_workers")
+    memory: "Optional[Any]" = Field(None, alias="memory")
+    per_worker: "Optional[int]" = Field(None, alias="per_worker")
+    storage: "Optional[Any]" = Field(None, alias="storage")
+
+
+class CreateBatchJobResponse(BaseModel):
+    job_id: "str" = Field(..., alias="job_id")
+
+
+class CreateModelBundleRequest(BaseModel):
+    app_config: "Optional[Any]" = Field(None, alias="app_config")
+    env_params: "ModelBundleEnvironmentParams" = Field(..., alias="env_params")
+    location: "str" = Field(..., alias="location")
+    metadata: "Optional[Any]" = Field(None, alias="metadata")
+    name: "str" = Field(..., alias="name")
+    packaging_type: "Optional[ModelBundlePackagingType]" = Field(None, alias="packaging_type")
+    requirements: "List[str]" = Field(..., alias="requirements")
+    schema_location: "Optional[str]" = Field(None, alias="schema_location")
+
+
+class CreateModelBundleResponse(BaseModel):
+    model_bundle_id: "str" = Field(..., alias="model_bundle_id")
+
+
+class CreateModelEndpointRequest(BaseModel):
+    billing_tags: "Optional[Any]" = Field(None, alias="billing_tags")
+    cpus: "Any" = Field(..., alias="cpus")
+    default_callback_url: "Optional[str]" = Field(None, alias="default_callback_url")
+    endpoint_type: "ModelEndpointType" = Field(..., alias="endpoint_type")
+    gpu_type: "Optional[GpuType]" = Field(None, alias="gpu_type")
+    gpus: "int" = Field(..., alias="gpus")
+    labels: "Dict[str, str]" = Field(..., alias="labels")
+    max_workers: "int" = Field(..., alias="max_workers")
+    memory: "Any" = Field(..., alias="memory")
+    metadata: "Any" = Field(..., alias="metadata")
+    min_workers: "int" = Field(..., alias="min_workers")
+    model_bundle_id: "str" = Field(..., alias="model_bundle_id")
+    name: "str" = Field(..., alias="name")
+    optimize_costs: "Optional[bool]" = Field(None, alias="optimize_costs")
+    per_worker: "int" = Field(..., alias="per_worker")
+    post_inference_hooks: "Optional[List[str]]" = Field(None, alias="post_inference_hooks")
+    prewarm: "Optional[bool]" = Field(None, alias="prewarm")
+    storage: "Optional[Any]" = Field(None, alias="storage")
+
+
+class CreateModelEndpointResponse(BaseModel):
+    endpoint_creation_task_id: "str" = Field(..., alias="endpoint_creation_task_id")
+
+
+class DeleteModelEndpointResponse(BaseModel):
+    deleted: "bool" = Field(..., alias="deleted")
+
+
+class EndpointPredictRequest(BaseModel):
+    args: "Optional[Any]" = Field(None, alias="args")
+    callback_url: "Optional[str]" = Field(None, alias="callback_url")
+    cloudpickle: "Optional[str]" = Field(None, alias="cloudpickle")
+    return_pickled: "Optional[bool]" = Field(None, alias="return_pickled")
+    url: "Optional[str]" = Field(None, alias="url")
+
+
+class GetAsyncTaskResponse(BaseModel):
+    result: "Optional[Any]" = Field(None, alias="result")
+    status: "TaskStatus" = Field(..., alias="status")
+    task_id: "str" = Field(..., alias="task_id")
+    traceback: "Optional[str]" = Field(None, alias="traceback")
+
+
+class GetBatchJobResponse(BaseModel):
+    duration: "Optional[float]" = Field(None, alias="duration")
+    num_tasks_completed: "Optional[int]" = Field(None, alias="num_tasks_completed")
+    num_tasks_pending: "Optional[int]" = Field(None, alias="num_tasks_pending")
+    result: "Optional[str]" = Field(None, alias="result")
+    status: "BatchJobStatus" = Field(..., alias="status")
+
+
+class GetModelEndpointResponse(BaseModel):
+    aws_role: "Optional[str]" = Field(None, alias="aws_role")
+    bundle_name: "str" = Field(..., alias="bundle_name")
+    created_at: "datetime" = Field(..., alias="created_at")
+    created_by: "str" = Field(..., alias="created_by")
+    default_callback_url: "Optional[str]" = Field(None, alias="default_callback_url")
+    deployment_name: "Optional[str]" = Field(None, alias="deployment_name")
+    deployment_state: "Optional[ModelEndpointDeploymentState]" = Field(None, alias="deployment_state")
+    destination: "str" = Field(..., alias="destination")
+    endpoint_type: "ModelEndpointType" = Field(..., alias="endpoint_type")
+    id: "str" = Field(..., alias="id")
+    labels: "Optional[Dict[str, str]]" = Field(None, alias="labels")
+    last_updated_at: "datetime" = Field(..., alias="last_updated_at")
+    metadata: "Optional[Any]" = Field(None, alias="metadata")
+    name: "str" = Field(..., alias="name")
+    post_inference_hooks: "Optional[List[str]]" = Field(None, alias="post_inference_hooks")
+    resource_state: "Optional[ModelEndpointResourceState]" = Field(None, alias="resource_state")
+    results_s3_bucket: "Optional[str]" = Field(None, alias="results_s3_bucket")
+    status: "ModelEndpointStatus" = Field(..., alias="status")
+
+
+class GpuType(str, Enum):
+    TESLA_T4 = "nvidia-tesla-t4"
+    AMPERE_A10 = "nvidia-ampere-a10"
+    A100 = "nvidia-a100"
+
+
+class HTTPValidationError(BaseModel):
+    detail: "Optional[List[ValidationError]]" = Field(None, alias="detail")
+
+
+class ListModelBundlesResponse(BaseModel):
+    model_bundles: "List[ModelBundleResponse]" = Field(..., alias="model_bundles")
+
+
+class ListModelEndpointsResponse(BaseModel):
+    model_endpoints: "List[GetModelEndpointResponse]" = Field(..., alias="model_endpoints")
+
+
+class ModelBundleEnvironmentParams(BaseModel):
+    ecr_repo: "Optional[str]" = Field(None, alias="ecr_repo")
+    framework_type: "ModelBundleFramework" = Field(..., alias="framework_type")
+    image_tag: "Optional[str]" = Field(None, alias="image_tag")
+    pytorch_image_tag: "Optional[str]" = Field(None, alias="pytorch_image_tag")
+    tensorflow_version: "Optional[str]" = Field(None, alias="tensorflow_version")
+
+
+class ModelBundleFramework(str, Enum):
+    PYTORCH = "pytorch"
+    TENSORFLOW = "tensorflow"
+    CUSTOM_BASE_IMAGE = "custom_base_image"
+
+
+class ModelBundleOrderBy(str, Enum):
+    NEWEST = "newest"
+    OLDEST = "oldest"
+
+
+class ModelBundlePackagingType(str, Enum):
+    CLOUDPICKLE = "cloudpickle"
+    ZIP = "zip"
+
+
+class ModelBundleResponse(BaseModel):
+    app_config: "Optional[Any]" = Field(None, alias="app_config")
+    created_at: "datetime" = Field(..., alias="created_at")
+    env_params: "ModelBundleEnvironmentParams" = Field(..., alias="env_params")
+    id: "str" = Field(..., alias="id")
+    location: "str" = Field(..., alias="location")
+    metadata: "Any" = Field(..., alias="metadata")
+    model_artifact_ids: "List[str]" = Field(..., alias="model_artifact_ids")
+    name: "str" = Field(..., alias="name")
+    packaging_type: "ModelBundlePackagingType" = Field(..., alias="packaging_type")
+    requirements: "List[str]" = Field(..., alias="requirements")
+    schema_location: "Optional[str]" = Field(None, alias="schema_location")
+
+
+class ModelEndpointDeploymentState(BaseModel):
+    available_workers: "Optional[int]" = Field(None, alias="available_workers")
+    max_workers: "int" = Field(..., alias="max_workers")
+    min_workers: "int" = Field(..., alias="min_workers")
+    per_worker: "int" = Field(..., alias="per_worker")
+    unavailable_workers: "Optional[int]" = Field(None, alias="unavailable_workers")
+
+
+class ModelEndpointOrderBy(str, Enum):
+    NEWEST = "newest"
+    OLDEST = "oldest"
+    ALPHABETICAL = "alphabetical"
+
+
+class ModelEndpointResourceState(BaseModel):
+    cpus: "Any" = Field(..., alias="cpus")
+    gpu_type: "Optional[GpuType]" = Field(None, alias="gpu_type")
+    gpus: "int" = Field(..., alias="gpus")
+    memory: "Any" = Field(..., alias="memory")
+    optimize_costs: "Optional[bool]" = Field(None, alias="optimize_costs")
+    storage: "Optional[Any]" = Field(None, alias="storage")
+
+
+class ModelEndpointStatus(str, Enum):
+    READY = "READY"
+    UPDATE_PENDING = "UPDATE_PENDING"
+    UPDATE_IN_PROGRESS = "UPDATE_IN_PROGRESS"
+    UPDATE_FAILED = "UPDATE_FAILED"
+    DELETE_IN_PROGRESS = "DELETE_IN_PROGRESS"
+
+
+class ModelEndpointType(str, Enum):
+    ASYNC = "async"
+    SYNC = "sync"
+
+
+class SyncEndpointPredictResponse(BaseModel):
+    result: "Optional[Any]" = Field(None, alias="result")
+    status: "TaskStatus" = Field(..., alias="status")
+    traceback: "Optional[str]" = Field(None, alias="traceback")
+
+
+class TaskStatus(str, Enum):
+    PENDING = "PENDING"
+    STARTED = "STARTED"
+    SUCCESS = "SUCCESS"
+    FAILURE = "FAILURE"
+    UNDEFINED = "UNDEFINED"
+
+
+class UpdateBatchJobRequest(BaseModel):
+    cancel: "bool" = Field(..., alias="cancel")
+
+
+class UpdateBatchJobResponse(BaseModel):
+    success: "bool" = Field(..., alias="success")
+
+
+class UpdateModelEndpointRequest(BaseModel):
+    aws_role: "Optional[str]" = Field(None, alias="aws_role")
+    billing_tags: "Optional[Any]" = Field(None, alias="billing_tags")
+    cpus: "Optional[Any]" = Field(None, alias="cpus")
+    default_callback_url: "Optional[str]" = Field(None, alias="default_callback_url")
+    gpu_type: "Optional[GpuType]" = Field(None, alias="gpu_type")
+    gpus: "Optional[int]" = Field(None, alias="gpus")
+    labels: "Optional[Dict[str, str]]" = Field(None, alias="labels")
+    max_workers: "Optional[int]" = Field(None, alias="max_workers")
+    memory: "Optional[Any]" = Field(None, alias="memory")
+    metadata: "Optional[Any]" = Field(None, alias="metadata")
+    min_workers: "Optional[int]" = Field(None, alias="min_workers")
+    model_bundle_id: "Optional[str]" = Field(None, alias="model_bundle_id")
+    optimize_costs: "Optional[bool]" = Field(None, alias="optimize_costs")
+    per_worker: "Optional[int]" = Field(None, alias="per_worker")
+    post_inference_hooks: "Optional[List[str]]" = Field(None, alias="post_inference_hooks")
+    prewarm: "Optional[bool]" = Field(None, alias="prewarm")
+    results_s3_bucket: "Optional[str]" = Field(None, alias="results_s3_bucket")
+    storage: "Optional[Any]" = Field(None, alias="storage")
+
+
+class UpdateModelEndpointResponse(BaseModel):
+    endpoint_creation_task_id: "str" = Field(..., alias="endpoint_creation_task_id")
+
+
+class ValidationError(BaseModel):
+    loc: "List[Any]" = Field(..., alias="loc")
+    msg: "str" = Field(..., alias="msg")
+    type: "str" = Field(..., alias="type")
diff --git a/launch/api_client/models/__init__.py b/launch/api_client/models/__init__.py
deleted file mode 100644
index 29cf94aa..00000000
--- a/launch/api_client/models/__init__.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# coding: utf-8
-
-# flake8: noqa
-
-# import all models into this package
-# if you have many models here with many references from one model to another this may
-# raise a RecursionError
-# to avoid this, import only the models that you directly need like:
-# from from launch.api_client.model.pet import Pet
-# or import this package, but before doing it, use:
-# import sys
-# sys.setrecursionlimit(n)
-
-from launch.api_client.model.batch_job_serialization_format import (
-    BatchJobSerializationFormat,
-)
-from launch.api_client.model.batch_job_status import BatchJobStatus
-from launch.api_client.model.clone_model_bundle_request import (
-    CloneModelBundleRequest,
-)
-from launch.api_client.model.create_async_task_response import (
-    CreateAsyncTaskResponse,
-)
-from launch.api_client.model.create_batch_job_request import (
-    CreateBatchJobRequest,
-)
-from launch.api_client.model.create_batch_job_resource_requests import (
-    CreateBatchJobResourceRequests,
-)
-from launch.api_client.model.create_batch_job_response import (
-    CreateBatchJobResponse,
-)
-from launch.api_client.model.create_model_bundle_request import (
-    CreateModelBundleRequest,
-)
-from launch.api_client.model.create_model_bundle_response import (
-    CreateModelBundleResponse,
-)
-from launch.api_client.model.create_model_endpoint_request import (
-    CreateModelEndpointRequest,
-)
-from launch.api_client.model.create_model_endpoint_response import (
-    CreateModelEndpointResponse,
-)
-from launch.api_client.model.delete_model_endpoint_response import (
-    DeleteModelEndpointResponse,
-)
-from launch.api_client.model.endpoint_predict_request import (
-    EndpointPredictRequest,
-)
-from launch.api_client.model.get_async_task_response import (
-    GetAsyncTaskResponse,
-)
-from launch.api_client.model.get_batch_job_response import GetBatchJobResponse
-from launch.api_client.model.get_model_endpoint_response import (
-    GetModelEndpointResponse,
-)
-from launch.api_client.model.gpu_type import GpuType
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.list_model_bundles_response import (
-    ListModelBundlesResponse,
-)
-from launch.api_client.model.list_model_endpoints_response import (
-    ListModelEndpointsResponse,
-)
-from launch.api_client.model.model_bundle_environment_params import (
-    ModelBundleEnvironmentParams,
-)
-from launch.api_client.model.model_bundle_framework import ModelBundleFramework
-from launch.api_client.model.model_bundle_order_by import ModelBundleOrderBy
-from launch.api_client.model.model_bundle_packaging_type import (
-    ModelBundlePackagingType,
-)
-from launch.api_client.model.model_bundle_response import ModelBundleResponse
-from launch.api_client.model.model_endpoint_deployment_state import (
-    ModelEndpointDeploymentState,
-)
-from launch.api_client.model.model_endpoint_order_by import (
-    ModelEndpointOrderBy,
-)
-from launch.api_client.model.model_endpoint_resource_state import (
-    ModelEndpointResourceState,
-)
-from launch.api_client.model.model_endpoint_status import ModelEndpointStatus
-from launch.api_client.model.model_endpoint_type import ModelEndpointType
-from launch.api_client.model.request_schema import RequestSchema
-from launch.api_client.model.response_schema import ResponseSchema
-from launch.api_client.model.sync_endpoint_predict_response import (
-    SyncEndpointPredictResponse,
-)
-from launch.api_client.model.task_status import TaskStatus
-from launch.api_client.model.update_batch_job_request import (
-    UpdateBatchJobRequest,
-)
-from launch.api_client.model.update_batch_job_response import (
-    UpdateBatchJobResponse,
-)
-from launch.api_client.model.update_model_endpoint_request import (
-    UpdateModelEndpointRequest,
-)
-from launch.api_client.model.update_model_endpoint_response import (
-    UpdateModelEndpointResponse,
-)
-from launch.api_client.model.validation_error import ValidationError
diff --git a/launch/client.py b/launch/client.py
index 252f302c..ddd64cd2 100644
--- a/launch/client.py
+++ b/launch/client.py
@@ -14,35 +14,6 @@
 from frozendict import frozendict
 from pydantic import BaseModel
 
-from launch.api_client import ApiClient, Configuration
-from launch.api_client.apis.tags.default_api import DefaultApi
-from launch.api_client.model.clone_model_bundle_request import (
-    CloneModelBundleRequest,
-)
-from launch.api_client.model.create_batch_job_request import (
-    CreateBatchJobRequest,
-)
-from launch.api_client.model.create_model_bundle_request import (
-    CreateModelBundleRequest,
-)
-from launch.api_client.model.create_model_endpoint_request import (
-    CreateModelEndpointRequest,
-)
-from launch.api_client.model.endpoint_predict_request import (
-    EndpointPredictRequest,
-)
-from launch.api_client.model.gpu_type import GpuType
-from launch.api_client.model.model_bundle_environment_params import (
-    ModelBundleEnvironmentParams,
-)
-from launch.api_client.model.model_bundle_framework import ModelBundleFramework
-from launch.api_client.model.model_bundle_packaging_type import (
-    ModelBundlePackagingType,
-)
-from launch.api_client.model.model_endpoint_type import ModelEndpointType
-from launch.api_client.model.update_model_endpoint_request import (
-    UpdateModelEndpointRequest,
-)
 from launch.connection import Connection
 from launch.constants import (
     BATCH_TASK_INPUT_SIGNED_URL_PATH,
@@ -64,6 +35,37 @@
     ModelEndpoint,
     SyncEndpoint,
 )
+from launch.openapi_client import ApiClient, Configuration
+from launch.openapi_client.apis.tags.default_api import DefaultApi
+from launch.openapi_client.model.clone_model_bundle_request import (
+    CloneModelBundleRequest,
+)
+from launch.openapi_client.model.create_batch_job_request import (
+    CreateBatchJobRequest,
+)
+from launch.openapi_client.model.create_model_bundle_request import (
+    CreateModelBundleRequest,
+)
+from launch.openapi_client.model.create_model_endpoint_request import (
+    CreateModelEndpointRequest,
+)
+from launch.openapi_client.model.endpoint_predict_request import (
+    EndpointPredictRequest,
+)
+from launch.openapi_client.model.gpu_type import GpuType
+from launch.openapi_client.model.model_bundle_environment_params import (
+    ModelBundleEnvironmentParams,
+)
+from launch.openapi_client.model.model_bundle_framework import (
+    ModelBundleFramework,
+)
+from launch.openapi_client.model.model_bundle_packaging_type import (
+    ModelBundlePackagingType,
+)
+from launch.openapi_client.model.model_endpoint_type import ModelEndpointType
+from launch.openapi_client.model.update_model_endpoint_request import (
+    UpdateModelEndpointRequest,
+)
 from launch.pydantic_schemas import get_model_definitions
 from launch.request_validation import validate_task_request
 
diff --git a/launch/model_endpoint.py b/launch/model_endpoint.py
index 8bf220e3..847ffa34 100644
--- a/launch/model_endpoint.py
+++ b/launch/model_endpoint.py
@@ -10,8 +10,8 @@
 from dataclasses_json import Undefined, dataclass_json
 from deprecation import deprecated
 
-from launch.api_client import ApiClient
-from launch.api_client.apis.tags.default_api import DefaultApi
+from launch.openapi_client import ApiClient
+from launch.openapi_client.apis.tags.default_api import DefaultApi
 from launch.request_validation import validate_task_request
 
 TASK_PENDING_STATE = "PENDING"
diff --git a/launch/openapi_client/__init__.py b/launch/openapi_client/__init__.py
new file mode 100644
index 00000000..be53ca86
--- /dev/null
+++ b/launch/openapi_client/__init__.py
@@ -0,0 +1,30 @@
+# coding: utf-8
+
+# flake8: noqa
+
+"""
+    launch
+
+    No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)  # noqa: E501
+
+    The version of the OpenAPI document: 1.0.0
+    Generated by: https://openapi-generator.tech
+"""
+
+__version__ = "1.0.5"
+
+# import ApiClient
+from launch.openapi_client.api_client import ApiClient
+
+# import Configuration
+from launch.openapi_client.configuration import Configuration
+
+# import exceptions
+from launch.openapi_client.exceptions import (
+    ApiAttributeError,
+    ApiException,
+    ApiKeyError,
+    ApiTypeError,
+    ApiValueError,
+    OpenApiException,
+)
diff --git a/launch/openapi_client/api_client.py b/launch/openapi_client/api_client.py
new file mode 100644
index 00000000..8ac49af2
--- /dev/null
+++ b/launch/openapi_client/api_client.py
@@ -0,0 +1,1543 @@
+# coding: utf-8
+"""
+    launch
+
+    No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)  # noqa: E501
+
+    The version of the OpenAPI document: 1.0.0
+    Generated by: https://openapi-generator.tech
+"""
+
+import atexit
+import email
+import enum
+import io
+import json
+import os
+import re
+import tempfile
+import typing
+from dataclasses import dataclass
+from decimal import Decimal
+from multiprocessing.pool import ThreadPool
+from urllib.parse import quote, urlparse
+
+import frozendict
+import typing_extensions
+import urllib3
+from urllib3._collections import HTTPHeaderDict
+from urllib3.fields import RequestField as RequestFieldBase
+
+from launch.openapi_client import rest
+from launch.openapi_client.configuration import Configuration
+from launch.openapi_client.exceptions import ApiTypeError, ApiValueError
+from launch.openapi_client.schemas import (
+    BinarySchema,
+    BoolClass,
+    FileIO,
+    NoneClass,
+    Schema,
+    Unset,
+    date,
+    datetime,
+    none_type,
+    unset,
+)
+
+
+class RequestField(RequestFieldBase):
+    def __eq__(self, other):
+        if not isinstance(other, RequestField):
+            return False
+        return self.__dict__ == other.__dict__
+
+
+class JSONEncoder(json.JSONEncoder):
+    compact_separators = (",", ":")
+
+    def default(self, obj):
+        if isinstance(obj, str):
+            return str(obj)
+        elif isinstance(obj, float):
+            return float(obj)
+        elif isinstance(obj, int):
+            return int(obj)
+        elif isinstance(obj, Decimal):
+            if obj.as_tuple().exponent >= 0:
+                return int(obj)
+            return float(obj)
+        elif isinstance(obj, NoneClass):
+            return None
+        elif isinstance(obj, BoolClass):
+            return bool(obj)
+        elif isinstance(obj, (dict, frozendict.frozendict)):
+            return {key: self.default(val) for key, val in obj.items()}
+        elif isinstance(obj, (list, tuple)):
+            return [self.default(item) for item in obj]
+        raise ApiValueError("Unable to prepare type {} for serialization".format(obj.__class__.__name__))
+
+
+class ParameterInType(enum.Enum):
+    QUERY = "query"
+    HEADER = "header"
+    PATH = "path"
+    COOKIE = "cookie"
+
+
+class ParameterStyle(enum.Enum):
+    MATRIX = "matrix"
+    LABEL = "label"
+    FORM = "form"
+    SIMPLE = "simple"
+    SPACE_DELIMITED = "spaceDelimited"
+    PIPE_DELIMITED = "pipeDelimited"
+    DEEP_OBJECT = "deepObject"
+
+
+class PrefixSeparatorIterator:
+    # A class to store prefixes and separators for rfc6570 expansions
+
+    def __init__(self, prefix: str, separator: str):
+        self.prefix = prefix
+        self.separator = separator
+        self.first = True
+        if separator in {".", "|", "%20"}:
+            item_separator = separator
+        else:
+            item_separator = ","
+        self.item_separator = item_separator
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        if self.first:
+            self.first = False
+            return self.prefix
+        return self.separator
+
+
+class ParameterSerializerBase:
+    @classmethod
+    def _get_default_explode(cls, style: ParameterStyle) -> bool:
+        return False
+
+    @staticmethod
+    def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
+        """
+        Get representation if str/float/int/None/items in list/ values in dict
+        None is returned if an item is undefined, use cases are value=
+        - None
+        - []
+        - {}
+        - [None, None None]
+        - {'a': None, 'b': None}
+        """
+        if type(in_data) in {str, float, int}:
+            if percent_encode:
+                return quote(str(in_data))
+            return str(in_data)
+        elif isinstance(in_data, none_type):
+            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+            return None
+        elif isinstance(in_data, list) and not in_data:
+            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+            return None
+        elif isinstance(in_data, dict) and not in_data:
+            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+            return None
+        raise ApiValueError("Unable to generate a ref6570 item representation of {}".format(in_data))
+
+    @staticmethod
+    def _to_dict(name: str, value: str):
+        return {name: value}
+
+    @classmethod
+    def __ref6570_str_float_int_expansion(
+        cls,
+        variable_name: str,
+        in_data: typing.Any,
+        explode: bool,
+        percent_encode: bool,
+        prefix_separator_iterator: PrefixSeparatorIterator,
+        var_name_piece: str,
+        named_parameter_expansion: bool,
+    ) -> str:
+        item_value = cls.__ref6570_item_value(in_data, percent_encode)
+        if item_value is None or (item_value == "" and prefix_separator_iterator.separator == ";"):
+            return next(prefix_separator_iterator) + var_name_piece
+        value_pair_equals = "=" if named_parameter_expansion else ""
+        return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+
+    @classmethod
+    def __ref6570_list_expansion(
+        cls,
+        variable_name: str,
+        in_data: typing.Any,
+        explode: bool,
+        percent_encode: bool,
+        prefix_separator_iterator: PrefixSeparatorIterator,
+        var_name_piece: str,
+        named_parameter_expansion: bool,
+    ) -> str:
+        item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
+        item_values = [v for v in item_values if v is not None]
+        if not item_values:
+            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+            return ""
+        value_pair_equals = "=" if named_parameter_expansion else ""
+        if not explode:
+            return (
+                next(prefix_separator_iterator)
+                + var_name_piece
+                + value_pair_equals
+                + prefix_separator_iterator.item_separator.join(item_values)
+            )
+        # exploded
+        return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
+            [var_name_piece + value_pair_equals + val for val in item_values]
+        )
+
+    @classmethod
+    def __ref6570_dict_expansion(
+        cls,
+        variable_name: str,
+        in_data: typing.Any,
+        explode: bool,
+        percent_encode: bool,
+        prefix_separator_iterator: PrefixSeparatorIterator,
+        var_name_piece: str,
+        named_parameter_expansion: bool,
+    ) -> str:
+        in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
+        in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
+        if not in_data_transformed:
+            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+            return ""
+        value_pair_equals = "=" if named_parameter_expansion else ""
+        if not explode:
+            return (
+                next(prefix_separator_iterator)
+                + var_name_piece
+                + value_pair_equals
+                + prefix_separator_iterator.item_separator.join(
+                    prefix_separator_iterator.item_separator.join(item_pair)
+                    for item_pair in in_data_transformed.items()
+                )
+            )
+        # exploded
+        return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
+            [key + "=" + val for key, val in in_data_transformed.items()]
+        )
+
+    @classmethod
+    def _ref6570_expansion(
+        cls,
+        variable_name: str,
+        in_data: typing.Any,
+        explode: bool,
+        percent_encode: bool,
+        prefix_separator_iterator: PrefixSeparatorIterator,
+    ) -> str:
+        """
+        Separator is for separate variables like dict with explode true, not for array item separation
+        """
+        named_parameter_expansion = prefix_separator_iterator.separator in {
+            "&",
+            ";",
+        }
+        var_name_piece = variable_name if named_parameter_expansion else ""
+        if type(in_data) in {str, float, int}:
+            return cls.__ref6570_str_float_int_expansion(
+                variable_name,
+                in_data,
+                explode,
+                percent_encode,
+                prefix_separator_iterator,
+                var_name_piece,
+                named_parameter_expansion,
+            )
+        elif isinstance(in_data, none_type):
+            # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
+            return ""
+        elif isinstance(in_data, list):
+            return cls.__ref6570_list_expansion(
+                variable_name,
+                in_data,
+                explode,
+                percent_encode,
+                prefix_separator_iterator,
+                var_name_piece,
+                named_parameter_expansion,
+            )
+        elif isinstance(in_data, dict):
+            return cls.__ref6570_dict_expansion(
+                variable_name,
+                in_data,
+                explode,
+                percent_encode,
+                prefix_separator_iterator,
+                var_name_piece,
+                named_parameter_expansion,
+            )
+        # bool, bytes, etc
+        raise ApiValueError("Unable to generate a ref6570 representation of {}".format(in_data))
+
+
+class StyleFormSerializer(ParameterSerializerBase):
+    @classmethod
+    def _get_default_explode(cls, style: ParameterStyle) -> bool:
+        if style is ParameterStyle.FORM:
+            return True
+        return super()._get_default_explode(style)
+
+    def _serialize_form(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+        name: str,
+        explode: bool,
+        percent_encode: bool,
+        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None,
+    ) -> str:
+        if prefix_separator_iterator is None:
+            prefix_separator_iterator = PrefixSeparatorIterator("", "&")
+        return self._ref6570_expansion(
+            variable_name=name,
+            in_data=in_data,
+            explode=explode,
+            percent_encode=percent_encode,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+
+
+class StyleSimpleSerializer(ParameterSerializerBase):
+    def _serialize_simple(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+        name: str,
+        explode: bool,
+        percent_encode: bool,
+    ) -> str:
+        prefix_separator_iterator = PrefixSeparatorIterator("", ",")
+        return self._ref6570_expansion(
+            variable_name=name,
+            in_data=in_data,
+            explode=explode,
+            percent_encode=percent_encode,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+
+
+class JSONDetector:
+    """
+    Works for:
+    application/json
+    application/json; charset=UTF-8
+    application/json-patch+json
+    application/geo+json
+    """
+
+    __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
+
+    @classmethod
+    def _content_type_is_json(cls, content_type: str) -> bool:
+        if cls.__json_content_type_pattern.match(content_type):
+            return True
+        return False
+
+
+@dataclass
+class ParameterBase(JSONDetector):
+    name: str
+    in_type: ParameterInType
+    required: bool
+    style: typing.Optional[ParameterStyle]
+    explode: typing.Optional[bool]
+    allow_reserved: typing.Optional[bool]
+    schema: typing.Optional[typing.Type[Schema]]
+    content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
+
+    __style_to_in_type = {
+        ParameterStyle.MATRIX: {ParameterInType.PATH},
+        ParameterStyle.LABEL: {ParameterInType.PATH},
+        ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
+        ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
+        ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
+        ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
+        ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
+    }
+    __in_type_to_default_style = {
+        ParameterInType.QUERY: ParameterStyle.FORM,
+        ParameterInType.PATH: ParameterStyle.SIMPLE,
+        ParameterInType.HEADER: ParameterStyle.SIMPLE,
+        ParameterInType.COOKIE: ParameterStyle.FORM,
+    }
+    __disallowed_header_names = {"Accept", "Content-Type", "Authorization"}
+    _json_encoder = JSONEncoder()
+
+    @classmethod
+    def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
+        if style is None:
+            return
+        in_type_set = cls.__style_to_in_type[style]
+        if in_type not in in_type_set:
+            raise ValueError(
+                "Invalid style and in_type combination. For style={} only in_type={} are allowed".format(
+                    style, in_type_set
+                )
+            )
+
+    def __init__(
+        self,
+        name: str,
+        in_type: ParameterInType,
+        required: bool = False,
+        style: typing.Optional[ParameterStyle] = None,
+        explode: bool = False,
+        allow_reserved: typing.Optional[bool] = None,
+        schema: typing.Optional[typing.Type[Schema]] = None,
+        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
+    ):
+        if schema is None and content is None:
+            raise ValueError("Value missing; Pass in either schema or content")
+        if schema and content:
+            raise ValueError("Too many values provided. Both schema and content were provided. Only one may be input")
+        if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
+            raise ValueError("Invalid name, name may not be one of {}".format(self.__disallowed_header_names))
+        self.__verify_style_to_in_type(style, in_type)
+        if content is None and style is None:
+            style = self.__in_type_to_default_style[in_type]
+        if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
+            raise ValueError("Invalid content length, content length must equal 1")
+        self.in_type = in_type
+        self.name = name
+        self.required = required
+        self.style = style
+        self.explode = explode
+        self.allow_reserved = allow_reserved
+        self.schema = schema
+        self.content = content
+
+    def _serialize_json(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+        eliminate_whitespace: bool = False,
+    ) -> str:
+        if eliminate_whitespace:
+            return json.dumps(in_data, separators=self._json_encoder.compact_separators)
+        return json.dumps(in_data)
+
+
+class PathParameter(ParameterBase, StyleSimpleSerializer):
+    def __init__(
+        self,
+        name: str,
+        required: bool = False,
+        style: typing.Optional[ParameterStyle] = None,
+        explode: bool = False,
+        allow_reserved: typing.Optional[bool] = None,
+        schema: typing.Optional[typing.Type[Schema]] = None,
+        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
+    ):
+        super().__init__(
+            name,
+            in_type=ParameterInType.PATH,
+            required=required,
+            style=style,
+            explode=explode,
+            allow_reserved=allow_reserved,
+            schema=schema,
+            content=content,
+        )
+
+    def __serialize_label(
+        self, in_data: typing.Union[None, int, float, str, bool, dict, list]
+    ) -> typing.Dict[str, str]:
+        prefix_separator_iterator = PrefixSeparatorIterator(".", ".")
+        value = self._ref6570_expansion(
+            variable_name=self.name,
+            in_data=in_data,
+            explode=self.explode,
+            percent_encode=True,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+        return self._to_dict(self.name, value)
+
+    def __serialize_matrix(
+        self, in_data: typing.Union[None, int, float, str, bool, dict, list]
+    ) -> typing.Dict[str, str]:
+        prefix_separator_iterator = PrefixSeparatorIterator(";", ";")
+        value = self._ref6570_expansion(
+            variable_name=self.name,
+            in_data=in_data,
+            explode=self.explode,
+            percent_encode=True,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+        return self._to_dict(self.name, value)
+
+    def __serialize_simple(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+    ) -> typing.Dict[str, str]:
+        value = self._serialize_simple(
+            in_data=in_data,
+            name=self.name,
+            explode=self.explode,
+            percent_encode=True,
+        )
+        return self._to_dict(self.name, value)
+
+    def serialize(
+        self,
+        in_data: typing.Union[
+            Schema,
+            Decimal,
+            int,
+            float,
+            str,
+            date,
+            datetime,
+            None,
+            bool,
+            list,
+            tuple,
+            dict,
+            frozendict.frozendict,
+        ],
+    ) -> typing.Dict[str, str]:
+        if self.schema:
+            cast_in_data = self.schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            """
+            simple -> path
+                path:
+                    returns path_params: dict
+            label -> path
+                returns path_params
+            matrix -> path
+                returns path_params
+            """
+            if self.style:
+                if self.style is ParameterStyle.SIMPLE:
+                    return self.__serialize_simple(cast_in_data)
+                elif self.style is ParameterStyle.LABEL:
+                    return self.__serialize_label(cast_in_data)
+                elif self.style is ParameterStyle.MATRIX:
+                    return self.__serialize_matrix(cast_in_data)
+        # self.content will be length one
+        for content_type, schema in self.content.items():
+            cast_in_data = schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            if self._content_type_is_json(content_type):
+                value = self._serialize_json(cast_in_data)
+                return self._to_dict(self.name, value)
+            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
+
+
+class QueryParameter(ParameterBase, StyleFormSerializer):
+    def __init__(
+        self,
+        name: str,
+        required: bool = False,
+        style: typing.Optional[ParameterStyle] = None,
+        explode: typing.Optional[bool] = None,
+        allow_reserved: typing.Optional[bool] = None,
+        schema: typing.Optional[typing.Type[Schema]] = None,
+        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
+    ):
+        used_style = ParameterStyle.FORM if style is None else style
+        used_explode = self._get_default_explode(used_style) if explode is None else explode
+
+        super().__init__(
+            name,
+            in_type=ParameterInType.QUERY,
+            required=required,
+            style=used_style,
+            explode=used_explode,
+            allow_reserved=allow_reserved,
+            schema=schema,
+            content=content,
+        )
+
+    def __serialize_space_delimited(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator],
+    ) -> typing.Dict[str, str]:
+        if prefix_separator_iterator is None:
+            prefix_separator_iterator = self.get_prefix_separator_iterator()
+        value = self._ref6570_expansion(
+            variable_name=self.name,
+            in_data=in_data,
+            explode=self.explode,
+            percent_encode=True,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+        return self._to_dict(self.name, value)
+
+    def __serialize_pipe_delimited(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator],
+    ) -> typing.Dict[str, str]:
+        if prefix_separator_iterator is None:
+            prefix_separator_iterator = self.get_prefix_separator_iterator()
+        value = self._ref6570_expansion(
+            variable_name=self.name,
+            in_data=in_data,
+            explode=self.explode,
+            percent_encode=True,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+        return self._to_dict(self.name, value)
+
+    def __serialize_form(
+        self,
+        in_data: typing.Union[None, int, float, str, bool, dict, list],
+        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator],
+    ) -> typing.Dict[str, str]:
+        if prefix_separator_iterator is None:
+            prefix_separator_iterator = self.get_prefix_separator_iterator()
+        value = self._serialize_form(
+            in_data,
+            name=self.name,
+            explode=self.explode,
+            percent_encode=True,
+            prefix_separator_iterator=prefix_separator_iterator,
+        )
+        return self._to_dict(self.name, value)
+
+    def get_prefix_separator_iterator(
+        self,
+    ) -> typing.Optional[PrefixSeparatorIterator]:
+        if self.style is ParameterStyle.FORM:
+            return PrefixSeparatorIterator("?", "&")
+        elif self.style is ParameterStyle.SPACE_DELIMITED:
+            return PrefixSeparatorIterator("", "%20")
+        elif self.style is ParameterStyle.PIPE_DELIMITED:
+            return PrefixSeparatorIterator("", "|")
+
+    def serialize(
+        self,
+        in_data: typing.Union[
+            Schema,
+            Decimal,
+            int,
+            float,
+            str,
+            date,
+            datetime,
+            None,
+            bool,
+            list,
+            tuple,
+            dict,
+            frozendict.frozendict,
+        ],
+        prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None,
+    ) -> typing.Dict[str, str]:
+        if self.schema:
+            cast_in_data = self.schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            """
+            form -> query
+                query:
+                    - GET/HEAD/DELETE: could use fields
+                    - PUT/POST: must use urlencode to send parameters
+                    returns fields: tuple
+            spaceDelimited -> query
+                returns fields
+            pipeDelimited -> query
+                returns fields
+            deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
+                returns fields
+            """
+            if self.style:
+                # TODO update query ones to omit setting values when [] {} or None is input
+                if self.style is ParameterStyle.FORM:
+                    return self.__serialize_form(cast_in_data, prefix_separator_iterator)
+                elif self.style is ParameterStyle.SPACE_DELIMITED:
+                    return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
+                elif self.style is ParameterStyle.PIPE_DELIMITED:
+                    return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
+        # self.content will be length one
+        if prefix_separator_iterator is None:
+            prefix_separator_iterator = self.get_prefix_separator_iterator()
+        for content_type, schema in self.content.items():
+            cast_in_data = schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            if self._content_type_is_json(content_type):
+                value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
+                return self._to_dict(
+                    self.name,
+                    next(prefix_separator_iterator) + self.name + "=" + quote(value),
+                )
+            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
+
+
+class CookieParameter(ParameterBase, StyleFormSerializer):
+    def __init__(
+        self,
+        name: str,
+        required: bool = False,
+        style: typing.Optional[ParameterStyle] = None,
+        explode: typing.Optional[bool] = None,
+        allow_reserved: typing.Optional[bool] = None,
+        schema: typing.Optional[typing.Type[Schema]] = None,
+        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
+    ):
+        used_style = ParameterStyle.FORM if style is None and content is None and schema else style
+        used_explode = self._get_default_explode(used_style) if explode is None else explode
+
+        super().__init__(
+            name,
+            in_type=ParameterInType.COOKIE,
+            required=required,
+            style=used_style,
+            explode=used_explode,
+            allow_reserved=allow_reserved,
+            schema=schema,
+            content=content,
+        )
+
+    def serialize(
+        self,
+        in_data: typing.Union[
+            Schema,
+            Decimal,
+            int,
+            float,
+            str,
+            date,
+            datetime,
+            None,
+            bool,
+            list,
+            tuple,
+            dict,
+            frozendict.frozendict,
+        ],
+    ) -> typing.Dict[str, str]:
+        if self.schema:
+            cast_in_data = self.schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            """
+            form -> cookie
+                returns fields: tuple
+            """
+            if self.style:
+                """
+                TODO add escaping of comma, space, equals
+                or turn encoding on
+                """
+                value = self._serialize_form(
+                    cast_in_data,
+                    explode=self.explode,
+                    name=self.name,
+                    percent_encode=False,
+                    prefix_separator_iterator=PrefixSeparatorIterator("", "&"),
+                )
+                return self._to_dict(self.name, value)
+        # self.content will be length one
+        for content_type, schema in self.content.items():
+            cast_in_data = schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            if self._content_type_is_json(content_type):
+                value = self._serialize_json(cast_in_data)
+                return self._to_dict(self.name, value)
+            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
+
+
+class HeaderParameter(ParameterBase, StyleSimpleSerializer):
+    def __init__(
+        self,
+        name: str,
+        required: bool = False,
+        style: typing.Optional[ParameterStyle] = None,
+        explode: bool = False,
+        allow_reserved: typing.Optional[bool] = None,
+        schema: typing.Optional[typing.Type[Schema]] = None,
+        content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None,
+    ):
+        super().__init__(
+            name,
+            in_type=ParameterInType.HEADER,
+            required=required,
+            style=style,
+            explode=explode,
+            allow_reserved=allow_reserved,
+            schema=schema,
+            content=content,
+        )
+
+    @staticmethod
+    def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
+        data = tuple(t for t in in_data if t)
+        headers = HTTPHeaderDict()
+        if not data:
+            return headers
+        headers.extend(data)
+        return headers
+
+    def serialize(
+        self,
+        in_data: typing.Union[
+            Schema,
+            Decimal,
+            int,
+            float,
+            str,
+            date,
+            datetime,
+            None,
+            bool,
+            list,
+            tuple,
+            dict,
+            frozendict.frozendict,
+        ],
+    ) -> HTTPHeaderDict:
+        if self.schema:
+            cast_in_data = self.schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            """
+            simple -> header
+                headers: PoolManager needs a mapping, tuple is close
+                    returns headers: dict
+            """
+            if self.style:
+                value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
+                return self.__to_headers(((self.name, value),))
+        # self.content will be length one
+        for content_type, schema in self.content.items():
+            cast_in_data = schema(in_data)
+            cast_in_data = self._json_encoder.default(cast_in_data)
+            if self._content_type_is_json(content_type):
+                value = self._serialize_json(cast_in_data)
+                return self.__to_headers(((self.name, value),))
+            raise NotImplementedError("Serialization of {} has not yet been implemented".format(content_type))
+
+
+class Encoding:
+    def __init__(
+        self,
+        content_type: str,
+        headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
+        style: typing.Optional[ParameterStyle] = None,
+        explode: bool = False,
+        allow_reserved: bool = False,
+    ):
+        self.content_type = content_type
+        self.headers = headers
+        self.style = style
+        self.explode = explode
+        self.allow_reserved = allow_reserved
+
+
+@dataclass
+class MediaType:
+    """
+    Used to store request and response body schema information
+    encoding:
+        A map between a property name and its encoding information.
+        The key, being the property name, MUST exist in the schema as a property.
+        The encoding object SHALL only apply to requestBody objects when the media type is
+        multipart or application/x-www-form-urlencoded.
+    """
+
+    schema: typing.Optional[typing.Type[Schema]] = None
+    encoding: typing.Optional[typing.Dict[str, Encoding]] = None
+
+
+@dataclass
+class ApiResponse:
+    response: urllib3.HTTPResponse
+    body: typing.Union[Unset, Schema]
+    headers: typing.Union[Unset, typing.List[HeaderParameter]]
+
+    def __init__(
+        self,
+        response: urllib3.HTTPResponse,
+        body: typing.Union[Unset, typing.Type[Schema]],
+        headers: typing.Union[Unset, typing.List[HeaderParameter]],
+    ):
+        """
+        pycharm needs this to prevent 'Unexpected argument' warnings
+        """
+        self.response = response
+        self.body = body
+        self.headers = headers
+
+
+@dataclass
+class ApiResponseWithoutDeserialization(ApiResponse):
+    response: urllib3.HTTPResponse
+    body: typing.Union[Unset, typing.Type[Schema]] = unset
+    headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
+
+
+class OpenApiResponse(JSONDetector):
+    __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
+
+    def __init__(
+        self,
+        response_cls: typing.Type[ApiResponse] = ApiResponse,
+        content: typing.Optional[typing.Dict[str, MediaType]] = None,
+        headers: typing.Optional[typing.List[HeaderParameter]] = None,
+    ):
+        self.headers = headers
+        if content is not None and len(content) == 0:
+            raise ValueError("Invalid value for content, the content dict must have >= 1 entry")
+        self.content = content
+        self.response_cls = response_cls
+
+    @staticmethod
+    def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
+        # python must be >= 3.9 so we can pass in bytes into json.loads
+        return json.loads(response.data)
+
+    @staticmethod
+    def __file_name_from_response_url(
+        response_url: typing.Optional[str],
+    ) -> typing.Optional[str]:
+        if response_url is None:
+            return None
+        url_path = urlparse(response_url).path
+        if url_path:
+            path_basename = os.path.basename(url_path)
+            if path_basename:
+                _filename, ext = os.path.splitext(path_basename)
+                if ext:
+                    return path_basename
+        return None
+
+    @classmethod
+    def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
+        if content_disposition is None:
+            return None
+        match = cls.__filename_content_disposition_pattern.search(content_disposition)
+        if not match:
+            return None
+        return match.group(1)
+
+    def __deserialize_application_octet_stream(
+        self, response: urllib3.HTTPResponse
+    ) -> typing.Union[bytes, io.BufferedReader]:
+        """
+        urllib3 use cases:
+        1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
+        2. when preload_content=False (stream=True) then supports_chunked_reads is True and
+            a file will be written and returned
+        """
+        if response.supports_chunked_reads():
+            file_name = self.__file_name_from_content_disposition(
+                response.headers.get("content-disposition")
+            ) or self.__file_name_from_response_url(response.geturl())
+
+            if file_name is None:
+                _fd, path = tempfile.mkstemp()
+            else:
+                path = os.path.join(tempfile.gettempdir(), file_name)
+
+            with open(path, "wb") as new_file:
+                chunk_size = 1024
+                while True:
+                    data = response.read(chunk_size)
+                    if not data:
+                        break
+                    new_file.write(data)
+            # release_conn is needed for streaming connections only
+            response.release_conn()
+            new_file = open(path, "rb")
+            return new_file
+        else:
+            return response.data
+
+    @staticmethod
+    def __deserialize_multipart_form_data(
+        response: urllib3.HTTPResponse,
+    ) -> typing.Dict[str, typing.Any]:
+        msg = email.message_from_bytes(response.data)
+        return {
+            part.get_param("name", header="Content-Disposition"): part.get_payload(decode=True).decode(
+                part.get_content_charset()
+            )
+            if part.get_content_charset()
+            else part.get_payload()
+            for part in msg.get_payload()
+        }
+
+    def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
+        content_type = response.getheader("content-type")
+        deserialized_body = unset
+        streamed = response.supports_chunked_reads()
+
+        deserialized_headers = unset
+        if self.headers is not None:
+            # TODO add header deserialiation here
+            pass
+
+        if self.content is not None:
+            if content_type not in self.content:
+                raise ApiValueError(
+                    f"Invalid content_type returned. Content_type='{content_type}' was returned "
+                    f"when only {str(set(self.content))} are defined for status_code={str(response.status)}"
+                )
+            body_schema = self.content[content_type].schema
+            if body_schema is None:
+                # some specs do not define response content media type schemas
+                return self.response_cls(response=response, headers=deserialized_headers, body=unset)
+
+            if self._content_type_is_json(content_type):
+                body_data = self.__deserialize_json(response)
+            elif content_type == "application/octet-stream":
+                body_data = self.__deserialize_application_octet_stream(response)
+            elif content_type.startswith("multipart/form-data"):
+                body_data = self.__deserialize_multipart_form_data(response)
+                content_type = "multipart/form-data"
+            else:
+                raise NotImplementedError("Deserialization of {} has not yet been implemented".format(content_type))
+            deserialized_body = body_schema.from_openapi_data_oapg(body_data, _configuration=configuration)
+        elif streamed:
+            response.release_conn()
+
+        return self.response_cls(
+            response=response,
+            headers=deserialized_headers,
+            body=deserialized_body,
+        )
+
+
+class ApiClient:
+    """Generic API client for OpenAPI client library builds.
+
+    OpenAPI generic API client. This client handles the client-
+    server communication, and is invariant across implementations. Specifics of
+    the methods and models for each application are generated from the OpenAPI
+    templates.
+
+    NOTE: This class is auto generated by OpenAPI Generator.
+    Ref: https://openapi-generator.tech
+    Do not edit the class manually.
+
+    :param configuration: .Configuration object for this client
+    :param header_name: a header to pass when making calls to the API.
+    :param header_value: a header value to pass when making calls to
+        the API.
+    :param cookie: a cookie to include in the header when making calls
+        to the API
+    :param pool_threads: The number of threads to use for async requests
+        to the API. More threads means more concurrent API requests.
+    """
+
+    _pool = None
+
+    def __init__(
+        self,
+        configuration: typing.Optional[Configuration] = None,
+        header_name: typing.Optional[str] = None,
+        header_value: typing.Optional[str] = None,
+        cookie: typing.Optional[str] = None,
+        pool_threads: int = 1,
+    ):
+        if configuration is None:
+            configuration = Configuration()
+        self.configuration = configuration
+        self.pool_threads = pool_threads
+
+        self.rest_client = rest.RESTClientObject(configuration)
+        self.default_headers = HTTPHeaderDict()
+        if header_name is not None:
+            self.default_headers[header_name] = header_value
+        self.cookie = cookie
+        # Set default User-Agent.
+        self.user_agent = "OpenAPI-Generator/1.0.5/python"
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        self.close()
+
+    def close(self):
+        if self._pool:
+            self._pool.close()
+            self._pool.join()
+            self._pool = None
+            if hasattr(atexit, "unregister"):
+                atexit.unregister(self.close)
+
+    @property
+    def pool(self):
+        """Create thread pool on first request
+        avoids instantiating unused threadpool for blocking clients.
+        """
+        if self._pool is None:
+            atexit.register(self.close)
+            self._pool = ThreadPool(self.pool_threads)
+        return self._pool
+
+    @property
+    def user_agent(self):
+        """User agent for this API client"""
+        return self.default_headers["User-Agent"]
+
+    @user_agent.setter
+    def user_agent(self, value):
+        self.default_headers["User-Agent"] = value
+
+    def set_default_header(self, header_name, header_value):
+        self.default_headers[header_name] = header_value
+
+    def __call_api(
+        self,
+        resource_path: str,
+        method: str,
+        headers: typing.Optional[HTTPHeaderDict] = None,
+        body: typing.Optional[typing.Union[str, bytes]] = None,
+        fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+        auth_settings: typing.Optional[typing.List[str]] = None,
+        stream: bool = False,
+        timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+        host: typing.Optional[str] = None,
+    ) -> urllib3.HTTPResponse:
+        # header parameters
+        used_headers = HTTPHeaderDict(self.default_headers)
+        if self.cookie:
+            headers["Cookie"] = self.cookie
+
+        # auth setting
+        self.update_params_for_auth(used_headers, auth_settings, resource_path, method, body)
+
+        # must happen after cookie setting and auth setting in case user is overriding those
+        if headers:
+            used_headers.update(headers)
+
+        # request url
+        if host is None:
+            url = self.configuration.host + resource_path
+        else:
+            # use server/host defined in path or operation instead
+            url = host + resource_path
+
+        # perform request and return response
+        response = self.request(
+            method,
+            url,
+            headers=used_headers,
+            fields=fields,
+            body=body,
+            stream=stream,
+            timeout=timeout,
+        )
+        return response
+
+    def call_api(
+        self,
+        resource_path: str,
+        method: str,
+        headers: typing.Optional[HTTPHeaderDict] = None,
+        body: typing.Optional[typing.Union[str, bytes]] = None,
+        fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+        auth_settings: typing.Optional[typing.List[str]] = None,
+        async_req: typing.Optional[bool] = None,
+        stream: bool = False,
+        timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+        host: typing.Optional[str] = None,
+    ) -> urllib3.HTTPResponse:
+        """Makes the HTTP request (synchronous) and returns deserialized data.
+
+        To make an async_req request, set the async_req parameter.
+
+        :param resource_path: Path to method endpoint.
+        :param method: Method to call.
+        :param headers: Header parameters to be
+            placed in the request header.
+        :param body: Request body.
+        :param fields: Request post form parameters,
+            for `application/x-www-form-urlencoded`, `multipart/form-data`.
+        :param auth_settings: Auth Settings names for the request.
+        :param async_req: execute request asynchronously
+        :type async_req: bool, optional TODO remove, unused
+        :param stream: if True, the urllib3.HTTPResponse object will
+                                 be returned without reading/decoding response
+                                 data. Also when True, if the openapi spec describes a file download,
+                                 the data will be written to a local filesystme file and the BinarySchema
+                                 instance will also inherit from FileSchema and FileIO
+                                 Default is False.
+        :type stream: bool, optional
+        :param timeout: timeout setting for this request. If one
+                                 number provided, it will be total request
+                                 timeout. It can also be a pair (tuple) of
+                                 (connection, read) timeouts.
+        :param host: api endpoint host
+        :return:
+            If async_req parameter is True,
+            the request will be called asynchronously.
+            The method will return the request thread.
+            If parameter async_req is False or missing,
+            then the method will return the response directly.
+        """
+
+        if not async_req:
+            return self.__call_api(
+                resource_path,
+                method,
+                headers,
+                body,
+                fields,
+                auth_settings,
+                stream,
+                timeout,
+                host,
+            )
+
+        return self.pool.apply_async(
+            self.__call_api,
+            (
+                resource_path,
+                method,
+                headers,
+                body,
+                json,
+                fields,
+                auth_settings,
+                stream,
+                timeout,
+                host,
+            ),
+        )
+
+    def request(
+        self,
+        method: str,
+        url: str,
+        headers: typing.Optional[HTTPHeaderDict] = None,
+        fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
+        body: typing.Optional[typing.Union[str, bytes]] = None,
+        stream: bool = False,
+        timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
+    ) -> urllib3.HTTPResponse:
+        """Makes the HTTP request using RESTClient."""
+        if method == "GET":
+            return self.rest_client.GET(url, stream=stream, timeout=timeout, headers=headers)
+        elif method == "HEAD":
+            return self.rest_client.HEAD(url, stream=stream, timeout=timeout, headers=headers)
+        elif method == "OPTIONS":
+            return self.rest_client.OPTIONS(
+                url,
+                headers=headers,
+                fields=fields,
+                stream=stream,
+                timeout=timeout,
+                body=body,
+            )
+        elif method == "POST":
+            return self.rest_client.POST(
+                url,
+                headers=headers,
+                fields=fields,
+                stream=stream,
+                timeout=timeout,
+                body=body,
+            )
+        elif method == "PUT":
+            return self.rest_client.PUT(
+                url,
+                headers=headers,
+                fields=fields,
+                stream=stream,
+                timeout=timeout,
+                body=body,
+            )
+        elif method == "PATCH":
+            return self.rest_client.PATCH(
+                url,
+                headers=headers,
+                fields=fields,
+                stream=stream,
+                timeout=timeout,
+                body=body,
+            )
+        elif method == "DELETE":
+            return self.rest_client.DELETE(url, headers=headers, stream=stream, timeout=timeout, body=body)
+        else:
+            raise ApiValueError("http method must be `GET`, `HEAD`, `OPTIONS`," " `POST`, `PATCH`, `PUT` or `DELETE`.")
+
+    def update_params_for_auth(self, headers, auth_settings, resource_path, method, body):
+        """Updates header and query params based on authentication setting.
+
+        :param headers: Header parameters dict to be updated.
+        :param auth_settings: Authentication setting identifiers list.
+        :param resource_path: A string representation of the HTTP request resource path.
+        :param method: A string representation of the HTTP request method.
+        :param body: A object representing the body of the HTTP request.
+            The object type is the return value of _encoder.default().
+        """
+        if not auth_settings:
+            return
+
+        for auth in auth_settings:
+            auth_setting = self.configuration.auth_settings().get(auth)
+            if not auth_setting:
+                continue
+            if auth_setting["in"] == "cookie":
+                headers.add("Cookie", auth_setting["value"])
+            elif auth_setting["in"] == "header":
+                if auth_setting["type"] != "http-signature":
+                    headers.add(auth_setting["key"], auth_setting["value"])
+            elif auth_setting["in"] == "query":
+                """TODO implement auth in query
+                need to pass in prefix_separator_iterator
+                and need to output resource_path with query params added
+                """
+                raise ApiValueError("Auth in query not yet implemented")
+            else:
+                raise ApiValueError("Authentication token must be in `query` or `header`")
+
+
+class Api:
+    """NOTE: This class is auto generated by OpenAPI Generator
+    Ref: https://openapi-generator.tech
+
+    Do not edit the class manually.
+    """
+
+    def __init__(self, api_client: typing.Optional[ApiClient] = None):
+        if api_client is None:
+            api_client = ApiClient()
+        self.api_client = api_client
+
+    @staticmethod
+    def _verify_typed_dict_inputs_oapg(
+        cls: typing.Type[typing_extensions.TypedDict],
+        data: typing.Dict[str, typing.Any],
+    ):
+        """
+        Ensures that:
+        - required keys are present
+        - additional properties are not input
+        - value stored under required keys do not have the value unset
+        Note: detailed value checking is done in schema classes
+        """
+        missing_required_keys = []
+        required_keys_with_unset_values = []
+        for required_key in cls.__required_keys__:
+            if required_key not in data:
+                missing_required_keys.append(required_key)
+                continue
+            value = data[required_key]
+            if value is unset:
+                required_keys_with_unset_values.append(required_key)
+        if missing_required_keys:
+            raise ApiTypeError(
+                "{} missing {} required arguments: {}".format(
+                    cls.__name__,
+                    len(missing_required_keys),
+                    missing_required_keys,
+                )
+            )
+        if required_keys_with_unset_values:
+            raise ApiValueError(
+                "{} contains invalid unset values for {} required keys: {}".format(
+                    cls.__name__,
+                    len(required_keys_with_unset_values),
+                    required_keys_with_unset_values,
+                )
+            )
+
+        disallowed_additional_keys = []
+        for key in data:
+            if key in cls.__required_keys__ or key in cls.__optional_keys__:
+                continue
+            disallowed_additional_keys.append(key)
+        if disallowed_additional_keys:
+            raise ApiTypeError(
+                "{} got {} unexpected keyword arguments: {}".format(
+                    cls.__name__,
+                    len(disallowed_additional_keys),
+                    disallowed_additional_keys,
+                )
+            )
+
+    def _get_host_oapg(
+        self,
+        operation_id: str,
+        servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
+        host_index: typing.Optional[int] = None,
+    ) -> typing.Optional[str]:
+        configuration = self.api_client.configuration
+        try:
+            if host_index is None:
+                index = configuration.server_operation_index.get(operation_id, configuration.server_index)
+            else:
+                index = host_index
+            server_variables = configuration.server_operation_variables.get(
+                operation_id, configuration.server_variables
+            )
+            host = configuration.get_host_from_settings(index, variables=server_variables, servers=servers)
+        except IndexError:
+            if servers:
+                raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(servers))
+            host = None
+        return host
+
+
+class SerializedRequestBody(typing_extensions.TypedDict, total=False):
+    body: typing.Union[str, bytes]
+    fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
+
+
+class RequestBody(StyleFormSerializer, JSONDetector):
+    """
+    A request body parameter
+    content: content_type to MediaType Schema info
+    """
+
+    __json_encoder = JSONEncoder()
+
+    def __init__(
+        self,
+        content: typing.Dict[str, MediaType],
+        required: bool = False,
+    ):
+        self.required = required
+        if len(content) == 0:
+            raise ValueError("Invalid value for content, the content dict must have >= 1 entry")
+        self.content = content
+
+    def __serialize_json(self, in_data: typing.Any) -> typing.Dict[str, bytes]:
+        in_data = self.__json_encoder.default(in_data)
+        json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode("utf-8")
+        return dict(body=json_str)
+
+    @staticmethod
+    def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
+        if isinstance(in_data, frozendict.frozendict):
+            raise ValueError("Unable to serialize type frozendict.frozendict to text/plain")
+        elif isinstance(in_data, tuple):
+            raise ValueError("Unable to serialize type tuple to text/plain")
+        elif isinstance(in_data, NoneClass):
+            raise ValueError("Unable to serialize type NoneClass to text/plain")
+        elif isinstance(in_data, BoolClass):
+            raise ValueError("Unable to serialize type BoolClass to text/plain")
+        return dict(body=str(in_data))
+
+    def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
+        json_value = self.__json_encoder.default(value)
+        return RequestField(
+            name=key,
+            data=json.dumps(json_value),
+            headers={"Content-Type": "application/json"},
+        )
+
+    def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
+        if isinstance(value, str):
+            return RequestField(
+                name=key,
+                data=str(value),
+                headers={"Content-Type": "text/plain"},
+            )
+        elif isinstance(value, bytes):
+            return RequestField(
+                name=key,
+                data=value,
+                headers={"Content-Type": "application/octet-stream"},
+            )
+        elif isinstance(value, FileIO):
+            request_field = RequestField(
+                name=key,
+                data=value.read(),
+                filename=os.path.basename(value.name),
+                headers={"Content-Type": "application/octet-stream"},
+            )
+            value.close()
+            return request_field
+        else:
+            return self.__multipart_json_item(key=key, value=value)
+
+    def __serialize_multipart_form_data(self, in_data: Schema) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
+        if not isinstance(in_data, frozendict.frozendict):
+            raise ValueError(f"Unable to serialize {in_data} to multipart/form-data because it is not a dict of data")
+        """
+        In a multipart/form-data request body, each schema property, or each element of a schema array property,
+        takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
+        for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
+
+        When passing in multipart types, boundaries MAY be used to separate sections of the content being
+        transferred – thus, the following default Content-Types are defined for multipart:
+
+        If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
+        If the property is complex, or an array of complex values, the default Content-Type is application/json
+            Question: how is the array of primitives encoded?
+        If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
+        """
+        fields = []
+        for key, value in in_data.items():
+            if isinstance(value, tuple):
+                if value:
+                    # values use explode = True, so the code makes a RequestField for each item with name=key
+                    for item in value:
+                        request_field = self.__multipart_form_item(key=key, value=item)
+                        fields.append(request_field)
+                else:
+                    # send an empty array as json because exploding will not send it
+                    request_field = self.__multipart_json_item(key=key, value=value)
+                    fields.append(request_field)
+            else:
+                request_field = self.__multipart_form_item(key=key, value=value)
+                fields.append(request_field)
+
+        return dict(fields=tuple(fields))
+
+    def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
+        if isinstance(in_data, bytes):
+            return dict(body=in_data)
+        # FileIO type
+        result = dict(body=in_data.read())
+        in_data.close()
+        return result
+
+    def __serialize_application_x_www_form_data(self, in_data: typing.Any) -> SerializedRequestBody:
+        """
+        POST submission of form data in body
+        """
+        if not isinstance(in_data, frozendict.frozendict):
+            raise ValueError(
+                f"Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data"
+            )
+        cast_in_data = self.__json_encoder.default(in_data)
+        value = self._serialize_form(cast_in_data, name="", explode=True, percent_encode=True)
+        return dict(body=value)
+
+    def serialize(self, in_data: typing.Any, content_type: str) -> SerializedRequestBody:
+        """
+        If a str is returned then the result will be assigned to data when making the request
+        If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
+        Return a tuple of
+
+        The key of the return dict is
+        - body for application/json
+        - encode_multipart and fields for multipart/form-data
+        """
+        media_type = self.content[content_type]
+        if isinstance(in_data, media_type.schema):
+            cast_in_data = in_data
+        elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
+            cast_in_data = media_type.schema(**in_data)
+        else:
+            cast_in_data = media_type.schema(in_data)
+        # TODO check for and use encoding if it exists
+        # and content_type is multipart or application/x-www-form-urlencoded
+        if self._content_type_is_json(content_type):
+            return self.__serialize_json(cast_in_data)
+        elif content_type == "text/plain":
+            return self.__serialize_text_plain(cast_in_data)
+        elif content_type == "multipart/form-data":
+            return self.__serialize_multipart_form_data(cast_in_data)
+        elif content_type == "application/x-www-form-urlencoded":
+            return self.__serialize_application_x_www_form_data(cast_in_data)
+        elif content_type == "application/octet-stream":
+            return self.__serialize_application_octet_stream(cast_in_data)
+        raise NotImplementedError("Serialization has not yet been implemented for {}".format(content_type))
diff --git a/launch/api_client/apis/__init__.py b/launch/openapi_client/apis/__init__.py
similarity index 100%
rename from launch/api_client/apis/__init__.py
rename to launch/openapi_client/apis/__init__.py
diff --git a/launch/api_client/apis/path_to_api.py b/launch/openapi_client/apis/path_to_api.py
similarity index 65%
rename from launch/api_client/apis/path_to_api.py
rename to launch/openapi_client/apis/path_to_api.py
index 89fa65fe..25a69640 100644
--- a/launch/api_client/apis/path_to_api.py
+++ b/launch/openapi_client/apis/path_to_api.py
@@ -1,38 +1,40 @@
 import typing_extensions
 
-from launch.api_client.apis.paths.healthcheck import Healthcheck
-from launch.api_client.apis.paths.healthz import Healthz
-from launch.api_client.apis.paths.readyz import Readyz
-from launch.api_client.apis.paths.v1_async_tasks import V1AsyncTasks
-from launch.api_client.apis.paths.v1_async_tasks_task_id import (
+from launch.openapi_client.apis.paths.healthcheck import Healthcheck
+from launch.openapi_client.apis.paths.healthz import Healthz
+from launch.openapi_client.apis.paths.readyz import Readyz
+from launch.openapi_client.apis.paths.v1_async_tasks import V1AsyncTasks
+from launch.openapi_client.apis.paths.v1_async_tasks_task_id import (
     V1AsyncTasksTaskId,
 )
-from launch.api_client.apis.paths.v1_batch_jobs import V1BatchJobs
-from launch.api_client.apis.paths.v1_batch_jobs_batch_job_id import (
+from launch.openapi_client.apis.paths.v1_batch_jobs import V1BatchJobs
+from launch.openapi_client.apis.paths.v1_batch_jobs_batch_job_id import (
     V1BatchJobsBatchJobId,
 )
-from launch.api_client.apis.paths.v1_model_bundles import V1ModelBundles
-from launch.api_client.apis.paths.v1_model_bundles_clone_with_changes import (
+from launch.openapi_client.apis.paths.v1_model_bundles import V1ModelBundles
+from launch.openapi_client.apis.paths.v1_model_bundles_clone_with_changes import (
     V1ModelBundlesCloneWithChanges,
 )
-from launch.api_client.apis.paths.v1_model_bundles_latest import (
+from launch.openapi_client.apis.paths.v1_model_bundles_latest import (
     V1ModelBundlesLatest,
 )
-from launch.api_client.apis.paths.v1_model_bundles_model_bundle_id import (
+from launch.openapi_client.apis.paths.v1_model_bundles_model_bundle_id import (
     V1ModelBundlesModelBundleId,
 )
-from launch.api_client.apis.paths.v1_model_endpoints import V1ModelEndpoints
-from launch.api_client.apis.paths.v1_model_endpoints_api import (
+from launch.openapi_client.apis.paths.v1_model_endpoints import (
+    V1ModelEndpoints,
+)
+from launch.openapi_client.apis.paths.v1_model_endpoints_api import (
     V1ModelEndpointsApi,
 )
-from launch.api_client.apis.paths.v1_model_endpoints_model_endpoint_id import (
+from launch.openapi_client.apis.paths.v1_model_endpoints_model_endpoint_id import (
     V1ModelEndpointsModelEndpointId,
 )
-from launch.api_client.apis.paths.v1_model_endpoints_schema_json import (
+from launch.openapi_client.apis.paths.v1_model_endpoints_schema_json import (
     V1ModelEndpointsSchemaJson,
 )
-from launch.api_client.apis.paths.v1_sync_tasks import V1SyncTasks
-from launch.api_client.paths import PathValues
+from launch.openapi_client.apis.paths.v1_sync_tasks import V1SyncTasks
+from launch.openapi_client.paths import PathValues
 
 PathToApi = typing_extensions.TypedDict(
     "PathToApi",
diff --git a/launch/api_client/apis/paths/__init__.py b/launch/openapi_client/apis/paths/__init__.py
similarity index 73%
rename from launch/api_client/apis/paths/__init__.py
rename to launch/openapi_client/apis/paths/__init__.py
index d856e7a6..14b48c6b 100644
--- a/launch/api_client/apis/paths/__init__.py
+++ b/launch/openapi_client/apis/paths/__init__.py
@@ -1,3 +1,3 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.apis.path_to_api import path_to_api
+# from launch.openapi_client.apis.path_to_api import path_to_api
diff --git a/launch/openapi_client/apis/paths/healthcheck.py b/launch/openapi_client/apis/paths/healthcheck.py
new file mode 100644
index 00000000..0f230792
--- /dev/null
+++ b/launch/openapi_client/apis/paths/healthcheck.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.healthcheck.get import ApiForget
+
+
+class Healthcheck(
+    ApiForget,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/healthz.py b/launch/openapi_client/apis/paths/healthz.py
new file mode 100644
index 00000000..fdc9e14b
--- /dev/null
+++ b/launch/openapi_client/apis/paths/healthz.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.healthz.get import ApiForget
+
+
+class Healthz(
+    ApiForget,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/readyz.py b/launch/openapi_client/apis/paths/readyz.py
new file mode 100644
index 00000000..01bac0ee
--- /dev/null
+++ b/launch/openapi_client/apis/paths/readyz.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.readyz.get import ApiForget
+
+
+class Readyz(
+    ApiForget,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_async_tasks.py b/launch/openapi_client/apis/paths/v1_async_tasks.py
new file mode 100644
index 00000000..b24d7301
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_async_tasks.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.v1_async_tasks.post import ApiForpost
+
+
+class V1AsyncTasks(
+    ApiForpost,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_async_tasks_task_id.py b/launch/openapi_client/apis/paths/v1_async_tasks_task_id.py
new file mode 100644
index 00000000..c9f731d4
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_async_tasks_task_id.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.v1_async_tasks_task_id.get import ApiForget
+
+
+class V1AsyncTasksTaskId(
+    ApiForget,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_batch_jobs.py b/launch/openapi_client/apis/paths/v1_batch_jobs.py
new file mode 100644
index 00000000..85128b11
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_batch_jobs.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.v1_batch_jobs.post import ApiForpost
+
+
+class V1BatchJobs(
+    ApiForpost,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_batch_jobs_batch_job_id.py b/launch/openapi_client/apis/paths/v1_batch_jobs_batch_job_id.py
new file mode 100644
index 00000000..9f868c85
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_batch_jobs_batch_job_id.py
@@ -0,0 +1,13 @@
+from launch.openapi_client.paths.v1_batch_jobs_batch_job_id.get import (
+    ApiForget,
+)
+from launch.openapi_client.paths.v1_batch_jobs_batch_job_id.put import (
+    ApiForput,
+)
+
+
+class V1BatchJobsBatchJobId(
+    ApiForget,
+    ApiForput,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_model_bundles.py b/launch/openapi_client/apis/paths/v1_model_bundles.py
new file mode 100644
index 00000000..9c951e27
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_model_bundles.py
@@ -0,0 +1,9 @@
+from launch.openapi_client.paths.v1_model_bundles.get import ApiForget
+from launch.openapi_client.paths.v1_model_bundles.post import ApiForpost
+
+
+class V1ModelBundles(
+    ApiForget,
+    ApiForpost,
+):
+    pass
diff --git a/launch/api_client/apis/paths/v1_model_bundles_clone_with_changes.py b/launch/openapi_client/apis/paths/v1_model_bundles_clone_with_changes.py
similarity index 50%
rename from launch/api_client/apis/paths/v1_model_bundles_clone_with_changes.py
rename to launch/openapi_client/apis/paths/v1_model_bundles_clone_with_changes.py
index df0644e3..4e895d48 100644
--- a/launch/api_client/apis/paths/v1_model_bundles_clone_with_changes.py
+++ b/launch/openapi_client/apis/paths/v1_model_bundles_clone_with_changes.py
@@ -1,4 +1,4 @@
-from launch.api_client.paths.v1_model_bundles_clone_with_changes.post import (
+from launch.openapi_client.paths.v1_model_bundles_clone_with_changes.post import (
     ApiForpost,
 )
 
diff --git a/launch/openapi_client/apis/paths/v1_model_bundles_latest.py b/launch/openapi_client/apis/paths/v1_model_bundles_latest.py
new file mode 100644
index 00000000..a04c91a8
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_model_bundles_latest.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.v1_model_bundles_latest.get import ApiForget
+
+
+class V1ModelBundlesLatest(
+    ApiForget,
+):
+    pass
diff --git a/launch/api_client/apis/paths/v1_model_bundles_model_bundle_id.py b/launch/openapi_client/apis/paths/v1_model_bundles_model_bundle_id.py
similarity index 50%
rename from launch/api_client/apis/paths/v1_model_bundles_model_bundle_id.py
rename to launch/openapi_client/apis/paths/v1_model_bundles_model_bundle_id.py
index 0e646257..a525a59d 100644
--- a/launch/api_client/apis/paths/v1_model_bundles_model_bundle_id.py
+++ b/launch/openapi_client/apis/paths/v1_model_bundles_model_bundle_id.py
@@ -1,4 +1,4 @@
-from launch.api_client.paths.v1_model_bundles_model_bundle_id.get import (
+from launch.openapi_client.paths.v1_model_bundles_model_bundle_id.get import (
     ApiForget,
 )
 
diff --git a/launch/openapi_client/apis/paths/v1_model_endpoints.py b/launch/openapi_client/apis/paths/v1_model_endpoints.py
new file mode 100644
index 00000000..44f88bf6
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_model_endpoints.py
@@ -0,0 +1,9 @@
+from launch.openapi_client.paths.v1_model_endpoints.get import ApiForget
+from launch.openapi_client.paths.v1_model_endpoints.post import ApiForpost
+
+
+class V1ModelEndpoints(
+    ApiForget,
+    ApiForpost,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_model_endpoints_api.py b/launch/openapi_client/apis/paths/v1_model_endpoints_api.py
new file mode 100644
index 00000000..d4f3d056
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_model_endpoints_api.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.v1_model_endpoints_api.get import ApiForget
+
+
+class V1ModelEndpointsApi(
+    ApiForget,
+):
+    pass
diff --git a/launch/openapi_client/apis/paths/v1_model_endpoints_model_endpoint_id.py b/launch/openapi_client/apis/paths/v1_model_endpoints_model_endpoint_id.py
new file mode 100644
index 00000000..66f72f0a
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_model_endpoints_model_endpoint_id.py
@@ -0,0 +1,17 @@
+from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id.delete import (
+    ApiFordelete,
+)
+from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id.get import (
+    ApiForget,
+)
+from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id.put import (
+    ApiForput,
+)
+
+
+class V1ModelEndpointsModelEndpointId(
+    ApiForget,
+    ApiForput,
+    ApiFordelete,
+):
+    pass
diff --git a/launch/api_client/apis/paths/v1_model_endpoints_schema_json.py b/launch/openapi_client/apis/paths/v1_model_endpoints_schema_json.py
similarity index 50%
rename from launch/api_client/apis/paths/v1_model_endpoints_schema_json.py
rename to launch/openapi_client/apis/paths/v1_model_endpoints_schema_json.py
index 7c62f95b..d0db7c76 100644
--- a/launch/api_client/apis/paths/v1_model_endpoints_schema_json.py
+++ b/launch/openapi_client/apis/paths/v1_model_endpoints_schema_json.py
@@ -1,4 +1,4 @@
-from launch.api_client.paths.v1_model_endpoints_schema_json.get import (
+from launch.openapi_client.paths.v1_model_endpoints_schema_json.get import (
     ApiForget,
 )
 
diff --git a/launch/openapi_client/apis/paths/v1_sync_tasks.py b/launch/openapi_client/apis/paths/v1_sync_tasks.py
new file mode 100644
index 00000000..c2d68d32
--- /dev/null
+++ b/launch/openapi_client/apis/paths/v1_sync_tasks.py
@@ -0,0 +1,7 @@
+from launch.openapi_client.paths.v1_sync_tasks.post import ApiForpost
+
+
+class V1SyncTasks(
+    ApiForpost,
+):
+    pass
diff --git a/launch/api_client/apis/tag_to_api.py b/launch/openapi_client/apis/tag_to_api.py
similarity index 63%
rename from launch/api_client/apis/tag_to_api.py
rename to launch/openapi_client/apis/tag_to_api.py
index 8b3c0d6d..944183ec 100644
--- a/launch/api_client/apis/tag_to_api.py
+++ b/launch/openapi_client/apis/tag_to_api.py
@@ -1,7 +1,7 @@
 import typing_extensions
 
-from launch.api_client.apis.tags import TagValues
-from launch.api_client.apis.tags.default_api import DefaultApi
+from launch.openapi_client.apis.tags import TagValues
+from launch.openapi_client.apis.tags.default_api import DefaultApi
 
 TagToApi = typing_extensions.TypedDict(
     "TagToApi",
diff --git a/launch/api_client/apis/tags/__init__.py b/launch/openapi_client/apis/tags/__init__.py
similarity index 80%
rename from launch/api_client/apis/tags/__init__.py
rename to launch/openapi_client/apis/tags/__init__.py
index 5595c045..0fd096c1 100644
--- a/launch/api_client/apis/tags/__init__.py
+++ b/launch/openapi_client/apis/tags/__init__.py
@@ -1,6 +1,6 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.apis.tag_to_api import tag_to_api
+# from launch.openapi_client.apis.tag_to_api import tag_to_api
 
 import enum
 
diff --git a/launch/api_client/apis/tags/default_api.py b/launch/openapi_client/apis/tags/default_api.py
similarity index 61%
rename from launch/api_client/apis/tags/default_api.py
rename to launch/openapi_client/apis/tags/default_api.py
index 6a2bb4aa..9c63633a 100644
--- a/launch/api_client/apis/tags/default_api.py
+++ b/launch/openapi_client/apis/tags/default_api.py
@@ -9,61 +9,63 @@
     Generated by: https://openapi-generator.tech
 """
 
-from launch.api_client.paths.healthcheck.get import HealthcheckHealthcheckGet
-from launch.api_client.paths.healthz.get import HealthcheckHealthzGet
-from launch.api_client.paths.readyz.get import HealthcheckReadyzGet
-from launch.api_client.paths.v1_async_tasks.post import (
+from launch.openapi_client.paths.healthcheck.get import (
+    HealthcheckHealthcheckGet,
+)
+from launch.openapi_client.paths.healthz.get import HealthcheckHealthzGet
+from launch.openapi_client.paths.readyz.get import HealthcheckReadyzGet
+from launch.openapi_client.paths.v1_async_tasks.post import (
     CreateAsyncInferenceTaskV1AsyncTasksPost,
 )
-from launch.api_client.paths.v1_async_tasks_task_id.get import (
+from launch.openapi_client.paths.v1_async_tasks_task_id.get import (
     GetAsyncInferenceTaskV1AsyncTasksTaskIdGet,
 )
-from launch.api_client.paths.v1_batch_jobs.post import (
+from launch.openapi_client.paths.v1_batch_jobs.post import (
     CreateBatchJobV1BatchJobsPost,
 )
-from launch.api_client.paths.v1_batch_jobs_batch_job_id.get import (
+from launch.openapi_client.paths.v1_batch_jobs_batch_job_id.get import (
     GetBatchJobV1BatchJobsBatchJobIdGet,
 )
-from launch.api_client.paths.v1_batch_jobs_batch_job_id.put import (
+from launch.openapi_client.paths.v1_batch_jobs_batch_job_id.put import (
     UpdateBatchJobV1BatchJobsBatchJobIdPut,
 )
-from launch.api_client.paths.v1_model_bundles.get import (
+from launch.openapi_client.paths.v1_model_bundles.get import (
     ListModelBundlesV1ModelBundlesGet,
 )
-from launch.api_client.paths.v1_model_bundles.post import (
+from launch.openapi_client.paths.v1_model_bundles.post import (
     CreateModelBundleV1ModelBundlesPost,
 )
-from launch.api_client.paths.v1_model_bundles_clone_with_changes.post import (
+from launch.openapi_client.paths.v1_model_bundles_clone_with_changes.post import (
     CloneModelBundleWithChangesV1ModelBundlesCloneWithChangesPost,
 )
-from launch.api_client.paths.v1_model_bundles_latest.get import (
+from launch.openapi_client.paths.v1_model_bundles_latest.get import (
     GetLatestModelBundleV1ModelBundlesLatestGet,
 )
-from launch.api_client.paths.v1_model_bundles_model_bundle_id.get import (
+from launch.openapi_client.paths.v1_model_bundles_model_bundle_id.get import (
     GetModelBundleV1ModelBundlesModelBundleIdGet,
 )
-from launch.api_client.paths.v1_model_endpoints.get import (
+from launch.openapi_client.paths.v1_model_endpoints.get import (
     ListModelEndpointsV1ModelEndpointsGet,
 )
-from launch.api_client.paths.v1_model_endpoints.post import (
+from launch.openapi_client.paths.v1_model_endpoints.post import (
     CreateModelEndpointV1ModelEndpointsPost,
 )
-from launch.api_client.paths.v1_model_endpoints_api.get import (
+from launch.openapi_client.paths.v1_model_endpoints_api.get import (
     GetModelEndpointsApiV1ModelEndpointsApiGet,
 )
-from launch.api_client.paths.v1_model_endpoints_model_endpoint_id.delete import (
+from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id.delete import (
     DeleteModelEndpointV1ModelEndpointsModelEndpointIdDelete,
 )
-from launch.api_client.paths.v1_model_endpoints_model_endpoint_id.get import (
+from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id.get import (
     GetModelEndpointV1ModelEndpointsModelEndpointIdGet,
 )
-from launch.api_client.paths.v1_model_endpoints_model_endpoint_id.put import (
+from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id.put import (
     UpdateModelEndpointV1ModelEndpointsModelEndpointIdPut,
 )
-from launch.api_client.paths.v1_model_endpoints_schema_json.get import (
+from launch.openapi_client.paths.v1_model_endpoints_schema_json.get import (
     GetModelEndpointsSchemaV1ModelEndpointsSchemaJsonGet,
 )
-from launch.api_client.paths.v1_sync_tasks.post import (
+from launch.openapi_client.paths.v1_sync_tasks.post import (
     CreateSyncInferenceTaskV1SyncTasksPost,
 )
 
diff --git a/launch/api_client/configuration.py b/launch/openapi_client/configuration.py
similarity index 99%
rename from launch/api_client/configuration.py
rename to launch/openapi_client/configuration.py
index 7c17d0c3..b8536513 100644
--- a/launch/api_client/configuration.py
+++ b/launch/openapi_client/configuration.py
@@ -17,7 +17,7 @@
 
 import urllib3
 
-from launch.api_client.exceptions import ApiValueError
+from launch.openapi_client.exceptions import ApiValueError
 
 JSON_SCHEMA_VALIDATION_KEYWORDS = {
     "multipleOf",
@@ -98,7 +98,7 @@ class Configuration(object):
 
         Configure API client with HTTP basic authentication:
 
-    conf = launch.api_client.Configuration(
+    conf = launch.openapi_client.Configuration(
         username='the-user',
         password='the-password',
     )
@@ -161,7 +161,7 @@ def __init__(
         self.logger = {}
         """Logging Settings
         """
-        self.logger["package_logger"] = logging.getLogger("launch.api_client")
+        self.logger["package_logger"] = logging.getLogger("launch.openapi_client")
         self.logger["urllib3_logger"] = logging.getLogger("urllib3")
         self.logger_format = "%(asctime)s %(levelname)s %(message)s"
         """Log format
diff --git a/launch/openapi_client/exceptions.py b/launch/openapi_client/exceptions.py
new file mode 100644
index 00000000..2751e056
--- /dev/null
+++ b/launch/openapi_client/exceptions.py
@@ -0,0 +1,137 @@
+# coding: utf-8
+
+"""
+    launch
+
+    No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)  # noqa: E501
+
+    The version of the OpenAPI document: 1.0.0
+    Generated by: https://openapi-generator.tech
+"""
+
+
+class OpenApiException(Exception):
+    """The base exception class for all OpenAPIExceptions"""
+
+
+class ApiTypeError(OpenApiException, TypeError):
+    def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None):
+        """Raises an exception for TypeErrors
+
+        Args:
+            msg (str): the exception message
+
+        Keyword Args:
+            path_to_item (list): a list of keys an indices to get to the
+                                 current_item
+                                 None if unset
+            valid_classes (tuple): the primitive classes that current item
+                                   should be an instance of
+                                   None if unset
+            key_type (bool): False if our value is a value in a dict
+                             True if it is a key in a dict
+                             False if our item is an item in a list
+                             None if unset
+        """
+        self.path_to_item = path_to_item
+        self.valid_classes = valid_classes
+        self.key_type = key_type
+        full_msg = msg
+        if path_to_item:
+            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+        super(ApiTypeError, self).__init__(full_msg)
+
+
+class ApiValueError(OpenApiException, ValueError):
+    def __init__(self, msg, path_to_item=None):
+        """
+        Args:
+            msg (str): the exception message
+
+        Keyword Args:
+            path_to_item (list) the path to the exception in the
+                received_data dict. None if unset
+        """
+
+        self.path_to_item = path_to_item
+        full_msg = msg
+        if path_to_item:
+            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+        super(ApiValueError, self).__init__(full_msg)
+
+
+class ApiAttributeError(OpenApiException, AttributeError):
+    def __init__(self, msg, path_to_item=None):
+        """
+        Raised when an attribute reference or assignment fails.
+
+        Args:
+            msg (str): the exception message
+
+        Keyword Args:
+            path_to_item (None/list) the path to the exception in the
+                received_data dict
+        """
+        self.path_to_item = path_to_item
+        full_msg = msg
+        if path_to_item:
+            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+        super(ApiAttributeError, self).__init__(full_msg)
+
+
+class ApiKeyError(OpenApiException, KeyError):
+    def __init__(self, msg, path_to_item=None):
+        """
+        Args:
+            msg (str): the exception message
+
+        Keyword Args:
+            path_to_item (None/list) the path to the exception in the
+                received_data dict
+        """
+        self.path_to_item = path_to_item
+        full_msg = msg
+        if path_to_item:
+            full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
+        super(ApiKeyError, self).__init__(full_msg)
+
+
+class ApiException(OpenApiException):
+    def __init__(
+        self,
+        status=None,
+        reason=None,
+        api_response: "launch.openapi_client.api_client.ApiResponse" = None,
+    ):
+        if api_response:
+            self.status = api_response.response.status
+            self.reason = api_response.response.reason
+            self.body = api_response.response.data
+            self.headers = api_response.response.getheaders()
+        else:
+            self.status = status
+            self.reason = reason
+            self.body = None
+            self.headers = None
+
+    def __str__(self):
+        """Custom error messages for exception"""
+        error_message = "({0})\n" "Reason: {1}\n".format(self.status, self.reason)
+        if self.headers:
+            error_message += "HTTP response headers: {0}\n".format(self.headers)
+
+        if self.body:
+            error_message += "HTTP response body: {0}\n".format(self.body)
+
+        return error_message
+
+
+def render_path(path_to_item):
+    """Returns a string representation of a path"""
+    result = ""
+    for pth in path_to_item:
+        if isinstance(pth, int):
+            result += "[{0}]".format(pth)
+        else:
+            result += "['{0}']".format(pth)
+    return result
diff --git a/launch/api_client/model/__init__.py b/launch/openapi_client/model/__init__.py
similarity index 83%
rename from launch/api_client/model/__init__.py
rename to launch/openapi_client/model/__init__.py
index 4a66ef07..9439f5e1 100644
--- a/launch/api_client/model/__init__.py
+++ b/launch/openapi_client/model/__init__.py
@@ -2,4 +2,4 @@
 # reference which would not work in python2
 # do not import all models into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all models from one package, import them with
-# from launch.api_client.models import ModelA, ModelB
+# from launch.openapi_client.models import ModelA, ModelB
diff --git a/launch/api_client/model/batch_job_serialization_format.py b/launch/openapi_client/model/batch_job_serialization_format.py
similarity index 94%
rename from launch/api_client/model/batch_job_serialization_format.py
rename to launch/openapi_client/model/batch_job_serialization_format.py
index 7bfc3103..fbc04cd6 100644
--- a/launch/api_client/model/batch_job_serialization_format.py
+++ b/launch/openapi_client/model/batch_job_serialization_format.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class BatchJobSerializationFormat(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/batch_job_serialization_format.pyi b/launch/openapi_client/model/batch_job_serialization_format.pyi
similarity index 100%
rename from launch/api_client/model/batch_job_serialization_format.pyi
rename to launch/openapi_client/model/batch_job_serialization_format.pyi
diff --git a/launch/api_client/model/batch_job_status.py b/launch/openapi_client/model/batch_job_status.py
similarity index 96%
rename from launch/api_client/model/batch_job_status.py
rename to launch/openapi_client/model/batch_job_status.py
index 052dada8..aa2c3796 100644
--- a/launch/api_client/model/batch_job_status.py
+++ b/launch/openapi_client/model/batch_job_status.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class BatchJobStatus(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/batch_job_status.pyi b/launch/openapi_client/model/batch_job_status.pyi
similarity index 100%
rename from launch/api_client/model/batch_job_status.pyi
rename to launch/openapi_client/model/batch_job_status.pyi
diff --git a/launch/api_client/model/clone_model_bundle_request.py b/launch/openapi_client/model/clone_model_bundle_request.py
similarity index 98%
rename from launch/api_client/model/clone_model_bundle_request.py
rename to launch/openapi_client/model/clone_model_bundle_request.py
index 75df3b51..deafa0ab 100644
--- a/launch/api_client/model/clone_model_bundle_request.py
+++ b/launch/openapi_client/model/clone_model_bundle_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CloneModelBundleRequest(schemas.DictSchema):
diff --git a/launch/api_client/model/clone_model_bundle_request.pyi b/launch/openapi_client/model/clone_model_bundle_request.pyi
similarity index 100%
rename from launch/api_client/model/clone_model_bundle_request.pyi
rename to launch/openapi_client/model/clone_model_bundle_request.pyi
diff --git a/launch/api_client/model/create_async_task_response.py b/launch/openapi_client/model/create_async_task_response.py
similarity index 98%
rename from launch/api_client/model/create_async_task_response.py
rename to launch/openapi_client/model/create_async_task_response.py
index 54f85f97..0911cf8d 100644
--- a/launch/api_client/model/create_async_task_response.py
+++ b/launch/openapi_client/model/create_async_task_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateAsyncTaskResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/create_async_task_response.pyi b/launch/openapi_client/model/create_async_task_response.pyi
similarity index 100%
rename from launch/api_client/model/create_async_task_response.pyi
rename to launch/openapi_client/model/create_async_task_response.pyi
diff --git a/launch/api_client/model/create_batch_job_request.py b/launch/openapi_client/model/create_batch_job_request.py
similarity index 97%
rename from launch/api_client/model/create_batch_job_request.py
rename to launch/openapi_client/model/create_batch_job_request.py
index c90c8903..3b405905 100644
--- a/launch/api_client/model/create_batch_job_request.py
+++ b/launch/openapi_client/model/create_batch_job_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateBatchJobRequest(schemas.DictSchema):
@@ -233,9 +233,9 @@ def __new__(
         )
 
 
-from launch.api_client.model.batch_job_serialization_format import (
+from launch.openapi_client.model.batch_job_serialization_format import (
     BatchJobSerializationFormat,
 )
-from launch.api_client.model.create_batch_job_resource_requests import (
+from launch.openapi_client.model.create_batch_job_resource_requests import (
     CreateBatchJobResourceRequests,
 )
diff --git a/launch/api_client/model/create_batch_job_request.pyi b/launch/openapi_client/model/create_batch_job_request.pyi
similarity index 100%
rename from launch/api_client/model/create_batch_job_request.pyi
rename to launch/openapi_client/model/create_batch_job_request.pyi
diff --git a/launch/api_client/model/create_batch_job_resource_requests.py b/launch/openapi_client/model/create_batch_job_resource_requests.py
similarity index 99%
rename from launch/api_client/model/create_batch_job_resource_requests.py
rename to launch/openapi_client/model/create_batch_job_resource_requests.py
index ed223c61..e2b08c0f 100644
--- a/launch/api_client/model/create_batch_job_resource_requests.py
+++ b/launch/openapi_client/model/create_batch_job_resource_requests.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateBatchJobResourceRequests(schemas.DictSchema):
@@ -477,4 +477,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.gpu_type import GpuType
+from launch.openapi_client.model.gpu_type import GpuType
diff --git a/launch/api_client/model/create_batch_job_resource_requests.pyi b/launch/openapi_client/model/create_batch_job_resource_requests.pyi
similarity index 100%
rename from launch/api_client/model/create_batch_job_resource_requests.pyi
rename to launch/openapi_client/model/create_batch_job_resource_requests.pyi
diff --git a/launch/api_client/model/create_batch_job_response.py b/launch/openapi_client/model/create_batch_job_response.py
similarity index 98%
rename from launch/api_client/model/create_batch_job_response.py
rename to launch/openapi_client/model/create_batch_job_response.py
index c160b679..d4c7475f 100644
--- a/launch/api_client/model/create_batch_job_response.py
+++ b/launch/openapi_client/model/create_batch_job_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateBatchJobResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/create_batch_job_response.pyi b/launch/openapi_client/model/create_batch_job_response.pyi
similarity index 100%
rename from launch/api_client/model/create_batch_job_response.pyi
rename to launch/openapi_client/model/create_batch_job_response.pyi
diff --git a/launch/api_client/model/create_model_bundle_request.py b/launch/openapi_client/model/create_model_bundle_request.py
similarity index 97%
rename from launch/api_client/model/create_model_bundle_request.py
rename to launch/openapi_client/model/create_model_bundle_request.py
index 255a190b..db57caa1 100644
--- a/launch/api_client/model/create_model_bundle_request.py
+++ b/launch/openapi_client/model/create_model_bundle_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateModelBundleRequest(schemas.DictSchema):
@@ -288,9 +288,9 @@ def __new__(
         )
 
 
-from launch.api_client.model.model_bundle_environment_params import (
+from launch.openapi_client.model.model_bundle_environment_params import (
     ModelBundleEnvironmentParams,
 )
-from launch.api_client.model.model_bundle_packaging_type import (
+from launch.openapi_client.model.model_bundle_packaging_type import (
     ModelBundlePackagingType,
 )
diff --git a/launch/api_client/model/create_model_bundle_request.pyi b/launch/openapi_client/model/create_model_bundle_request.pyi
similarity index 100%
rename from launch/api_client/model/create_model_bundle_request.pyi
rename to launch/openapi_client/model/create_model_bundle_request.pyi
diff --git a/launch/api_client/model/create_model_bundle_response.py b/launch/openapi_client/model/create_model_bundle_response.py
similarity index 98%
rename from launch/api_client/model/create_model_bundle_response.py
rename to launch/openapi_client/model/create_model_bundle_response.py
index 23a7a0c0..4bb60b8e 100644
--- a/launch/api_client/model/create_model_bundle_response.py
+++ b/launch/openapi_client/model/create_model_bundle_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateModelBundleResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/create_model_bundle_response.pyi b/launch/openapi_client/model/create_model_bundle_response.pyi
similarity index 100%
rename from launch/api_client/model/create_model_bundle_response.pyi
rename to launch/openapi_client/model/create_model_bundle_response.pyi
diff --git a/launch/api_client/model/create_model_endpoint_request.py b/launch/openapi_client/model/create_model_endpoint_request.py
similarity index 99%
rename from launch/api_client/model/create_model_endpoint_request.py
rename to launch/openapi_client/model/create_model_endpoint_request.py
index 570250b6..675b8038 100644
--- a/launch/api_client/model/create_model_endpoint_request.py
+++ b/launch/openapi_client/model/create_model_endpoint_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateModelEndpointRequest(schemas.DictSchema):
@@ -780,5 +780,5 @@ def __new__(
         )
 
 
-from launch.api_client.model.gpu_type import GpuType
-from launch.api_client.model.model_endpoint_type import ModelEndpointType
+from launch.openapi_client.model.gpu_type import GpuType
+from launch.openapi_client.model.model_endpoint_type import ModelEndpointType
diff --git a/launch/api_client/model/create_model_endpoint_request.pyi b/launch/openapi_client/model/create_model_endpoint_request.pyi
similarity index 100%
rename from launch/api_client/model/create_model_endpoint_request.pyi
rename to launch/openapi_client/model/create_model_endpoint_request.pyi
diff --git a/launch/api_client/model/create_model_endpoint_response.py b/launch/openapi_client/model/create_model_endpoint_response.py
similarity index 98%
rename from launch/api_client/model/create_model_endpoint_response.py
rename to launch/openapi_client/model/create_model_endpoint_response.py
index d198be40..4ac8ac49 100644
--- a/launch/api_client/model/create_model_endpoint_response.py
+++ b/launch/openapi_client/model/create_model_endpoint_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class CreateModelEndpointResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/create_model_endpoint_response.pyi b/launch/openapi_client/model/create_model_endpoint_response.pyi
similarity index 100%
rename from launch/api_client/model/create_model_endpoint_response.pyi
rename to launch/openapi_client/model/create_model_endpoint_response.pyi
diff --git a/launch/api_client/model/delete_model_endpoint_response.py b/launch/openapi_client/model/delete_model_endpoint_response.py
similarity index 98%
rename from launch/api_client/model/delete_model_endpoint_response.py
rename to launch/openapi_client/model/delete_model_endpoint_response.py
index 01024336..77bf0dc5 100644
--- a/launch/api_client/model/delete_model_endpoint_response.py
+++ b/launch/openapi_client/model/delete_model_endpoint_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class DeleteModelEndpointResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/delete_model_endpoint_response.pyi b/launch/openapi_client/model/delete_model_endpoint_response.pyi
similarity index 100%
rename from launch/api_client/model/delete_model_endpoint_response.pyi
rename to launch/openapi_client/model/delete_model_endpoint_response.pyi
diff --git a/launch/api_client/model/endpoint_predict_request.py b/launch/openapi_client/model/endpoint_predict_request.py
similarity index 99%
rename from launch/api_client/model/endpoint_predict_request.py
rename to launch/openapi_client/model/endpoint_predict_request.py
index 9814f08b..c5efff9d 100644
--- a/launch/api_client/model/endpoint_predict_request.py
+++ b/launch/openapi_client/model/endpoint_predict_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class EndpointPredictRequest(schemas.DictSchema):
diff --git a/launch/api_client/model/endpoint_predict_request.pyi b/launch/openapi_client/model/endpoint_predict_request.pyi
similarity index 100%
rename from launch/api_client/model/endpoint_predict_request.pyi
rename to launch/openapi_client/model/endpoint_predict_request.pyi
diff --git a/launch/api_client/model/get_async_task_response.py b/launch/openapi_client/model/get_async_task_response.py
similarity index 97%
rename from launch/api_client/model/get_async_task_response.py
rename to launch/openapi_client/model/get_async_task_response.py
index 02dc4e20..20df78e8 100644
--- a/launch/api_client/model/get_async_task_response.py
+++ b/launch/openapi_client/model/get_async_task_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class GetAsyncTaskResponse(schemas.DictSchema):
@@ -189,4 +189,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.task_status import TaskStatus
+from launch.openapi_client.model.task_status import TaskStatus
diff --git a/launch/api_client/model/get_async_task_response.pyi b/launch/openapi_client/model/get_async_task_response.pyi
similarity index 100%
rename from launch/api_client/model/get_async_task_response.pyi
rename to launch/openapi_client/model/get_async_task_response.pyi
diff --git a/launch/api_client/model/get_batch_job_response.py b/launch/openapi_client/model/get_batch_job_response.py
similarity index 97%
rename from launch/api_client/model/get_batch_job_response.py
rename to launch/openapi_client/model/get_batch_job_response.py
index 9b0b0894..279a3d32 100644
--- a/launch/api_client/model/get_batch_job_response.py
+++ b/launch/openapi_client/model/get_batch_job_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class GetBatchJobResponse(schemas.DictSchema):
@@ -203,4 +203,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.batch_job_status import BatchJobStatus
+from launch.openapi_client.model.batch_job_status import BatchJobStatus
diff --git a/launch/api_client/model/get_batch_job_response.pyi b/launch/openapi_client/model/get_batch_job_response.pyi
similarity index 100%
rename from launch/api_client/model/get_batch_job_response.pyi
rename to launch/openapi_client/model/get_batch_job_response.pyi
diff --git a/launch/api_client/model/get_model_endpoint_response.py b/launch/openapi_client/model/get_model_endpoint_response.py
similarity index 97%
rename from launch/api_client/model/get_model_endpoint_response.py
rename to launch/openapi_client/model/get_model_endpoint_response.py
index ea40d0d9..4e57ca51 100644
--- a/launch/api_client/model/get_model_endpoint_response.py
+++ b/launch/openapi_client/model/get_model_endpoint_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class GetModelEndpointResponse(schemas.DictSchema):
@@ -521,11 +521,13 @@ def __new__(
         )
 
 
-from launch.api_client.model.model_endpoint_deployment_state import (
+from launch.openapi_client.model.model_endpoint_deployment_state import (
     ModelEndpointDeploymentState,
 )
-from launch.api_client.model.model_endpoint_resource_state import (
+from launch.openapi_client.model.model_endpoint_resource_state import (
     ModelEndpointResourceState,
 )
-from launch.api_client.model.model_endpoint_status import ModelEndpointStatus
-from launch.api_client.model.model_endpoint_type import ModelEndpointType
+from launch.openapi_client.model.model_endpoint_status import (
+    ModelEndpointStatus,
+)
+from launch.openapi_client.model.model_endpoint_type import ModelEndpointType
diff --git a/launch/api_client/model/get_model_endpoint_response.pyi b/launch/openapi_client/model/get_model_endpoint_response.pyi
similarity index 100%
rename from launch/api_client/model/get_model_endpoint_response.pyi
rename to launch/openapi_client/model/get_model_endpoint_response.pyi
diff --git a/launch/api_client/model/gpu_type.py b/launch/openapi_client/model/gpu_type.py
similarity index 95%
rename from launch/api_client/model/gpu_type.py
rename to launch/openapi_client/model/gpu_type.py
index 3d1ee053..2de6e59f 100644
--- a/launch/api_client/model/gpu_type.py
+++ b/launch/openapi_client/model/gpu_type.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class GpuType(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/gpu_type.pyi b/launch/openapi_client/model/gpu_type.pyi
similarity index 100%
rename from launch/api_client/model/gpu_type.pyi
rename to launch/openapi_client/model/gpu_type.pyi
diff --git a/launch/api_client/model/http_validation_error.py b/launch/openapi_client/model/http_validation_error.py
similarity index 96%
rename from launch/api_client/model/http_validation_error.py
rename to launch/openapi_client/model/http_validation_error.py
index 5ba37eb2..53dfd1d9 100644
--- a/launch/api_client/model/http_validation_error.py
+++ b/launch/openapi_client/model/http_validation_error.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class HTTPValidationError(schemas.DictSchema):
@@ -130,4 +130,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.validation_error import ValidationError
+from launch.openapi_client.model.validation_error import ValidationError
diff --git a/launch/api_client/model/http_validation_error.pyi b/launch/openapi_client/model/http_validation_error.pyi
similarity index 100%
rename from launch/api_client/model/http_validation_error.pyi
rename to launch/openapi_client/model/http_validation_error.pyi
diff --git a/launch/api_client/model/list_model_bundles_response.py b/launch/openapi_client/model/list_model_bundles_response.py
similarity index 96%
rename from launch/api_client/model/list_model_bundles_response.py
rename to launch/openapi_client/model/list_model_bundles_response.py
index a0b7e25b..76e8d603 100644
--- a/launch/api_client/model/list_model_bundles_response.py
+++ b/launch/openapi_client/model/list_model_bundles_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ListModelBundlesResponse(schemas.DictSchema):
@@ -140,4 +140,6 @@ def __new__(
         )
 
 
-from launch.api_client.model.model_bundle_response import ModelBundleResponse
+from launch.openapi_client.model.model_bundle_response import (
+    ModelBundleResponse,
+)
diff --git a/launch/api_client/model/list_model_bundles_response.pyi b/launch/openapi_client/model/list_model_bundles_response.pyi
similarity index 100%
rename from launch/api_client/model/list_model_bundles_response.pyi
rename to launch/openapi_client/model/list_model_bundles_response.pyi
diff --git a/launch/api_client/model/list_model_endpoints_response.py b/launch/openapi_client/model/list_model_endpoints_response.py
similarity index 96%
rename from launch/api_client/model/list_model_endpoints_response.py
rename to launch/openapi_client/model/list_model_endpoints_response.py
index 64bea072..b4f3d30d 100644
--- a/launch/api_client/model/list_model_endpoints_response.py
+++ b/launch/openapi_client/model/list_model_endpoints_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ListModelEndpointsResponse(schemas.DictSchema):
@@ -138,6 +138,6 @@ def __new__(
         )
 
 
-from launch.api_client.model.get_model_endpoint_response import (
+from launch.openapi_client.model.get_model_endpoint_response import (
     GetModelEndpointResponse,
 )
diff --git a/launch/api_client/model/list_model_endpoints_response.pyi b/launch/openapi_client/model/list_model_endpoints_response.pyi
similarity index 100%
rename from launch/api_client/model/list_model_endpoints_response.pyi
rename to launch/openapi_client/model/list_model_endpoints_response.pyi
diff --git a/launch/api_client/model/model_bundle_environment_params.py b/launch/openapi_client/model/model_bundle_environment_params.py
similarity index 97%
rename from launch/api_client/model/model_bundle_environment_params.py
rename to launch/openapi_client/model/model_bundle_environment_params.py
index ec1a7b2b..5b39dbda 100644
--- a/launch/api_client/model/model_bundle_environment_params.py
+++ b/launch/openapi_client/model/model_bundle_environment_params.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelBundleEnvironmentParams(schemas.DictSchema):
@@ -190,4 +190,6 @@ def __new__(
         )
 
 
-from launch.api_client.model.model_bundle_framework import ModelBundleFramework
+from launch.openapi_client.model.model_bundle_framework import (
+    ModelBundleFramework,
+)
diff --git a/launch/api_client/model/model_bundle_environment_params.pyi b/launch/openapi_client/model/model_bundle_environment_params.pyi
similarity index 100%
rename from launch/api_client/model/model_bundle_environment_params.pyi
rename to launch/openapi_client/model/model_bundle_environment_params.pyi
diff --git a/launch/api_client/model/model_bundle_framework.py b/launch/openapi_client/model/model_bundle_framework.py
similarity index 95%
rename from launch/api_client/model/model_bundle_framework.py
rename to launch/openapi_client/model/model_bundle_framework.py
index 66597ae1..5868a7a3 100644
--- a/launch/api_client/model/model_bundle_framework.py
+++ b/launch/openapi_client/model/model_bundle_framework.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelBundleFramework(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/model_bundle_framework.pyi b/launch/openapi_client/model/model_bundle_framework.pyi
similarity index 100%
rename from launch/api_client/model/model_bundle_framework.pyi
rename to launch/openapi_client/model/model_bundle_framework.pyi
diff --git a/launch/api_client/model/model_bundle_order_by.py b/launch/openapi_client/model/model_bundle_order_by.py
similarity index 95%
rename from launch/api_client/model/model_bundle_order_by.py
rename to launch/openapi_client/model/model_bundle_order_by.py
index 987734ac..58a5683c 100644
--- a/launch/api_client/model/model_bundle_order_by.py
+++ b/launch/openapi_client/model/model_bundle_order_by.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelBundleOrderBy(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/model_bundle_order_by.pyi b/launch/openapi_client/model/model_bundle_order_by.pyi
similarity index 100%
rename from launch/api_client/model/model_bundle_order_by.pyi
rename to launch/openapi_client/model/model_bundle_order_by.pyi
diff --git a/launch/api_client/model/model_bundle_packaging_type.py b/launch/openapi_client/model/model_bundle_packaging_type.py
similarity index 95%
rename from launch/api_client/model/model_bundle_packaging_type.py
rename to launch/openapi_client/model/model_bundle_packaging_type.py
index a36eab11..d4fba457 100644
--- a/launch/api_client/model/model_bundle_packaging_type.py
+++ b/launch/openapi_client/model/model_bundle_packaging_type.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelBundlePackagingType(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/model_bundle_packaging_type.pyi b/launch/openapi_client/model/model_bundle_packaging_type.pyi
similarity index 100%
rename from launch/api_client/model/model_bundle_packaging_type.pyi
rename to launch/openapi_client/model/model_bundle_packaging_type.pyi
diff --git a/launch/api_client/model/model_bundle_response.py b/launch/openapi_client/model/model_bundle_response.py
similarity index 98%
rename from launch/api_client/model/model_bundle_response.py
rename to launch/openapi_client/model/model_bundle_response.py
index bd33d7f1..3027a84d 100644
--- a/launch/api_client/model/model_bundle_response.py
+++ b/launch/openapi_client/model/model_bundle_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelBundleResponse(schemas.DictSchema):
@@ -381,9 +381,9 @@ def __new__(
         )
 
 
-from launch.api_client.model.model_bundle_environment_params import (
+from launch.openapi_client.model.model_bundle_environment_params import (
     ModelBundleEnvironmentParams,
 )
-from launch.api_client.model.model_bundle_packaging_type import (
+from launch.openapi_client.model.model_bundle_packaging_type import (
     ModelBundlePackagingType,
 )
diff --git a/launch/api_client/model/model_bundle_response.pyi b/launch/openapi_client/model/model_bundle_response.pyi
similarity index 100%
rename from launch/api_client/model/model_bundle_response.pyi
rename to launch/openapi_client/model/model_bundle_response.pyi
diff --git a/launch/api_client/model/model_endpoint_deployment_state.py b/launch/openapi_client/model/model_endpoint_deployment_state.py
similarity index 99%
rename from launch/api_client/model/model_endpoint_deployment_state.py
rename to launch/openapi_client/model/model_endpoint_deployment_state.py
index 076e6dd5..99ec97f3 100644
--- a/launch/api_client/model/model_endpoint_deployment_state.py
+++ b/launch/openapi_client/model/model_endpoint_deployment_state.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelEndpointDeploymentState(schemas.DictSchema):
diff --git a/launch/api_client/model/model_endpoint_deployment_state.pyi b/launch/openapi_client/model/model_endpoint_deployment_state.pyi
similarity index 100%
rename from launch/api_client/model/model_endpoint_deployment_state.pyi
rename to launch/openapi_client/model/model_endpoint_deployment_state.pyi
diff --git a/launch/api_client/model/model_endpoint_order_by.py b/launch/openapi_client/model/model_endpoint_order_by.py
similarity index 95%
rename from launch/api_client/model/model_endpoint_order_by.py
rename to launch/openapi_client/model/model_endpoint_order_by.py
index e2eb86a1..07a304f2 100644
--- a/launch/api_client/model/model_endpoint_order_by.py
+++ b/launch/openapi_client/model/model_endpoint_order_by.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelEndpointOrderBy(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/model_endpoint_order_by.pyi b/launch/openapi_client/model/model_endpoint_order_by.pyi
similarity index 100%
rename from launch/api_client/model/model_endpoint_order_by.pyi
rename to launch/openapi_client/model/model_endpoint_order_by.pyi
diff --git a/launch/api_client/model/model_endpoint_resource_state.py b/launch/openapi_client/model/model_endpoint_resource_state.py
similarity index 99%
rename from launch/api_client/model/model_endpoint_resource_state.py
rename to launch/openapi_client/model/model_endpoint_resource_state.py
index 32843450..4c9ba848 100644
--- a/launch/api_client/model/model_endpoint_resource_state.py
+++ b/launch/openapi_client/model/model_endpoint_resource_state.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelEndpointResourceState(schemas.DictSchema):
@@ -466,4 +466,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.gpu_type import GpuType
+from launch.openapi_client.model.gpu_type import GpuType
diff --git a/launch/api_client/model/model_endpoint_resource_state.pyi b/launch/openapi_client/model/model_endpoint_resource_state.pyi
similarity index 100%
rename from launch/api_client/model/model_endpoint_resource_state.pyi
rename to launch/openapi_client/model/model_endpoint_resource_state.pyi
diff --git a/launch/api_client/model/model_endpoint_status.py b/launch/openapi_client/model/model_endpoint_status.py
similarity index 96%
rename from launch/api_client/model/model_endpoint_status.py
rename to launch/openapi_client/model/model_endpoint_status.py
index 8703e65a..96943f08 100644
--- a/launch/api_client/model/model_endpoint_status.py
+++ b/launch/openapi_client/model/model_endpoint_status.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelEndpointStatus(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/model_endpoint_status.pyi b/launch/openapi_client/model/model_endpoint_status.pyi
similarity index 100%
rename from launch/api_client/model/model_endpoint_status.pyi
rename to launch/openapi_client/model/model_endpoint_status.pyi
diff --git a/launch/api_client/model/model_endpoint_type.py b/launch/openapi_client/model/model_endpoint_type.py
similarity index 94%
rename from launch/api_client/model/model_endpoint_type.py
rename to launch/openapi_client/model/model_endpoint_type.py
index 57c909ed..a24ce4be 100644
--- a/launch/api_client/model/model_endpoint_type.py
+++ b/launch/openapi_client/model/model_endpoint_type.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ModelEndpointType(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/model_endpoint_type.pyi b/launch/openapi_client/model/model_endpoint_type.pyi
similarity index 100%
rename from launch/api_client/model/model_endpoint_type.pyi
rename to launch/openapi_client/model/model_endpoint_type.pyi
diff --git a/launch/api_client/model/request_schema.py b/launch/openapi_client/model/request_schema.py
similarity index 91%
rename from launch/api_client/model/request_schema.py
rename to launch/openapi_client/model/request_schema.py
index 48c12333..b067de32 100644
--- a/launch/api_client/model/request_schema.py
+++ b/launch/openapi_client/model/request_schema.py
@@ -20,6 +20,6 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 RequestSchema = schemas.AnyTypeSchema
diff --git a/launch/api_client/model/request_schema.pyi b/launch/openapi_client/model/request_schema.pyi
similarity index 100%
rename from launch/api_client/model/request_schema.pyi
rename to launch/openapi_client/model/request_schema.pyi
diff --git a/launch/api_client/model/response_schema.py b/launch/openapi_client/model/response_schema.py
similarity index 91%
rename from launch/api_client/model/response_schema.py
rename to launch/openapi_client/model/response_schema.py
index ac2a8608..8ff8d999 100644
--- a/launch/api_client/model/response_schema.py
+++ b/launch/openapi_client/model/response_schema.py
@@ -20,6 +20,6 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 ResponseSchema = schemas.AnyTypeSchema
diff --git a/launch/api_client/model/response_schema.pyi b/launch/openapi_client/model/response_schema.pyi
similarity index 100%
rename from launch/api_client/model/response_schema.pyi
rename to launch/openapi_client/model/response_schema.pyi
diff --git a/launch/api_client/model/sync_endpoint_predict_response.py b/launch/openapi_client/model/sync_endpoint_predict_response.py
similarity index 97%
rename from launch/api_client/model/sync_endpoint_predict_response.py
rename to launch/openapi_client/model/sync_endpoint_predict_response.py
index 90dd7e21..2e214848 100644
--- a/launch/api_client/model/sync_endpoint_predict_response.py
+++ b/launch/openapi_client/model/sync_endpoint_predict_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class SyncEndpointPredictResponse(schemas.DictSchema):
@@ -170,4 +170,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.task_status import TaskStatus
+from launch.openapi_client.model.task_status import TaskStatus
diff --git a/launch/api_client/model/sync_endpoint_predict_response.pyi b/launch/openapi_client/model/sync_endpoint_predict_response.pyi
similarity index 100%
rename from launch/api_client/model/sync_endpoint_predict_response.pyi
rename to launch/openapi_client/model/sync_endpoint_predict_response.pyi
diff --git a/launch/api_client/model/task_status.py b/launch/openapi_client/model/task_status.py
similarity index 96%
rename from launch/api_client/model/task_status.py
rename to launch/openapi_client/model/task_status.py
index 9693fad1..a6c86a4f 100644
--- a/launch/api_client/model/task_status.py
+++ b/launch/openapi_client/model/task_status.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class TaskStatus(schemas.EnumBase, schemas.StrSchema):
diff --git a/launch/api_client/model/task_status.pyi b/launch/openapi_client/model/task_status.pyi
similarity index 100%
rename from launch/api_client/model/task_status.pyi
rename to launch/openapi_client/model/task_status.pyi
diff --git a/launch/api_client/model/update_batch_job_request.py b/launch/openapi_client/model/update_batch_job_request.py
similarity index 98%
rename from launch/api_client/model/update_batch_job_request.py
rename to launch/openapi_client/model/update_batch_job_request.py
index ecf257dd..13b4300f 100644
--- a/launch/api_client/model/update_batch_job_request.py
+++ b/launch/openapi_client/model/update_batch_job_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class UpdateBatchJobRequest(schemas.DictSchema):
diff --git a/launch/api_client/model/update_batch_job_request.pyi b/launch/openapi_client/model/update_batch_job_request.pyi
similarity index 100%
rename from launch/api_client/model/update_batch_job_request.pyi
rename to launch/openapi_client/model/update_batch_job_request.pyi
diff --git a/launch/api_client/model/update_batch_job_response.py b/launch/openapi_client/model/update_batch_job_response.py
similarity index 98%
rename from launch/api_client/model/update_batch_job_response.py
rename to launch/openapi_client/model/update_batch_job_response.py
index 7230339e..0bf84145 100644
--- a/launch/api_client/model/update_batch_job_response.py
+++ b/launch/openapi_client/model/update_batch_job_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class UpdateBatchJobResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/update_batch_job_response.pyi b/launch/openapi_client/model/update_batch_job_response.pyi
similarity index 100%
rename from launch/api_client/model/update_batch_job_response.pyi
rename to launch/openapi_client/model/update_batch_job_response.pyi
diff --git a/launch/api_client/model/update_model_endpoint_request.py b/launch/openapi_client/model/update_model_endpoint_request.py
similarity index 99%
rename from launch/api_client/model/update_model_endpoint_request.py
rename to launch/openapi_client/model/update_model_endpoint_request.py
index 484c135e..8440d694 100644
--- a/launch/api_client/model/update_model_endpoint_request.py
+++ b/launch/openapi_client/model/update_model_endpoint_request.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class UpdateModelEndpointRequest(schemas.DictSchema):
@@ -767,4 +767,4 @@ def __new__(
         )
 
 
-from launch.api_client.model.gpu_type import GpuType
+from launch.openapi_client.model.gpu_type import GpuType
diff --git a/launch/api_client/model/update_model_endpoint_request.pyi b/launch/openapi_client/model/update_model_endpoint_request.pyi
similarity index 100%
rename from launch/api_client/model/update_model_endpoint_request.pyi
rename to launch/openapi_client/model/update_model_endpoint_request.pyi
diff --git a/launch/api_client/model/update_model_endpoint_response.py b/launch/openapi_client/model/update_model_endpoint_response.py
similarity index 98%
rename from launch/api_client/model/update_model_endpoint_response.py
rename to launch/openapi_client/model/update_model_endpoint_response.py
index 162a8cc6..36e121c0 100644
--- a/launch/api_client/model/update_model_endpoint_response.py
+++ b/launch/openapi_client/model/update_model_endpoint_response.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class UpdateModelEndpointResponse(schemas.DictSchema):
diff --git a/launch/api_client/model/update_model_endpoint_response.pyi b/launch/openapi_client/model/update_model_endpoint_response.pyi
similarity index 100%
rename from launch/api_client/model/update_model_endpoint_response.pyi
rename to launch/openapi_client/model/update_model_endpoint_response.pyi
diff --git a/launch/api_client/model/validation_error.py b/launch/openapi_client/model/validation_error.py
similarity index 99%
rename from launch/api_client/model/validation_error.py
rename to launch/openapi_client/model/validation_error.py
index 3b7ad840..4d8f61fb 100644
--- a/launch/api_client/model/validation_error.py
+++ b/launch/openapi_client/model/validation_error.py
@@ -20,7 +20,7 @@
 import frozendict  # noqa: F401
 import typing_extensions  # noqa: F401
 
-from launch.api_client import schemas  # noqa: F401
+from launch.openapi_client import schemas  # noqa: F401
 
 
 class ValidationError(schemas.DictSchema):
diff --git a/launch/api_client/model/validation_error.pyi b/launch/openapi_client/model/validation_error.pyi
similarity index 100%
rename from launch/api_client/model/validation_error.pyi
rename to launch/openapi_client/model/validation_error.pyi
diff --git a/launch/openapi_client/models/__init__.py b/launch/openapi_client/models/__init__.py
new file mode 100644
index 00000000..65f4e7cd
--- /dev/null
+++ b/launch/openapi_client/models/__init__.py
@@ -0,0 +1,116 @@
+# coding: utf-8
+
+# flake8: noqa
+
+# import all models into this package
+# if you have many models here with many references from one model to another this may
+# raise a RecursionError
+# to avoid this, import only the models that you directly need like:
+# from from launch.openapi_client.model.pet import Pet
+# or import this package, but before doing it, use:
+# import sys
+# sys.setrecursionlimit(n)
+
+from launch.openapi_client.model.batch_job_serialization_format import (
+    BatchJobSerializationFormat,
+)
+from launch.openapi_client.model.batch_job_status import BatchJobStatus
+from launch.openapi_client.model.clone_model_bundle_request import (
+    CloneModelBundleRequest,
+)
+from launch.openapi_client.model.create_async_task_response import (
+    CreateAsyncTaskResponse,
+)
+from launch.openapi_client.model.create_batch_job_request import (
+    CreateBatchJobRequest,
+)
+from launch.openapi_client.model.create_batch_job_resource_requests import (
+    CreateBatchJobResourceRequests,
+)
+from launch.openapi_client.model.create_batch_job_response import (
+    CreateBatchJobResponse,
+)
+from launch.openapi_client.model.create_model_bundle_request import (
+    CreateModelBundleRequest,
+)
+from launch.openapi_client.model.create_model_bundle_response import (
+    CreateModelBundleResponse,
+)
+from launch.openapi_client.model.create_model_endpoint_request import (
+    CreateModelEndpointRequest,
+)
+from launch.openapi_client.model.create_model_endpoint_response import (
+    CreateModelEndpointResponse,
+)
+from launch.openapi_client.model.delete_model_endpoint_response import (
+    DeleteModelEndpointResponse,
+)
+from launch.openapi_client.model.endpoint_predict_request import (
+    EndpointPredictRequest,
+)
+from launch.openapi_client.model.get_async_task_response import (
+    GetAsyncTaskResponse,
+)
+from launch.openapi_client.model.get_batch_job_response import (
+    GetBatchJobResponse,
+)
+from launch.openapi_client.model.get_model_endpoint_response import (
+    GetModelEndpointResponse,
+)
+from launch.openapi_client.model.gpu_type import GpuType
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.list_model_bundles_response import (
+    ListModelBundlesResponse,
+)
+from launch.openapi_client.model.list_model_endpoints_response import (
+    ListModelEndpointsResponse,
+)
+from launch.openapi_client.model.model_bundle_environment_params import (
+    ModelBundleEnvironmentParams,
+)
+from launch.openapi_client.model.model_bundle_framework import (
+    ModelBundleFramework,
+)
+from launch.openapi_client.model.model_bundle_order_by import (
+    ModelBundleOrderBy,
+)
+from launch.openapi_client.model.model_bundle_packaging_type import (
+    ModelBundlePackagingType,
+)
+from launch.openapi_client.model.model_bundle_response import (
+    ModelBundleResponse,
+)
+from launch.openapi_client.model.model_endpoint_deployment_state import (
+    ModelEndpointDeploymentState,
+)
+from launch.openapi_client.model.model_endpoint_order_by import (
+    ModelEndpointOrderBy,
+)
+from launch.openapi_client.model.model_endpoint_resource_state import (
+    ModelEndpointResourceState,
+)
+from launch.openapi_client.model.model_endpoint_status import (
+    ModelEndpointStatus,
+)
+from launch.openapi_client.model.model_endpoint_type import ModelEndpointType
+from launch.openapi_client.model.request_schema import RequestSchema
+from launch.openapi_client.model.response_schema import ResponseSchema
+from launch.openapi_client.model.sync_endpoint_predict_response import (
+    SyncEndpointPredictResponse,
+)
+from launch.openapi_client.model.task_status import TaskStatus
+from launch.openapi_client.model.update_batch_job_request import (
+    UpdateBatchJobRequest,
+)
+from launch.openapi_client.model.update_batch_job_response import (
+    UpdateBatchJobResponse,
+)
+from launch.openapi_client.model.update_model_endpoint_request import (
+    UpdateModelEndpointRequest,
+)
+from launch.openapi_client.model.update_model_endpoint_response import (
+    UpdateModelEndpointResponse,
+)
+from launch.openapi_client.model.validation_error import ValidationError
diff --git a/launch/api_client/paths/__init__.py b/launch/openapi_client/paths/__init__.py
similarity index 94%
rename from launch/api_client/paths/__init__.py
rename to launch/openapi_client/paths/__init__.py
index d7e772a6..2fa6bba2 100644
--- a/launch/api_client/paths/__init__.py
+++ b/launch/openapi_client/paths/__init__.py
@@ -1,6 +1,6 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.apis.path_to_api import path_to_api
+# from launch.openapi_client.apis.path_to_api import path_to_api
 
 import enum
 
diff --git a/launch/api_client/paths/healthcheck/__init__.py b/launch/openapi_client/paths/healthcheck/__init__.py
similarity index 66%
rename from launch/api_client/paths/healthcheck/__init__.py
rename to launch/openapi_client/paths/healthcheck/__init__.py
index ae6dcb46..b6b3e4d8 100644
--- a/launch/api_client/paths/healthcheck/__init__.py
+++ b/launch/openapi_client/paths/healthcheck/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.healthcheck import Api
+# from launch.openapi_client.paths.healthcheck import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.HEALTHCHECK
diff --git a/launch/api_client/paths/healthcheck/get.py b/launch/openapi_client/paths/healthcheck/get.py
similarity index 98%
rename from launch/api_client/paths/healthcheck/get.py
rename to launch/openapi_client/paths/healthcheck/get.py
index 8b959525..2beb39c1 100644
--- a/launch/api_client/paths/healthcheck/get.py
+++ b/launch/openapi_client/paths/healthcheck/get.py
@@ -20,8 +20,8 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
 
 from . import path
 
diff --git a/launch/api_client/paths/healthcheck/get.pyi b/launch/openapi_client/paths/healthcheck/get.pyi
similarity index 100%
rename from launch/api_client/paths/healthcheck/get.pyi
rename to launch/openapi_client/paths/healthcheck/get.pyi
diff --git a/launch/api_client/paths/healthz/__init__.py b/launch/openapi_client/paths/healthz/__init__.py
similarity index 66%
rename from launch/api_client/paths/healthz/__init__.py
rename to launch/openapi_client/paths/healthz/__init__.py
index 3253e712..a55b8737 100644
--- a/launch/api_client/paths/healthz/__init__.py
+++ b/launch/openapi_client/paths/healthz/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.healthz import Api
+# from launch.openapi_client.paths.healthz import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.HEALTHZ
diff --git a/launch/api_client/paths/healthz/get.py b/launch/openapi_client/paths/healthz/get.py
similarity index 98%
rename from launch/api_client/paths/healthz/get.py
rename to launch/openapi_client/paths/healthz/get.py
index 0faead57..1209b307 100644
--- a/launch/api_client/paths/healthz/get.py
+++ b/launch/openapi_client/paths/healthz/get.py
@@ -20,8 +20,8 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
 
 from . import path
 
diff --git a/launch/api_client/paths/healthz/get.pyi b/launch/openapi_client/paths/healthz/get.pyi
similarity index 100%
rename from launch/api_client/paths/healthz/get.pyi
rename to launch/openapi_client/paths/healthz/get.pyi
diff --git a/launch/api_client/paths/readyz/__init__.py b/launch/openapi_client/paths/readyz/__init__.py
similarity index 66%
rename from launch/api_client/paths/readyz/__init__.py
rename to launch/openapi_client/paths/readyz/__init__.py
index 9b49ccf0..3d2d432b 100644
--- a/launch/api_client/paths/readyz/__init__.py
+++ b/launch/openapi_client/paths/readyz/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.readyz import Api
+# from launch.openapi_client.paths.readyz import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.READYZ
diff --git a/launch/api_client/paths/readyz/get.py b/launch/openapi_client/paths/readyz/get.py
similarity index 98%
rename from launch/api_client/paths/readyz/get.py
rename to launch/openapi_client/paths/readyz/get.py
index 548ecabd..3a897590 100644
--- a/launch/api_client/paths/readyz/get.py
+++ b/launch/openapi_client/paths/readyz/get.py
@@ -20,8 +20,8 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
 
 from . import path
 
diff --git a/launch/api_client/paths/readyz/get.pyi b/launch/openapi_client/paths/readyz/get.pyi
similarity index 100%
rename from launch/api_client/paths/readyz/get.pyi
rename to launch/openapi_client/paths/readyz/get.pyi
diff --git a/launch/api_client/paths/v1_async_tasks/__init__.py b/launch/openapi_client/paths/v1_async_tasks/__init__.py
similarity index 65%
rename from launch/api_client/paths/v1_async_tasks/__init__.py
rename to launch/openapi_client/paths/v1_async_tasks/__init__.py
index aafe0cc4..f03bddc9 100644
--- a/launch/api_client/paths/v1_async_tasks/__init__.py
+++ b/launch/openapi_client/paths/v1_async_tasks/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_async_tasks import Api
+# from launch.openapi_client.paths.v1_async_tasks import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_ASYNCTASKS
diff --git a/launch/api_client/paths/v1_async_tasks/post.py b/launch/openapi_client/paths/v1_async_tasks/post.py
similarity index 97%
rename from launch/api_client/paths/v1_async_tasks/post.py
rename to launch/openapi_client/paths/v1_async_tasks/post.py
index 7a4d2a1c..051630a5 100644
--- a/launch/api_client/paths/v1_async_tasks/post.py
+++ b/launch/openapi_client/paths/v1_async_tasks/post.py
@@ -20,15 +20,17 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.create_async_task_response import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.create_async_task_response import (
     CreateAsyncTaskResponse,
 )
-from launch.api_client.model.endpoint_predict_request import (
+from launch.openapi_client.model.endpoint_predict_request import (
     EndpointPredictRequest,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_async_tasks/post.pyi b/launch/openapi_client/paths/v1_async_tasks/post.pyi
similarity index 100%
rename from launch/api_client/paths/v1_async_tasks/post.pyi
rename to launch/openapi_client/paths/v1_async_tasks/post.pyi
diff --git a/launch/api_client/paths/v1_async_tasks_task_id/__init__.py b/launch/openapi_client/paths/v1_async_tasks_task_id/__init__.py
similarity index 65%
rename from launch/api_client/paths/v1_async_tasks_task_id/__init__.py
rename to launch/openapi_client/paths/v1_async_tasks_task_id/__init__.py
index 21c8a0fc..d0d72b88 100644
--- a/launch/api_client/paths/v1_async_tasks_task_id/__init__.py
+++ b/launch/openapi_client/paths/v1_async_tasks_task_id/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_async_tasks_task_id import Api
+# from launch.openapi_client.paths.v1_async_tasks_task_id import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_ASYNCTASKS_TASK_ID
diff --git a/launch/api_client/paths/v1_async_tasks_task_id/get.py b/launch/openapi_client/paths/v1_async_tasks_task_id/get.py
similarity index 97%
rename from launch/api_client/paths/v1_async_tasks_task_id/get.py
rename to launch/openapi_client/paths/v1_async_tasks_task_id/get.py
index 1faa81d9..97eb47c9 100644
--- a/launch/api_client/paths/v1_async_tasks_task_id/get.py
+++ b/launch/openapi_client/paths/v1_async_tasks_task_id/get.py
@@ -20,12 +20,14 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.get_async_task_response import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.get_async_task_response import (
     GetAsyncTaskResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_async_tasks_task_id/get.pyi b/launch/openapi_client/paths/v1_async_tasks_task_id/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_async_tasks_task_id/get.pyi
rename to launch/openapi_client/paths/v1_async_tasks_task_id/get.pyi
diff --git a/launch/api_client/paths/v1_batch_jobs/__init__.py b/launch/openapi_client/paths/v1_batch_jobs/__init__.py
similarity index 66%
rename from launch/api_client/paths/v1_batch_jobs/__init__.py
rename to launch/openapi_client/paths/v1_batch_jobs/__init__.py
index 3d5cef58..09ea71dd 100644
--- a/launch/api_client/paths/v1_batch_jobs/__init__.py
+++ b/launch/openapi_client/paths/v1_batch_jobs/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_batch_jobs import Api
+# from launch.openapi_client.paths.v1_batch_jobs import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_BATCHJOBS
diff --git a/launch/api_client/paths/v1_batch_jobs/post.py b/launch/openapi_client/paths/v1_batch_jobs/post.py
similarity index 97%
rename from launch/api_client/paths/v1_batch_jobs/post.py
rename to launch/openapi_client/paths/v1_batch_jobs/post.py
index e23cb7c8..914abb78 100644
--- a/launch/api_client/paths/v1_batch_jobs/post.py
+++ b/launch/openapi_client/paths/v1_batch_jobs/post.py
@@ -20,15 +20,17 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.create_batch_job_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.create_batch_job_request import (
     CreateBatchJobRequest,
 )
-from launch.api_client.model.create_batch_job_response import (
+from launch.openapi_client.model.create_batch_job_response import (
     CreateBatchJobResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_batch_jobs/post.pyi b/launch/openapi_client/paths/v1_batch_jobs/post.pyi
similarity index 100%
rename from launch/api_client/paths/v1_batch_jobs/post.pyi
rename to launch/openapi_client/paths/v1_batch_jobs/post.pyi
diff --git a/launch/api_client/paths/v1_batch_jobs_batch_job_id/__init__.py b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/__init__.py
similarity index 64%
rename from launch/api_client/paths/v1_batch_jobs_batch_job_id/__init__.py
rename to launch/openapi_client/paths/v1_batch_jobs_batch_job_id/__init__.py
index 3fa32eda..236a4a73 100644
--- a/launch/api_client/paths/v1_batch_jobs_batch_job_id/__init__.py
+++ b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_batch_jobs_batch_job_id import Api
+# from launch.openapi_client.paths.v1_batch_jobs_batch_job_id import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_BATCHJOBS_BATCH_JOB_ID
diff --git a/launch/api_client/paths/v1_batch_jobs_batch_job_id/get.py b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/get.py
similarity index 97%
rename from launch/api_client/paths/v1_batch_jobs_batch_job_id/get.py
rename to launch/openapi_client/paths/v1_batch_jobs_batch_job_id/get.py
index 716c6706..0cd2f9c4 100644
--- a/launch/api_client/paths/v1_batch_jobs_batch_job_id/get.py
+++ b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/get.py
@@ -20,10 +20,14 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.get_batch_job_response import GetBatchJobResponse
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.get_batch_job_response import (
+    GetBatchJobResponse,
+)
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_batch_jobs_batch_job_id/get.pyi b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_batch_jobs_batch_job_id/get.pyi
rename to launch/openapi_client/paths/v1_batch_jobs_batch_job_id/get.pyi
diff --git a/launch/api_client/paths/v1_batch_jobs_batch_job_id/put.py b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/put.py
similarity index 97%
rename from launch/api_client/paths/v1_batch_jobs_batch_job_id/put.py
rename to launch/openapi_client/paths/v1_batch_jobs_batch_job_id/put.py
index 5c33fbb0..02d44177 100644
--- a/launch/api_client/paths/v1_batch_jobs_batch_job_id/put.py
+++ b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/put.py
@@ -20,13 +20,15 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.update_batch_job_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.update_batch_job_request import (
     UpdateBatchJobRequest,
 )
-from launch.api_client.model.update_batch_job_response import (
+from launch.openapi_client.model.update_batch_job_response import (
     UpdateBatchJobResponse,
 )
 
diff --git a/launch/api_client/paths/v1_batch_jobs_batch_job_id/put.pyi b/launch/openapi_client/paths/v1_batch_jobs_batch_job_id/put.pyi
similarity index 100%
rename from launch/api_client/paths/v1_batch_jobs_batch_job_id/put.pyi
rename to launch/openapi_client/paths/v1_batch_jobs_batch_job_id/put.pyi
diff --git a/launch/api_client/paths/v1_model_bundles/__init__.py b/launch/openapi_client/paths/v1_model_bundles/__init__.py
similarity index 65%
rename from launch/api_client/paths/v1_model_bundles/__init__.py
rename to launch/openapi_client/paths/v1_model_bundles/__init__.py
index 46bf5ae2..bdf1ad22 100644
--- a/launch/api_client/paths/v1_model_bundles/__init__.py
+++ b/launch/openapi_client/paths/v1_model_bundles/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_bundles import Api
+# from launch.openapi_client.paths.v1_model_bundles import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELBUNDLES
diff --git a/launch/api_client/paths/v1_model_bundles/get.py b/launch/openapi_client/paths/v1_model_bundles/get.py
similarity index 96%
rename from launch/api_client/paths/v1_model_bundles/get.py
rename to launch/openapi_client/paths/v1_model_bundles/get.py
index e917842b..5ec1a307 100644
--- a/launch/api_client/paths/v1_model_bundles/get.py
+++ b/launch/openapi_client/paths/v1_model_bundles/get.py
@@ -20,13 +20,17 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.list_model_bundles_response import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.list_model_bundles_response import (
     ListModelBundlesResponse,
 )
-from launch.api_client.model.model_bundle_order_by import ModelBundleOrderBy
+from launch.openapi_client.model.model_bundle_order_by import (
+    ModelBundleOrderBy,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_bundles/get.pyi b/launch/openapi_client/paths/v1_model_bundles/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_bundles/get.pyi
rename to launch/openapi_client/paths/v1_model_bundles/get.pyi
diff --git a/launch/api_client/paths/v1_model_bundles/post.py b/launch/openapi_client/paths/v1_model_bundles/post.py
similarity index 97%
rename from launch/api_client/paths/v1_model_bundles/post.py
rename to launch/openapi_client/paths/v1_model_bundles/post.py
index 4ff01f10..4b755863 100644
--- a/launch/api_client/paths/v1_model_bundles/post.py
+++ b/launch/openapi_client/paths/v1_model_bundles/post.py
@@ -20,15 +20,17 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.create_model_bundle_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.create_model_bundle_request import (
     CreateModelBundleRequest,
 )
-from launch.api_client.model.create_model_bundle_response import (
+from launch.openapi_client.model.create_model_bundle_response import (
     CreateModelBundleResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_bundles/post.pyi b/launch/openapi_client/paths/v1_model_bundles/post.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_bundles/post.pyi
rename to launch/openapi_client/paths/v1_model_bundles/post.pyi
diff --git a/launch/api_client/paths/v1_model_bundles_clone_with_changes/__init__.py b/launch/openapi_client/paths/v1_model_bundles_clone_with_changes/__init__.py
similarity index 63%
rename from launch/api_client/paths/v1_model_bundles_clone_with_changes/__init__.py
rename to launch/openapi_client/paths/v1_model_bundles_clone_with_changes/__init__.py
index 35511bd5..86365dfd 100644
--- a/launch/api_client/paths/v1_model_bundles_clone_with_changes/__init__.py
+++ b/launch/openapi_client/paths/v1_model_bundles_clone_with_changes/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_bundles_clone_with_changes import Api
+# from launch.openapi_client.paths.v1_model_bundles_clone_with_changes import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELBUNDLES_CLONEWITHCHANGES
diff --git a/launch/api_client/paths/v1_model_bundles_clone_with_changes/post.py b/launch/openapi_client/paths/v1_model_bundles_clone_with_changes/post.py
similarity index 97%
rename from launch/api_client/paths/v1_model_bundles_clone_with_changes/post.py
rename to launch/openapi_client/paths/v1_model_bundles_clone_with_changes/post.py
index 0108f19d..383103ef 100644
--- a/launch/api_client/paths/v1_model_bundles_clone_with_changes/post.py
+++ b/launch/openapi_client/paths/v1_model_bundles_clone_with_changes/post.py
@@ -20,15 +20,17 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.clone_model_bundle_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.clone_model_bundle_request import (
     CloneModelBundleRequest,
 )
-from launch.api_client.model.create_model_bundle_response import (
+from launch.openapi_client.model.create_model_bundle_response import (
     CreateModelBundleResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_bundles_clone_with_changes/post.pyi b/launch/openapi_client/paths/v1_model_bundles_clone_with_changes/post.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_bundles_clone_with_changes/post.pyi
rename to launch/openapi_client/paths/v1_model_bundles_clone_with_changes/post.pyi
diff --git a/launch/api_client/paths/v1_model_bundles_latest/__init__.py b/launch/openapi_client/paths/v1_model_bundles_latest/__init__.py
similarity index 65%
rename from launch/api_client/paths/v1_model_bundles_latest/__init__.py
rename to launch/openapi_client/paths/v1_model_bundles_latest/__init__.py
index b5e54b32..074d2cc6 100644
--- a/launch/api_client/paths/v1_model_bundles_latest/__init__.py
+++ b/launch/openapi_client/paths/v1_model_bundles_latest/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_bundles_latest import Api
+# from launch.openapi_client.paths.v1_model_bundles_latest import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELBUNDLES_LATEST
diff --git a/launch/api_client/paths/v1_model_bundles_latest/get.py b/launch/openapi_client/paths/v1_model_bundles_latest/get.py
similarity index 97%
rename from launch/api_client/paths/v1_model_bundles_latest/get.py
rename to launch/openapi_client/paths/v1_model_bundles_latest/get.py
index 0660279f..29c9a499 100644
--- a/launch/api_client/paths/v1_model_bundles_latest/get.py
+++ b/launch/openapi_client/paths/v1_model_bundles_latest/get.py
@@ -20,10 +20,14 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.model_bundle_response import ModelBundleResponse
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.model_bundle_response import (
+    ModelBundleResponse,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_bundles_latest/get.pyi b/launch/openapi_client/paths/v1_model_bundles_latest/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_bundles_latest/get.pyi
rename to launch/openapi_client/paths/v1_model_bundles_latest/get.pyi
diff --git a/launch/api_client/paths/v1_model_bundles_model_bundle_id/__init__.py b/launch/openapi_client/paths/v1_model_bundles_model_bundle_id/__init__.py
similarity index 64%
rename from launch/api_client/paths/v1_model_bundles_model_bundle_id/__init__.py
rename to launch/openapi_client/paths/v1_model_bundles_model_bundle_id/__init__.py
index d3532d3a..99e031a3 100644
--- a/launch/api_client/paths/v1_model_bundles_model_bundle_id/__init__.py
+++ b/launch/openapi_client/paths/v1_model_bundles_model_bundle_id/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_bundles_model_bundle_id import Api
+# from launch.openapi_client.paths.v1_model_bundles_model_bundle_id import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELBUNDLES_MODEL_BUNDLE_ID
diff --git a/launch/api_client/paths/v1_model_bundles_model_bundle_id/get.py b/launch/openapi_client/paths/v1_model_bundles_model_bundle_id/get.py
similarity index 97%
rename from launch/api_client/paths/v1_model_bundles_model_bundle_id/get.py
rename to launch/openapi_client/paths/v1_model_bundles_model_bundle_id/get.py
index 11174fab..2103a00b 100644
--- a/launch/api_client/paths/v1_model_bundles_model_bundle_id/get.py
+++ b/launch/openapi_client/paths/v1_model_bundles_model_bundle_id/get.py
@@ -20,10 +20,14 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.model_bundle_response import ModelBundleResponse
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.model_bundle_response import (
+    ModelBundleResponse,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_bundles_model_bundle_id/get.pyi b/launch/openapi_client/paths/v1_model_bundles_model_bundle_id/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_bundles_model_bundle_id/get.pyi
rename to launch/openapi_client/paths/v1_model_bundles_model_bundle_id/get.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints/__init__.py b/launch/openapi_client/paths/v1_model_endpoints/__init__.py
similarity index 65%
rename from launch/api_client/paths/v1_model_endpoints/__init__.py
rename to launch/openapi_client/paths/v1_model_endpoints/__init__.py
index f58f6045..8a241c32 100644
--- a/launch/api_client/paths/v1_model_endpoints/__init__.py
+++ b/launch/openapi_client/paths/v1_model_endpoints/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_endpoints import Api
+# from launch.openapi_client.paths.v1_model_endpoints import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELENDPOINTS
diff --git a/launch/api_client/paths/v1_model_endpoints/get.py b/launch/openapi_client/paths/v1_model_endpoints/get.py
similarity index 96%
rename from launch/api_client/paths/v1_model_endpoints/get.py
rename to launch/openapi_client/paths/v1_model_endpoints/get.py
index 03cf6126..c0a2051f 100644
--- a/launch/api_client/paths/v1_model_endpoints/get.py
+++ b/launch/openapi_client/paths/v1_model_endpoints/get.py
@@ -20,13 +20,15 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.list_model_endpoints_response import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.list_model_endpoints_response import (
     ListModelEndpointsResponse,
 )
-from launch.api_client.model.model_endpoint_order_by import (
+from launch.openapi_client.model.model_endpoint_order_by import (
     ModelEndpointOrderBy,
 )
 
diff --git a/launch/api_client/paths/v1_model_endpoints/get.pyi b/launch/openapi_client/paths/v1_model_endpoints/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints/get.pyi
rename to launch/openapi_client/paths/v1_model_endpoints/get.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints/post.py b/launch/openapi_client/paths/v1_model_endpoints/post.py
similarity index 97%
rename from launch/api_client/paths/v1_model_endpoints/post.py
rename to launch/openapi_client/paths/v1_model_endpoints/post.py
index 43f29056..40d5604e 100644
--- a/launch/api_client/paths/v1_model_endpoints/post.py
+++ b/launch/openapi_client/paths/v1_model_endpoints/post.py
@@ -20,15 +20,17 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.create_model_endpoint_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.create_model_endpoint_request import (
     CreateModelEndpointRequest,
 )
-from launch.api_client.model.create_model_endpoint_response import (
+from launch.openapi_client.model.create_model_endpoint_response import (
     CreateModelEndpointResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_endpoints/post.pyi b/launch/openapi_client/paths/v1_model_endpoints/post.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints/post.pyi
rename to launch/openapi_client/paths/v1_model_endpoints/post.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints_api/__init__.py b/launch/openapi_client/paths/v1_model_endpoints_api/__init__.py
similarity index 65%
rename from launch/api_client/paths/v1_model_endpoints_api/__init__.py
rename to launch/openapi_client/paths/v1_model_endpoints_api/__init__.py
index c4f1910d..7f145b43 100644
--- a/launch/api_client/paths/v1_model_endpoints_api/__init__.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_api/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_endpoints_api import Api
+# from launch.openapi_client.paths.v1_model_endpoints_api import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELENDPOINTSAPI
diff --git a/launch/api_client/paths/v1_model_endpoints_api/get.py b/launch/openapi_client/paths/v1_model_endpoints_api/get.py
similarity index 98%
rename from launch/api_client/paths/v1_model_endpoints_api/get.py
rename to launch/openapi_client/paths/v1_model_endpoints_api/get.py
index 24323282..088b43bd 100644
--- a/launch/api_client/paths/v1_model_endpoints_api/get.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_api/get.py
@@ -20,8 +20,8 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_endpoints_api/get.pyi b/launch/openapi_client/paths/v1_model_endpoints_api/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints_api/get.pyi
rename to launch/openapi_client/paths/v1_model_endpoints_api/get.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/__init__.py b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/__init__.py
similarity index 64%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/__init__.py
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/__init__.py
index fb6cdf7c..8051b60b 100644
--- a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/__init__.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_endpoints_model_endpoint_id import Api
+# from launch.openapi_client.paths.v1_model_endpoints_model_endpoint_id import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELENDPOINTS_MODEL_ENDPOINT_ID
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/delete.py b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/delete.py
similarity index 97%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/delete.py
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/delete.py
index fb76c011..5f2d885f 100644
--- a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/delete.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/delete.py
@@ -20,12 +20,14 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.delete_model_endpoint_response import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.delete_model_endpoint_response import (
     DeleteModelEndpointResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/delete.pyi b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/delete.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/delete.pyi
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/delete.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/get.py b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/get.py
similarity index 97%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/get.py
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/get.py
index 70ec5816..055716d1 100644
--- a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/get.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/get.py
@@ -20,12 +20,14 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.get_model_endpoint_response import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.get_model_endpoint_response import (
     GetModelEndpointResponse,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/get.pyi b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/get.pyi
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/get.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/put.py b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/put.py
similarity index 97%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/put.py
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/put.py
index ea63b0cb..a248b7b9 100644
--- a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/put.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/put.py
@@ -20,13 +20,15 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.update_model_endpoint_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.update_model_endpoint_request import (
     UpdateModelEndpointRequest,
 )
-from launch.api_client.model.update_model_endpoint_response import (
+from launch.openapi_client.model.update_model_endpoint_response import (
     UpdateModelEndpointResponse,
 )
 
diff --git a/launch/api_client/paths/v1_model_endpoints_model_endpoint_id/put.pyi b/launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/put.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints_model_endpoint_id/put.pyi
rename to launch/openapi_client/paths/v1_model_endpoints_model_endpoint_id/put.pyi
diff --git a/launch/api_client/paths/v1_model_endpoints_schema_json/__init__.py b/launch/openapi_client/paths/v1_model_endpoints_schema_json/__init__.py
similarity index 64%
rename from launch/api_client/paths/v1_model_endpoints_schema_json/__init__.py
rename to launch/openapi_client/paths/v1_model_endpoints_schema_json/__init__.py
index 4dd4f68d..eaf987f0 100644
--- a/launch/api_client/paths/v1_model_endpoints_schema_json/__init__.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_schema_json/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_model_endpoints_schema_json import Api
+# from launch.openapi_client.paths.v1_model_endpoints_schema_json import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_MODELENDPOINTSSCHEMA_JSON
diff --git a/launch/api_client/paths/v1_model_endpoints_schema_json/get.py b/launch/openapi_client/paths/v1_model_endpoints_schema_json/get.py
similarity index 98%
rename from launch/api_client/paths/v1_model_endpoints_schema_json/get.py
rename to launch/openapi_client/paths/v1_model_endpoints_schema_json/get.py
index d11fddbc..7bae63cf 100644
--- a/launch/api_client/paths/v1_model_endpoints_schema_json/get.py
+++ b/launch/openapi_client/paths/v1_model_endpoints_schema_json/get.py
@@ -20,8 +20,8 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
 
 from . import path
 
diff --git a/launch/api_client/paths/v1_model_endpoints_schema_json/get.pyi b/launch/openapi_client/paths/v1_model_endpoints_schema_json/get.pyi
similarity index 100%
rename from launch/api_client/paths/v1_model_endpoints_schema_json/get.pyi
rename to launch/openapi_client/paths/v1_model_endpoints_schema_json/get.pyi
diff --git a/launch/api_client/paths/v1_sync_tasks/__init__.py b/launch/openapi_client/paths/v1_sync_tasks/__init__.py
similarity index 66%
rename from launch/api_client/paths/v1_sync_tasks/__init__.py
rename to launch/openapi_client/paths/v1_sync_tasks/__init__.py
index e05b06c3..bc95fbd6 100644
--- a/launch/api_client/paths/v1_sync_tasks/__init__.py
+++ b/launch/openapi_client/paths/v1_sync_tasks/__init__.py
@@ -1,7 +1,7 @@
 # do not import all endpoints into this module because that uses a lot of memory and stack frames
 # if you need the ability to import all endpoints from this module, import them with
-# from launch.api_client.paths.v1_sync_tasks import Api
+# from launch.openapi_client.paths.v1_sync_tasks import Api
 
-from launch.api_client.paths import PathValues
+from launch.openapi_client.paths import PathValues
 
 path = PathValues.V1_SYNCTASKS
diff --git a/launch/api_client/paths/v1_sync_tasks/post.py b/launch/openapi_client/paths/v1_sync_tasks/post.py
similarity index 97%
rename from launch/api_client/paths/v1_sync_tasks/post.py
rename to launch/openapi_client/paths/v1_sync_tasks/post.py
index 0134b5e3..e2f5b667 100644
--- a/launch/api_client/paths/v1_sync_tasks/post.py
+++ b/launch/openapi_client/paths/v1_sync_tasks/post.py
@@ -20,13 +20,15 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client import schemas  # noqa: F401
-from launch.api_client import api_client, exceptions
-from launch.api_client.model.endpoint_predict_request import (
+from launch.openapi_client import schemas  # noqa: F401
+from launch.openapi_client import api_client, exceptions
+from launch.openapi_client.model.endpoint_predict_request import (
     EndpointPredictRequest,
 )
-from launch.api_client.model.http_validation_error import HTTPValidationError
-from launch.api_client.model.sync_endpoint_predict_response import (
+from launch.openapi_client.model.http_validation_error import (
+    HTTPValidationError,
+)
+from launch.openapi_client.model.sync_endpoint_predict_response import (
     SyncEndpointPredictResponse,
 )
 
diff --git a/launch/api_client/paths/v1_sync_tasks/post.pyi b/launch/openapi_client/paths/v1_sync_tasks/post.pyi
similarity index 100%
rename from launch/api_client/paths/v1_sync_tasks/post.pyi
rename to launch/openapi_client/paths/v1_sync_tasks/post.pyi
diff --git a/launch/api_client/rest.py b/launch/openapi_client/rest.py
similarity index 99%
rename from launch/api_client/rest.py
rename to launch/openapi_client/rest.py
index 96d2a3ae..41a3239a 100644
--- a/launch/api_client/rest.py
+++ b/launch/openapi_client/rest.py
@@ -18,7 +18,7 @@
 import urllib3
 from urllib3._collections import HTTPHeaderDict
 
-from launch.api_client.exceptions import ApiException, ApiValueError
+from launch.openapi_client.exceptions import ApiException, ApiValueError
 
 logger = logging.getLogger(__name__)
 
diff --git a/launch/api_client/schemas.py b/launch/openapi_client/schemas.py
similarity index 99%
rename from launch/api_client/schemas.py
rename to launch/openapi_client/schemas.py
index d04c1959..4fbf12d8 100644
--- a/launch/api_client/schemas.py
+++ b/launch/openapi_client/schemas.py
@@ -22,8 +22,8 @@
 import frozendict
 from dateutil.parser.isoparser import _takes_ascii, isoparser
 
-from launch.api_client.configuration import Configuration
-from launch.api_client.exceptions import ApiTypeError, ApiValueError
+from launch.openapi_client.configuration import Configuration
+from launch.openapi_client.exceptions import ApiTypeError, ApiValueError
 
 
 class Unset(object):
diff --git a/pyproject.toml b/pyproject.toml
index 2a335750..14e824c5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -22,7 +22,8 @@ exclude = '''
 [tool.mypy]
 exclude = [
     '^launch/clientlib/',
-    '^launch/api_client/'
+    '^launch/api_client/',
+    '^launch/openapi_client/',
 ]
 
 [tool.poetry]
@@ -88,4 +89,5 @@ extend-select = ['Q']
 flake8-quotes = {inline-quotes = 'double', multiline-quotes = 'double'}
 exclude = [
     "launch/api_client",
+    "launch/openapi_client",
 ]
diff --git a/tests/test_client.py b/tests/test_client.py
index a82f8db3..d6c6ed74 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -12,8 +12,8 @@
 from urllib3 import HTTPResponse
 
 import launch
-from launch.api_client.api_client import ApiResponseWithoutDeserialization
-from launch.api_client.model.list_model_endpoints_response import (
+from launch.openapi_client.api_client import ApiResponseWithoutDeserialization
+from launch.openapi_client.model.list_model_endpoints_response import (
     ListModelEndpointsResponse,
 )
 
diff --git a/tests/test_model_endpoint.py b/tests/test_model_endpoint.py
index 809e7cd9..7945c369 100644
--- a/tests/test_model_endpoint.py
+++ b/tests/test_model_endpoint.py
@@ -7,7 +7,7 @@
 from urllib3 import HTTPResponse
 
 import launch
-from launch.api_client.api_client import ApiResponseWithoutDeserialization
+from launch.openapi_client.api_client import ApiResponseWithoutDeserialization
 
 
 def _get_mock_client():