diff --git a/dataikuapi/dss/project.py b/dataikuapi/dss/project.py index ba29a3157..7da272957 100644 --- a/dataikuapi/dss/project.py +++ b/dataikuapi/dss/project.py @@ -25,6 +25,7 @@ from .analysis import DSSAnalysis from .flow import DSSProjectFlow from .app import DSSAppManifest +from .webapp import DSSWebApp from .codestudio import DSSCodeStudioObject, DSSCodeStudioObjectListItem class DSSProject(object): @@ -383,7 +384,7 @@ def create_s3_dataset(self, dataset_name, connection, path_in_connection, bucket """ Creates a new external S3 dataset in the project and returns a :class:`~dataikuapi.dss.dataset.DSSDataset` to interact with it. - The created dataset doesn not have its format and schema initialized, it is recommend to use + The created dataset doesn not have its format and schema initialized, it is recommend to use :meth:`~dataikuapi.dss.dataset.DSSDataset.autodetect_settings` on the returned object :param dataset_name: Name of the dataset to create. Must not already exist @@ -473,9 +474,9 @@ def list_streaming_endpoints(self, as_type="listitems"): def get_streaming_endpoint(self, streaming_endpoint_name): """ Get a handle to interact with a specific streaming endpoint - + :param string streaming_endpoint_name: the name of the desired streaming endpoint - + :returns: A :class:`dataikuapi.dss.streaming_endpoint.DSSStreamingEndpoint` streaming endpoint handle """ return DSSStreamingEndpoint(self.client, self.project_key, streaming_endpoint_name) @@ -484,18 +485,18 @@ def create_streaming_endpoint(self, streaming_endpoint_name, type, params=None): """ Create a new streaming endpoint in the project, and return a handle to interact with it. - The precise structure of ``params`` depends on the specific streaming endpoint + The precise structure of ``params`` depends on the specific streaming endpoint type. To know which fields exist for a given streaming endpoint type, create a streaming endpoint from the UI, and use :meth:`get_streaming_endpoint` to retrieve the configuration of the streaming endpoint and inspect it. Then reproduce a similar structure in the :meth:`create_streaming_endpoint` call. Not all settings of a streaming endpoint can be set at creation time (for example partitioning). After creation, you'll have the ability to modify the streaming endpoint - + :param string streaming_endpoint_name: the name for the new streaming endpoint :param string type: the type of the streaming endpoint :param dict params: the parameters for the type, as a JSON object (defaults to `{}`) - + Returns: A :class:`dataikuapi.dss.streaming_endpoint.DSSStreamingEndpoint` streaming endpoint handle """ @@ -654,11 +655,11 @@ def get_ml_task(self, analysis_id, mltask_id): def list_mltask_queues(self): """ - List non-empty ML task queues in this project - + List non-empty ML task queues in this project + :returns: an iterable :class:`DSSMLTaskQueues` listing of MLTask queues (each a dict) :rtype: :class:`DSSMLTaskQueues` - """ + """ data = self.client._perform_json("GET", "/projects/%s/models/labs/mltask-queues" % self.project_key) return DSSMLTaskQueues(data) @@ -806,9 +807,9 @@ def list_model_evaluation_stores(self): def get_model_evaluation_store(self, mes_id): """ Get a handle to interact with a specific model evaluation store - + :param string mes_id: the id of the desired model evaluation store - + :returns: A :class:`dataikuapi.dss.modelevaluationstore.DSSModelEvaluationStore` model evaluation store handle """ return DSSModelEvaluationStore(self.client, self.project_key, mes_id) @@ -816,7 +817,7 @@ def get_model_evaluation_store(self, mes_id): def create_model_evaluation_store(self, name): """ Create a new model evaluation store in the project, and return a handle to interact with it. - + :param string name: the name for the new model evaluation store :returns: A :class:`dataikuapi.dss.modelevaluationstore.DSSModelEvaluationStore` model evaluation store handle @@ -902,8 +903,8 @@ def start_job(self, definition): """ Create a new job, and return a handle to interact with it - :param dict definition: The definition should contain: - + :param dict definition: The definition should contain: + * the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD) * a list of outputs to build from the available types: (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT) * (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed HDFS datasets. @@ -918,7 +919,7 @@ def start_job_and_wait(self, definition, no_fail=False): Starts a new job and waits for it to complete. :param dict definition: The definition should contain: - + * the type of job (RECURSIVE_BUILD, NON_RECURSIVE_FORCED_BUILD, RECURSIVE_FORCED_BUILD, RECURSIVE_MISSING_ONLY_BUILD) * a list of outputs to build from the available types: (DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT) * (Optional) a refreshHiveMetastore field (True or False) to specify whether to re-synchronize the Hive metastore for recomputed HDFS datasets. @@ -953,7 +954,7 @@ def new_job_definition_builder(self, job_type='NON_RECURSIVE_FORCED_BUILD'): ######################################################## # Jupyter Notebooks ######################################################## - + def list_jupyter_notebooks(self, active=False, as_type="object"): """ List the jupyter notebooks of a project. @@ -962,7 +963,7 @@ def list_jupyter_notebooks(self, active=False, as_type="object"): :param bool active: if True, only return currently running jupyter notebooks. :returns: The list of the notebooks. If "as_type" is "listitems", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem`, if "as_type" is "objects", each one as a :class:`dataikuapi.dss.notebook.DSSJupyterNotebook` - + :rtype: list of :class:`dataikuapi.dss.notebook.DSSJupyterNotebook` or list of :class:`dataikuapi.dss.notebook.DSSJupyterNotebookListItem` """ notebook_items = self.client._perform_json("GET", "/projects/%s/jupyter-notebooks/" % self.project_key, params={"active": active}) @@ -1005,7 +1006,7 @@ def create_jupyter_notebook(self, notebook_name, notebook_content): def list_continuous_activities(self, as_objects=True): """ List the continuous activities in this project - + Returns: a list of the continuous activities, each one as a JSON object, containing both the definition and the state """ @@ -1018,7 +1019,7 @@ def list_continuous_activities(self, as_objects=True): def get_continuous_activity(self, recipe_id): """ Get a handler to interact with a specific continuous activities - + Returns: A :class:`dataikuapi.dss.continuousactivity.DSSContinuousActivity` job handle """ @@ -1121,9 +1122,9 @@ def list_exported_bundles(self): def export_bundle(self, bundle_id): """ - Creates a new project bundle on the Design node - - :param str bundle_id: bundle id tag + Creates a new project bundle on the Design node + + :param str bundle_id: bundle id tag """ return self.client._perform_json("PUT", "/projects/%s/bundles/exported/%s" % (self.project_key, bundle_id)) @@ -1131,21 +1132,21 @@ def export_bundle(self, bundle_id): def get_exported_bundle_archive_stream(self, bundle_id): """ Download a bundle archive that can be deployed in a DSS automation Node, as a binary stream. - + .. warning:: - + The stream must be closed after use. Use a ``with`` statement to handle closing the stream at the end of the block by default. For example: - - .. code-block:: python - + + .. code-block:: python + with project.get_exported_bundle_archive_stream('v1') as fp: - # use fp - + # use fp + # or explicitly close the stream after use fp = project.get_exported_bundle_archive_stream('v1') - # use fp, then close + # use fp, then close fp.close() - + """ return self.client._perform_raw("GET", "/projects/%s/bundles/exported/%s/archive" % (self.project_key, bundle_id)) @@ -1153,7 +1154,7 @@ def get_exported_bundle_archive_stream(self, bundle_id): def download_exported_bundle_archive_to_file(self, bundle_id, path): """ Download a bundle archive that can be deployed in a DSS automation Node into the given output file. - + :param string path: if "-", will write to /dev/stdout """ if path == "-": @@ -1199,7 +1200,7 @@ def list_imported_bundles(self): def import_bundle_from_archive(self, archive_path): """ Imports a bundle from a zip archive path on the Automation node. - + :param str archive_path: A full path to a zip archive, for example `/home/dataiku/my-bundle-v1.zip` """ return self.client._perform_json("POST", @@ -1208,10 +1209,10 @@ def import_bundle_from_archive(self, archive_path): def import_bundle_from_stream(self, fp): """ - Imports a bundle from a file stream, on the Automation node. - - :param file-like fp: file handler. Usage example: - + Imports a bundle from a file stream, on the Automation node. + + :param file-like fp: file handler. Usage example: + .. code-block:: python project = client.get_project('MY_PROJECT') @@ -1240,8 +1241,8 @@ def activate_bundle(self, bundle_id, scenarios_to_enable=None): def preload_bundle(self, bundle_id): """ - Preloads a bundle that has been imported on the Automation node - + Preloads a bundle that has been imported on the Automation node + :param str bundle_id: the bundle_id for an existing imported bundle """ return self.client._perform_json("POST", @@ -1365,7 +1366,7 @@ def new_recipe(self, type, name=None): grouping_recipe_builder = project.new_recipe("grouping") grouping_recipe_builder.with_input("dataset_to_group_on") # Create a new managed dataset for the output in the "filesystem_managed" connection - grouping_recipe_builder.with_new_output("grouped_dataset", "filesystem_managed") + grouping_recipe_builder.with_new_output("grouped_dataset", "filesystem_managed") grouping_recipe_builder.with_group_key("column") recipe = grouping_recipe_builder.build() @@ -1515,6 +1516,28 @@ def get_macro(self, runnable_type): """ return DSSMacro(self.client, self.project_key, runnable_type) + ######################################################## + # Webapps + ######################################################## + + def list_webapps(self): + """ + List the webapps heads of this project + + :returns: the list of the webapps as :class:`dataikuapi.dss.webapp.DSSWebApp` + """ + webapps = self.client._perform_json("GET", "/projects/%s/webapps/" % self.project_key) + return [DSSWebApp(self.client, self.project_key, w["id"]) for w in webapps] + + def get_webapp(self, webapp_id): + """ + Get a handle to interact with a specific webapp + + :param webapp_id: the identifier of a webapp + :returns: A :class:`dataikuapi.dss.webapp.DSSWebApp` webapp handle + """ + return DSSWebApp(self.client, self.project_key, webapp_id) + ######################################################## # Wiki ######################################################## @@ -1798,7 +1821,7 @@ def set_container_exec_config(self, config_name): def set_k8s_cluster(self, cluster, fallback_cluster=None): """Sets the Kubernetes cluster used by this project - :param str cluster: Identifier of the cluster to use. May use variables expansion. If None, sets the project + :param str cluster: Identifier of the cluster to use. May use variables expansion. If None, sets the project to use the globally-defined cluster :param str fallback_cluster: Identifier of the cluster to use if the variable used for "cluster" does not exist """ @@ -1883,10 +1906,10 @@ def with_refresh_metastore(self, refresh_metastore): def with_output(self, name, object_type=None, object_project_key=None, partition=None): """ Adds an item to build in this job - + :param name: name of the output object :param object_type: type of object to build from: DATASET, MANAGED_FOLDER, SAVED_MODEL, STREAMING_ENDPOINT - :param object_project_key: PROJECT_KEY for the project that contains the object to build + :param object_project_key: PROJECT_KEY for the project that contains the object to build :param partition: specify partition to build """ self.definition['outputs'].append({'type':object_type, 'id':name, 'projectKey':object_project_key, 'partition':partition}) @@ -1901,11 +1924,11 @@ def start(self): Starts the job, and return a :doc:`dataikuapi.dss.job.DSSJob` handle to interact with it. You need to wait for the returned job to complete - + :return: the :class:`dataikuapi.dss.job.DSSJob` job handle :rtype: :class:`dataikuapi.dss.job.DSSJob` """ - job_def = self.project.client._perform_json("POST", + job_def = self.project.client._perform_json("POST", "/projects/%s/jobs/" % self.project.project_key, body = self.definition) return DSSJob(self.project.client, self.project.project_key, job_def['id']) diff --git a/dataikuapi/dss/webapp.py b/dataikuapi/dss/webapp.py new file mode 100644 index 000000000..07130c0ca --- /dev/null +++ b/dataikuapi/dss/webapp.py @@ -0,0 +1,121 @@ +import sys +from .future import DSSFuture + +if sys.version_info >= (3,0): + import urllib.parse + dku_quote_fn = urllib.parse.quote +else: + import urllib + dku_quote_fn = urllib.quote + + +class DSSWebApp(object): + """ + A handle to manage a webapp + """ + def __init__(self, client, project_key, webapp_id): + """Do not call directly, use :meth:`dataikuapi.dss.project.DSSProject.get_webapps`""" + self.client = client + self.project_key = project_key + self.webapp_id = webapp_id + + def get_state(self): + """ + Return the state of the webapp + + :return: the state of the webapp + :rtype: :class:`DSSWebAppBackendState` + """ + state = self.client._perform_json("GET", "/projects/%s/webapps/%s/backend/state" % (self.project_key, self.webapp_id)) + return DSSWebAppBackendState(self.client, self.project_key, self.webapp_id, state) + + def stop_backend(self): + """ + Stop a webapp + """ + self.client._perform_empty("PUT", "/projects/%s/webapps/%s/backend/actions/stop" % (self.project_key, self.webapp_id)) + + def restart_backend(self): + """ + Restart a webapp + :returns: a handle to a DSS future to track the progress of the restart + :rtype: :class:`dataikuapi.dss.future.DSSFuture` + """ + future = self.client._perform_json("PUT", "/projects/%s/webapps/%s/backend/actions/restart" % (self.project_key, self.webapp_id)) + return DSSFuture(self.client, future["jobId"]) + + def get_definition(self): + """ + Get a webapp definition + + :returns: a handle to manage the webapp definition + :rtype: :class:`dataikuapi.dss.webapp.DSSWebAppDefinition` + """ + definition = self.client._perform_json("GET", "/projects/%s/webapps/%s/" % (self.project_key, self.webapp_id)) + return DSSWebAppDefinition(self.client, self.project_key, self.webapp_id, definition) + + +class DSSWebAppBackendState(object): + """ + A handle to manage WebApp backend state + """ + def __init__(self, client, project_key, webapp_id, state): + """Do not call directly, use :meth:`dataikuapi.dss.webapp.DSSWebApp.get_state`""" + self.client = client + self.project_key = project_key + self.webapp_id = webapp_id + self.state = state + + def get_state(self): + """ + Returns the dict containing the current state of the webapp backend. + Warning : this dict is replaced when webapp backend state changes + + :returns: a dict + """ + return self.state + + def is_running(self): + """ + Tells if the webapp app backend is running or not + + :returns: a bool + """ + return "futureInfo" in self.state and \ + "alive" in self.state["futureInfo"] and \ + self.state["futureInfo"]["alive"] + + +class DSSWebAppDefinition(object): + """ + A handle to manage a WebApp definition + """ + def __init__(self, client, project_key, webapp_id, definition): + """Do not call directly, use :meth:`dataikuapi.dss.webapp.DSSWebApp.get_definition`""" + self.client = client + self.project_key = project_key + self.webapp_id = webapp_id + self.definition = definition + + def get_definition(self): + """ + Get the definition + + :returns: the definition of the webapp + """ + return self.definition + + def set_definition(self, definition): + """ + Set the definition + + :param definition : the definition of the webapp + """ + self.definition = definition + + def save(self): + """ + Save the current webapp definition and update it. + """ + self.client._perform_json("PUT", "/projects/%s/webapps/%s" % (self.project_key, self.webapp_id), body=self.definition) + self.definition = self.client._perform_json("GET", "/projects/%s/webapps/%s" % (self.project_key, self.webapp_id)) diff --git a/tests/webapps_tests.py b/tests/webapps_tests.py new file mode 100644 index 000000000..8313bf820 --- /dev/null +++ b/tests/webapps_tests.py @@ -0,0 +1,82 @@ +from time import sleep +from dataikuapi.dssclient import DSSClient +from dataikuapi.dss.project import DSSProject +from dataikuapi.dss.webapp import DSSWebApp +from nose.tools import ok_ +from nose.tools import eq_ + +host="http://localhost:8083" +apiKey="CMZBjFkUgcDh08S3awoPyVIweBelxPjy" +testProjectKey="WEBAPPS" +testWebAppPythonId="VCMN2ra" + + +def remove_key(d, key): + r = dict(d) + del r[key] + return r + + +class WebappApi_tests(object): + + def __init__(self): + self.client = None + self.project = None; + + def setUp(self): + self.client = DSSClient(host, apiKey) + self.project = DSSProject(self.client, testProjectKey) + + def t01_list_webapps_test(self): + webapps = self.project.list_webapps(); + ok_(len(webapps) > 0) + + def t02_get_python_webapp_test(self): + webapp = self.project.get_webapp(testWebAppPythonId) + ok_(webapp is not None) + + def t03_get_definition_test(self): + webapp = self.project.get_webapp(testWebAppPythonId) + definition = webapp.get_definition() + print "Definition " + str(definition) + ok_(definition is not None) + eq_(definition.webapp_id, testWebAppPythonId) + eq_(definition.get_definition()["id"], testWebAppPythonId) + + def t04_update_python_webapp_test(self): + webapp = self.project.get_webapp(testWebAppPythonId) + definition = webapp.get_definition() + old_def = dict(definition.get_definition()) + definition.save() + eq_(remove_key(definition.get_definition(), "versionTag"), remove_key(old_def, "versionTag")) + eq_(definition.get_definition()["versionTag"]["versionNumber"], old_def["versionTag"]["versionNumber"] + 1) + + def t05_restart_backend_test(self): + """ + WARNING: you should manually stop the backend before this test + """ + webapp = self.project.get_webapp(testWebAppPythonId) + ok_(not webapp.get_state().is_running(), "The backend should be stopped before the test") + future = webapp.restart_backend() + future.wait_for_result() + ok_(webapp.get_state().is_running()) + + def t06_stop_backend_test(self): + """ + WARNING: you should manually start the backend before this test + """ + webapp = self.project.get_webapp(testWebAppPythonId) + ok_(webapp.get_state().is_running(),"The backend should be started before the test") + webapp.stop_backend() + sleep(2) + eq_(webapp.get_state().is_running(), False) + + def t07_state_consistency_test(self): + webapp = self.project.get_webapp(testWebAppPythonId) + webapp.stop_backend() + eq_(webapp.get_state().is_running(), False) + future = webapp.restart_backend() + future.wait_for_result() + eq_(webapp.get_state().is_running(), True) + webapp.stop_backend() + eq_(webapp.get_state().is_running(), False) \ No newline at end of file