@@ -125,8 +125,7 @@ following sql statements
125125Want to increase the performance of your model by tuning the
126126parameters? Use the hyperparameter tuning directly
127127in SQL using below SQL syntax, choose different tuners
128- from the dask_ml package based on memory and compute constraints and
129- for more details refer to the `dask ml documentation <https://ml.dask.org/hyper-parameter-search.html#incremental-hyperparameter-optimization >`_
128+ based on memory and compute constraints.
130129
131130..
132131 TODO - add a GPU section to these examples once we have working CREATE EXPERIMENT tests for GPU
@@ -135,7 +134,7 @@ for more details refer to the `dask ml documentation <https://ml.dask.org/hyper-
135134
136135 CREATE EXPERIMENT my_exp WITH (
137136 model_class = 'sklearn.ensemble.GradientBoostingClassifier',
138- experiment_class = 'dask_ml .model_selection.GridSearchCV',
137+ experiment_class = 'sklearn .model_selection.GridSearchCV',
139138 tune_parameters = (n_estimators = ARRAY [16, 32, 2],
140139 learning_rate = ARRAY [0.1,0.01,0.001],
141140 max_depth = ARRAY [3,4,5,10]
@@ -258,7 +257,6 @@ and the boolean target ``label``.
258257 SELECT * FROM training_data
259258
260259 -- We can now train a model from the sklearn package.
261- -- Make sure to install it together with dask-ml with conda or pip.
262260 CREATE OR REPLACE MODEL my_model WITH (
263261 model_class = 'sklearn.ensemble.GradientBoostingClassifier',
264262 wrap_predict = True,
@@ -282,7 +280,7 @@ and the boolean target ``label``.
282280 -- experiment to tune different hyperparameters
283281 CREATE EXPERIMENT my_exp WITH(
284282 model_class = 'sklearn.ensemble.GradientBoostingClassifier',
285- experiment_class = 'dask_ml .model_selection.GridSearchCV',
283+ experiment_class = 'sklearn .model_selection.GridSearchCV',
286284 tune_parameters = (n_estimators = ARRAY [16, 32, 2],
287285 learning_rate = ARRAY [0.1,0.01,0.001],
288286 max_depth = ARRAY [3,4,5,10]
0 commit comments