Skip to content

Commit

Permalink
Merge pull request #791 from mlrun/development
Browse files Browse the repository at this point in the history
[Demos] Change to local=False in batch infer v2 demo. (#790)
  • Loading branch information
aviaIguazio authored Feb 13, 2024
2 parents 4620020 + f7e4fd1 commit b2f8c2e
Showing 1 changed file with 1 addition and 10 deletions.
11 changes: 1 addition & 10 deletions batch_inference_v2/batch_inference_v2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -478,10 +478,7 @@
"# Import the `batch_inference_v2` function from the functions hub:\n",
"batch_inference_function = mlrun.import_function('hub://batch_inference_v2')\n",
"# you can import the function from the current directory as well: \n",
"# batch_inference_function = mlrun.import_function(\"function.yaml\")\n",
"\n",
"# Set the desired artifact path:\n",
"artifact_path = \"./\""
"# batch_inference_function = mlrun.import_function(\"function.yaml\")\n"
]
},
{
Expand Down Expand Up @@ -1448,23 +1445,18 @@
"# 1. Generate data:\n",
"generate_data_run = demo_function.run(\n",
" handler=\"generate_data\",\n",
" artifact_path=artifact_path,\n",
" returns=[\"training_set : dataset\", \"prediction_set : dataset\"],\n",
" local=True,\n",
")\n",
"\n",
"# 2. Train a model:\n",
"train_run = demo_function.run(\n",
" handler=\"train\",\n",
" artifact_path=artifact_path,\n",
" inputs={\"training_set\": generate_data_run.outputs[\"training_set\"]},\n",
" local=True,\n",
")\n",
"\n",
"# 3. Perform batch prediction:\n",
"batch_inference_run = batch_inference_function.run(\n",
" handler=\"infer\",\n",
" artifact_path=artifact_path,\n",
" inputs={\"dataset\": generate_data_run.outputs[\"prediction_set\"]},\n",
" params={\n",
" \"model_path\": train_run.outputs[\"model\"],\n",
Expand All @@ -1474,7 +1466,6 @@
" \"model_endpoint_drift_threshold\": 0.2,\n",
" \"model_endpoint_possible_drift_threshold\": 0.1,\n",
" },\n",
" local=True,\n",
")"
]
},
Expand Down

0 comments on commit b2f8c2e

Please sign in to comment.