update samples from Release-53 as a part of SDK release

This commit is contained in:
vizhur
2020-05-27 02:45:07 +00:00
parent 12c06a4168
commit 11e8ed2bab
74 changed files with 1085 additions and 713 deletions

View File

@@ -386,4 +386,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -21,9 +21,10 @@ image_size = 299
num_channel = 3
def get_class_label_dict():
def get_class_label_dict(labels_dir):
label = []
proto_as_ascii_lines = tf.gfile.GFile("labels.txt").readlines()
labels_path = os.path.join(labels_dir, 'labels.txt')
proto_as_ascii_lines = tf.gfile.GFile(labels_path).readlines()
for l in proto_as_ascii_lines:
label.append(l.rstrip())
return label
@@ -34,14 +35,10 @@ def init():
parser = argparse.ArgumentParser(description="Start a tensorflow model serving")
parser.add_argument('--model_name', dest="model_name", required=True)
parser.add_argument('--labels_name', dest="labels_name", required=True)
parser.add_argument('--labels_dir', dest="labels_dir", required=True)
args, _ = parser.parse_known_args()
workspace = Run.get_context(allow_offline=False).experiment.workspace
label_ds = Dataset.get_by_name(workspace=workspace, name=args.labels_name)
label_ds.download(target_path='.', overwrite=True)
label_dict = get_class_label_dict()
label_dict = get_class_label_dict(args.labels_dir)
classes_num = len(label_dict)
with slim.arg_scope(inception_v3.inception_v3_arg_scope()):

View File

@@ -20,14 +20,8 @@
"metadata": {},
"source": [
"# Use Azure Machine Learning Pipelines for batch prediction\n",
"\n",
"## Note\n",
"This notebook uses public preview functionality (ParallelRunStep). Please install azureml-contrib-pipeline-steps package before running this notebook.\n",
"\n",
"\n",
"In this tutorial, you use Azure Machine Learning service pipelines to run a batch scoring image classification job. The example job uses the pre-trained [Inception-V3](https://arxiv.org/abs/1512.00567) CNN (convolutional neural network) Tensorflow model to classify unlabeled images. Machine learning pipelines optimize your workflow with speed, portability, and reuse so you can focus on your expertise, machine learning, rather than on infrastructure and automation. After building and publishing a pipeline, you can configure a REST endpoint to enable triggering the pipeline from any HTTP library on any platform.\n",
"\n",
"\n",
"In this tutorial, you learn the following tasks:\n",
"\n",
"> * Configure workspace and download sample data\n",
@@ -38,7 +32,7 @@
"> * Build, run, and publish a pipeline\n",
"> * Enable a REST endpoint for the pipeline\n",
"\n",
"If you don\u00e2\u20ac\u2122t have an Azure subscription, create a free account before you begin. Try the [free or paid version of Azure Machine Learning service](https://aka.ms/AMLFree) today."
"If you don't have an Azure subscription, create a free account before you begin. Try the [free or paid version of Azure Machine Learning service](https://aka.ms/AMLFree) today."
]
},
{
@@ -129,7 +123,7 @@
"from azureml.pipeline.core import PipelineData\n",
"\n",
"input_images = Dataset.File.from_files((batchscore_blob, \"batchscoring/images/\"))\n",
"label_ds = Dataset.File.from_files((batchscore_blob, \"batchscoring/labels/*.txt\"))\n",
"label_ds = Dataset.File.from_files((batchscore_blob, \"batchscoring/labels/\"))\n",
"output_dir = PipelineData(name=\"scores\", \n",
" datastore=def_data_store, \n",
" output_path_on_compute=\"batchscoring/results\")"
@@ -149,7 +143,7 @@
"outputs": [],
"source": [
"input_images = input_images.register(workspace = ws, name = \"input_images\")\n",
"label_ds = label_ds.register(workspace = ws, name = \"label_ds\")"
"label_ds = label_ds.register(workspace = ws, name = \"label_ds\", create_new_version=True)"
]
},
{
@@ -260,7 +254,7 @@
"The script `batch_scoring.py` takes the following parameters, which get passed from the `ParallelRunStep` that you create later:\n",
"\n",
"- `--model_name`: the name of the model being used\n",
"- `--labels_name` : the name of the `Dataset` holding the `labels.txt` file \n",
"- `--labels_dir` : the directory path having the `labels.txt` file \n",
"\n",
"The pipelines infrastructure uses the `ArgumentParser` class to pass parameters into pipeline steps. For example, in the code below the first argument `--model_name` is given the property identifier `model_name`. In the `main()` function, this property is accessed using `Model.get_model_path(args.model_name)`."
]
@@ -296,7 +290,8 @@
"from azureml.core.conda_dependencies import CondaDependencies\n",
"from azureml.core.runconfig import DEFAULT_GPU_IMAGE\n",
"\n",
"cd = CondaDependencies.create(pip_packages=[\"tensorflow-gpu==1.15.2\", \"azureml-defaults\"])\n",
"cd = CondaDependencies.create(pip_packages=[\"tensorflow-gpu==1.15.2\",\n",
" \"azureml-core\", \"azureml-dataprep[fuse]\"])\n",
"\n",
"env = Environment(name=\"parallelenv\")\n",
"env.python.conda_dependencies=cd\n",
@@ -317,7 +312,7 @@
"metadata": {},
"outputs": [],
"source": [
"from azureml.contrib.pipeline.steps import ParallelRunConfig\n",
"from azureml.pipeline.steps import ParallelRunConfig\n",
"\n",
"parallel_run_config = ParallelRunConfig(\n",
" environment=env,\n",
@@ -356,18 +351,20 @@
"metadata": {},
"outputs": [],
"source": [
"from azureml.contrib.pipeline.steps import ParallelRunStep\n",
"from azureml.pipeline.steps import ParallelRunStep\n",
"from datetime import datetime\n",
"\n",
"parallel_step_name = \"batchscoring-\" + datetime.now().strftime(\"%Y%m%d%H%M\")\n",
"\n",
"label_config = label_ds.as_named_input(\"labels_input\")\n",
"\n",
"batch_score_step = ParallelRunStep(\n",
" name=parallel_step_name,\n",
" inputs=[input_images.as_named_input(\"input_images\")],\n",
" output=output_dir,\n",
" models=[model],\n",
" arguments=[\"--model_name\", \"inception\",\n",
" \"--labels_name\", \"label_ds\"],\n",
" \"--labels_dir\", label_config],\n",
" side_inputs=[label_config],\n",
" parallel_run_config=parallel_run_config,\n",
" allow_reuse=False\n",
")"

View File

@@ -3,7 +3,7 @@ dependencies:
- pip:
- azureml-sdk
- azureml-pipeline-core
- azureml-contrib-pipeline-steps
- azureml-pipeline-steps
- pandas
- requests
- azureml-widgets