mirror of
https://github.com/Azure/MachineLearningNotebooks.git
synced 2025-12-19 17:17:04 -05:00
Merge pull request #1779 from Azure/release_update/Release-149
update samples from Release-149 as a part of SDK release
This commit is contained in:
@@ -149,12 +149,7 @@ def get_backtest_pipeline(
|
|||||||
inputs=[forecasts.as_mount()],
|
inputs=[forecasts.as_mount()],
|
||||||
outputs=[data_results],
|
outputs=[data_results],
|
||||||
source_directory=PROJECT_FOLDER,
|
source_directory=PROJECT_FOLDER,
|
||||||
arguments=[
|
arguments=["--forecasts", forecasts, "--output-dir", data_results],
|
||||||
"--forecasts",
|
|
||||||
forecasts,
|
|
||||||
"--output-dir",
|
|
||||||
data_results,
|
|
||||||
],
|
|
||||||
runconfig=run_config,
|
runconfig=run_config,
|
||||||
compute_target=compute_target,
|
compute_target=compute_target,
|
||||||
allow_reuse=False,
|
allow_reuse=False,
|
||||||
|
|||||||
@@ -23,11 +23,7 @@ except ImportError:
|
|||||||
|
|
||||||
|
|
||||||
def infer_forecasting_dataset_tcn(
|
def infer_forecasting_dataset_tcn(
|
||||||
X_test,
|
X_test, y_test, model, output_path, output_dataset_name="results"
|
||||||
y_test,
|
|
||||||
model,
|
|
||||||
output_path,
|
|
||||||
output_dataset_name="results",
|
|
||||||
):
|
):
|
||||||
|
|
||||||
y_pred, df_all = model.forecast(X_test, y_test)
|
y_pred, df_all = model.forecast(X_test, y_test)
|
||||||
@@ -71,10 +67,7 @@ def get_model(model_path, model_file_name):
|
|||||||
def get_args():
|
def get_args():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--model_name",
|
"--model_name", type=str, dest="model_name", help="Model to be loaded"
|
||||||
type=str,
|
|
||||||
dest="model_name",
|
|
||||||
help="Model to be loaded",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -108,12 +101,7 @@ def get_args():
|
|||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
def get_data(
|
def get_data(run, fitted_model, target_column_name, test_dataset_name):
|
||||||
run,
|
|
||||||
fitted_model,
|
|
||||||
target_column_name,
|
|
||||||
test_dataset_name,
|
|
||||||
):
|
|
||||||
|
|
||||||
# get input dataset by name
|
# get input dataset by name
|
||||||
test_dataset = Dataset.get_by_name(run.experiment.workspace, test_dataset_name)
|
test_dataset = Dataset.get_by_name(run.experiment.workspace, test_dataset_name)
|
||||||
@@ -159,10 +147,7 @@ if __name__ == "__main__":
|
|||||||
fitted_model = get_model(model_path, model_file_name)
|
fitted_model = get_model(model_path, model_file_name)
|
||||||
|
|
||||||
X_test_df, y_test = get_data(
|
X_test_df, y_test = get_data(
|
||||||
run,
|
run, fitted_model, target_column_name, test_dataset_name
|
||||||
fitted_model,
|
|
||||||
target_column_name,
|
|
||||||
test_dataset_name,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
infer_forecasting_dataset_tcn(
|
infer_forecasting_dataset_tcn(
|
||||||
|
|||||||
@@ -69,17 +69,19 @@
|
|||||||
"# ONNX Model Zoo and save it in the same folder as this tutorial\n",
|
"# ONNX Model Zoo and save it in the same folder as this tutorial\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import urllib.request\n",
|
"import urllib.request\n",
|
||||||
|
"import os\n",
|
||||||
"\n",
|
"\n",
|
||||||
"onnx_model_url = \"https://github.com/onnx/models/blob/main/vision/body_analysis/emotion_ferplus/model/emotion-ferplus-7.tar.gz?raw=true\"\n",
|
"onnx_model_url = \"https://github.com/onnx/models/blob/main/vision/body_analysis/emotion_ferplus/model/emotion-ferplus-7.tar.gz?raw=true\"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"urllib.request.urlretrieve(onnx_model_url, filename=\"emotion-ferplus-7.tar.gz\")\n",
|
"urllib.request.urlretrieve(onnx_model_url, filename=\"emotion-ferplus-7.tar.gz\")\n",
|
||||||
|
"os.mkdir(\"emotion_ferplus\")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# the ! magic command tells our jupyter notebook kernel to run the following line of \n",
|
"# the ! magic command tells our jupyter notebook kernel to run the following line of \n",
|
||||||
"# code from the command line instead of the notebook kernel\n",
|
"# code from the command line instead of the notebook kernel\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# We use tar and xvcf to unzip the files we just retrieved from the ONNX model zoo\n",
|
"# We use tar and xvcf to unzip the files we just retrieved from the ONNX model zoo\n",
|
||||||
"\n",
|
"\n",
|
||||||
"!tar xvzf emotion-ferplus-7.tar.gz"
|
"!tar xvzf emotion-ferplus-7.tar.gz -C emotion_ferplus"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -130,7 +132,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"model_dir = \"emotion_ferplus\" # replace this with the location of your model files\n",
|
"model_dir = \"emotion_ferplus/model\" # replace this with the location of your model files\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# leave as is if it's in the same folder as this notebook"
|
"# leave as is if it's in the same folder as this notebook"
|
||||||
]
|
]
|
||||||
@@ -496,13 +498,12 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"# to use parsers to read in our model/data\n",
|
"# to use parsers to read in our model/data\n",
|
||||||
"import json\n",
|
"import json\n",
|
||||||
"import os\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"test_inputs = []\n",
|
"test_inputs = []\n",
|
||||||
"test_outputs = []\n",
|
"test_outputs = []\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# read in 3 testing images from .pb files\n",
|
"# read in 1 testing images from .pb files\n",
|
||||||
"test_data_size = 3\n",
|
"test_data_size = 1\n",
|
||||||
"\n",
|
"\n",
|
||||||
"for num in np.arange(test_data_size):\n",
|
"for num in np.arange(test_data_size):\n",
|
||||||
" input_test_data = os.path.join(model_dir, 'test_data_set_{0}'.format(num), 'input_0.pb')\n",
|
" input_test_data = os.path.join(model_dir, 'test_data_set_{0}'.format(num), 'input_0.pb')\n",
|
||||||
@@ -533,7 +534,7 @@
|
|||||||
},
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"### Show some sample images\n",
|
"### Show some sample images\n",
|
||||||
"We use `matplotlib` to plot 3 test images from the dataset."
|
"We use `matplotlib` to plot 1 test images from the dataset."
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -547,7 +548,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"plt.figure(figsize = (20, 20))\n",
|
"plt.figure(figsize = (20, 20))\n",
|
||||||
"for test_image in np.arange(3):\n",
|
"for test_image in np.arange(test_data_size):\n",
|
||||||
" test_inputs[test_image].reshape(1, 64, 64)\n",
|
" test_inputs[test_image].reshape(1, 64, 64)\n",
|
||||||
" plt.subplot(1, 8, test_image+1)\n",
|
" plt.subplot(1, 8, test_image+1)\n",
|
||||||
" plt.axhline('')\n",
|
" plt.axhline('')\n",
|
||||||
|
|||||||
@@ -69,10 +69,12 @@
|
|||||||
"# ONNX Model Zoo and save it in the same folder as this tutorial\n",
|
"# ONNX Model Zoo and save it in the same folder as this tutorial\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import urllib.request\n",
|
"import urllib.request\n",
|
||||||
|
"import os\n",
|
||||||
"\n",
|
"\n",
|
||||||
"onnx_model_url = \"https://github.com/onnx/models/blob/main/vision/classification/mnist/model/mnist-7.tar.gz?raw=true\"\n",
|
"onnx_model_url = \"https://github.com/onnx/models/blob/main/vision/classification/mnist/model/mnist-7.tar.gz?raw=true\"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"urllib.request.urlretrieve(onnx_model_url, filename=\"mnist-7.tar.gz\")"
|
"urllib.request.urlretrieve(onnx_model_url, filename=\"mnist-7.tar.gz\")\n",
|
||||||
|
"os.mkdir(\"mnist\")"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -86,7 +88,7 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"# We use tar and xvcf to unzip the files we just retrieved from the ONNX model zoo\n",
|
"# We use tar and xvcf to unzip the files we just retrieved from the ONNX model zoo\n",
|
||||||
"\n",
|
"\n",
|
||||||
"!tar xvzf mnist-7.tar.gz"
|
"!tar xvzf mnist-7.tar.gz -C mnist"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -137,7 +139,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"model_dir = \"mnist\" # replace this with the location of your model files\n",
|
"model_dir = \"mnist/model\" # replace this with the location of your model files\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# leave as is if it's in the same folder as this notebook"
|
"# leave as is if it's in the same folder as this notebook"
|
||||||
]
|
]
|
||||||
@@ -447,13 +449,12 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"# to use parsers to read in our model/data\n",
|
"# to use parsers to read in our model/data\n",
|
||||||
"import json\n",
|
"import json\n",
|
||||||
"import os\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"test_inputs = []\n",
|
"test_inputs = []\n",
|
||||||
"test_outputs = []\n",
|
"test_outputs = []\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# read in 3 testing images from .pb files\n",
|
"# read in 1 testing images from .pb files\n",
|
||||||
"test_data_size = 3\n",
|
"test_data_size = 1\n",
|
||||||
"\n",
|
"\n",
|
||||||
"for i in np.arange(test_data_size):\n",
|
"for i in np.arange(test_data_size):\n",
|
||||||
" input_test_data = os.path.join(model_dir, 'test_data_set_{0}'.format(i), 'input_0.pb')\n",
|
" input_test_data = os.path.join(model_dir, 'test_data_set_{0}'.format(i), 'input_0.pb')\n",
|
||||||
@@ -486,7 +487,7 @@
|
|||||||
},
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"### Show some sample images\n",
|
"### Show some sample images\n",
|
||||||
"We use `matplotlib` to plot 3 test images from the dataset."
|
"We use `matplotlib` to plot 1 test images from the dataset."
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -500,7 +501,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"plt.figure(figsize = (16, 6))\n",
|
"plt.figure(figsize = (16, 6))\n",
|
||||||
"for test_image in np.arange(3):\n",
|
"for test_image in np.arange(test_data_size):\n",
|
||||||
" plt.subplot(1, 15, test_image+1)\n",
|
" plt.subplot(1, 15, test_image+1)\n",
|
||||||
" plt.axhline('')\n",
|
" plt.axhline('')\n",
|
||||||
" plt.axvline('')\n",
|
" plt.axvline('')\n",
|
||||||
|
|||||||
@@ -8,8 +8,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
rm -rf /usr/share/man/*
|
rm -rf /usr/share/man/*
|
||||||
|
|
||||||
RUN conda install -y conda=4.12.0 python=3.7 && conda clean -ay
|
RUN conda install -y conda=4.13.0 python=3.7 && conda clean -ay
|
||||||
RUN pip install ray-on-aml==0.1.6 & \
|
RUN pip install ray-on-aml==0.2.1 & \
|
||||||
pip install --no-cache-dir \
|
pip install --no-cache-dir \
|
||||||
azureml-defaults \
|
azureml-defaults \
|
||||||
azureml-dataset-runtime[fuse,pandas] \
|
azureml-dataset-runtime[fuse,pandas] \
|
||||||
|
|||||||
Reference in New Issue
Block a user