mirror of
https://github.com/Azure/MachineLearningNotebooks.git
synced 2025-12-20 09:37:04 -05:00
Compare commits
22 Commits
sdgilley/u
...
shwinne1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf0490ab92 | ||
|
|
9f0e817c70 | ||
|
|
a4d713d19b | ||
|
|
91a20a0ff9 | ||
|
|
a0c510bf42 | ||
|
|
116d57c012 | ||
|
|
660708db63 | ||
|
|
206df82f9b | ||
|
|
7cfb2da5b8 | ||
|
|
e5adb4af3a | ||
|
|
b849267220 | ||
|
|
9891080b70 | ||
|
|
2974e86aa0 | ||
|
|
0a18161193 | ||
|
|
c676cc9969 | ||
|
|
50f4bc9643 | ||
|
|
f3c7072735 | ||
|
|
44295d9e16 | ||
|
|
710fc0bb4b | ||
|
|
c44dba427f | ||
|
|
8066a9263c | ||
|
|
054aadffed |
215
build_nb_index.py
Normal file
215
build_nb_index.py
Normal file
@@ -0,0 +1,215 @@
|
||||
# ---------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# ---------------------------------------------------------
|
||||
|
||||
### USAGE
|
||||
#
|
||||
# 1. Add following metadata elements to the notebook
|
||||
#
|
||||
# "friendly_name": "string", friendly name for notebook
|
||||
# "exclude_from_index": true/false, setting true excludes the notebook from index
|
||||
# "order_index": integer, smaller value moves notebook closer to beginning
|
||||
# "category": "starter", "tutorial", "training", "deployment" or "other"
|
||||
# "tags": [ "featured" ], optional, only supported tag to highlight notebook with :star: symbol
|
||||
# "task": "string", description of notebook task
|
||||
# "datasets": [ "dataset 1", "dataset 2"], list of datasets, can be ["None"]
|
||||
# "compute": [ "compute 1", "compute 2" ], list of computes, can be ["None"]
|
||||
# "deployment": ["deployment 1", "deployment 2"], list of deployment targets, can be ["None"]
|
||||
# "framework": ["fw 1", "fw2"], list of ml framework, can be ["None"]
|
||||
#
|
||||
# 2. Then run
|
||||
#
|
||||
# build_nb_index.py <root folder of notebooks>
|
||||
#
|
||||
# 3. The script should produce index.md file with tables of notebook indices
|
||||
|
||||
### Example metadata section
|
||||
|
||||
'''
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"name": "cforbe"
|
||||
}
|
||||
],
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3.6",
|
||||
"language": "python",
|
||||
"name": "python36"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.7"
|
||||
},
|
||||
"msauthor": "trbye",
|
||||
"friendly_name": "Prepare data for regression modeling",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "tutorial",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"local"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"Azure ML AutoML"
|
||||
]
|
||||
}
|
||||
'''
|
||||
|
||||
import os, json, sys
|
||||
from shutil import copyfile, copytree, rmtree
|
||||
|
||||
|
||||
# Index building walk over notebook folder
|
||||
def post_process(notebooks_dir):
|
||||
indexer = NotebookIndex()
|
||||
n_dest = len(notebooks_dir)
|
||||
for r, d, f in os.walk(notebooks_dir):
|
||||
for file in f:
|
||||
# Handle only notebooks
|
||||
if file.endswith(".ipynb") and not file.endswith('checkpoint.ipynb'):
|
||||
try:
|
||||
file_path = os.path.join(r, file)
|
||||
with open(file_path, 'r') as fin:
|
||||
content = json.load(fin)
|
||||
print(file)
|
||||
indexer.add_to_index(os.path.join(r[n_dest:],file), content["metadata"])
|
||||
except Exception as e:
|
||||
print("Problem: ",str(e))
|
||||
indexer.write_index("./index.md")
|
||||
|
||||
### Customize these make index look different
|
||||
|
||||
index_template = '''
|
||||
# Index
|
||||
Azure Machine Learning is a cloud service that you use to train, deploy, automate, and manage machine learning models. This index should assist in navigating the Azure Machine Learning notebook samples and encourage efficient retrieval of topics and content.
|
||||
|
||||
|
||||
## Getting Started
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
GETTING_STARTED_NBS
|
||||
|
||||
## Tutorials
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
TUTORIAL_NBS
|
||||
|
||||
## Training
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
TRAINING_NBS
|
||||
|
||||
## Deployment
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
DEPLOYMENT_NBS
|
||||
|
||||
## Other Notebooks
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
OTHER_NBS
|
||||
'''
|
||||
|
||||
index_row = '''| NB_SYMBOL[NB_NAME](NB_PATH) | NB_TASK | NB_DATASET | NB_COMPUTE | NB_DEPLOYMENT | NB_FRAMEWORK |'''
|
||||
|
||||
index_file = "index.md"
|
||||
|
||||
nb_types = ["starter", "tutorial", "training", "deployment", "other"]
|
||||
replace_strings = ["GETTING_STARTED_NBS", "TUTORIAL_NBS", "TRAINING_NBS", "DEPLOYMENT_NBS", "OTHER_NBS"]
|
||||
|
||||
class NotebookIndex:
|
||||
def __init__(self):
|
||||
self.index = index_template
|
||||
self.nb_rows = {}
|
||||
for elem in nb_types:
|
||||
self.nb_rows[elem] = []
|
||||
|
||||
def add_to_index(self, path_to_notebook, metadata):
|
||||
repo_url = "https://github.com/Azure/MachineLearningNotebooks/blob/master/"
|
||||
|
||||
if "exclude_from_index" in metadata:
|
||||
if metadata["exclude_from_index"]:
|
||||
return
|
||||
|
||||
if "friendly_name" in metadata:
|
||||
this_row = index_row.replace("NB_NAME",metadata["friendly_name"])
|
||||
else:
|
||||
this_name = os.path.basename(path_to_notebook)
|
||||
this_row = index_row.replace("NB_NAME", this_name[:-6])
|
||||
|
||||
path_to_notebook = path_to_notebook.replace("\\","/")
|
||||
this_row = this_row.replace("NB_PATH", repo_url + path_to_notebook)
|
||||
|
||||
if "task" in metadata:
|
||||
this_row = this_row.replace("NB_TASK", metadata["task"])
|
||||
if "datasets" in metadata:
|
||||
this_row = this_row.replace("NB_DATASET", ", ".join(metadata["datasets"]))
|
||||
if "compute" in metadata:
|
||||
this_row = this_row.replace("NB_COMPUTE", ", ".join(metadata["compute"]))
|
||||
if "deployment" in metadata:
|
||||
this_row = this_row.replace("NB_DEPLOYMENT", ", ".join(metadata["deployment"]))
|
||||
if "framework" in metadata:
|
||||
this_row = this_row.replace("NB_FRAMEWORK", ", ".join(metadata["framework"]))
|
||||
## Fall back
|
||||
this_row = this_row.replace("NB_TASK","")
|
||||
this_row = this_row.replace("NB_DATASET","")
|
||||
this_row = this_row.replace("NB_COMPUTE","")
|
||||
this_row = this_row.replace("NB_DEPLOYMENT","")
|
||||
this_row = this_row.replace("NB_FRAMEWORK","")
|
||||
|
||||
if "tags" in metadata:
|
||||
if "featured" in metadata["tags"]:
|
||||
this_row = this_row.replace("NB_SYMBOL",":star:")
|
||||
## Fall back
|
||||
this_row =this_row.replace("NB_SYMBOL","")
|
||||
|
||||
index_order = 9999999
|
||||
if "index_order" in metadata:
|
||||
index_order = metadata["index_order"]
|
||||
|
||||
if "category" in metadata:
|
||||
self.nb_rows[metadata["category"]].append((index_order, this_row))
|
||||
else:
|
||||
self.nb_rows["other"].append((index_order, this_row))
|
||||
|
||||
def sort_and_stringify(self,section):
|
||||
sorted_index = sorted(self.nb_rows[section], key = lambda x: x[0])
|
||||
sorted_index = [x[1] for x in sorted_index]
|
||||
## TODO: Make this portable
|
||||
return "\n".join(sorted_index)
|
||||
|
||||
def write_index(self, index_file):
|
||||
for nb_type, replace_string in zip(nb_types, replace_strings):
|
||||
nb_string = self.sort_and_stringify(nb_type)
|
||||
self.index = self.index.replace(replace_string, nb_string)
|
||||
with open(index_file,"w") as fin:
|
||||
fin.write(self.index)
|
||||
|
||||
try:
|
||||
dest_repo = sys.argv[1]
|
||||
except:
|
||||
dest_repo = "./MachineLearningNotebooks"
|
||||
|
||||
post_process(dest_repo)
|
||||
@@ -214,7 +214,8 @@
|
||||
"* You do not have permission to create a resource group if it's non-existing.\n",
|
||||
"* You are not a subscription owner or contributor and no Azure ML workspaces have ever been created in this subscription\n",
|
||||
"\n",
|
||||
"If workspace creation fails, please work with your IT admin to provide you with the appropriate permissions or to provision the required resources."
|
||||
"If workspace creation fails, please work with your IT admin to provide you with the appropriate permissions or to provision the required resources.\n",
|
||||
"To learn more about the Enterprise SKU, please visit the Pricing and SKU details page."
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -230,11 +231,14 @@
|
||||
"from azureml.core import Workspace\n",
|
||||
"\n",
|
||||
"# Create the workspace using the specified parameters\n",
|
||||
"# To create an Enterprise workspace, please specify the sku = enterprise\n",
|
||||
|
||||
"ws = Workspace.create(name = workspace_name,\n",
|
||||
" subscription_id = subscription_id,\n",
|
||||
" resource_group = resource_group, \n",
|
||||
" location = workspace_region,\n",
|
||||
" create_resource_group = True,\n",
|
||||
" sku = basic,\n",
|
||||
" exist_ok = True)\n",
|
||||
"ws.get_details()\n",
|
||||
"\n",
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
@@ -13,23 +20,20 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Automated Machine Learning\n",
|
||||
"_**Classification with Deployment using a Bank Marketing Dataset**_\n",
|
||||
"# Unique Descriptive Title\n",
|
||||
"_**Unique Subtitle**_\n",
|
||||
"\n",
|
||||
"Introduction that describes in a customer friendly language, what they will do and accomplish.\n".
|
||||
"## Contents\n",
|
||||
"1. [Introduction](#Introduction)\n",
|
||||
"1. [Setup](#Setup)\n",
|
||||
"1. [Train](#Train)\n",
|
||||
"1. [Results](#Results)\n",
|
||||
"1. [Deploy](#Deploy)\n",
|
||||
"1. [Test](#Test)\n",
|
||||
"1. [Prerequisites](#Prerequisites)\n",
|
||||
"1. [Configuration and Setup](#Setup)\n",
|
||||
"1. [Working with Data](#Working with Data)\n",
|
||||
"1. [Training](#Training)\n",
|
||||
"1. [Productionizing](#Productionizing)\n",
|
||||
"1. [Model Monitoring](#Model Monitoring)\n",
|
||||
"1. [Clean up resources](#Clean up resources)\n",
|
||||
"1. [Next Steps](#Next Steps)\n",
|
||||
"1. [Acknowledgements](#Acknowledgements)"
|
||||
]
|
||||
},
|
||||
@@ -37,21 +41,10 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Introduction\n",
|
||||
"## Configuration\n",
|
||||
"\n",
|
||||
"In this example we use the UCI Bank Marketing dataset to showcase how you can use AutoML for a classification problem and deploy it to an Azure Container Instance (ACI). The classification goal is to predict if the client will subscribe to a term deposit with the bank.\n",
|
||||
"\n",
|
||||
"If you are using an Azure Machine Learning Notebook VM, you are all set. Otherwise, go through the [configuration](../../../configuration.ipynb) notebook first if you haven't already to establish your connection to the AzureML Workspace. \n",
|
||||
"\n",
|
||||
"In this notebook you will learn how to:\n",
|
||||
"1. Create an experiment using an existing workspace.\n",
|
||||
"2. Configure AutoML using `AutoMLConfig`.\n",
|
||||
"3. Train the model using local compute.\n",
|
||||
"4. Explore the results.\n",
|
||||
"5. Register the model.\n",
|
||||
"6. Create a container image.\n",
|
||||
"7. Create an Azure Container Instance (ACI) service.\n",
|
||||
"8. Test the ACI service."
|
||||
"If you are using an Azure Machine Learning Compute Instance, you are all set. Otherwise, go through the [configuration](../../../configuration.ipynb) notebook first if you haven't already to establish your connection to the AzureML Workspace. \n",
|
||||
"Please note that a Basic edition workspace is created by default in the configuration.ipynb file.\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -60,8 +53,7 @@
|
||||
"source": [
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
"As part of the setup you have already created an Azure ML `Workspace` object. For AutoML you will need to create an `Experiment` object, which is a named object in a `Workspace` used to run experiments."
|
||||
]
|
||||
"As part of the setup you have already created an Azure ML `Workspace` object....\n",
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@@ -69,22 +61,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"import logging\n",
|
||||
"\n",
|
||||
"from matplotlib import pyplot as plt\n",
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"import os\n",
|
||||
"from sklearn import datasets\n",
|
||||
"import azureml.dataprep as dprep\n",
|
||||
"from sklearn.model_selection import train_test_split\n",
|
||||
"\n",
|
||||
"import azureml.core\n",
|
||||
"from azureml.core.experiment import Experiment\n",
|
||||
"from azureml.core.workspace import Workspace\n",
|
||||
"from azureml.train.automl import AutoMLConfig\n",
|
||||
"from azureml.train.automl.run import AutoMLRun"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -93,26 +70,21 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ws = Workspace.from_config()\n",
|
||||
"tenant_id = os.environ['TENANT_ID’]\n",
|
||||
"client_id = os.environ['CLIENT_ID’]\n",
|
||||
"run = Run.get_context()\n",
|
||||
"secret_name = “{0}-secret”.format(client_id)\n",
|
||||
"secret = run.get_secret(name=secret_name)\n",
|
||||
"sp_auth = ServicePrincipalAuthentication(tenant_id, client_id, secret)\n",
|
||||
"ws = Workspace.from_config(auth=sp_auth)\n",
|
||||
"\n",
|
||||
"# choose a name for experiment\n",
|
||||
"experiment_name = 'automl-classification-bmarketing'\n",
|
||||
"# choose a unique name for experiment\n",
|
||||
"experiment_name = 'unique-name'\n",
|
||||
"# project folder\n",
|
||||
"project_folder = './sample_projects/automl-classification-bankmarketing'\n",
|
||||
"project_folder = './sample_projects/test'\n",
|
||||
"\n",
|
||||
"experiment=Experiment(ws, experiment_name)\n",
|
||||
"\n",
|
||||
"output = {}\n",
|
||||
"output['SDK version'] = azureml.core.VERSION\n",
|
||||
"output['Subscription ID'] = ws.subscription_id\n",
|
||||
"output['Workspace'] = ws.name\n",
|
||||
"output['Resource Group'] = ws.resource_group\n",
|
||||
"output['Location'] = ws.location\n",
|
||||
"output['Project Directory'] = project_folder\n",
|
||||
"output['Experiment Name'] = experiment.name\n",
|
||||
"pd.set_option('display.max_colwidth', -1)\n",
|
||||
"outputDf = pd.DataFrame(data = output, index = [''])\n",
|
||||
"outputDf.T"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -120,580 +92,74 @@
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Create or Attach existing AmlCompute\n",
|
||||
"You will need to create a compute target for your AutoML run. In this tutorial, you create AmlCompute as your training compute resource.\n",
|
||||
"You will need to create a compute target for your run. In this tutorial, you create AmlCompute as your training compute resource.\n",
|
||||
"#### Creation of AmlCompute takes approximately 5 minutes. \n",
|
||||
"If the AmlCompute with that name is already in your workspace this code will skip the creation process.\n",
|
||||
"As with other Azure services, there are limits on certain resources (e.g. AmlCompute) associated with the Azure Machine Learning service. Please read this article on the default limits and how to request more quota."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.core.compute import AmlCompute\n",
|
||||
"from azureml.core.compute import ComputeTarget\n",
|
||||
"# Working with Data\n",
|
||||
"\n",
|
||||
"# Choose a name for your cluster.\n",
|
||||
"amlcompute_cluster_name = \"automlcl\"\n",
|
||||
"\n",
|
||||
"found = False\n",
|
||||
"# Check if this compute target already exists in the workspace.\n",
|
||||
"cts = ws.compute_targets\n",
|
||||
"if amlcompute_cluster_name in cts and cts[amlcompute_cluster_name].type == 'AmlCompute':\n",
|
||||
" found = True\n",
|
||||
" print('Found existing compute target.')\n",
|
||||
" compute_target = cts[amlcompute_cluster_name]\n",
|
||||
" \n",
|
||||
"if not found:\n",
|
||||
" print('Creating a new compute target...')\n",
|
||||
" provisioning_config = AmlCompute.provisioning_configuration(vm_size = \"STANDARD_D2_V2\", # for GPU, use \"STANDARD_NC6\"\n",
|
||||
" #vm_priority = 'lowpriority', # optional\n",
|
||||
" max_nodes = 6)\n",
|
||||
"\n",
|
||||
" # Create the cluster.\n",
|
||||
" compute_target = ComputeTarget.create(ws, amlcompute_cluster_name, provisioning_config)\n",
|
||||
" \n",
|
||||
" # Can poll for a minimum number of nodes and for a specific timeout.\n",
|
||||
" # If no min_node_count is provided, it will use the scale settings for the cluster.\n",
|
||||
" compute_target.wait_for_completion(show_output = True, min_node_count = None, timeout_in_minutes = 20)\n",
|
||||
" \n",
|
||||
" # For a more detailed view of current AmlCompute status, use get_status()."
|
||||
"Here you would learn how to perform Data labeling and use Open Datasets etc..\n",
|
||||
"To do this first load....\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Data\n",
|
||||
"## Training\n",
|
||||
"\n",
|
||||
"Here load the data in the get_data() script to be utilized in azure compute. To do this first load all the necessary libraries and dependencies to set up paths for the data and to create the conda_Run_config."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"if not os.path.isdir('data'):\n",
|
||||
" os.mkdir('data')\n",
|
||||
" \n",
|
||||
"if not os.path.exists(project_folder):\n",
|
||||
" os.makedirs(project_folder)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.core.runconfig import RunConfiguration\n",
|
||||
"from azureml.core.conda_dependencies import CondaDependencies\n",
|
||||
"import pkg_resources\n",
|
||||
"\n",
|
||||
"# create a new RunConfig object\n",
|
||||
"conda_run_config = RunConfiguration(framework=\"python\")\n",
|
||||
"\n",
|
||||
"# Set compute target to AmlCompute\n",
|
||||
"conda_run_config.target = compute_target\n",
|
||||
"conda_run_config.environment.docker.enabled = True\n",
|
||||
"conda_run_config.environment.docker.base_image = azureml.core.runconfig.DEFAULT_CPU_IMAGE\n",
|
||||
"\n",
|
||||
"dprep_dependency = 'azureml-dataprep==' + pkg_resources.get_distribution(\"azureml-dataprep\").version\n",
|
||||
"\n",
|
||||
"cd = CondaDependencies.create(pip_packages=['azureml-sdk[automl]', dprep_dependency], conda_packages=['numpy','py-xgboost<=0.80'])\n",
|
||||
"conda_run_config.environment.python.conda_dependencies = cd"
|
||||
"Here you would learn how to train a DNN using...\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Load Data\n",
|
||||
"# Productionizing\n",
|
||||
"\n",
|
||||
"Here we create the script to be run in azure comput for loading the data, we load the bank marketing dataset into X_train and y_train. Next X_train and y_train is returned for training the model."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = \"https://automlsamplenotebookdata.blob.core.windows.net/automl-sample-notebook-data/bankmarketing_train.csv\"\n",
|
||||
"dflow = dprep.read_csv(data, infer_column_types=True)\n",
|
||||
"dflow.get_profile()\n",
|
||||
"X_train = dflow.drop_columns(columns=['y'])\n",
|
||||
"y_train = dflow.keep_columns(columns=['y'], validate_column_exists=True)\n",
|
||||
"dflow.head()"
|
||||
"Here you would learn how to deploy your model to ACI to perform...\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Train\n",
|
||||
"# Model Monitoring\n",
|
||||
"\n",
|
||||
"Instantiate a AutoMLConfig object. This defines the settings and data used to run the experiment.\n",
|
||||
"\n",
|
||||
"|Property|Description|\n",
|
||||
"|-|-|\n",
|
||||
"|**task**|classification or regression|\n",
|
||||
"|**primary_metric**|This is the metric that you want to optimize. Classification supports the following primary metrics: <br><i>accuracy</i><br><i>AUC_weighted</i><br><i>average_precision_score_weighted</i><br><i>norm_macro_recall</i><br><i>precision_score_weighted</i>|\n",
|
||||
"|**iteration_timeout_minutes**|Time limit in minutes for each iteration.|\n",
|
||||
"|**iterations**|Number of iterations. In each iteration AutoML trains a specific pipeline with the data.|\n",
|
||||
"|**n_cross_validations**|Number of cross validation splits.|\n",
|
||||
"|**X**|(sparse) array-like, shape = [n_samples, n_features]|\n",
|
||||
"|**y**|(sparse) array-like, shape = [n_samples, ], Multi-class targets.|\n",
|
||||
"|**path**|Relative path to the project folder. AutoML stores configuration files for the experiment under this folder. You can specify a new empty folder.|\n",
|
||||
"\n",
|
||||
"**_You can find more information about primary metrics_** [here](https://docs.microsoft.com/en-us/azure/machine-learning/service/how-to-configure-auto-train#primary-metric)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"automl_settings = {\n",
|
||||
" \"iteration_timeout_minutes\": 5,\n",
|
||||
" \"iterations\": 10,\n",
|
||||
" \"n_cross_validations\": 2,\n",
|
||||
" \"primary_metric\": 'AUC_weighted',\n",
|
||||
" \"preprocess\": True,\n",
|
||||
" \"max_concurrent_iterations\": 5,\n",
|
||||
" \"verbosity\": logging.INFO,\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"automl_config = AutoMLConfig(task = 'classification',\n",
|
||||
" debug_log = 'automl_errors.log',\n",
|
||||
" path = project_folder,\n",
|
||||
" run_configuration=conda_run_config,\n",
|
||||
" X = X_train,\n",
|
||||
" y = y_train,\n",
|
||||
" **automl_settings\n",
|
||||
" )"
|
||||
"Here you would learn how to detect datadrift etc...\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Call the `submit` method on the experiment object and pass the run configuration. Execution of local runs is synchronous. Depending on the data and the number of iterations this can run for a while.\n",
|
||||
"In this example, we specify `show_output = True` to print currently running iterations to the console."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"remote_run = experiment.submit(automl_config, show_output = True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"remote_run"
|
||||
"# Clean up resources\n",
|
||||
"\n",
|
||||
"Now, let's clean up the resources we created...\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Results"
|
||||
"# Next Steps\n",
|
||||
"\n",
|
||||
"In this notebook, you’ve done x, y, z. You can learn more with these resources:\n",
|
||||
"+ [SDK reference documentation for `MyClass`]()\n",
|
||||
"+ [About this feature](https://docs.microsoft.com/azure/machine-learning/service/thisfeature)\n",
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"#### Widget for Monitoring Runs\n",
|
||||
"\n",
|
||||
"The widget will first report a \"loading\" status while running the first iteration. After completing the first iteration, an auto-updating graph and table will be shown. The widget will refresh once per minute, so you should see the graph update as child runs complete.\n",
|
||||
"\n",
|
||||
"**Note:** The widget displays a link at the bottom. Use this link to open a web interface to explore the individual run details"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.widgets import RunDetails\n",
|
||||
"RunDetails(remote_run).show() "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Deploy\n",
|
||||
"\n",
|
||||
"### Retrieve the Best Model\n",
|
||||
"\n",
|
||||
"Below we select the best pipeline from our iterations. The `get_output` method on `automl_classifier` returns the best run and the fitted model for the last invocation. Overloads on `get_output` allow you to retrieve the best run and fitted model for *any* logged metric or for a particular *iteration*."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"best_run, fitted_model = remote_run.get_output()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Register the Fitted Model for Deployment\n",
|
||||
"If neither `metric` nor `iteration` are specified in the `register_model` call, the iteration with the best primary metric is registered."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"description = 'AutoML Model trained on bank marketing data to predict if a client will subscribe to a term deposit'\n",
|
||||
"tags = None\n",
|
||||
"model = remote_run.register_model(description = description, tags = tags)\n",
|
||||
"\n",
|
||||
"print(remote_run.model_id) # This will be written to the script file later in the notebook."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Create Scoring Script\n",
|
||||
"The scoring script is required to generate the image for deployment. It contains the code to do the predictions on input data."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%writefile score.py\n",
|
||||
"import pickle\n",
|
||||
"import json\n",
|
||||
"import numpy\n",
|
||||
"import azureml.train.automl\n",
|
||||
"from sklearn.externals import joblib\n",
|
||||
"from azureml.core.model import Model\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def init():\n",
|
||||
" global model\n",
|
||||
" model_path = Model.get_model_path(model_name = '<<modelid>>') # this name is model.id of model that we want to deploy\n",
|
||||
" # deserialize the model file back into a sklearn model\n",
|
||||
" model = joblib.load(model_path)\n",
|
||||
"\n",
|
||||
"def run(rawdata):\n",
|
||||
" try:\n",
|
||||
" data = json.loads(rawdata)['data']\n",
|
||||
" data = numpy.array(data)\n",
|
||||
" result = model.predict(data)\n",
|
||||
" except Exception as e:\n",
|
||||
" result = str(e)\n",
|
||||
" return json.dumps({\"error\": result})\n",
|
||||
" return json.dumps({\"result\":result.tolist()})"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Create a YAML File for the Environment"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"To ensure the fit results are consistent with the training results, the SDK dependency versions need to be the same as the environment that trains the model. Details about retrieving the versions can be found in notebook [12.auto-ml-retrieve-the-training-sdk-versions](12.auto-ml-retrieve-the-training-sdk-versions.ipynb)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dependencies = remote_run.get_run_sdk_dependencies(iteration = 1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for p in ['azureml-train-automl', 'azureml-sdk', 'azureml-core']:\n",
|
||||
" print('{}\\t{}'.format(p, dependencies[p]))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.core.conda_dependencies import CondaDependencies\n",
|
||||
"\n",
|
||||
"myenv = CondaDependencies.create(conda_packages=['numpy','scikit-learn','py-xgboost<=0.80'],\n",
|
||||
" pip_packages=['azureml-sdk[automl]'])\n",
|
||||
"\n",
|
||||
"conda_env_file_name = 'myenv.yml'\n",
|
||||
"myenv.save_to_file('.', conda_env_file_name)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Substitute the actual version number in the environment file.\n",
|
||||
"# This is not strictly needed in this notebook because the model should have been generated using the current SDK version.\n",
|
||||
"# However, we include this in case this code is used on an experiment from a previous SDK version.\n",
|
||||
"\n",
|
||||
"with open(conda_env_file_name, 'r') as cefr:\n",
|
||||
" content = cefr.read()\n",
|
||||
"\n",
|
||||
"with open(conda_env_file_name, 'w') as cefw:\n",
|
||||
" cefw.write(content.replace(azureml.core.VERSION, dependencies['azureml-sdk']))\n",
|
||||
"\n",
|
||||
"# Substitute the actual model id in the script file.\n",
|
||||
"\n",
|
||||
"script_file_name = 'score.py'\n",
|
||||
"\n",
|
||||
"with open(script_file_name, 'r') as cefr:\n",
|
||||
" content = cefr.read()\n",
|
||||
"\n",
|
||||
"with open(script_file_name, 'w') as cefw:\n",
|
||||
" cefw.write(content.replace('<<modelid>>', remote_run.model_id))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Create a Container Image\n",
|
||||
"\n",
|
||||
"Next use Azure Container Instances for deploying models as a web service for quickly deploying and validating your model\n",
|
||||
"or when testing a model that is under development."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.core.image import Image, ContainerImage\n",
|
||||
"\n",
|
||||
"image_config = ContainerImage.image_configuration(runtime= \"python\",\n",
|
||||
" execution_script = script_file_name,\n",
|
||||
" conda_file = conda_env_file_name,\n",
|
||||
" tags = {'area': \"bmData\", 'type': \"automl_classification\"},\n",
|
||||
" description = \"Image for automl classification sample\")\n",
|
||||
"\n",
|
||||
"image = Image.create(name = \"automlsampleimage\",\n",
|
||||
" # this is the model object \n",
|
||||
" models = [model],\n",
|
||||
" image_config = image_config, \n",
|
||||
" workspace = ws)\n",
|
||||
"\n",
|
||||
"image.wait_for_creation(show_output = True)\n",
|
||||
"\n",
|
||||
"if image.creation_state == 'Failed':\n",
|
||||
" print(\"Image build log at: \" + image.image_build_log_uri)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Deploy the Image as a Web Service on Azure Container Instance\n",
|
||||
"\n",
|
||||
"Deploy an image that contains the model and other assets needed by the service."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.core.webservice import AciWebservice\n",
|
||||
"\n",
|
||||
"aciconfig = AciWebservice.deploy_configuration(cpu_cores = 1, \n",
|
||||
" memory_gb = 1, \n",
|
||||
" tags = {'area': \"bmData\", 'type': \"automl_classification\"}, \n",
|
||||
" description = 'sample service for Automl Classification')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from azureml.core.webservice import Webservice\n",
|
||||
"\n",
|
||||
"aci_service_name = 'automl-sample-bankmarketing'\n",
|
||||
"print(aci_service_name)\n",
|
||||
"aci_service = Webservice.deploy_from_image(deployment_config = aciconfig,\n",
|
||||
" image = image,\n",
|
||||
" name = aci_service_name,\n",
|
||||
" workspace = ws)\n",
|
||||
"aci_service.wait_for_deployment(True)\n",
|
||||
"print(aci_service.state)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Delete a Web Service\n",
|
||||
"\n",
|
||||
"Deletes the specified web service."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#aci_service.delete()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Get Logs from a Deployed Web Service\n",
|
||||
"\n",
|
||||
"Gets logs from a deployed web service."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#aci_service.get_logs()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Test\n",
|
||||
"\n",
|
||||
"Now that the model is trained split our data in the same way the data was split for training (The difference here is the data is being split locally) and then run the test data through the trained model to get the predicted values."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Load the bank marketing datasets.\n",
|
||||
"from sklearn.datasets import load_diabetes\n",
|
||||
"from sklearn.model_selection import train_test_split\n",
|
||||
"from numpy import array"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = \"https://automlsamplenotebookdata.blob.core.windows.net/automl-sample-notebook-data/bankmarketing_validate.csv\"\n",
|
||||
"dflow = dprep.read_csv(data, infer_column_types=True)\n",
|
||||
"dflow.get_profile()\n",
|
||||
"X_test = dflow.drop_columns(columns=['y'])\n",
|
||||
"y_test = dflow.keep_columns(columns=['y'], validate_column_exists=True)\n",
|
||||
"dflow.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_test = X_test.to_pandas_dataframe()\n",
|
||||
"y_test = y_test.to_pandas_dataframe()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = fitted_model.predict(X_test)\n",
|
||||
"actual = array(y_test)\n",
|
||||
"actual = actual[:,0]\n",
|
||||
"print(y_pred.shape, \" \", actual.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Calculate metrics for the prediction\n",
|
||||
"\n",
|
||||
"Now visualize the data on a scatter plot to show what our truth (actual) values are compared to the predicted values \n",
|
||||
"from the trained model that was returned."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib notebook\n",
|
||||
"test_pred = plt.scatter(actual, y_pred, color='b')\n",
|
||||
"test_test = plt.scatter(actual, actual, color='g')\n",
|
||||
"plt.legend((test_pred, test_test), ('prediction', 'truth'), loc='upper left', fontsize=8)\n",
|
||||
"plt.show()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Acknowledgements"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"This Bank Marketing dataset is made available under the Creative Commons (CCO: Public Domain) License: https://creativecommons.org/publicdomain/zero/1.0/. Any rights in individual contents of the database are licensed under the Database Contents License: https://creativecommons.org/publicdomain/zero/1.0/ and is available at: https://www.kaggle.com/janiobachmann/bank-marketing-dataset .\n",
|
||||
"This dataset is made available under the Creative Commons (CCO: Public Domain) License: https://creativecommons.org/publicdomain/zero/1.0/. Any rights in individual contents of the database are licensed under the Database Contents License: https://creativecommons.org/publicdomain/zero/1.0/ and is available at: https://www.kaggle.com/janiobachmann/bank-marketing-dataset .\n",
|
||||
"\n",
|
||||
"_**Acknowledgements**_\n",
|
||||
"This dataset is originally available within the UCI Machine Learning Database: https://archive.ics.uci.edu/ml/datasets/bank+marketing\n",
|
||||
@@ -705,9 +171,32 @@
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"name": "v-rasav"
|
||||
"name": "YOUR ALIAS"
|
||||
}
|
||||
],
|
||||
"category": "tutorial",
|
||||
"compute": [
|
||||
"AML Compute"
|
||||
],
|
||||
"datasets": [
|
||||
"MNIST"
|
||||
],
|
||||
"deployment": [
|
||||
"AKS"
|
||||
],
|
||||
"exclude_from_index": false,
|
||||
"framework": [
|
||||
"PyTorch"
|
||||
],
|
||||
"friendly_name": "How to use ModuleStep with AML Pipelines",
|
||||
},
|
||||
"order_index": 14,
|
||||
"star_tag": [],
|
||||
"tags": [
|
||||
"Pipeline Builder"
|
||||
],
|
||||
"task": "Demonstrates the use of ModuleStep"
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3.6",
|
||||
"language": "python",
|
||||
@@ -729,3 +218,7 @@
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -41,6 +41,8 @@
|
||||
"\n",
|
||||
"Make sure you have executed the [configuration](../../../configuration.ipynb) before running this notebook.\n",
|
||||
"\n",
|
||||
"An Enterprise workspace is required for this notebook. To learn more about creating an Enterprise workspace or upgrading to an Enterprise workspace from the Azure portal, please visit our [Workspace page](https://docs.microsoft.com/en-us/azure/machine-learning/service/concept-workspace#upgrade).\n",
|
||||
|
||||
"In this notebook you will learn how to:\n",
|
||||
"1. Create an experiment using an existing workspace.\n",
|
||||
"2. Configure AutoML using `AutoMLConfig`.\n",
|
||||
@@ -61,61 +63,13 @@
|
||||
"As part of the setup you have already created an Azure ML `Workspace` object. For AutoML you will need to create an `Experiment` object, which is a named object in a `Workspace` used to run experiments."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"import logging\n",
|
||||
"\n",
|
||||
"from matplotlib import pyplot as plt\n",
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"from sklearn import datasets\n",
|
||||
"\n",
|
||||
"import azureml.core\n",
|
||||
"from azureml.core.experiment import Experiment\n",
|
||||
"from azureml.core.workspace import Workspace\n",
|
||||
"from azureml.train.automl import AutoMLConfig\n",
|
||||
"from azureml.train.automl.run import AutoMLRun"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ws = Workspace.from_config()\n",
|
||||
"\n",
|
||||
"# choose a name for experiment\n",
|
||||
"experiment_name = 'automl-classification-deployment'\n",
|
||||
"# project folder\n",
|
||||
"project_folder = './sample_projects/automl-classification-deployment'\n",
|
||||
"\n",
|
||||
"experiment=Experiment(ws, experiment_name)\n",
|
||||
"\n",
|
||||
"output = {}\n",
|
||||
"output['SDK version'] = azureml.core.VERSION\n",
|
||||
"output['Subscription ID'] = ws.subscription_id\n",
|
||||
"output['Workspace'] = ws.name\n",
|
||||
"output['Resource Group'] = ws.resource_group\n",
|
||||
"output['Location'] = ws.location\n",
|
||||
"output['Project Directory'] = project_folder\n",
|
||||
"output['Experiment Name'] = experiment.name\n",
|
||||
"pd.set_option('display.max_colwidth', -1)\n",
|
||||
"outputDf = pd.DataFrame(data = output, index = [''])\n",
|
||||
"outputDf.T"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Train\n",
|
||||
"\n",
|
||||
"The following steps require an Enterprise workspace to gain access to these features.To learn more about creating an Enterprise workspace or upgrading to an Enterprise workspace from the Azure portal, please visit our [Workspace page](https://docs.microsoft.com/en-us/azure/machine-learning/service/concept-workspace#upgrade).\n",
|
||||
"Instantiate a AutoMLConfig object. This defines the settings and data used to run the experiment.\n",
|
||||
"\n",
|
||||
"|Property|Description|\n",
|
||||
@@ -484,7 +438,7 @@
|
||||
"metadata": {
|
||||
"authors": [
|
||||
{
|
||||
"name": "savitam"
|
||||
"name": "shwinne"
|
||||
}
|
||||
],
|
||||
"kernelspec": {
|
||||
|
||||
@@ -479,7 +479,27 @@
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.6"
|
||||
}
|
||||
},
|
||||
"friendly_name": "Testing index",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "tutorial",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"local"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"Azure ML AutoML"
|
||||
],
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
@@ -470,7 +470,27 @@
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.6"
|
||||
}
|
||||
},
|
||||
"friendly_name": "Prepare data for regression modeling",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "deployment",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"test"
|
||||
],
|
||||
"compute": [
|
||||
"localtest"
|
||||
],
|
||||
"deployment": [
|
||||
"AKS"
|
||||
],
|
||||
"framework": [
|
||||
"test1"
|
||||
]
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
@@ -645,7 +645,26 @@
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.7"
|
||||
}
|
||||
},
|
||||
"friendly_name": "Pipeline test",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "training",
|
||||
"tags": [
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"local"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"Azure ML AutoML"
|
||||
]
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
130
index.md
Normal file
130
index.md
Normal file
@@ -0,0 +1,130 @@
|
||||
|
||||
# Index
|
||||
Azure Machine Learning is a cloud service that you use to train, deploy, automate, and manage machine learning models. This index should assist in navigating the Azure Machine Learning notebook samples and encourage efficient retrieval of topics and content.
|
||||
|
||||
|
||||
## Getting Started
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
|
||||
|
||||
## Tutorials
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
|
||||
|
||||
## Training
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
| [Pipeline test](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/pipeline-style-transfer/pipeline-style-transfer.ipynb) | Regression | NYC Taxi | local | None | Azure ML AutoML |
|
||||
|
||||
## Deployment
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
| :star:[Prepare data for regression modeling](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/production-deploy-to-aks/production-deploy-to-aks.ipynb) | Regression | test | localtest | AKS | test1 |
|
||||
|
||||
## Other Notebooks
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|
|
||||
| [auto-ml-classification-bank-marketing](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/classification-bank-marketing/auto-ml-classification-bank-marketing.ipynb) | | | | | |
|
||||
| [auto-ml-classification-credit-card-fraud](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/classification-credit-card-fraud/auto-ml-classification-credit-card-fraud.ipynb) | | | | | |
|
||||
| [auto-ml-classification-with-deployment](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/classification-with-deployment/auto-ml-classification-with-deployment.ipynb) | | | | | |
|
||||
| [auto-ml-classification-with-onnx](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/classification-with-onnx/auto-ml-classification-with-onnx.ipynb) | | | | | |
|
||||
| [auto-ml-classification-with-whitelisting](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/classification-with-whitelisting/auto-ml-classification-with-whitelisting.ipynb) | | | | | |
|
||||
| [auto-ml-dataprep](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/dataprep/auto-ml-dataprep.ipynb) | | | | | |
|
||||
| [auto-ml-dataprep-remote-execution](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/dataprep-remote-execution/auto-ml-dataprep-remote-execution.ipynb) | | | | | |
|
||||
| [auto-ml-exploring-previous-runs](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/exploring-previous-runs/auto-ml-exploring-previous-runs.ipynb) | | | | | |
|
||||
| [auto-ml-forecasting-bike-share](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/forecasting-bike-share/auto-ml-forecasting-bike-share.ipynb) | | | | | |
|
||||
| [auto-ml-forecasting-energy-demand](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/forecasting-energy-demand/auto-ml-forecasting-energy-demand.ipynb) | | | | | |
|
||||
| [auto-ml-forecasting-orange-juice-sales](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/forecasting-orange-juice-sales/auto-ml-forecasting-orange-juice-sales.ipynb) | | | | | |
|
||||
| [auto-ml-missing-data-blacklist-early-termination](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/missing-data-blacklist-early-termination/auto-ml-missing-data-blacklist-early-termination.ipynb) | | | | | |
|
||||
| [auto-ml-model-explanation](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/model-explanation/auto-ml-model-explanation.ipynb) | | | | | |
|
||||
| [auto-ml-regression](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/regression/auto-ml-regression.ipynb) | | | | | |
|
||||
| [auto-ml-regression-concrete-strength](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/regression-concrete-strength/auto-ml-regression-concrete-strength.ipynb) | | | | | |
|
||||
| [auto-ml-regression-hardware-performance](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/regression-hardware-performance/auto-ml-regression-hardware-performance.ipynb) | | | | | |
|
||||
| [auto-ml-remote-amlcompute](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/remote-amlcompute/auto-ml-remote-amlcompute.ipynb) | | | | | |
|
||||
| [auto-ml-remote-amlcompute-with-onnx](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/remote-amlcompute-with-onnx/auto-ml-remote-amlcompute-with-onnx.ipynb) | | | | | |
|
||||
| [auto-ml-sample-weight](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/sample-weight/auto-ml-sample-weight.ipynb) | | | | | |
|
||||
| [auto-ml-sparse-data-train-test-split](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/sparse-data-train-test-split/auto-ml-sparse-data-train-test-split.ipynb) | | | | | |
|
||||
| [auto-ml-sql-energy-demand](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/sql-server/energy-demand/auto-ml-sql-energy-demand.ipynb) | | | | | |
|
||||
| [auto-ml-sql-setup](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/sql-server/setup/auto-ml-sql-setup.ipynb) | | | | | |
|
||||
| [auto-ml-subsampling-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//automated-machine-learning/subsampling/auto-ml-subsampling-local.ipynb) | | | | | |
|
||||
| [build-model-run-history-03](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/amlsdk/build-model-run-history-03.ipynb) | | | | | |
|
||||
| [deploy-to-aci-04](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/amlsdk/deploy-to-aci-04.ipynb) | | | | | |
|
||||
| [deploy-to-aks-existingimage-05](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/amlsdk/deploy-to-aks-existingimage-05.ipynb) | | | | | |
|
||||
| [ingest-data-02](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/amlsdk/ingest-data-02.ipynb) | | | | | |
|
||||
| [installation-and-configuration-01](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/amlsdk/installation-and-configuration-01.ipynb) | | | | | |
|
||||
| [automl-databricks-local-01](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/automl/automl-databricks-local-01.ipynb) | | | | | |
|
||||
| [automl-databricks-local-with-deployment](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/automl/automl-databricks-local-with-deployment.ipynb) | | | | | |
|
||||
| [aml-pipelines-use-databricks-as-compute-target](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-databricks/databricks-as-remote-compute-target/aml-pipelines-use-databricks-as-compute-target.ipynb) | | | | | |
|
||||
| [automl_hdi_local_classification](https://github.com/Azure/MachineLearningNotebooks/blob/master//azure-hdi/automl_hdi_local_classification.ipynb) | | | | | |
|
||||
| [model-register-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master//deploy-to-cloud/model-register-and-deploy.ipynb) | | | | | |
|
||||
| [register-model-deploy-local-advanced](https://github.com/Azure/MachineLearningNotebooks/blob/master//deploy-to-local/register-model-deploy-local-advanced.ipynb) | | | | | |
|
||||
| [register-model-deploy-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//deploy-to-local/register-model-deploy-local.ipynb) | | | | | |
|
||||
| [accelerated-models-object-detection](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/accelerated-models/accelerated-models-object-detection.ipynb) | | | | | |
|
||||
| [accelerated-models-quickstart](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/accelerated-models/accelerated-models-quickstart.ipynb) | | | | | |
|
||||
| [accelerated-models-training](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/accelerated-models/accelerated-models-training.ipynb) | | | | | |
|
||||
| [enable-app-insights-in-production-service](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/enable-app-insights-in-production-service/enable-app-insights-in-production-service.ipynb) | | | | | |
|
||||
| [enable-data-collection-for-models-in-aks](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/enable-data-collection-for-models-in-aks/enable-data-collection-for-models-in-aks.ipynb) | | | | | |
|
||||
| [onnx-convert-aml-deploy-tinyyolo](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/onnx/onnx-convert-aml-deploy-tinyyolo.ipynb) | | | | | |
|
||||
| [onnx-inference-facial-expression-recognition-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/onnx/onnx-inference-facial-expression-recognition-deploy.ipynb) | | | | | |
|
||||
| [onnx-inference-mnist-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/onnx/onnx-inference-mnist-deploy.ipynb) | | | | | |
|
||||
| [onnx-modelzoo-aml-deploy-resnet50](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/onnx/onnx-modelzoo-aml-deploy-resnet50.ipynb) | | | | | |
|
||||
| [onnx-train-pytorch-aml-deploy-mnist](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/onnx/onnx-train-pytorch-aml-deploy-mnist.ipynb) | | | | | |
|
||||
| [production-deploy-to-aks-gpu](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/production-deploy-to-aks-gpu/production-deploy-to-aks-gpu.ipynb) | | | | | |
|
||||
| [register-model-create-image-deploy-service](https://github.com/Azure/MachineLearningNotebooks/blob/master//deployment/register-model-create-image-deploy-service/register-model-create-image-deploy-service.ipynb) | | | | | |
|
||||
| [save-retrieve-explanations-run-history](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/azure-integration/run-history/save-retrieve-explanations-run-history.ipynb) | | | | | |
|
||||
| [train-explain-model-locally-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/azure-integration/scoring-time/train-explain-model-locally-and-deploy.ipynb) | | | | | |
|
||||
| [train-explain-model-on-amlcompute-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/azure-integration/scoring-time/train-explain-model-on-amlcompute-and-deploy.ipynb) | | | | | |
|
||||
| [advanced-feature-transformations-explain-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/tabular-data/advanced-feature-transformations-explain-local.ipynb) | | | | | |
|
||||
| [explain-binary-classification-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/tabular-data/explain-binary-classification-local.ipynb) | | | | | |
|
||||
| [explain-multiclass-classification-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/tabular-data/explain-multiclass-classification-local.ipynb) | | | | | |
|
||||
| [explain-regression-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/tabular-data/explain-regression-local.ipynb) | | | | | |
|
||||
| [simple-feature-transformations-explain-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//explain-model/tabular-data/simple-feature-transformations-explain-local.ipynb) | | | | | |
|
||||
| [aml-pipelines-data-transfer](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-data-transfer.ipynb) | | | | | |
|
||||
| [aml-pipelines-getting-started](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-getting-started.ipynb) | | | | | |
|
||||
| [aml-pipelines-how-to-use-azurebatch-to-run-a-windows-executable](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-how-to-use-azurebatch-to-run-a-windows-executable.ipynb) | | | | | |
|
||||
| [aml-pipelines-how-to-use-estimatorstep](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-how-to-use-estimatorstep.ipynb) | | | | | |
|
||||
| [aml-pipelines-parameter-tuning-with-hyperdrive](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-parameter-tuning-with-hyperdrive.ipynb) | | | | | |
|
||||
| [aml-pipelines-publish-and-run-using-rest-endpoint](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-publish-and-run-using-rest-endpoint.ipynb) | | | | | |
|
||||
| [aml-pipelines-setup-schedule-for-a-published-pipeline](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-setup-schedule-for-a-published-pipeline.ipynb) | | | | | |
|
||||
| [aml-pipelines-setup-versioned-pipeline-endpoints](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-setup-versioned-pipeline-endpoints.ipynb) | | | | | |
|
||||
| [aml-pipelines-use-adla-as-compute-target](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-use-adla-as-compute-target.ipynb) | | | | | |
|
||||
| [aml-pipelines-use-databricks-as-compute-target](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-use-databricks-as-compute-target.ipynb) | | | | | |
|
||||
| [aml-pipelines-with-automated-machine-learning-step](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-with-automated-machine-learning-step.ipynb) | | | | | |
|
||||
| [aml-pipelines-with-data-dependency-steps](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/intro-to-pipelines/aml-pipelines-with-data-dependency-steps.ipynb) | | | | | |
|
||||
| [nyc-taxi-data-regression-model-building](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/nyc-taxi-data-regression-model-building/nyc-taxi-data-regression-model-building.ipynb) | | | | | |
|
||||
| [pipeline-batch-scoring](https://github.com/Azure/MachineLearningNotebooks/blob/master//machine-learning-pipelines/pipeline-batch-scoring/pipeline-batch-scoring.ipynb) | | | | | |
|
||||
| [authentication-in-azureml](https://github.com/Azure/MachineLearningNotebooks/blob/master//manage-azureml-service/authentication-in-azureml/authentication-in-azureml.ipynb) | | | | | |
|
||||
| [azure-ml-datadrift](https://github.com/Azure/MachineLearningNotebooks/blob/master//monitor-models/data-drift/azure-ml-datadrift.ipynb) | | | | | |
|
||||
| [logging-api](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/logging-api/logging-api.ipynb) | | | | | |
|
||||
| [manage-runs](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/manage-runs/manage-runs.ipynb) | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-sklearn](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/train-hyperparameter-tune-deploy-with-sklearn/train-hyperparameter-tune-deploy-with-sklearn.ipynb) | | | | | |
|
||||
| [train-in-spark](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/train-in-spark/train-in-spark.ipynb) | | | | | |
|
||||
| [train-on-amlcompute](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/train-on-amlcompute/train-on-amlcompute.ipynb) | | | | | |
|
||||
| [train-on-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/train-on-local/train-on-local.ipynb) | | | | | |
|
||||
| [train-on-remote-vm](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/train-on-remote-vm/train-on-remote-vm.ipynb) | | | | | |
|
||||
| [train-within-notebook](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/train-within-notebook/train-within-notebook.ipynb) | | | | | |
|
||||
| [using-environments](https://github.com/Azure/MachineLearningNotebooks/blob/master//training/using-environments/using-environments.ipynb) | | | | | |
|
||||
| [distributed-chainer](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/distributed-chainer/distributed-chainer.ipynb) | | | | | |
|
||||
| [distributed-cntk-with-custom-docker](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/distributed-cntk-with-custom-docker/distributed-cntk-with-custom-docker.ipynb) | | | | | |
|
||||
| [distributed-pytorch-with-horovod](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/distributed-pytorch-with-horovod/distributed-pytorch-with-horovod.ipynb) | | | | | |
|
||||
| [distributed-tensorflow-with-horovod](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/distributed-tensorflow-with-horovod/distributed-tensorflow-with-horovod.ipynb) | | | | | |
|
||||
| [distributed-tensorflow-with-parameter-server](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/distributed-tensorflow-with-parameter-server/distributed-tensorflow-with-parameter-server.ipynb) | | | | | |
|
||||
| [export-run-history-to-tensorboard](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/export-run-history-to-tensorboard/export-run-history-to-tensorboard.ipynb) | | | | | |
|
||||
| [how-to-use-estimator](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/how-to-use-estimator/how-to-use-estimator.ipynb) | | | | | |
|
||||
| [notebook_example](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/how-to-use-estimator/notebook_example.ipynb) | | | | | |
|
||||
| [tensorboard](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/tensorboard/tensorboard.ipynb) | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-chainer](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/train-hyperparameter-tune-deploy-with-chainer/train-hyperparameter-tune-deploy-with-chainer.ipynb) | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-keras](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/train-hyperparameter-tune-deploy-with-keras/train-hyperparameter-tune-deploy-with-keras.ipynb) | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-pytorch](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/train-hyperparameter-tune-deploy-with-pytorch/train-hyperparameter-tune-deploy-with-pytorch.ipynb) | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-tensorflow](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/train-hyperparameter-tune-deploy-with-tensorflow/train-hyperparameter-tune-deploy-with-tensorflow.ipynb) | | | | | |
|
||||
| [train-tensorflow-resume-training](https://github.com/Azure/MachineLearningNotebooks/blob/master//training-with-deep-learning/train-tensorflow-resume-training/train-tensorflow-resume-training.ipynb) | | | | | |
|
||||
| [deploy-model](https://github.com/Azure/MachineLearningNotebooks/blob/master//using-mlflow/deploy-model/deploy-model.ipynb) | | | | | |
|
||||
| [train-and-deploy-pytorch](https://github.com/Azure/MachineLearningNotebooks/blob/master//using-mlflow/train-deploy-pytorch/train-and-deploy-pytorch.ipynb) | | | | | |
|
||||
| [train-local](https://github.com/Azure/MachineLearningNotebooks/blob/master//using-mlflow/train-local/train-local.ipynb) | | | | | |
|
||||
| [train-remote](https://github.com/Azure/MachineLearningNotebooks/blob/master//using-mlflow/train-remote/train-remote.ipynb) | | | | | |
|
||||
190
index2.md
Normal file
190
index2.md
Normal file
@@ -0,0 +1,190 @@
|
||||
# Index
|
||||
Azure Machine Learning is a cloud service that you use to train, deploy, automate,
|
||||
and manage machine learning models. This index should assist in navigating the Azure
|
||||
Machine Learning notebook samples and encourage efficient retrieval of topics and content.
|
||||

|
||||
|
||||
## Getting Started
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework | Tags |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|:------------:|
|
||||
|
||||
|
||||
## Tutorials
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework | Tags |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|:------------:|
|
||||
|
||||
|
||||
## Training
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework | Tags |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|:------------:|
|
||||
|
||||
|
||||
|
||||
## Deployment
|
||||
|
||||
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework | Tags |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|:------------:|
|
||||
|
||||
|
||||
|
||||
## Other Notebooks
|
||||
|Title| Task | Dataset | Training Compute | Deployment Target | ML Framework | Tags |
|
||||
|:----|:-----|:-------:|:----------------:|:-----------------:|:------------:|:------------:|
|
||||
| [Logging APIs](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/track-and-monitor-experiments/logging-api/logging-api.ipynb) | Logging APIs and analyzing results | None | None | None | None | None |
|
||||
| [configuration](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azuremlconfiguration.ipynb) | | | | | | |
|
||||
| [azure-ml-with-nvidia-rapids](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/contrib/RAPIDS/azure-ml-with-nvidia-rapids.ipynb) | | | | | | |
|
||||
| [auto-ml-classification](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/classification/auto-ml-classification.ipynb) | | | | | | |
|
||||
| [auto-ml-classification-bank-marketing](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/classification-bank-marketing/auto-ml-classification-bank-marketing.ipynb) | | | | | | |
|
||||
| [auto-ml-classification-credit-card-fraud](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/classification-credit-card-fraud/auto-ml-classification-credit-card-fraud.ipynb) | | | | | | |
|
||||
| [auto-ml-classification-with-deployment](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/classification-with-deployment/auto-ml-classification-with-deployment.ipynb) | | | | | | |
|
||||
| [auto-ml-classification-with-onnx](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/classification-with-onnx/auto-ml-classification-with-onnx.ipynb) | | | | | | |
|
||||
| [auto-ml-classification-with-whitelisting](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/classification-with-whitelisting/auto-ml-classification-with-whitelisting.ipynb) | | | | | | |
|
||||
| [auto-ml-dataset](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/dataset/auto-ml-dataset.ipynb) | | | | | | |
|
||||
| [auto-ml-dataset-remote-execution](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/dataset-remote-execution/auto-ml-dataset-remote-execution.ipynb) | | | | | | |
|
||||
| [auto-ml-exploring-previous-runs](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/exploring-previous-runs/auto-ml-exploring-previous-runs.ipynb) | | | | | | |
|
||||
| [auto-ml-forecasting-bike-share](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/forecasting-bike-share/auto-ml-forecasting-bike-share.ipynb) | | | | | | |
|
||||
| [auto-ml-forecasting-energy-demand](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/forecasting-energy-demand/auto-ml-forecasting-energy-demand.ipynb) | | | | | | |
|
||||
| [auto-ml-forecasting-orange-juice-sales](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/forecasting-orange-juice-sales/auto-ml-forecasting-orange-juice-sales.ipynb) | | | | | | |
|
||||
| [auto-ml-missing-data-blacklist-early-termination](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/missing-data-blacklist-early-termination/auto-ml-missing-data-blacklist-early-termination.ipynb) | | | | | | |
|
||||
| [auto-ml-model-explanation](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/model-explanation/auto-ml-model-explanation.ipynb) | | | | | | |
|
||||
| [auto-ml-regression](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/regression/auto-ml-regression.ipynb) | | | | | | |
|
||||
| [auto-ml-regression-concrete-strength](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/regression-concrete-strength/auto-ml-regression-concrete-strength.ipynb) | | | | | | |
|
||||
| [auto-ml-regression-hardware-performance](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/regression-hardware-performance/auto-ml-regression-hardware-performance.ipynb) | | | | | | |
|
||||
| [auto-ml-remote-amlcompute](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/remote-amlcompute/auto-ml-remote-amlcompute.ipynb) | | | | | | |
|
||||
| [auto-ml-remote-amlcompute-with-onnx](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/remote-amlcompute-with-onnx/auto-ml-remote-amlcompute-with-onnx.ipynb) | | | | | | |
|
||||
| [auto-ml-sample-weight](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/sample-weight/auto-ml-sample-weight.ipynb) | | | | | | |
|
||||
| [auto-ml-sparse-data-train-test-split](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/sparse-data-train-test-split/auto-ml-sparse-data-train-test-split.ipynb) | | | | | | |
|
||||
| [auto-ml-sql-energy-demand](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/sql-server/energy-demand/auto-ml-sql-energy-demand.ipynb) | | | | | | |
|
||||
| [auto-ml-sql-setup](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/sql-server/setup/auto-ml-sql-setup.ipynb) | | | | | | |
|
||||
| [auto-ml-subsampling-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/automated-machine-learning/subsampling/auto-ml-subsampling-local.ipynb) | | | | | | |
|
||||
| [build-model-run-history-03](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/amlsdk/build-model-run-history-03.ipynb) | | | | | | |
|
||||
| [deploy-to-aci-04](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/amlsdk/deploy-to-aci-04.ipynb) | | | | | | |
|
||||
| [deploy-to-aks-existingimage-05](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/amlsdk/deploy-to-aks-existingimage-05.ipynb) | | | | | | |
|
||||
| [ingest-data-02](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/amlsdk/ingest-data-02.ipynb) | | | | | | |
|
||||
| [installation-and-configuration-01](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/amlsdk/installation-and-configuration-01.ipynb) | | | | | | |
|
||||
| [automl-databricks-local-01](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/automl/automl-databricks-local-01.ipynb) | | | | | | |
|
||||
| [automl-databricks-local-with-deployment](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/automl/automl-databricks-local-with-deployment.ipynb) | | | | | | |
|
||||
| [aml-pipelines-use-databricks-as-compute-target](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-databricks/databricks-as-remote-compute-target/aml-pipelines-use-databricks-as-compute-target.ipynb) | | | | | | |
|
||||
| [automl_hdi_local_classification](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/azure-hdi/automl_hdi_local_classification.ipynb) | | | | | | |
|
||||
| [model-register-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deploy-to-cloud/model-register-and-deploy.ipynb) | | | | | | |
|
||||
| [register-model-deploy-local-advanced](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deploy-to-local/register-model-deploy-local-advanced.ipynb) | | | | | | |
|
||||
| [register-model-deploy-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deploy-to-local/register-model-deploy-local.ipynb) | | | | | | |
|
||||
| [accelerated-models-object-detection](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/accelerated-models/accelerated-models-object-detection.ipynb) | | | | | | |
|
||||
| [accelerated-models-quickstart](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/accelerated-models/accelerated-models-quickstart.ipynb) | | | | | | |
|
||||
| [accelerated-models-training](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/accelerated-models/accelerated-models-training.ipynb) | | | | | | |
|
||||
| [model-register-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/deploy-to-cloud/model-register-and-deploy.ipynb) | | | | | | |
|
||||
| [register-model-deploy-local-advanced](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/deploy-to-local/register-model-deploy-local-advanced.ipynb) | | | | | | |
|
||||
| [register-model-deploy-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/deploy-to-local/register-model-deploy-local.ipynb) | | | | | | |
|
||||
| [enable-app-insights-in-production-service](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/enable-app-insights-in-production-service/enable-app-insights-in-production-service.ipynb) | | | | | | |
|
||||
| [enable-data-collection-for-models-in-aks](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/enable-data-collection-for-models-in-aks/enable-data-collection-for-models-in-aks.ipynb) | | | | | | |
|
||||
| [onnx-convert-aml-deploy-tinyyolo](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/onnx/onnx-convert-aml-deploy-tinyyolo.ipynb) | | | | | | |
|
||||
| [onnx-inference-facial-expression-recognition-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/onnx/onnx-inference-facial-expression-recognition-deploy.ipynb) | | | | | | |
|
||||
| [onnx-inference-mnist-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/onnx/onnx-inference-mnist-deploy.ipynb) | | | | | | |
|
||||
| [onnx-modelzoo-aml-deploy-resnet50](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/onnx/onnx-modelzoo-aml-deploy-resnet50.ipynb) | | | | | | |
|
||||
| [onnx-train-pytorch-aml-deploy-mnist](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/onnx/onnx-train-pytorch-aml-deploy-mnist.ipynb) | | | | | | |
|
||||
| [production-deploy-to-aks](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/production-deploy-to-aks/production-deploy-to-aks.ipynb) | | | | | | |
|
||||
| [production-deploy-to-aks-gpu](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/production-deploy-to-aks-gpu/production-deploy-to-aks-gpu.ipynb) | | | | | | |
|
||||
| [register-model-create-image-deploy-service](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/register-model-create-image-deploy-service/register-model-create-image-deploy-service.ipynb) | | | | | | |
|
||||
| [explain-model-on-amlcompute](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/azure-integration/remote-explanation/explain-model-on-amlcompute.ipynb) | | | | | | |
|
||||
| [save-retrieve-explanations-run-history](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/azure-integration/run-history/save-retrieve-explanations-run-history.ipynb) | | | | | | |
|
||||
| [train-explain-model-locally-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/azure-integration/scoring-time/train-explain-model-locally-and-deploy.ipynb) | | | | | | |
|
||||
| [train-explain-model-on-amlcompute-and-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/azure-integration/scoring-time/train-explain-model-on-amlcompute-and-deploy.ipynb) | | | | | | |
|
||||
| [advanced-feature-transformations-explain-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/tabular-data/advanced-feature-transformations-explain-local.ipynb) | | | | | | |
|
||||
| [explain-binary-classification-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/tabular-data/explain-binary-classification-local.ipynb) | | | | | | |
|
||||
| [explain-multiclass-classification-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/tabular-data/explain-multiclass-classification-local.ipynb) | | | | | | |
|
||||
| [explain-regression-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/tabular-data/explain-regression-local.ipynb) | | | | | | |
|
||||
| [simple-feature-transformations-explain-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/explain-model/tabular-data/simple-feature-transformations-explain-local.ipynb) | | | | | | |
|
||||
| [aml-pipelines-data-transfer](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-data-transfer.ipynb) | | | | | | |
|
||||
| [aml-pipelines-getting-started](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-getting-started.ipynb) | | | | | |
|
||||
| [aml-pipelines-how-to-use-azurebatch-to-run-a-windows-executable](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-how-to-use-azurebatch-to-run-a-windows-executable.ipynb) | | | | | | |
|
||||
| [aml-pipelines-how-to-use-estimatorstep](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-how-to-use-estimatorstep.ipynb) | | | | | | |
|
||||
| [aml-pipelines-how-to-use-pipeline-drafts](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-how-to-use-pipeline-drafts.ipynb) | | | | | | |
|
||||
| [aml-pipelines-parameter-tuning-with-hyperdrive](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-parameter-tuning-with-hyperdrive.ipynb) | | | | | | |
|
||||
| [aml-pipelines-publish-and-run-using-rest-endpoint](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-publish-and-run-using-rest-endpoint.ipynb) | | | | | | |
|
||||
| [aml-pipelines-setup-schedule-for-a-published-pipeline](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-setup-schedule-for-a-published-pipeline.ipynb) | | | | | | |
|
||||
| [aml-pipelines-setup-versioned-pipeline-endpoints](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-setup-versioned-pipeline-endpoints.ipynb) | | | | | | |
|
||||
| [aml-pipelines-use-adla-as-compute-target](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-use-adla-as-compute-target.ipynb) | | | | | | |
|
||||
| [aml-pipelines-use-databricks-as-compute-target](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-use-databricks-as-compute-target.ipynb) | | | | | | |
|
||||
| [aml-pipelines-with-automated-machine-learning-step](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-with-automated-machine-learning-step.ipynb) | | | | | | |
|
||||
| [aml-pipelines-with-data-dependency-steps](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-with-data-dependency-steps.ipynb) | | | | | | |
|
||||
| [nyc-taxi-data-regression-model-building](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/nyc-taxi-data-regression-model-building/nyc-taxi-data-regression-model-building.ipynb) | | | | | | |
|
||||
| [pipeline-batch-scoring](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/pipeline-batch-scoring/pipeline-batch-scoring.ipynb) | | | | | | |
|
||||
| [pipeline-style-transfer](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/pipeline-style-transfer/pipeline-style-transfer.ipynb) | | | | | | |
|
||||
| [authentication-in-azureml](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/manage-azureml-service/authentication-in-azureml/authentication-in-azureml.ipynb) | | | | | | |
|
||||
| [azure-ml-datadrift](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/monitor-models/data-drift/azure-ml-datadrift.ipynb) | | | | | | |
|
||||
| [manage-runs](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/track-and-monitor-experiments/manage-runs/manage-runs.ipynb) | | | | | | |
|
||||
| [tensorboard](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/track-and-monitor-experiments/tensorboard/tensorboard.ipynb) | | | | | | |
|
||||
| [deploy-model](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml//track-and-monitor-experiments/using-mlflow/deploy-model/deploy-model.ipynb) | | | | | | |
|
||||
| [train-and-deploy-pytorch](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/track-and-monitor-experiments/using-mlflow/train-deploy-pytorch/train-and-deploy-pytorch.ipynb) | | | | | | |
|
||||
| [train-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/track-and-monitor-experiments/using-mlflow/train-local/train-local.ipynb) | | | | | | |
|
||||
| [train-remote](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/how-to-use-azureml/track-and-monitor-experiments/using-mlflow/train-remote/train-remote.ipynb) | | | | | | |
|
||||
| [logging-api](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/logging-api/logging-api.ipynb) | | | | | | |
|
||||
| [manage-runs](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/manage-runs/manage-runs.ipynb) | | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-sklearn](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/train-hyperparameter-tune-deploy-with-sklearn/train-hyperparameter-tune-deploy-with-sklearn.ipynb) | | | | | | |
|
||||
| [train-in-spark](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/train-in-spark/train-in-spark.ipynb) | | | | | | |
|
||||
| [train-on-amlcompute](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/train-on-amlcompute/train-on-amlcompute.ipynb) | | | | | | |
|
||||
| [train-on-local](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/train-on-local/train-on-local.ipynb) | | | | | | |
|
||||
| [train-on-remote-vm](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/train-on-remote-vm/train-on-remote-vm.ipynb) | | | | | | |
|
||||
| [train-within-notebook](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/train-within-notebook/train-within-notebook.ipynb) | | | | | | |
|
||||
| [using-environments](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training/using-environments/using-environments.ipynb) | | | | | | |
|
||||
| [distributed-chainer](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/distributed-chainer/distributed-chainer.ipynb) | | | | | | |
|
||||
| [distributed-cntk-with-custom-docker](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/distributed-cntk-with-custom-docker/distributed-cntk-with-custom-docker.ipynb) | | | | | | |
|
||||
| [distributed-pytorch-with-horovod](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/distributed-pytorch-with-horovod/distributed-pytorch-with-horovod.ipynb) | | | | | | |
|
||||
| [distributed-tensorflow-with-horovod](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/distributed-tensorflow-with-horovod/distributed-tensorflow-with-horovod.ipynb) | | | | | | |
|
||||
| [distributed-tensorflow-with-parameter-server](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/distributed-tensorflow-with-parameter-server/distributed-tensorflow-with-parameter-server.ipynb) | | | | | | |
|
||||
| [export-run-history-to-tensorboard](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/export-run-history-to-tensorboard/export-run-history-to-tensorboard.ipynb) | | | | | | |
|
||||
| [how-to-use-estimator](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/how-to-use-estimator/how-to-use-estimator.ipynb) | | | | | | |
|
||||
| [notebook_example](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/how-to-use-estimator/notebook_example.ipynb) | | | | | | |
|
||||
| [tensorboard](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/tensorboard/tensorboard.ipynb) | | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-chainer](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/train-hyperparameter-tune-deploy-with-chainer/train-hyperparameter-tune-deploy-with-chainer.ipynb) | | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-keras](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/train-hyperparameter-tune-deploy-with-keras/train-hyperparameter-tune-deploy-with-keras.ipynb) | | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-pytorch](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/train-hyperparameter-tune-deploy-with-pytorch/train-hyperparameter-tune-deploy-with-pytorch.ipynb) | | | | | | |
|
||||
| [train-hyperparameter-tune-deploy-with-tensorflow](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/train-hyperparameter-tune-deploy-with-tensorflow/train-hyperparameter-tune-deploy-with-tensorflow.ipynb) | | | | | | |
|
||||
| [train-tensorflow-resume-training](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/training-with-deep-learning/train-tensorflow-resume-training/train-tensorflow-resume-training.ipynb) | | | | | | |
|
||||
| [new-york-taxi](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/case-studies/new-york-taxi/new-york-taxi.ipynb) | | | | | | |
|
||||
| [new-york-taxi_scale-out](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/case-studies/new-york-taxi/new-york-taxi_scale-out.ipynb) | | | | | | |
|
||||
| [add-column-using-expression](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/add-column-using-expression.ipynb) | | | | | | |
|
||||
| [append-columns-and-rows](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/append-columns-and-rows.ipynb) | | | | | | |
|
||||
| [assertions](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/assertions.ipynb) | | | | | | |
|
||||
| [auto-read-file](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/auto-read-file.ipynb) | | | | | | |
|
||||
| [cache](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/cache.ipynb) | | | | | | |
|
||||
| [column-manipulations](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/column-manipulations.ipynb) | | | | | | |
|
||||
| [column-type-transforms](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/column-type-transforms.ipynb) | | | | | | |
|
||||
| [custom-python-transforms](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/custom-python-transforms.ipynb) | | | | | | |
|
||||
| [data-ingestion](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/data-ingestion.ipynb) | | | | | | |
|
||||
| [data-profile](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/data-profile.ipynb) | | | | | | |
|
||||
| [datastore](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/datastore.ipynb) | | | | | | |
|
||||
| [derive-column-by-example](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/derive-column-by-example.ipynb) | | | | | | |
|
||||
| [external-references](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/external-references.ipynb) | | | | | | |
|
||||
| [filtering](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/filtering.ipynb) | | | | | | |
|
||||
| [fuzzy-group](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/fuzzy-group.ipynb) | | | | | | |
|
||||
| [impute-missing-values](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/impute-missing-values.ipynb) | | | | | | |
|
||||
| [join](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/join.ipynb) | | | | | | |
|
||||
| [label-encoder](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/label-encoder.ipynb) | | | | | | |
|
||||
| [min-max-scaler](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/min-max-scaler.ipynb) | | | | | | |
|
||||
| [one-hot-encoder](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/one-hot-encoder.ipynb) | | | | | | |
|
||||
| [open-save-dataflows](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/open-save-dataflows.ipynb) | | | | | | |
|
||||
| [quantile-transformation](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/quantile-transformation.ipynb) | | | | | | |
|
||||
| [random-split](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/random-split.ipynb) | | | | | | |
|
||||
| [replace-datasource-replace-reference](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/replace-datasource-replace-reference.ipynb) | | | | | | |
|
||||
| [replace-fill-error](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/replace-fill-error.ipynb) | | | | | | |
|
||||
| [secrets](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/secrets.ipynb) | | | | | | |
|
||||
| [semantic-types](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/semantic-types.ipynb) | | | | | | |
|
||||
| [split-column-by-example](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/split-column-by-example.ipynb) | | | | | | |
|
||||
| [subsetting-sampling](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/subsetting-sampling.ipynb) | | | | | | |
|
||||
| [summarize](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/summarize.ipynb) | | | | | | |
|
||||
| [working-with-file-streams](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/working-with-file-streams.ipynb) | | | | | | |
|
||||
| [writing-data](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/how-to-guides/writing-data.ipynb) | | | | | | |
|
||||
| [getting-started](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/dataprep/tutorials/getting-started/getting-started.ipynb) | | | | | | |
|
||||
| [datasets-diff](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/datasets/datasets-diff/datasets-diff.ipynb) | | | | | | |
|
||||
| [file-dataset-img-classification](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/datasets/datasets-tutorial/file-dataset-img-classification.ipynb) | | | | | | |
|
||||
| [tabular-dataset-tutorial](https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/work-with-data/datasets/datasets-tutorial/tabular-dataset-tutorial.ipynb) | | | | | | |
|
||||
| [configuration](https://github.com/Azure/MachineLearningNotebooks/blob/master/setup-environment/configuration.ipynb) | | | | | | |
|
||||
| [img-classification-part1-training](https://github.com/Azure/MachineLearningNotebooks/blob/master/tutorials/img-classification-part1-training.ipynb) | | | | | | |
|
||||
| [img-classification-part2-deploy](https://github.com/Azure/MachineLearningNotebooks/blob/master/tutorials/img-classification-part2-deploy.ipynb) | | | | | | |
|
||||
| [regression-automated-ml](https://github.com/Azure/MachineLearningNotebooks/blob/master/tutorials/regression-automated-ml.ipynb) | | | | | | |
|
||||
| [tutorial-1st-experiment-sdk-train](https://github.com/Azure/MachineLearningNotebooks/blob/master/tutorials/tutorial-1st-experiment-sdk-train.ipynb) | | | | | | |
|
||||
@@ -675,7 +675,7 @@
|
||||
"name": "python36"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"codemdirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
@@ -686,7 +686,26 @@
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.6"
|
||||
},
|
||||
"msauthor": "roastala"
|
||||
"friendly_name": "Testing index",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "tutorial",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"local"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"Azure ML AutoML"
|
||||
],
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
@@ -619,7 +619,27 @@
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.6"
|
||||
},
|
||||
"msauthor": "sgilley"
|
||||
"msauthor": "sgilley",
|
||||
"friendly_name": "Image classification tutorial",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "tutorial",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"AKS"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"Azure ML AutoML"
|
||||
]
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
@@ -631,7 +631,26 @@
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.7"
|
||||
},
|
||||
"msauthor": "trbye"
|
||||
"friendly_name": "Testing index 2",
|
||||
"exclude_from_index": true,
|
||||
"order_index": 1,
|
||||
"category": "training",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"ACI"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"Azure ML AutoML2"
|
||||
],
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
@@ -542,7 +542,27 @@
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.7"
|
||||
},
|
||||
"msauthor": "sgilley"
|
||||
"msauthor": "sgilley",
|
||||
"friendly_name": "Regression modeling",
|
||||
"exclude_from_index": false,
|
||||
"order_index": 1,
|
||||
"category": "tutorial",
|
||||
"tags": [
|
||||
"featured"
|
||||
],
|
||||
"task": "Regression",
|
||||
"datasets": [
|
||||
"NYC Taxi"
|
||||
],
|
||||
"compute": [
|
||||
"Aml compute", "bla bla"
|
||||
],
|
||||
"deployment": [
|
||||
"None"
|
||||
],
|
||||
"framework": [
|
||||
"AutoML"
|
||||
]
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
|
||||
Reference in New Issue
Block a user