Compare commits

...

6 Commits

Author SHA1 Message Date
amlrelsa-ms
5019ad6c5a update samples from Release-76 as a part of SDK release 2020-11-03 22:31:02 +00:00
Cody
41a2ebd2b3 Merge pull request #1226 from Azure/lostmygithubaccount-patch-3
Update README.md
2020-11-03 11:25:10 -08:00
Cody
53e3283d1d Update README.md 2020-11-03 11:17:41 -08:00
Harneet Virk
ba9c4c5465 Merge pull request #1225 from Azure/release_update/Release-75
update samples from Release-75 as a part of  SDK release
2020-11-03 11:11:11 -08:00
amlrelsa-ms
a6c65f00ec update samples from Release-75 as a part of SDK release 2020-11-03 19:07:12 +00:00
Cody
95072eabc2 Merge pull request #1221 from Azure/lostmygithubaccount-patch-2
Update README.md
2020-11-02 11:52:05 -08:00
3 changed files with 10 additions and 10 deletions

View File

@@ -1,7 +1,5 @@
# Azure Machine Learning service example notebooks # Azure Machine Learning service example notebooks
> a community-driven repository of examples using mlflow for tracking can be found at https://github.com/Azure/azureml-examples
This repository contains example notebooks demonstrating the [Azure Machine Learning](https://azure.microsoft.com/en-us/services/machine-learning-service/) Python SDK which allows you to build, train, deploy and manage machine learning solutions using Azure. The AML SDK allows you the choice of using local or cloud compute resources, while managing and maintaining the complete data science workflow from the cloud. This repository contains example notebooks demonstrating the [Azure Machine Learning](https://azure.microsoft.com/en-us/services/machine-learning-service/) Python SDK which allows you to build, train, deploy and manage machine learning solutions using Azure. The AML SDK allows you the choice of using local or cloud compute resources, while managing and maintaining the complete data science workflow from the cloud.
![Azure ML Workflow](https://raw.githubusercontent.com/MicrosoftDocs/azure-docs/master/articles/machine-learning/media/concept-azure-machine-learning-architecture/workflow.png) ![Azure ML Workflow](https://raw.githubusercontent.com/MicrosoftDocs/azure-docs/master/articles/machine-learning/media/concept-azure-machine-learning-architecture/workflow.png)

View File

@@ -460,8 +460,8 @@
" name=\"Merge Taxi Data\",\n", " name=\"Merge Taxi Data\",\n",
" script_name=\"merge.py\", \n", " script_name=\"merge.py\", \n",
" arguments=[\"--output_merge\", merged_data],\n", " arguments=[\"--output_merge\", merged_data],\n",
" inputs=[cleansed_green_data.parse_parquet_files(file_extension=None),\n", " inputs=[cleansed_green_data.parse_parquet_files(),\n",
" cleansed_yellow_data.parse_parquet_files(file_extension=None)],\n", " cleansed_yellow_data.parse_parquet_files()],\n",
" outputs=[merged_data],\n", " outputs=[merged_data],\n",
" compute_target=aml_compute,\n", " compute_target=aml_compute,\n",
" runconfig=aml_run_config,\n", " runconfig=aml_run_config,\n",
@@ -497,7 +497,7 @@
" name=\"Filter Taxi Data\",\n", " name=\"Filter Taxi Data\",\n",
" script_name=\"filter.py\", \n", " script_name=\"filter.py\", \n",
" arguments=[\"--output_filter\", filtered_data],\n", " arguments=[\"--output_filter\", filtered_data],\n",
" inputs=[merged_data.parse_parquet_files(file_extension=None)],\n", " inputs=[merged_data.parse_parquet_files()],\n",
" outputs=[filtered_data],\n", " outputs=[filtered_data],\n",
" compute_target=aml_compute,\n", " compute_target=aml_compute,\n",
" runconfig = aml_run_config,\n", " runconfig = aml_run_config,\n",
@@ -533,7 +533,7 @@
" name=\"Normalize Taxi Data\",\n", " name=\"Normalize Taxi Data\",\n",
" script_name=\"normalize.py\", \n", " script_name=\"normalize.py\", \n",
" arguments=[\"--output_normalize\", normalized_data],\n", " arguments=[\"--output_normalize\", normalized_data],\n",
" inputs=[filtered_data.parse_parquet_files(file_extension=None)],\n", " inputs=[filtered_data.parse_parquet_files()],\n",
" outputs=[normalized_data],\n", " outputs=[normalized_data],\n",
" compute_target=aml_compute,\n", " compute_target=aml_compute,\n",
" runconfig = aml_run_config,\n", " runconfig = aml_run_config,\n",
@@ -574,7 +574,7 @@
" name=\"Transform Taxi Data\",\n", " name=\"Transform Taxi Data\",\n",
" script_name=\"transform.py\", \n", " script_name=\"transform.py\", \n",
" arguments=[\"--output_transform\", transformed_data],\n", " arguments=[\"--output_transform\", transformed_data],\n",
" inputs=[normalized_data.parse_parquet_files(file_extension=None)],\n", " inputs=[normalized_data.parse_parquet_files()],\n",
" outputs=[transformed_data],\n", " outputs=[transformed_data],\n",
" compute_target=aml_compute,\n", " compute_target=aml_compute,\n",
" runconfig = aml_run_config,\n", " runconfig = aml_run_config,\n",
@@ -614,7 +614,7 @@
" script_name=\"train_test_split.py\", \n", " script_name=\"train_test_split.py\", \n",
" arguments=[\"--output_split_train\", output_split_train,\n", " arguments=[\"--output_split_train\", output_split_train,\n",
" \"--output_split_test\", output_split_test],\n", " \"--output_split_test\", output_split_test],\n",
" inputs=[transformed_data.parse_parquet_files(file_extension=None)],\n", " inputs=[transformed_data.parse_parquet_files()],\n",
" outputs=[output_split_train, output_split_test],\n", " outputs=[output_split_train, output_split_test],\n",
" compute_target=aml_compute,\n", " compute_target=aml_compute,\n",
" runconfig = aml_run_config,\n", " runconfig = aml_run_config,\n",
@@ -690,7 +690,7 @@
" \"n_cross_validations\": 5\n", " \"n_cross_validations\": 5\n",
"}\n", "}\n",
"\n", "\n",
"training_dataset = output_split_train.parse_parquet_files(file_extension=None).keep_columns(['pickup_weekday','pickup_hour', 'distance','passengers', 'vendor', 'cost'])\n", "training_dataset = output_split_train.parse_parquet_files().keep_columns(['pickup_weekday','pickup_hour', 'distance','passengers', 'vendor', 'cost'])\n",
"\n", "\n",
"automl_config = AutoMLConfig(task = 'regression',\n", "automl_config = AutoMLConfig(task = 'regression',\n",
" debug_log = 'automated_ml_errors.log',\n", " debug_log = 'automated_ml_errors.log',\n",

View File

@@ -429,7 +429,8 @@
"dependencies:\n", "dependencies:\n",
"- python=3.6.2\n", "- python=3.6.2\n",
"- pip:\n", "- pip:\n",
" - azureml-defaults==1.13.0\n", " - h5py<=2.10.0\n",
" - azureml-defaults\n",
" - tensorflow-gpu==2.0.0\n", " - tensorflow-gpu==2.0.0\n",
" - keras<=2.3.1\n", " - keras<=2.3.1\n",
" - matplotlib" " - matplotlib"
@@ -981,6 +982,7 @@
"\n", "\n",
"cd = CondaDependencies.create()\n", "cd = CondaDependencies.create()\n",
"cd.add_tensorflow_conda_package()\n", "cd.add_tensorflow_conda_package()\n",
"cd.add_conda_package('h5py<=2.10.0')\n",
"cd.add_conda_package('keras<=2.3.1')\n", "cd.add_conda_package('keras<=2.3.1')\n",
"cd.add_pip_package(\"azureml-defaults\")\n", "cd.add_pip_package(\"azureml-defaults\")\n",
"cd.save_to_file(base_directory='./', conda_file_path='myenv.yml')\n", "cd.save_to_file(base_directory='./', conda_file_path='myenv.yml')\n",