From 2f10c42813ee11174f1daedb715fc7a09910e0a1 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 09:32:17 +0100 Subject: [PATCH 01/21] Created using Colaboratory --- RNN_PT.ipynb | 2181 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 2181 insertions(+) create mode 100644 RNN_PT.ipynb diff --git a/RNN_PT.ipynb b/RNN_PT.ipynb new file mode 100644 index 0000000..363ccc6 --- /dev/null +++ b/RNN_PT.ipynb @@ -0,0 +1,2181 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "RNN_PT.ipynb", + "provenance": [], + "include_colab_link": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "c194dfd9668b4c7e97a0b598ee810468": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_56a4a9cb54764f7c9d858310441a55aa", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_3eaae8eacaf048448ad11007109143e9", + "IPY_MODEL_bd2c425468d444e9931605adeaeaa78a" + ] + } + }, + "56a4a9cb54764f7c9d858310441a55aa": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "3eaae8eacaf048448ad11007109143e9": { + "model_module": "@jupyter-widgets/controls", + "model_name": "IntProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_b140724336ea4e04af134d48432f74d6", + "_dom_classes": [], + "description": "", + "_model_name": "IntProgressModel", + "bar_style": "success", + "max": 1, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 1, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_f16d5d027aed4f8f87773bb7951616b2" + } + }, + "bd2c425468d444e9931605adeaeaa78a": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_42fd164015de419a9c58be0f860c4439", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "9920512it [00:00, 25909449.50it/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_9ef3b6475ef049b1822ee119866bdc3e" + } + }, + "b140724336ea4e04af134d48432f74d6": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "f16d5d027aed4f8f87773bb7951616b2": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "42fd164015de419a9c58be0f860c4439": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "9ef3b6475ef049b1822ee119866bdc3e": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "b7c63f595ac24762a3215071b14fd32c": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_58e5e45c6ba7449bab80fd696286e072", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_446d95ac8c6b4e1094fbf6703499f412", + "IPY_MODEL_023cf4ce24da49329796df463123d41f" + ] + } + }, + "58e5e45c6ba7449bab80fd696286e072": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "446d95ac8c6b4e1094fbf6703499f412": { + "model_module": "@jupyter-widgets/controls", + "model_name": "IntProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_915c954de9f348278f22b68eec8cee63", + "_dom_classes": [], + "description": "", + "_model_name": "IntProgressModel", + "bar_style": "success", + "max": 1, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 1, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_faef4bfc378447d892f6278cb410d1c5" + } + }, + "023cf4ce24da49329796df463123d41f": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_4a352572ee9b4682b70da3838d5117c5", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "32768it [00:00, 230359.16it/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_2563cdf52b8348bf89bfaa3ef19e8373" + } + }, + "915c954de9f348278f22b68eec8cee63": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "faef4bfc378447d892f6278cb410d1c5": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "4a352572ee9b4682b70da3838d5117c5": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "2563cdf52b8348bf89bfaa3ef19e8373": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "8cdc0cb999bc4b16bdbe0e75e329481a": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_26c8ff74123244e29a66b98557519883", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_aecc1a9e4bc4478096278bf04bc4a7eb", + "IPY_MODEL_d4560bb340f441b89ce84f457bd7b48c" + ] + } + }, + "26c8ff74123244e29a66b98557519883": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "aecc1a9e4bc4478096278bf04bc4a7eb": { + "model_module": "@jupyter-widgets/controls", + "model_name": "IntProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_8eec08b3b62642b9856b188a06762742", + "_dom_classes": [], + "description": "", + "_model_name": "IntProgressModel", + "bar_style": "success", + "max": 1, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 1, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_f577c19d9e6b4699bbeca1b4ceb27bd4" + } + }, + "d4560bb340f441b89ce84f457bd7b48c": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_2323f246afa04fc5b0b099d8729426d5", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "1654784it [00:00, 6775428.01it/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_cfad60d6e3f7458fbfd33185e5bad018" + } + }, + "8eec08b3b62642b9856b188a06762742": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "f577c19d9e6b4699bbeca1b4ceb27bd4": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "2323f246afa04fc5b0b099d8729426d5": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "cfad60d6e3f7458fbfd33185e5bad018": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "146b255c687b41f5ac178e918045d46d": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_ef6b741c684546b184ca86813c2b02a0", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_022f6fad73914b4cbbd65e48de239c2f", + "IPY_MODEL_b0f8d0bbd29d45b986b4895a447aff7d" + ] + } + }, + "ef6b741c684546b184ca86813c2b02a0": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "022f6fad73914b4cbbd65e48de239c2f": { + "model_module": "@jupyter-widgets/controls", + "model_name": "IntProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_c1d3127d214545aea609ddcd7c19f1fd", + "_dom_classes": [], + "description": "", + "_model_name": "IntProgressModel", + "bar_style": "success", + "max": 1, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 1, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_abd1c8b5ff7f4bf7a751a53bcacf5d5f" + } + }, + "b0f8d0bbd29d45b986b4895a447aff7d": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_8f59433e532748f8a33a47e84c31a281", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "8192it [00:00, 67940.87it/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_ee940fd1588a48668c2a34c8f26c1195" + } + }, + "c1d3127d214545aea609ddcd7c19f1fd": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "abd1c8b5ff7f4bf7a751a53bcacf5d5f": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "8f59433e532748f8a33a47e84c31a281": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "ee940fd1588a48668c2a34c8f26c1195": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + } + } + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BVq5mz9s8CPL", + "colab_type": "text" + }, + "source": [ + "## Building RNNs is Fun with PyTorch and Google Colab\n", + "In this tutorial, I will first teach you how to build a recurrent neural network (RNN) with a single layer, consisting of one single neuron, with PyTorch and Google Colab. I will also show you how to implement a simple RNN-based model for image classification.\n", + "\n", + "We will be using Google Colab so we need to manually install the PyTorch library first. You can do this by using the following command:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "hicY8Xs6_dI4", + "colab_type": "code", + "outputId": "af1911cf-b176-4a67-ea43-88e40f6292d9", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + } + }, + "source": [ + "!pip3 install torch torchvision" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (1.4.0)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (0.5.0)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.12.0)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.17.5)\n", + "Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision) (6.2.2)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "mXs_oWct-qxY", + "colab_type": "text" + }, + "source": [ + "Now we can import the necessary libraries we will use in the tutorial:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "BBuAr4-L8CPN", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "import os\n", + "import numpy as np" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "EMQfRrRl8CPV", + "colab_type": "text" + }, + "source": [ + "### RNN with A Single Neuron\n", + "\n", + "\n", + "The idea of this tutorial is to show you the basic operations necessary for building an RNN architecture using PyTorch. This guide assumes you have knowledge of basic RNNs and that you have read the tutorial on [building neural networks from scratch using PyTorch](https://medium.com/dair-ai/a-simple-neural-network-from-scratch-with-pytorch-and-google-colab-c7f3830618e0). I will try to review RNNs wherever possible for those that need a refresher but I will keep it minimal.\n", + "\n", + "First, let's build the computation graph for a single-layer RNN. Again, we are not concerned with the math for now, I just want to show you the PyTorch operations needed to build your RNN models.\n", + "\n", + "For illustration purposes, this is the architecture we are building:\n", + "\n", + "![alt txt](https://docs.google.com/drawings/d/e/2PACX-1vQXBLYvvI1dqAHdLA0hQdsP1PojmCfuSCMK2DXEL0uTvRUqvD1eYK8fsECcNCoekxCbgWJ-k7QF_1s4/pub?w=600&h=400)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "kHjE4j2x8CPW", + "colab_type": "text" + }, + "source": [ + "And here is the code:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "qiY3EllI8CPY", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class SingleRNN(nn.Module):\n", + " def __init__(self, n_inputs, n_neurons):\n", + " super(SingleRNN, self).__init__()\n", + " \n", + " self.Wx = torch.randn(n_inputs, n_neurons) # 4 X 1\n", + " self.Wy = torch.randn(n_neurons, n_neurons) # 1 X 1\n", + " \n", + " self.b = torch.zeros(1, n_neurons) # 1 X 4\n", + " \n", + " def forward(self, X0, X1):\n", + " self.Y0 = torch.tanh(torch.mm(X0, self.Wx) + self.b) # 4 X 1\n", + " \n", + " self.Y1 = torch.tanh(torch.mm(self.Y0, self.Wy) +\n", + " torch.mm(X1, self.Wx) + self.b) # 4 X 1\n", + " \n", + " return self.Y0, self.Y1" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cP94VBHE8CPc", + "colab_type": "text" + }, + "source": [ + "In the above code, I have implemented a simple one layer, one neuron RNN. I initialized two weight matrices, `Wx` and `Wy` with values from a normal distribution. `Wx` contains connection weights for the inputs of the current time step, while `Wy` contains connection weights for the outputs of the previous time step. We added a bias `b`. The `forward` function computes two outputs -- one for each time step... two in this case. Note that we are using `tanh` as the nonlinearity (activation function).\n", + "\n", + "As for the input, we are providing 4 instances, with each instance containing two input sequences.\n", + "\n", + "For illustration purposes, this is how the data is being fed into the RNN model: \n", + "\n", + "![alt txt](https://docs.google.com/drawings/d/e/2PACX-1vRpQYtOzO1U_3yQLf1885kMaja6MsXtJ8QnlqxrfpTgZmb4WpewJXphGdmotYXDB1VE6zlW6cBY_WqR/pub?w=600&h=600)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ydSxau_eFpwH", + "colab_type": "text" + }, + "source": [ + "And this is the code to test the model:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "7a7-kIhj8CPe", + "colab_type": "code", + "colab": {} + }, + "source": [ + "N_INPUT = 4\n", + "N_NEURONS = 1\n", + "\n", + "X0_batch = torch.tensor([[0,1,2,0], [3,4,5,0], \n", + " [6,7,8,0], [9,0,1,0]],\n", + " dtype = torch.float) #t=0 => 4 X 4\n", + "\n", + "X1_batch = torch.tensor([[9,8,7,0], [0,0,0,0], \n", + " [6,5,4,0], [3,2,1,0]],\n", + " dtype = torch.float) #t=1 => 4 X 4\n", + "\n", + "model = SingleRNN(N_INPUT, N_NEURONS)\n", + "\n", + "Y0_val, Y1_val = model(X0_batch, X1_batch)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uNaFkJUP8CPj", + "colab_type": "text" + }, + "source": [ + "After we have fed the input into the computation graph, we obtain outputs for each timestep (`Y0`, `Y1`), which we can now print as follows:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "q-teLNoV8CPl", + "colab_type": "code", + "outputId": "5101559c-2dad-4c71-b12b-58fbe854ba46", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 153 + } + }, + "source": [ + "print(Y0_val)\n", + "print(Y1_val)" + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "text": [ + "tensor([[-0.9984],\n", + " [-1.0000],\n", + " [-1.0000],\n", + " [-1.0000]])\n", + "tensor([[-1.0000],\n", + " [-0.2690],\n", + " [-1.0000],\n", + " [-1.0000]])\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "n6tcX6ii8CP0", + "colab_type": "text" + }, + "source": [ + "### Increasing Neurons in RNN Layer\n", + "Next, I will show you how to generalize the RNN we have just build to let the single layer support an `n` amount of neurons. In terms of the architecture, nothing really changes since we have already parameterized the number of neurons in the computation graph we have built. However, the size of the output changes since we have changed the size of number of units (i.e., neurons) in the RNN layer. \n", + "\n", + "Here is an illustration of what we will build:\n", + "\n", + "![alt txt](https://docs.google.com/drawings/d/e/2PACX-1vQov6BGg1fXOb7Bg5zenPh7R5j6VsZJh_D6JevQ_sm_fCxmXORxad3qLIFGG1FojzJig0qdcAQoGYoN/pub?w=600&h=404)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "t5e2Eh5dGvnB", + "colab_type": "text" + }, + "source": [ + "And here is the code:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "zcEqoDfP8CP2", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class BasicRNN(nn.Module):\n", + " def __init__(self, n_inputs, n_neurons):\n", + " super(BasicRNN, self).__init__()\n", + " \n", + " self.Wx = torch.randn(n_inputs, n_neurons) # n_inputs X n_neurons\n", + " self.Wy = torch.randn(n_neurons, n_neurons) # n_neurons X n_neurons\n", + " \n", + " self.b = torch.zeros(1, n_neurons) # 1 X n_neurons\n", + " \n", + " def forward(self, X0, X1):\n", + " self.Y0 = torch.tanh(torch.mm(X0, self.Wx) + self.b) # batch_size X n_neurons\n", + " \n", + " self.Y1 = torch.tanh(torch.mm(self.Y0, self.Wy) +\n", + " torch.mm(X1, self.Wx) + self.b) # batch_size X n_neurons\n", + " \n", + " return self.Y0, self.Y1" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "xlc4vEf88CP5", + "colab_type": "code", + "colab": {} + }, + "source": [ + "N_INPUT = 3 # number of features in input\n", + "N_NEURONS = 5 # number of units in layer\n", + "\n", + "X0_batch = torch.tensor([[0,1,2], [3,4,5], \n", + " [6,7,8], [9,0,1]],\n", + " dtype = torch.float) #t=0 => 4 X 3\n", + "\n", + "X1_batch = torch.tensor([[9,8,7], [0,0,0], \n", + " [6,5,4], [3,2,1]],\n", + " dtype = torch.float) #t=1 => 4 X 3\n", + "\n", + "model = BasicRNN(N_INPUT, N_NEURONS)\n", + "\n", + "Y0_val, Y1_val = model(X0_batch, X1_batch)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "c9VNN7jf8CP9", + "colab_type": "text" + }, + "source": [ + "Now when we print the outputs produced for each time step, it is of size (`4 X 5`), which represents the batch size and number of neurons, respectively." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "r1kiEzop8CP-", + "colab_type": "code", + "outputId": "85c311c2-e238-4de8-b81b-926cd41d5542", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 153 + } + }, + "source": [ + "print(Y0_val)\n", + "print(Y1_val)" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "tensor([[ 0.8637, 0.9878, -0.1758, -0.7462, -0.9552],\n", + " [ 0.9995, 1.0000, 0.6667, -0.9998, -0.9814],\n", + " [ 1.0000, 1.0000, 0.9454, -1.0000, -0.9924],\n", + " [-0.8953, 1.0000, 1.0000, -0.9836, 0.9999]])\n", + "tensor([[ 1.0000, 1.0000, 0.9154, -1.0000, 0.9993],\n", + " [ 0.1450, -0.9337, -0.9938, -0.9316, 0.9942],\n", + " [ 0.9997, 1.0000, -0.8217, -1.0000, 0.9984],\n", + " [-0.6636, 0.9985, 0.0432, -0.9922, 0.2622]])\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BRKjsv2t8CQG", + "colab_type": "text" + }, + "source": [ + "### PyTorch Built-in RNN Cell" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Rcu2H6D88CQH", + "colab_type": "text" + }, + "source": [ + "If you take a closer look at the `BasicRNN` computation graph we have just built, it has a serious flaw. What if we wanted to build an architecture that supports extremely large inputs and outputs. The way it is currently built, it would require us to individually compute the outputs for every time step, increasing the lines of code needed to implement the desired computation graph. Below I will show you how to consolidate and implement this more efficiently and cleanly using the built-in RNNCell module.\n", + "\n", + "Let's first try to implement this informally to analyze the role `RNNCell` plays:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Dp0Wjh4Z8CQI", + "colab_type": "code", + "outputId": "bc101fc6-9060-4240-a82b-b920a4ed3eab", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 136 + } + }, + "source": [ + "rnn = nn.RNNCell(3, 5) # n_input X n_neurons\n", + "\n", + "X_batch = torch.tensor([[[0,1,2], [3,4,5], \n", + " [6,7,8], [9,0,1]],\n", + " [[9,8,7], [0,0,0], \n", + " [6,5,4], [3,2,1]]\n", + " ], dtype = torch.float) # X0 and X1\n", + "\n", + "hx = torch.randn(4, 5) # m X n_neurons\n", + "output = []\n", + "\n", + "# for each time step\n", + "for i in range(2):\n", + " hx = rnn(X_batch[i], hx)\n", + " output.append(hx)\n", + "\n", + "print(output)" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[tensor([[-0.6909, 0.5620, 0.2721, -0.2241, 0.0939],\n", + " [-0.5744, 0.4671, -0.4800, 0.9956, -0.7457],\n", + " [-0.5976, 0.5328, -0.9615, 1.0000, -0.7355],\n", + " [ 0.9859, -0.6745, -0.9990, 0.9984, 0.2917]], grad_fn=), tensor([[-0.7276, 0.3742, -0.9864, 1.0000, -0.8790],\n", + " [ 0.1334, 0.1078, 0.0161, -0.4180, -0.2246],\n", + " [-0.3839, 0.2674, -0.9703, 0.9991, -0.7527],\n", + " [ 0.7661, 0.0675, -0.9386, 0.9704, -0.3403]], grad_fn=)]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cRNUCsXl8CQN", + "colab_type": "text" + }, + "source": [ + "With the above code, we have basically implemented the same model that was implemented in `BasicRNN`. `torch.RNNCell(...)` does all the magic of creating and maintaining the necessary weights and biases for us. `torch.RNNCell` accepts a tensor as input and outputs the next hidden state for each element in the batch. Read more about this module [here](https://pytorch.org/docs/stable/nn.html?highlight=rnncell#torch.nn.RNNCell).\n", + "\n", + "Now, let's formally build the computation graph using the same information we used above." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Opob45Zj8CQP", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class CleanBasicRNN(nn.Module):\n", + " def __init__(self, batch_size, n_inputs, n_neurons):\n", + " super(CleanBasicRNN, self).__init__()\n", + " \n", + " self.rnn = nn.RNNCell(n_inputs, n_neurons)\n", + " self.hx = torch.randn(batch_size, n_neurons) # initialize hidden state\n", + " \n", + " def forward(self, X):\n", + " output = []\n", + "\n", + " # for each time step\n", + " for i in range(2):\n", + " self.hx = self.rnn(X[i], self.hx)\n", + " output.append(self.hx)\n", + " \n", + " return output, self.hx" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "oL1yBNis8CQa", + "colab_type": "code", + "outputId": "7f2ffb2d-e412-4792-a8bb-2687b59d3205", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 204 + } + }, + "source": [ + "FIXED_BATCH_SIZE = 4 # our batch size is fixed for now\n", + "N_INPUT = 3\n", + "N_NEURONS = 5\n", + "\n", + "X_batch = torch.tensor([[[0,1,2], [3,4,5], \n", + " [6,7,8], [9,0,1]],\n", + " [[9,8,7], [0,0,0], \n", + " [6,5,4], [3,2,1]]\n", + " ], dtype = torch.float) # X0 and X1\n", + "\n", + "\n", + "model = CleanBasicRNN(FIXED_BATCH_SIZE, N_INPUT, N_NEURONS)\n", + "output_val, states_val = model(X_batch)\n", + "print(output_val) # contains all output for all timesteps\n", + "print(states_val) # contain values for final state or final timestep, i.e., t=1" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[tensor([[ 0.4909, 0.6246, 0.2240, 0.4163, 0.9287],\n", + " [-0.3733, -0.3016, 0.9961, 1.0000, 0.9759],\n", + " [-0.3570, -0.6732, 0.9999, 1.0000, 0.9993],\n", + " [-0.9648, -0.5048, -0.5303, 0.9903, 0.3518]], grad_fn=), tensor([[-0.6901, -0.8889, 0.9998, 1.0000, 0.9993],\n", + " [ 0.6467, -0.8708, -0.6372, 0.1005, 0.5848],\n", + " [-0.2918, -0.9729, 0.9811, 1.0000, 0.9881],\n", + " [-0.1899, -0.9033, 0.3249, 0.9862, 0.6411]], grad_fn=)]\n", + "tensor([[-0.6901, -0.8889, 0.9998, 1.0000, 0.9993],\n", + " [ 0.6467, -0.8708, -0.6372, 0.1005, 0.5848],\n", + " [-0.2918, -0.9729, 0.9811, 1.0000, 0.9881],\n", + " [-0.1899, -0.9033, 0.3249, 0.9862, 0.6411]], grad_fn=)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "l240cp4g8CQh", + "colab_type": "text" + }, + "source": [ + "You can see how the code is much cleaner since we don't need to explicitly operate on the weights as shown in the previous code snippet  --  everything is handled implicitly and eloquently behind the scenes by PyTorch." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "n3zn-Ydx8CQi", + "colab_type": "text" + }, + "source": [ + "### RNN for Image Classification" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eTJHs-S-8CQk", + "colab_type": "text" + }, + "source": [ + "![alt txt](https://docs.google.com/drawings/d/e/2PACX-1vSiMstqkE9hTYmhPD3KMeFRNNKYA2NnrCayahBOEL1TalRqaWF7rH8a7O-nP9c-mKOdZRsWtmAGZfNN/pub?w=969&h=368)\n", + "\n", + "Now that you have learned how to build a simple RNN from scratch and using the built-in `RNNCell` module provided in PyTorch, let's do something more sophisticated and special.\n", + "\n", + "Let's try to build an image classifier using the MNIST dataset. The MNIST dataset consists of images that contain hand-written numbers from 1–10. Essentially, we want to build a classifier to predict the numbers displayed by a set of images. I know this sounds strange but you will be surprised by how well RNNs perform on this image classification task.\n", + "\n", + "In addition, we will also be using the `RNN` module instead of the `RNNCell` module since we want to generalize the computation graph to be able to support an `n` number of layers as well. We will only use one layer in the following computation graph, but you can experiment with the code later on by adding more layers." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "4vnhd2vR8CQo", + "colab_type": "text" + }, + "source": [ + "#### Importing the dataset \n", + "Before building the RNN-based computation graph, let's import the MNIST dataset, split it into test and train portions, do a few transformations, and further explore it. You will need the following PyTorch libraries and lines of code to download and import the MNIST dataset to Google Colab." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "6FViVH8w8CQr", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import torchvision\n", + "import torchvision.transforms as transforms" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "yrNdklIv8CQv", + "colab_type": "code", + "outputId": "4358c5d4-d002-4318-a653-863d541c1b93", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 383, + "referenced_widgets": [ + "c194dfd9668b4c7e97a0b598ee810468", + "56a4a9cb54764f7c9d858310441a55aa", + "3eaae8eacaf048448ad11007109143e9", + "bd2c425468d444e9931605adeaeaa78a", + "b140724336ea4e04af134d48432f74d6", + "f16d5d027aed4f8f87773bb7951616b2", + "42fd164015de419a9c58be0f860c4439", + "9ef3b6475ef049b1822ee119866bdc3e", + "b7c63f595ac24762a3215071b14fd32c", + "58e5e45c6ba7449bab80fd696286e072", + "446d95ac8c6b4e1094fbf6703499f412", + "023cf4ce24da49329796df463123d41f", + "915c954de9f348278f22b68eec8cee63", + "faef4bfc378447d892f6278cb410d1c5", + "4a352572ee9b4682b70da3838d5117c5", + "2563cdf52b8348bf89bfaa3ef19e8373", + "8cdc0cb999bc4b16bdbe0e75e329481a", + "26c8ff74123244e29a66b98557519883", + "aecc1a9e4bc4478096278bf04bc4a7eb", + "d4560bb340f441b89ce84f457bd7b48c", + "8eec08b3b62642b9856b188a06762742", + "f577c19d9e6b4699bbeca1b4ceb27bd4", + "2323f246afa04fc5b0b099d8729426d5", + "cfad60d6e3f7458fbfd33185e5bad018", + "146b255c687b41f5ac178e918045d46d", + "ef6b741c684546b184ca86813c2b02a0", + "022f6fad73914b4cbbd65e48de239c2f", + "b0f8d0bbd29d45b986b4895a447aff7d", + "c1d3127d214545aea609ddcd7c19f1fd", + "abd1c8b5ff7f4bf7a751a53bcacf5d5f", + "8f59433e532748f8a33a47e84c31a281", + "ee940fd1588a48668c2a34c8f26c1195" + ] + } + }, + "source": [ + "BATCH_SIZE = 64\n", + "\n", + "# list all transformations\n", + "transform = transforms.Compose(\n", + " [transforms.ToTensor()])\n", + "\n", + "# download and load training dataset\n", + "trainset = torchvision.datasets.MNIST(root='./data', train=True,\n", + " download=True, transform=transform)\n", + "trainloader = torch.utils.data.DataLoader(trainset, batch_size=BATCH_SIZE,\n", + " shuffle=True, num_workers=2)\n", + "\n", + "# download and load testing dataset\n", + "testset = torchvision.datasets.MNIST(root='./data', train=False,\n", + " download=True, transform=transform)\n", + "testloader = torch.utils.data.DataLoader(testset, batch_size=BATCH_SIZE,\n", + " shuffle=False, num_workers=2)" + ], + "execution_count": 13, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz to ./data/MNIST/raw/train-images-idx3-ubyte.gz\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c194dfd9668b4c7e97a0b598ee810468", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=1, bar_style='info', max=1), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "\n", + "Extracting ./data/MNIST/raw/train-images-idx3-ubyte.gz to ./data/MNIST/raw\n", + "Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz to ./data/MNIST/raw/train-labels-idx1-ubyte.gz\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b7c63f595ac24762a3215071b14fd32c", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=1, bar_style='info', max=1), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "\n", + "Extracting ./data/MNIST/raw/train-labels-idx1-ubyte.gz to ./data/MNIST/raw\n", + "Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz to ./data/MNIST/raw/t10k-images-idx3-ubyte.gz\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8cdc0cb999bc4b16bdbe0e75e329481a", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=1, bar_style='info', max=1), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "\n", + "Extracting ./data/MNIST/raw/t10k-images-idx3-ubyte.gz to ./data/MNIST/raw\n", + "Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz to ./data/MNIST/raw/t10k-labels-idx1-ubyte.gz\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "146b255c687b41f5ac178e918045d46d", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=1, bar_style='info', max=1), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "\n", + "Extracting ./data/MNIST/raw/t10k-labels-idx1-ubyte.gz to ./data/MNIST/raw\n", + "Processing...\n", + "Done!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ixg3XnDM8CQy", + "colab_type": "text" + }, + "source": [ + "The code above loads and prepares the dataset to be fed into the computation graph we will build later on. Take a few minutes to play around with the code and understand what is happening. Notice that we needed to provide a batch size. This is because `trainloader` and `testloader` are iterators which will make it easier when we are iterating on the dataset and training our RNN model with minibatches." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2f0fVaIV8CQ3", + "colab_type": "text" + }, + "source": [ + "#### Exloring the dataset\n", + "Here is a few lines of code to explore the dataset. I won't cover much of what's going on here, but you can take some time and look at it by yourself." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "TEKzVTOY8CQ4", + "colab_type": "code", + "outputId": "50ff5622-a7e5-4e27-9a2c-873e5d7256f0", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 269 + } + }, + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", + "# functions to show an image\n", + "def imshow(img):\n", + " #img = img / 2 + 0.5 # unnormalize\n", + " npimg = img.numpy()\n", + " plt.imshow(np.transpose(npimg, (1, 2, 0)))\n", + "\n", + "# get some random training images\n", + "dataiter = iter(trainloader)\n", + "images, labels = dataiter.next()\n", + "\n", + "# show images\n", + "imshow(torchvision.utils.make_grid(images))" + ], + "execution_count": 14, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD8CAYAAAB3lxGOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOxdeXhM1xv+JquEkI0ITUXUvu9bkbYh\ntqJItFpb6YZSFLHHUjsttVVQWpSofacEiVCJ2JcEQUIiq+zrnPv+/og5v5nMhGTm3qSR+z7P+2Ry\n753vu8s575ztfp8CAMmQIaPswqikT0CGDBklC1kEZMgo45BFQIaMMg5ZBGTIKOOQRUCGjDIOWQRk\nyCjjkEwEFApFd4VCEapQKB4qFAovqfzIkCHDMCikWCegUCiMiSiMiLoS0TMiCiKizwDcFd2ZDBky\nDIJULYE2RPQQQDiAHCLaRUR9JfIlQ4YMA2Aikd3qRBSp9v8zImpb0MEKhUJetihDhvSIB1A5/0ap\nROCNUCgUXxPR1yXlX4aMMoinujZKJQLPichJ7f93Xm3jALCRiDYSyS0BGTJKElKNCQQRUW2FQlFT\noVCYEdGnRHRIIl8yZMgwAJK0BAAoFQrFWCI6SUTGRLQFwB0pfMmQIcMwSLZOAMAxAHUA1ALwk1R+\n3kZYWFjQrFmzKCkpiVq0aFHSp/PWwMLCgvbt20eMMbp06RI1atSopE/pvwEAJU4igsz/s23bthAE\nAYIgYNCgQSV+PvrQ3d0d9+/fR4UKFXTut7Kywq5duyAIAqpUqSL5+VStWhULFiwAYwzx8fEYOHCg\nwTadnZ2xaNEiIK8Qa7FTp04YMGCAaNdgZ2eHH374AQB4+VDxwoULGDNmzJtsBOusfyUtAPqKQI0a\nNRAZGclvSHZ2NmbMmAFzc/MSrwCGsiREwNbWFmvWrEFCQgL69OljkC1ra2tERkaCMVZgBXdwcABj\n7LXHiEU3NzcIggDGGNLS0tCoUSODbb777rs4e/YsGGMQBAEtWrTQ2D9r1iykpKTg7t27olyDjY0N\nAgICwBhDbGwsYmJikJmZye+hIAjIycnBiRMnYGFhUZCdt0MEFAoFmjZtitDQUH4DEhIS+OfvvvtO\nssLUsmVLeHt74/bt2xAEAadOnZLET8+ePYtFBMzNzbFgwQIcOHAA9+/f5z4DAgIMsmtiYoKjR48W\nSgSOHDkCExMTSa7PysoKP/zwA54/fw5BELB37160bNnSYLsmJiYYPnw4GGPIyMhATk4Ohg4dyvdX\nqlQJ8fHxYIzhyZMnBvuzs7PjAsAY49vbtGmDPn36oE+fPnj8+DHfb2dnV5Ctt0MEBg0axC92+/bt\n6N+/Pxo1aoTBgweDMYbExESsWLEC5cuXN+jGN2zYEEuXLsXSpUuxbNkyREVFITMzk/+iqJqVUhTe\nP//8E4IgIDExUdRfSQsLC4wYMQLffvstjhw5gsTERK1mpRgiQETYtGlTgSJgYWGB06dPgzGGjh07\nSnIPiQjnz5/nz2rw4MEwMjIy2Gb9+vVx8eJFPHr0CLGxsahSpQpGjBiB27dv82OaNm3K/YohAmvX\nruX21EVAnTdu3CgbItCvXz88evQIjDFcv35dY5+xsTH27dvHb8Ty5cv1uuG9e/fG9evX8eLFC42m\nlqopuWfPHqSmpoIxhq1bt0pSeP/66y8IgoCjR4+KZrNChQq4deuWVoVPTk6Gj48PevbsCV9fX+Tm\n5uKrr74y2J+3tzf8/f1Rrlw5rX3t27dHdnY2GGNo27atqPfOysoKK1aswLVr15CVlYXDhw+jSZMm\notmfN28eGGN4+vQp+vbtC6K8X+SsrCw0bdoURIQJEyaIJgK9e/dGdHQ0GGPYuHEjEhMTtY6pU6cO\nL6MbN26EqalpQfZKtwj06NGD9zPT09PRqVMnjf2mpqY4efIkv/k9e/bU66Z37NgR27Ztw9atWzm3\nbduGrl27olGjRqhWrRpevnwJxhi8vb1FLcAqqirorFmzRLM5Z84cblfVr9y9ezesrKxAlPfrFRMT\ng8uXLxvsq3nz5ti5cyd27Nihc/+oUaPAGMPNmzcLHDjUl9999x0vA6NHjxbVdpMmTZCWloZ79+5p\ntXCysrJw4cIFlCtXDmfOnOHncOzYMb39WVpa4saNGwCAW7dugSivwqsf061bN0RHRwMAHjx4gHff\nffd1Nku3CFy4cAGMMeTm5uocuKpQoQK/8f7+/jAzMxO1AKj4xx9/QBAEREVFSWKfiCQZD5g/fz6e\nPn2KCRMmoH379lr7V61aBUEQsH37doN92dvbIyUlBRkZGXj33Xe1fplUFXXVqlWi37uJEydCEAT4\n+/uLbvvatWu4fv261iAgEeHixYtgjGHAgAH8R4Ixhg8++EBvfx4eHmCMITs7G59++qnW/p49eyIn\nJweMMTx48ADvvffem2yWThEwMjKCt7c3cnNzkZubi/nz52sdY2xsjB49eoAxBqVSqXcroDA8dOiQ\nZAVYRZUIWFtbS+ZDnTVr1kR6ejqys7PRu3dvUWzu3LmTV4Tz589j+vTpWLFiBcaMGYOEhAQkJSWh\nVq1aol5H27ZtERERgaysLLi6uopq29vbG0qlEl5eXjr3q0RA1c1hjCEwMNCgcYjFixeDMYY7d+5o\ntZhUA56MMYSHh7+pBaBi6RQBb29vMMbw8uVLuLm5aRXeAwcOaAya6FJMsXjixAkIgoCTJ0/yZrQU\nFAQBjx49gqWlpWQ+VGzYsCEyMjLw4sWLwvySFJrOzs58hkAXo6OjRb+W4OBgrW6ah4cH1q9fDxcX\nF73tXrx4EU+fPi3w/vz2229gjOHs2bPIyMjg1yjGAKuXlxcEQUBWVhZ27NiBcuXK8S5deHh4Ua+r\ndIrAnj17wBjDyZMnNdYA1KlTh0/DFIcIVKpUiU81TZo0SRIfRITGjRtDEAT89ttvkvlQZ2BgIARB\nwIEDBzBy5EjMnj0bf//9N27cuME5b948ve23adMG48ePx/jx4/H7778DyFvX8eLFC1GvY8iQIRAE\nAQkJCXBycgIRaQyABgcH672GJCYmBj///DOI8tYHfPnllzhy5Ai3DQCTJk2Cs7MzVqxYwbcvWbJE\nlGv7+++/uc1Dhw4BALZs2VLYX391lk4RWLRoEa/gV65cgb+/P/z9/fno/T///AMfHx/JRWDz5s1g\njOHSpUuvm4IxmJ9++ikEQUCbNm0k80GUN5DatGlTnVOEKkZGRmLz5s2oVKmSaH6fPHnCR7HFvJ7f\nf/8dgiAgPj4e69atQ1RUlMaPw/Lly/Vumqtmhvbu3YuHDx9q/fD4+fnxY1ULiBhjqFmzpijX1rZt\nW973Z4xh4sSJ+o55lU4RcHFx0Rj1V6dqhaBqSubZs2eoWLGiJJXm2bNnYIyJstz0ddy/f79kImBs\nbIx27dphxYoVePTokc6Kf+vWLWzevBmNGzeWZEwiLi4OjDFMmzZNVLsqEdBVTq5cuQIbGxu9beuy\nGRUVhWXLlqFdu3Y6r48xhm+++UaUa6tfvz6Sk5O53Ro1auhrq3SKAFHeyjYHBwd4e3tj6dKlcHBw\ngIODA9+vEgExFma8riA8ePBAMvsqBgQE4MqVKzA2Nhbd9vz587UqfUZGBk6dOoWvv/4aNWvWfN0c\ns2j3UQoRcHR0xKlTp7QqqxjLhI8cOcLtPXjwAAcPHkTXrl11Hiu2CLi5ueHff//VuKYvv/xSX3ul\nVwTexL1794IxhrFjx0pScO3s7CAIgmjK/joGBASIMlefn+XKlUNGRoaWCNSvX1/ya1KnVCKgonr/\n+dKlSwa1AFRUKBSwtbVF69at3zhYK6YIuLm54eXLlxAEAenp6ZgyZQrS09NlEdDF6OhoZGRkiPJi\niC4uXrwYSUlJhjTDCk2pREChUKBTp044deoUFi5ciNjYWGRmZkp+PfmpqiCGjNbroouLC7p3784r\nYXx8vMFLx/WhWCJQv359LgABAQFo3bo17Ozs8PLlS4wcOVJfu2+nCHTp0gUZGRlITk6W7MEKgiDp\nugB1SiUCKlpaWuL8+fPFOgOhTlUFEXP6s1u3bkhKSuJjAsePH+czBMVNMUTAyMiIT69eu3YNlSpV\ngpGREebOnQvGmCEDtTpFoMQCjYoFZ2dnMjc3p9zcXEnsu7i4EBHR5s2bJbFf3OjQoQN16tSJbty4\nQWPGjCnp0xEFJiYmZGVlRRcvXqTg4GCaPXs2paamlvRp6Q1bW1vq3r07ERHFxsaSq6srtWjRgmbO\nnElERMnJyeI6LOlWgFjdAZX6xsbGomHDhqIp+4EDByAIgtaa7dLIjz76CNnZ2aKvqS8KGWPYuXOn\nKG/0/Rd5+fJlURYL1a9fH+PHj+etNl9fX8yePdvQgdu3sztARJg2bRoYYzhx4oSob6VZWFjgxo0b\nEAShxAuXoTQ2NkZQUBB8fHxK/FxklhjfXhGQkuvXr38rRECmTCpABCTJRVhUyHkHZMgoFlwF0Cr/\nRjk1uQwZZRyyCMiQUcYhi4AMGWUcsgjIkFHGIYuADBllHLIIFBKbNm0ixpgWAwMDydLSUhKfAIgx\nRi9evKCmTZtK4qO4sGvXLn7PTp48SWZmZiV9SjJeQRaBQmLEiBE6ty9fvpwyMjIk8SkIAgEge3t7\n6tSpkyQ+igP29vZUq1Yt/r+joyMZGclFT18AoOrVq4tmr9Q9ifr169Pz58/p+fPndOvWLRo2bFix\n+L1+/Tpdv36dkpKSNLaPHDmSrK2tJfG5d+9eIiLKycnR8isGypUrR4mJiVxsOnfuLLqPKlWq0Jdf\nfskTq969e5c++ugjysrKEt2XCqNHj6bnz59TUlISTZw4UTI/JYFq1aqRIAg0a9Ys8YyW9GrBoqwY\nHDJkCI+woh5FZuLEiZIHw1CxWbNm+OqrrzTCSrdu3VoSX6royffu3cOrBVWi0dLSEps2bUJ0dDSW\nLVuGnJwcdO7cWfRrcHNzK5YYkB06dMCIESMQHR2N3NxcDZ87d+5Ehw4diqV8SE0nJycwxvDXX3/p\n8/3Sv2z47t27YIxh165daNKkCdq0aYOrV69CEAT06NGjWB9GkyZNcOzYMTDGsH//ftHz6XXp0oWL\ngFKpFDVgioWFBQ4fPqwR1jwzM1N0EbC1tcXBgwd5ZYyJiRH9OdjY2GD37t0a+Sh1MSEhAe+8845e\nPszMzHhA0c2bN2P16tVYvXo1hg4dijp16nDOmTMHq1evxr59+zBt2jTRhZvo/yKwfv16fb5fukWg\nZcuWyMzMxJUrV7QCfW7YsAEhISGi3/A30draGk+fPgVjDN26dRPNbuvWrbF//34uAg8fPhQt2KdC\nocCkSZMgCAJWr14Nov9n7Z0wYYJo11CpUiUcPnyYV8KnT5/C3d1d1PtvY2PDo0rponqSTsYY+vfv\nr1fGIxMTEy1bhaEUrdMxY8aAMYbq1avr8/3SLQLdunWDIAjo16+fzv1ff/216De8MAwLCwNjTNQo\nNkePHoVSqQRjDMnJyaJemyrteWhoKM+d0L9/fwiCgKVLl4rmZ+jQoRoV4tKlS6Lf+969e7+2Etap\nUwdbt27V2Pbrr7/q5UuVFHT9+vVwdnaGs7Mz5s+fD3d3d7i7u/NtHTp0AGN5mbKkEIHdu3cDQNkV\ngVu3bhXY7G7YsCHCw8OL9b3/+vXr80gyYorAo0ePuAgcPnxY1HM+evQoMjIyePJMIsLq1ashCAJG\njBghmp+///6bVzxdSTLMzMzg4eGB9u3bo0GDBnp1p7p3764Rijs/O3XqBHNzc41tfmrhwYtCW1tb\nXLx4EdnZ2a8dX1CFyF+wYIHo5U09RXmZFYGCmvzm5uZYu3Yt4uPjC4wCKwUvXrwIpVKJtWvXitb/\nU09rDUDUVF0NGjRAVlYW1qxZo7F97dq1ouc+VIVo19UKcHV1xYkTJzQq54cffqiXn+nTpxcoAhER\nEejbt68oIkBEqFy5MqZOnYqVK1cWeIyvry8YY6IGtlGxffv2/MehTIoAEeHMmTMaQUNUvy7vv/8+\nD8oohgiYm5vD0dER7u7uWLVqlUZ+OcYYDhw4wAv5qVOnRIuXV7lyZdy+fRtKpVKSNF3Lli3Tukd+\nfn48Ou/06dNF86VKIc8Y49l7iPJyORaUH4AxhiFDhhTZl4mJCVxcXLB9+3bOVq1awc7ODsePHxdN\nBN7EqVOnGjJy/0b+J0WAiJ4Q0S0iuq5yQES2RHSaiB68+msjlghMnz5dI/1StWrVuBBERUUhKytL\nKxlEUejo6IiZM2cWmOwkP+/cuSNaNqLKlSsjJCSEDwZKET69RYsWyM7Ohr+/PypWrIimTZviypUr\nGDZsGARBEHV2QL0loPqVt7S0BGP/n94NCwtD06ZNNbJM6SMCBfGdd97RemZSiUDlypURHh6OtLQ0\nyaaM27dvz1uIYoqAGIuFPgDQDP8PVuBFRGcA1CaiM6/+FwU///wz1a9fn0JCQqhhw4ZkbW1NAwYM\nIGdnZypfvjz5+/vT5cuX9bJtbm5O33zzDc2dO5fc3NwoKSmJcnJy6Pbt27Ru3TpaunQpnTt3TuM7\n9erVox9//JFMTU0NvrY+ffpQ06ZNycjIiLZs2UJr1qwx2GZ+hISE0NixY6l169aUlJRE169fJ3d3\nd3JyciKFQiG6v/ywsbHR+D8yMpJcXFyof//+kvirVq2aJHZ1YciQIVSjRg2Kjo6moKAgyfwAIEEQ\nxDdqYEvAPt+2UCJyfPXZkYhCxWoJEOU11Xfv3o2OHTvybaoUznv27NFLYW1tbXHu3Dkwlpf9+Kuv\nvkKzZs3QuXNnnsRy3rx5iIiIAGMMN2/exM2bN8FYXlxDfRNdqli5cmX8+++/vKkn9lRaftatWxeT\nJ0/mKdVUA4NitgRUuRtVXSY3Nze+TVd3ICgoCDt27IC9vb1o5xAeHq7lR6pZpF27doExhuHDh0v2\n3P6r3YHHRBRCRFeJ6OtX25LU9ivU/xdDBHTxt99+gyAIaNKkiV7fX7NmDRhjCA4O1jkF2a1bNzDG\nkJqailGjRsHGxga2trYIDQ1FdHQ0bG1tDTr/Hj168EVBjDFRK0JhOHv2bNFFQJVSXhd1iYC+A4MF\nsVGjRlo+rly5gqpVq0pyDy9duoQDBw5Ikj5Oxf+qCFR/9bcKEd0gos6Ur9IT0csCvvs1EQW/ot43\npmXLlrxQ6SsCqvn4iRMnamzv2bMnlixZgtzcXAQHB2uNntetWxdDhw41OHGnj4+PhghIVYgKomoN\nhpgi8O677+LcuXNIT0/XqowAwBhDaGgofH190a9fP1SuXFk0340aNcLz58+1/Iqd9Sh/Gdq0aZOk\nz+k/KQL5KrU3Ef1IEncH1Fm+fHlcunQJgiAgJCQEFhYWej/ArKwsLFmyBC1btsSCBQsQFhaGrKws\nXoDETM+dn6oFR4wxXL9+XdKCpIsqERBznYCK48aN42nkVUxJSYGvr6+oWYjUuWrVKi0BuHXrlqQt\nLMYYPDw8JH9W/6nFQkRUnois1D4HElF3IlpGRF6vtnsR0VKpRKBq1aq8efnJJ5/ofWOvXbumUUDV\nC8/GjRslS2mlUCiwcOFCZGZm8pbAL7/8InlByk+VCIi5YlCdDg4OGkk5AgMDDR5H0cWGDRsiNDRU\nawHRnTt39K00hWZUVBSftZKSqm7Of0UEXCivC3CDiO4Q0YxX2+0ob1bgARH9Q0S2UonAgAED+Isd\n6qnKi8r8vxxhYWFYsGABPD09JXkJREWFQsErv4qzZs2SvCDl5/Dhw0V/dyA/7ezsEBwcDMYYXF1d\nRbdfvnx5REZG6hyDmDt3ruT38MKFC8XyrFQrIP8TIiAm9b0hqof8uhVcMt/MU6dOISMjA87OziV+\nLoawRYsWSExM5OXi7t27xea7uESAiEQXgVIXVEQXVME3ZOiHx48fU1BQEEVFRZX0qRiEkJAQ6tq1\nK/31119ERDRgwIBi821ubk7m5ubF4svY2JieP38umj05A5EMGSIgISGB2rRpQ48ePSrpU3kd5AxE\nMmRIhZ07d/7XBaBAyC0BGTLKDuSWgAwZMrQhi4AMGWUcsgjIkFHGIYuADBkiYcWKFfRfGGMrKt5a\nEejQoQP5+/uLZm/BggWkVCpp8ODBotmU8fbA19eXJk6cSJGRkZL7cnR0pKioKPr444/FMVjSqwUN\nWTFYEJ2cnHD79m1ERESIZjMnJ4dT7PMtbtarVw+pqam4d+9esfizsrLCxYsXcfz48RK/dqmogqen\np+S+GjRoAEEQ4OXlVdTvvn3LhnXRyMgIM2bMQFxcHOrVqyea3eIUAXd3dwDAqVOnJPOxY8cOMCZd\n9iR1Dhw4kL8bIbWvkqIKxeFr5cqVAKBPaLu3d9mwOmrXrk3z58+nFStW0P3790v6dPTC559/ToIg\nUKdOnahLly6S+AgJCSEios6dO5OxsbEkPlS4c+eOpPaLiiZNmtDw4cPpyJEjNHnyZIPtOTk5ERHR\nypUrDbZVGHzyyScEgJRKpTgGS7oVIGZLoFy5crh16xb+/fdf0dVX/U2/oKAgSZVe9TpzbGysqPkM\n1Fm5cmVERUVBEASDIyO9icuWLftPtARat26No0ePIjc3l0dYPnHihMF2IyIiRO16vomCIODGjRso\nV65cUb/7dnUHGjVqpPX+9vLlyxEbG6t3hKHXUb07cOXKFckesIuLC38L7sWLF5IWJlVAFqlFoFmz\nZnjy5AmUSiV+/PFHSX3lp7GxMYYMGYKDBw8iISGBV/7c3Fz4+fkZHBVqxYoVACBZzAldFAQBixcv\n1ue7b48INGrUCKdPn4aRkRHf1q5dO2RkZKB///6S3Hh1ESgoFZqhbNmyJU+6WhwiMG7cOAAQNY9i\nQWzWrBmUSiVWrVoluS8iwowZMzB79mwEBATwii8IAjIyMuDv7y9KfgonJyeoUBzXRJSXY0GpVOqb\nZfntEAFnZ2dERUVpDJrZ29vj7t27+OOPPyRLUS71wKC9vT3S0tLAGOP586QWAUdHRwiCgOzsbNSs\nWVNSXxUqVEBMTAzCwsIkbXmYmJjgxx9/5BGnVHz8+DHGjRsnaiBQFSIiIuDp6QlPT0+0b99e0vs4\nbdo03LlzR9/vl34RMDU1xZkzZxAbG6sRL27KlClQKpWSjnSr+rTNmzcX3Xb37t25ACQkJKBt27Zg\njCE9PR1t2rQBUV7uvubNm2PZsmXYvHmzKH7t7Ox4kNb33ntP0sJLRPjpp5+gVCpRrVo1Seybm5tj\n3LhxvOL3798ff/31F8aOHSt6FGBfX18uAuotAtX4AAD4+vqK3k3YtGlT2RUBKysr3L9/Hz4+Phrb\nd+7cifv376NKlSqSFmBVK0AsEahQoQK2bt2qFY1XPTKOipmZmUhISMCNGzcwd+5cg0KpqdPMzIyL\nwJYtWyS9f0TSisC8efO0mv1mZmai+2nfvj2v8CtWrOAVXtexnp6eAIDAwEBRn5ebm5u+Nkq3CMyd\nOxf379/XCE3drVs35ObmYvLkyZIXYDFFwNHRUSNN1+u4evVqSZuYACAIAjZs2CD5PVy4cCEYY5KI\nwL179zREQBAE/PnnnwYP/OVnYGCg1i9+QSKgur+v218Ufv/99xAEQa8Mzq+oUwRMqJRg5MiRpFQq\nafny5XTgwAG6cuUKLVy4kP766y9atmyZpL6DgoLIyEi8JRVWVlbk6OjI/z99+jTFxMRQXFwc+fr6\nUsOGDWnTpk1ERBQQEECXLl0SzXd+qApCfHy8ZD7y+5ICgwcPpvr169OTJ0/4tr1799KcOXNoypQp\nlJuba7APT09Pat++PRH9f20AEdGgQYN0Hu/r60tEJNrz69u3LxGReOsDVCjpVkBhWwJubm6IjY3l\n0YVVar9792588skn+OSTTyQL+XzlyhVRWwLGxsZo1qwZpk+fDmtray1lV40JMMYkX4aq6g7MmTNH\nUj9E0o8J5GdcXBxSU1NRp04dUezpgq+vr9Zx7du3560EMdcP3L17F4IgGGKjdHcHiAg2NjZo06YN\nvvnmGzx//hwXL15EeHg4BEHA06dP0bJlS0kKk9gi8CaWhAhcvnzZkGZmoVjcIhAbG4v09HRRlo+r\n+vfqUPX1nZyc4OnpCV9fX40uwooVK0S7lsaNGyM1NRWhoaGG2Cn9IqBitWrVkJWVVSwFiejtFoFH\njx5xX6qZCKkotgj07NkTGRkZ2LBhA4YOHcq3Ozg4wMfHh08NiuFLfQbgTfD19RV1HMfIyAjbt28H\nAKxfv94QW2+HCJQvXx779u3D/fv3JS2w6gwODpZ0ijA/i1MEgP93rUqbCFy5coVXvLS0NMTExCAm\nJgZJSUkQBAFLlizR5yWbAqmrNQDkNfl9fX0le1bW1ta8xdajRw9DbJXugUEiIiMjI1q9ejX169eP\nmjZtWmx+9+7dS02aNCk2f8UJVUGIi4ujp0+flvTpFAlTp06lf/75h4yMjMjS0pIsLS35Pj8/P5o6\ndaqo/lQDfJcuXaIOHTqIarswyMrKouPHj4tut1SJQPfu3WnEiBG0fv36Yn0zbdGiRTR37txi81ec\n2LFjBw0ePJiSkpIoJiamWHy6u7vT77//brAdPz8/atGiBdnZ2VH37t2JiOjp06dkZGREa9asMdh+\nfkRGRpJCoRDd7ptQtWpVIiL66aefpHFQ0l2BonYHygKLsztARAgKCiqW2YEuXbqgV69eJX5/yzDf\njjGBssBy5cphxowZ8PPzQ4sWLUr8fGS+NdQpAnLyERkyyg7k5CMyZMjQhiwCMmSUccgiIENGGYcs\nAjJklHHIIvAfQ6tWrWjBggUlfRoyyhBkEfiPQfWqqgwZxYaSXiMgrxP4Pz08PMAYQ6tWrUr8XKSg\nq6srOnfuXCy+LC0tsXPnTixcuBCPHj3CqVOnDA7f3qJFC6xZs0Yj6MvSpUvRsGHDEr+3haR+i4WI\naAsRxRLRbbVttkR0mogevPpr82q7gohWE9FDIrpJRC2kFAEfHx9s3LgR9evXl+SmmZqaomPHjli7\ndi2A/79os2bNGnTs2FF0f5MmTQJjTLJcA4Wlg4MDxo4di3PnziE5OVk0u66urjh58qTk51+9enU8\ne/ZMK9LQtGnTDLLr5eWlkX9CxYiICHTv3r1En1khqbcIdCaiFqQpAkuJyOvVZy8iWvLqc08iOk55\nYtCOiP6VSgSGDRvGH64Ub2gWT/kAACAASURBVBS+8847PFWXKnJt/tBf3t7eovoMCQkBY0z0oJhF\nYa9evRAYGMivUcxXtp2dnXHr1i3Jr+Hjjz/mZePRo0eIj4+HIAiYNWuWQXYbNGiAhw8fIiEhgTMr\nKwtKpRLR0dH46KOPJL2u2rVrQxAEPH/+HHXr1tXHhv7LhonImTRFIJSIHF99diSi0FeffyOiz3Qd\nJ5YImJqa4tdff+UP+dKlS/j9999Fuclt27ZFz5494ebmhj179mgE/Xz8+LGWCPj7+4v2gD/88EOE\nh4eDMSZ5JSmIR48e1Qp8KuY7BdWrVy8WEXBwcEBQUBC8vb1RsWJFbN68WRQR0MXz58/zFkFCQoJk\n1/T+++/jyZMnGuIcGRlZ1EQ7oopAktpnhep/IjpCRO+r7TtDRK3EFAEvLy8uANu2bYONjQ1q166N\ndu3aceqRnomv1x83bhwmTJiA8+fPY8iQIVi9ejVCQkJw4sQJ7NmzR6OCiBX6m4hw4cIFMMaKNU4C\nUd6v84ULF/Dvv/9qCcDLly9FfXehuERAnb1790Z0dLRkIlClShVMnjwZSqUSOTk5mDJliug+vL29\nkZ2dzZ/LkiVL+A/G8ePHi9J9lEYEXv3/sqgiQERfE1HwK77xAsqVKwdvb28eTuzRo0ewtLTE4sWL\nERERodH3O378OObNm1ekG12uXDmePOLly5eIjo7GyJEj+X4HBwcMGjQIPj4+/GGEhYWhUqVKolSO\nBw8egDGGTZs2FVsFMTY2xsOHDzUq/q1bt3Ds2DHcunULjDEcPHhQNH+zZ8/G7du3i+36GjRogJiY\nGN6V++yzzyTzpWoNLFiwQFS7derUQVRUFH8+qqQ76gOURRiYLN3dgXbt2mlUdDMzM2zcuJE/4ISE\nBHzzzTecffr00eumd+7cGYIgICAgQCtwab9+/TB16lS8fPmSBzzVMyecBg8cOMAfqFSDnPlZr149\nHDp0iN+/0NBQ+Pr6wsbGBr169QJjDNnZ2Rg8eLBoPnfv3l2sLYEXL17w8pKYmIi2bdtK5ksqEVAf\nn9m4cSP/1f8viMAy0hwYXPrqcy/SHBi8Ukj7b7wAlQhkZmZi5syZMDY25g84f0ISQ7h48WIwxjB8\n+HCNATpra2u0aNECjDHcu3ePDxYuWrTIIH/NmzfXiJxc0HEWFhZwd3cXZdBwxIgRvOUhCAJOnjwJ\nS0tLvn/Xrl1IT0/H8+fPRQ0KOnny5GITgQMHDmjNDjx58gQVK1YU1U/Xrl1x8OBBSUSgb9++vIt2\n4sQJVKhQge9TF4GePXsW1qbeswN/EVE0EeUS0TMiGklEdpTX1H9ARP8Qka3a+MBaInpERLeoEOMB\nhRUBPz8/CIKA0aNH822CIODXX3/VSEwqxkNVVY5evXqhbdu2GD9+PH777TdERERoNJ3FEIFOnTpx\ne7t27dLab2JigtGjR/OBSUP6nCYmJmjQoAG2b9/OfY4ZM0Zr7n7cuHF4+PAhEhMT8dVXX4l2b11d\nXYtNBG7dugVBEHDv3j0EBATg+fPnEARB1BiRw4cPR2JiIheApKQk1KhRQzT7vr6+PCellZWVxr5/\n/vmHDxAWYaagdAcVuXDhAgRBQEREBK5du4Zr165BEARMnTpV9AUoAwYM4M3kuLg41K1bF1u2bEGt\nWrW0ZgcMGRMwNzfHqVOn+CBc/ofZrFkznVmJ9MkbWLduXURGRvJfxaSkJJ0h2q9cucK7OmIOfBIR\n+vfvX6xjAur09vaGIAhYunSpaDYPHTqk8VzS0tJE7XLMnz9f5+Ix9VmC6dOnF8Vm6RaBIUOGaDXv\nVMzMzMTp06fh5eUlSnN5wIABAKDhY+/evahUqRLc3Nxw7NgxXlHUBw+Lytq1a/OHeeTIEY19FhYW\nOqcl//zzT42me2G5fv16buPevXuwsrLSytUXHByM3NxcfozYi5aKe0xAnWvXroUgCJg/f75oNq2t\nrbFr1y6NhUPR0dGiLST7+uuvwRjDhAkTQERwcXHRWrFYpkTA3t4eY8eOxYMHD7B161ZERkYCAGJj\nYxEVFcUr66RJkwxKojFz5kykpaXxlkBmZqZWBW3ZsiWysrKQkZGBo0eP6u3rdSIwevRoLQEIDAzU\n6BcWlh4eHnwcIyYmBgsXLtTYX69ePfz8888a01BDhgwRvSLu3r0b9+7dE2VGpSh0dHREcnKy6M11\nojwh+PTTTzWE4OnTp2jWrJkotp8+fcrzJ+j6AZwxY0ZRbJZuEdBFVYbWChUqYOLEicjNzYUgCHBx\ncdHLnoeHBzIzM3l///79+xg4cKDOY/39/fHkyRMkJyfrnf5MXQSSkpIwduxYNG7cGJ6ennjx4oWW\nCCxfvrzIPmrVqoX4+HhuY9CgQRr769ati99++43vP3PmDHr27KnVBxWDu3fvLnDsQ0qqugIbN26U\nzEd0dLSGEBRhsO61nD59ulam6hUrVpTNloAu5k/TPGvWLL1FwM7OjgtAQkIChg0b9tpm95EjRyAI\nAtLS0tCgQQO9zt/BwUEjAxBjjFfY/MuUo6KiULt27SLZd3Z2xurVq7mN7du3a4yOm5iYIDIyEowx\nPHv2DD///LOk7y2IKQIVK1bEyJEj35hnsEmTJsjIyIAgCOjWrZvo11SxYkX06tULmZmZGiJw9epV\nUX3Y2NigadOmsLGxgampKRISEmQRINIUAWNjYxw8eFBvEbC3t8fJkycxZcoUdOjQ4Y3Hf/nll7yi\n6rsmgShvuk61+iv/zIP6ykRbW9si2/bz89OwOWzYMLRp0warVq3C6dOn+YzLvn379BayovD7778H\nAIwZM8ZgW3fu3IEgCEhISCjQXpUqVRAWFsankcV+J6N///7w8/PT+VJRcHCwZPfR1dWVj90UscXx\ndorAP//8A6K8kXZVP0kfEShXrhzP//7TTz+98fiAgAAA4OmuDHmotWvXxoIFCxAdHc0rbHZ2NhYs\nWID27dvrPcZRp04draXA+blu3bqirj83qPAyxtClSxeDbaky9KrEUtWNUVV0U1NTPoMUFRUl+lhA\n9+7dkZ6erlMAxOwO6KKnpyd/fh988EFRvvt2ikBaWhpmz57Np7YMWeDSt29fMJb3stCCBQtgb2+v\nNYJORHB3d+fLl/38/FC9evViqUT6cNWqVcjJydGo+NnZ2YiIiMCaNWuK9Vy2bt2Ku3fvwtTU1GBb\njRo1wqlTp7QGym7cuIGQkBDcvn2bb5NiufCgQYO0ugBiDwwWxM2bN4MxhosXL+osn6/h2ycCly5d\n0ioEhmRtrVq1Km7cuKHRHL906RI8PT3Rt29feHp6Yv/+/RqDh+pjEv9VDh8+HGPGjOEUcylwUSj2\nFKGFhQXGjRuHlJQUnSPnt2/fxhdffCHqYjJ1fv/997h3756WCKxbt07S+6gSgW+//bao3337RIAo\nr2kUEhKC3NxcvP/++6Ld6E8++QQ+Pj54/Pgx7t27Bx8fH/j4+ODTTz/lYwdSPmiZMgtieno6cnNz\n4ejoWNTvyhmIZMh4G3DixAmKiYmhYcOGFfWrOjMQySIgQ0bZgZyGTIYMGdqQRUCGjDIOWQRkyCjj\nkEVAhowyDlkEZMgo45BFQIaMMg5ZBGTIKOMolSJQvnx56tevH509e5YAUHJyMs2aNYsqV64sqd+q\nVauSi4uLFq2srET35ezsTOfOnSPGGDHGqGHDhuTk5CS6n+nTp5MgCASABEGgSZMmUUREBK1cuZIq\nVKggur+3BcOGDePPhjFGSUlJZG9vL5m/Hj160NmzZyk8PJxWrlypwU2bNhFjjM6cOaOf8ZJeMlyU\nZcOdO3fGjh078PLlS+Tk5OD777/nfPz4MX7++WfRl2hWqFABH330ET766CNcvHhR5wsjS5cuhY2N\njWg+V69ejYSEBA0fjDFER0cXOZ/C61i9enUe3/D+/fs4deoUXr58yV80evz4sV7prurWrYtp06bB\n2dm50N+xsrLSGfPQEFasWBHNmjVDWFgYACA1NdWgNxgrVaqEYcOGYc+ePVovZeWPA2hpaYlKlSqh\nffv2mDVrlkHRrogIH330EWrWrKlzX/fu3fkboW+wU3rfHXBwcMDWrVuRkpKCmJgYzJkzR+uGXL58\nWdTkmUR5r6POmTOnwNdF1VnEVzoLpJmZGRhjWvZV26Kjo0XxU6VKFcTHx+PMmTOoVasWiPJeyKlV\nqxYGDhzIX0H+9ddfi2x7yZIlYIzh/PnzhTq+Z8+eOH/+PGJiYgy6ppo1a6JSpUro3bs39u/fz18G\nU6fq1fOisnnz5pgxY4aGrczMTBw7dkwrYpGFhQW2bt2Ka9eu4ebNm2CMifojkd/X/fv3ERERgapV\nq77p+NIpAtbW1ggODkZmZia8vLzg4OCgdUyvXr2QkpKCL774QrSbO3r0aCxdurRQAiCWCFSqVAln\nz56VXAQqVqzIMw8VFEbsl19+4W9RFsV2rVq18Pz5cx4O+03Zeu3t7REUFMSvb+zYsXpd086dOxEX\nF4dnz55pVXz12In6iIC7u7tGiK99+/ahVatWBQZieffddzX8h4WFSRaxafny5UhNTS1sMtTSKQLd\nunXD9evX0bRpU537u3fvjrS0NFy/fh2VK1c2+KZ++umn2LlzJ2JjYzUq4ebNmzFo0CA8ffpUMhGo\nUqWKRoVXcfv27dixY4doIqAKc/66CmFiYoJhw4ahXr16MDc3L7Tta9eu8cL/9OnTN4b/+uuvvzQq\njL6tgTNnzmhV/ps3b2LUqFGYNWsW3+br61tk2ykpKRoC8LqmfadOnTSEaPfu3XrHvHwTly1bVugg\nOK9YOkWAiArsCzVr1gwpKSlIS0vTN1WzBvv27YvIyEidlVzVAmncuDHatGmDESNGiC4CAwcO5M3M\nESNGwNbWFrdu3UKtWrV4kpL4+HiDsui4uLggNTUVubm56Nq1a4HHmZiYoFOnTjh8+HChC3H79u2R\nnJzMK8B333332mbwrFmzkJSUxI/PyMgoMLDrmzhgwADcvHkTiYmJWLRoEX8eNjY2PGhrYGCgXj8U\n6sKSk5ODJUuWwNXVVes4Y2NjnhSEMYaffvpJsjTzU6ZMQXZ2dqG7XK9YekVAFxs1aoSsrCyEh4fj\nnXfeMeiGGhkZoVu3bkhKStKq/FeuXIG9vb3Wd0xNTfH111+LJgJOTk64c+cOlEolRowYobV/zZo1\nUCqViI+PN6jF07t3bzDGEBsbW+Ax1apVw8mTJ4vUryfKS22uCrgSFBT02riItra2OHjwoEYFu3fv\nnkFhwKysrFClShWNbarBT8YYpk6dqpfdmJgYrVZGQECAlq/Dhw+DMYbc3Fxs2LBB9JBmKg4dOhSZ\nmZkIDAwsavj20i8C5cuXx/LlyxEWFsYjyGZkZODvv/9GmzZt9L6pLi4uWpU/NDT0jf3hzz77jB//\n5ZdfGvRgzc3NeRck/77OnTvzeHaGdgdq1KiBlJQUJCYmav1KW1hY4KeffkJmZiZSUlIwYcKEIv2S\nqVeS8PBwNGrUCFZWVqhWrRrq168POzs7EOWNfagqjIrPnj0TPeKRra0tLl++DMbyMknpm2a9YsWK\n2LdvH1JTUzXOOT09HZ6enrCyssLWrVuRnp6OiIgIDBgwQNTryM/MzExcvHhRn8HG0i8CqmjCaWlp\n2Lx5M9q0acPz9GVkZODy5csYOHAgypUrV6Sbo/6LrmJhREVdBHRV3qJy27ZtGnbMzMywcOFCPm2n\nVCqxdetWg/3ExcWBMYZevXrxbQ0bNsSBAwegVCoREBBQ5PDmRJoi8Pz5cxw5cgRnzpzhyU+Dg4Px\n+eefw9/fX+uXVcwU6CoOGTKE2/fy8jLY3ueff44LFy7w1o6KKqFhLC+RrdjXoc7BgwcjMDBQ39mG\n0i8ChaGHhwdycnLg7e1d6O8IgqBRmY8ePVqoppyYIlCpUiXe7wegVUl27dolWkBTW1tbPHnyBIIg\nYNOmTTx0d7t27QyyO3HiRHh5eeHs2bPw8vLCr7/+im3btiE4OFjretT54sULUcKQ56fK/qxZs0BE\naN26tV6h23Vx1KhROq8lJiYGly5dwrBhw0QNQHv79m0+XmSAnbIhAkSE+Ph4zJkzp9DHAyhyS6Bu\n3boICgrix0+bNk3v83Vzc8OtW7e0pgNVzf+ZM2eKHtG4V69evOCePXvWYAEoLLOzs7UCgvbr109U\nH87OzhqVNCwsDJcvX0Zubi4++eQTUXx07NjxtcLGWF4SGzGyOltYWPABx4CAAENslQ0RaNGiBdLT\n0zFx4sRCf8fd3V1LBB48ePDaJle7du0Mmh0wMjJClSpVMG7cOK349ar+5qFDh0RfRaeiqtURHR1d\nrLkB1WcPGGNYtWqVwavpiPKSwRw9ehSJiYlafXcVHz16hHr16hnsq3z58nzgNDc3F71798aaNWuQ\nnJyMrKwsDZ/79u3TO9qxo6MjOnTooDHjIAgCVq5cWeQu7yu+/SJgbm6OoKAgREVFFWkEvUaNGoiP\nj9cSgrS0NKSmpmrx3Xff1aq4RRWBXr16aVT6q1evYvHixXyaUMp8fTNmzOBprFJTU9GoUSPJfKmz\nRYsW/HoZY/j555+LtAbhddS1ToCxvAVLFy5cwJgxY0QRGyLC06dPuQAsWLBAY9/777+vIXRpaWl6\nzRI4Ojpi+/btfKHT9evXMWvWLN7CSUpK0iefwtstAubm5jh27BgAaGXdLQxbtWqFBw8eFGp1oLOz\ns0GLhdzd3ZGdnc2/u2/fPo0VZVKJgIODA3bv3o3c3FxkZ2ejZ8+eYKzIWWz0pioDkYozZ84UzbYu\nEQgLC9M53WooVSIQFRWlc796+nB9Fic5OTnB19cXQF6Gq1OnTmnkxVy4cCGUSiUEQYC/v39RUqGX\nfhF4Xb940aJFAIBjx47p/evywQcfYPv27QgPD9dZ+c+ePYvt27ejcuXKOH78uN4i4OHhwQvJ3Llz\ntfYDEF0EjI2NceLECTDGkJKSgo8//hhEVKwisHDhQn7dGzduFHUp7WeffYapU6fy5c6Mab/UIxZf\nJwLdunXDkydPwBhDSEjIG5dN52eTJk0QERHBx4UOHDigMx197969cf/+fQiCgPT0dJw9exYrV658\nk/3SLQIeHh4IDQ3VSgOuyrybk5NjkACoc9CgQZg3b54W1fuThowJeHh48O9dvnwZnp6e8PDw4JSi\nJVCnTh1eOVS5B52cnIqtO+Ds7Izff/+dd32kWkjz8ccf8+uU6lp0iYCzszMGDhyInJwcpKWlwdfX\nV69UZFFRUfz8C7O4qX///rh16xYYY7hx48abUryVbhFo0qQJBEHAhQsXYGdnB2NjYwwfPhwvXrxA\nVlYWli1bJmla7fysUqUKNmzYoJcI9OnTBykpKTpnA5RKJbZt22bQ0mBdrFOnDnJzczF69GgoFAqY\nmpri3LlzRVl3rjdNTU0xdOhQXriPHTsmSf5GU1NTBAQESC4CixYtQnZ2NnJycvDgwQM8ePCAL03O\nzs7GokWL9LJbsWJFPqvw448/Fvp7JiYmcHd3L3B5vRpLtwgQEcaMGYOMjAxERkYiODgYgiDgxIkT\nRekTiUr1dQJFbVK3bNkSV69e1SkCo0ePFv1c7e3tcezYMdja2sLJyQlr164FY6wwBcdgWlpa4u+/\n/+ZdkYMHD0oyIzFlyhQuAElJSZJeU69evbB69Wru78aNG0hNTTVoSXf58uURGBgoZZZo/USAiLYQ\nUSwR3Vbb5k1Ez4no+iv2VNs3jYgeElEoEbmLKQIyZb6OXbt25Ytq/Pz8Svx8/oPUWwQ6E1EL0haB\nH3Uc24CIbhCRORHVJKJHRGQsi4BMmf8J6hSBN8YYBHCBiBLfdNwr9CWiXQCyATymvBZBm0J+V4YM\nGSUAQwKNjlUoFDcVCsUWhUJh82pbdSKKVDvm2attMmTI+I9CXxFYT0S1iKgZEUUT0YqiGlAoFF8r\nFIpghUIRrOc5yJAhQwToJQIAYgAwAAIR+dD/m/zPiUg9LvY7r7bpsrERQCvoSJUsQ4aM4oNeIqBQ\nKBzV/v2EiG6/+nyIiD5VKBTmCoWiJhHVJqIrhp2iDBkypITJmw5QKBR/EZErEdkrFIpnRDSHiFwV\nCkUzyhtxfEJE3xARAbijUCh8ieguESmJaAwAJs2py5AhQwwoXk3RlexJKBQlfxIyZLz9uKqr+10q\n05DpQpcuXWj//v0aqaH8/PyKxXd8fDw9fPiwWHyJjc2bN5NSqSRBEEgQBAoLC6Pq1d+OCR0fHx9i\njJFSqeRkjFGtWrVK+tT0Qo0aNejw4cMEgDZs2EDr1q3jXLlyJTVt2pSqVatWdMMlvWTY0MVClStX\nxoEDBzTSZ6k4efJk0RZaWFpa4osvvsDevXtx8+ZNDebm5kIQBFH8mJmZAcgLL3bo0CGMGzdOtPfg\n1eni4oILFy7w6D4JCQn8s1jRd0qSqte98y/J9vPzK9YgKmLRysoKsbGxb4xm9PDhw9fZKf3vDuji\n3r17tW7Ev//+i2fPnuH7778X5QG0atWKR3UpiPq8N56fbm5u8PPz0yq4y5cvF60wVahQAd7e3khJ\nSYEgCNi/fz8GDRqENWvWQBAE+Pj4wMjICB07dkS/fv10hlsvLD/44AP4+/tDEAQAwLVr17Bjxw58\n9dVXkr3mS0RQKBQIDQ3VKQKMMZ52TWyamZlh6NChOHjwIAIDAxEYGIg1a9aAKO9V7qLEvczPH3/8\nkZfvgIAAhIaGajAuLg5paWnYsWPH6+y8fSKwZcsWjfRQUVFR6N+/P0xMTNC4cWODH2r58uUxZ84c\nvHz5UqPCJyYm8vDgqm2GvJNfvnx5HlVYqVRi+PDhGD58OCZMmIDHjx9j3759ohXU7du383O+du0a\nf/X60KFDSE9Px4cffoh3330X+/btgyAIWLp0qd6+Ro4cWaBo5uTkoHPnzqJXxC5dumDPnj280oeE\nhMDe3h5Hjx6VVAS++OILPH78GCkpKVi2bBk8PDywbt067NmzB0SECRMm4I8//tDbvpeX12vfjqxZ\nsyaaN2/+JjtvlwiUK1dO49f/+fPnor8Xv3XrVl5ok5KSMHLkSPTr14/n73v69CkEQcC///5r0C/m\n8OHDeaHdtGmTxj5ra+vCJJosFKtXr47k5GQIgoDNmzfj1YAsiAgbNmzAyJEjUatWLezZsweCIODl\ny5fw8PDQ25+6COzduxejRo3SEAJDczXooiqoiCqJi+q5qIvA77//LqrPjz76CNnZ2bh8+TKqVavG\nt1euXBkffvghJk6ciJycHDg5Oento1atWjykmQHn+naJwLx58zREQI94a69l3bp1kZqayn+18r8i\n2rt3b94SeF06rzexUaNGiIuL44VWrJh7utiuXTskJCQgNzdXZ3N8zpw5ePbsGQRBQHx8vMFRgLdt\n28YrvCqIiLoI5M/gYwjLly+PDh064NGjR1wE1Pf36dOHi8Dhw4dF8WlkZIQpU6ZAqVRi7ty5GuHM\nzczMULt2bT7ucvfuXYN8qVoCWVlZhth5e0SgZ8+eGvnhV69erXdE14LYu3dvXljzP0AzMzMEBgZC\nEAQ8e/aswMy+haEqMMnp06dhYWGhsa9JkyYYPnw4zp8/L9pg3ZUrV3ieBVXcAgsLCxw+fJhfb3x8\nvN7ZetSp3hJQRW5W/R8WFvamKDhFYo8ePTT6/Tdv3tTYb21tzTM+iyUCdnZ2EAQBcXFxGttr1aoF\nX19fCIKAzMxMLFmyxOCchKroQZs3bzbEztshAjVq1NAIKe3n5yfJr6e6CHz33Xca+2bPng1BEJCc\nnGxwIlRVM7Vbt25a+yZPnswL9pEjR0S5rk6dOmmMcXh4eODcuXMQBAGMMSxevBjW1tai+Bo+fDj3\n07ZtW9SrV4/7yX9PDWVYWBgvE9evX9fZPVu1ahUAiHYv58+fD0EQ4OnpCSLCe++9h9GjR+PZs2dI\nSEjAL7/8gk6dOoni6/Hjx2DM4ExKb4cIqEdzef78Od5//31RC5OKzZo1Q3p6OgRBwBdffMG3z5w5\nk2/fsGGDwX6OHj2Ks2fPagmZpaUlvLy8RBcBIkKHDh1w+/Ztjaa5GM3//DQ2Nsbu3bv5r2VaWhoE\nQUBubq5B/WN1qmYCMjMzoVQqcfPmzQLHZ1TjBWK0BBwcHBAVFQVBEGBhYYFz584hOTkZ2dnZOH36\nNDp06CDafXR1deXRqevVq4cmTZpgzpw5uHPnDn788ceipD4v/SLg7u6uMQ6gHoZZCs6bNw+CIGDy\n5MnYvn070tPTuRrrmQtOi6qWQFBQEFxdXTFt2jSkpqYaFNL8dTQyMkK7du0QHx+vIQJPnjzRCuIq\nFt977z3MmTOHTxcyxtC3b19RbA8cOFAjW1NBobnUBwbF6g5cv36dT3/qm/H4TaxcubLGDJiKCQkJ\n8PHx4clOdu/eXRh7pV8Ezp07p3EjpLjp6lSJQFJSEi+848aNE9WHk5MT7t+/r/WAW7Vqha+++gqM\n5WXOEeOX08bGBsuXL9eo/P3798fZs2chCAKWLFki+T1V74YYasva2pqvq2CMITQ0tMBjjx07BsaY\nKBW2QoUKOHToEE+pJkay04Lo6empUTbi4+Ph5ubGF5AtWbIEjOXlQCyEvbdTBNzc3CS5+eXLl+dZ\nkAVBQEREhKhNPHXWqFEDY8eO5VRt37x5M5RKJc6dOyeKn3v37mkIwKFDh0BEmD59Om+mSxEFWJ1i\nikDt2rU1WgHu7u46j/vuu+94dOfw8HA4Ojrq7dPDwwN3796FUqlEVlYWBEEocm6BolB9Fszf319r\nVufu3btlRwRq1KiB0NBQjbDVycnJyMzMNCgZqC527dqVT5WpqE+qbkMppgh0796dr3pUKpXYvHkz\nH52vVauWqJXzdRTTz/Lly7kIFNTE79u3r0Z49/r16+vt77PPPkNcXByio6Mxfvx4tGzZEoIg4OLF\ni5Ldr+7duyM8PBz9+/eHmZkZ325mZqaRdWnGjBmFsVe6RaB3794Frpe+evWqKDfc1NQU69ev5+8C\npKSkIDExUbT3AopKb+XqtQAAGxxJREFUMUWgffv2vAL+8ssvGvuMjY2xfv16CIKAnTt3SnpNYoqA\nqp9fkAgMHDgQfn5+/BhDErp07twZ6enpCAgI4IvSBg8eDEEQDFoOrC9VKeQYY7h48WJhE5TqF2i0\nLKF27dr0zTffkLGxMcXExND7779P3377LRERGRmV7luVnZ1NRERbt26l6dOna+xjjFFkZF5oyHLl\nyhX7uekLhULB6ezsTDVq1OD7hg4dSrt37yZXV1dSKBS0ZcsW+vTTT/X2NWPGDLKwsKDjx49TdHQ0\n1a5dm1xcXIiIyN7e3uBryY8uXbqQmZmZzn02Nja0Y8cOIiKKiYmh0aNHU1ZWlv7OSroVYGhLICUl\nRbSkk+PHj+fN5d69e4Mob2BGEARRF7YUlmK2BKpXr474+HjExMSgZ8+eqF69Op9dqVq1Kh4/fgxB\nECTPSCRVS0CpVCIyMhKhoaEICwvjU4aMMZw5c8bgFO+bNm0q8D2I/Iu8DKWrqytycnIwbNgwrX1G\nRkY8ozRjDJ9//nlRbL+d3YETJ06IdvN79eoFQRAQGxsLorzBwRMnTkAQBINXfOnDjh07ijow+OGH\nHyI8PJx3d+7du4d169YhPDycL48We61Afqoqjj6Zo/Oze/fuOlPKq785uHHjRlGmkjt37ozTp0/j\n/Pnz2LlzJ18IZGFhofEOhhisWLEi0tPTcf36dXTu3Bn169fHpEmTsHz5cjx48ADZ2dk4fvw4OnXq\nVNRyWbpFoFatWli1apWGAMybN0/UgSwLCwu+mi49PR2ZmZkQBAEvXryQtGIURNXotxgVRp0dO3bE\njh07tF6P1pUhWWyqv1Akhr3OnTsXKALqMy2ljYcOHSpwDKx9+/b62i3dIlAWaWRkhEmTJvE04m8D\nQ0JCuBCI8X6CzCJRFgGZJc/GjRtzEfDx8Snx8ylj1CkCcqBRGTLKDt7uQKMyZMjQD7IIyJBRxiGL\ngAwZZRyyCMiQUcYhi4AMGWUcsgjIkFHGUWpFYPz48RopxxhjlJaWRocOHSIrKyvR/W3fvp0EQaCh\nQ4eKblsXTExMqF27dpSYmMhThAmCQJcvX6b58+cbZNvY2Jh8fX0pPDxcpLP976B9+/b022+/8fsF\ngARBoCNHjtCoUaPI2tpaUv/Lli0jAFSlShVJ/Vy9epUeP35MVatWNdxYSS8U0mex0Pjx4xEdHY2Y\nmBgcPHgQ33zzDQ4fPsyXVe7du1e08F8q5ubmgjGGrVu3Sr6oo1WrVhrRf1XLmFWfly1bZpD9qlWr\n8iW2Ur4Y1bx5c8yaNQtxcXEAgNDQUMyaNQsjR46UxJ8qCvTr0nTdu3cPH374oST+jY2N8fDhQ9HD\nqedngwYNkJWVpc9r32/PikFVtBX1l4eMjIzg7e2NnJwcxMXFoWHDhqLd9JYtWyIrKwuDBg3Cp59+\nKtnDJSKYm5vjxYsX/EWmEydO4LvvvoOrqysyMjJ4nANDfIwdO5aLQJs2bSS7loiICJ0VURXUZNSo\nUaL6s7a2fq0AqGioiKqYP5ORqakpBEFAaGiopPEvp02bxkPDFfG7pV8ETE1NUbNmTS4Cul6s+fXX\nXxEZGSlaJCALCwvcuHFDtBde3kRnZ2f+y1+nTh2+vW3btlwE/Pz8DPIRExPDI/O+ePFC5yurYnDP\nnj2vrYy5ubn4+eefRfNXWBFQ5VswhN27d0dKSgouXLjAE5yqROD06dOSlpG7d+8iLCxMn1D7pV8E\natWqhaysLAQHBxcYaPTbb78FY0y0PHfNmzeHIAhwcHCQ9MGqOHPmTPzzzz9wdXXl2ypVqoQzZ85A\nEAQcPnwYzZo109t+69atwRhDdHQ0GjZsCKVSieTkZCxatEj0aylXrhzs7Ozg6emJOXPmoH///oiM\njNSokI8ePRLNX6VKlXhEaMYY3N3dERQUpCUC7733nsG+Zs2axZPPqDIPqXJVSC0CBkSAejtE4E3R\nhlUiIEbcwWrVquHJkydgjGllOBoyZAiioqJEC2iiYpUqVWBnZ6exrUePHjwl2t69e/WObWBhYYGj\nR48CAM+lcPPmTQB5qdDDw8PRqFEjgzIqvYmqWIeMMSQmJhqUwk0XFy9ezO2rx+ATUwQaNGiAmJgY\nCIKAbdu28e1ffvklBEHA8ePHJbt/CoUCCQkJaNq0qT7fLxsi8Oeff4IxhokTJxp8w318fAr0FRIS\ngrS0NNGToOannZ0doqKiAAAvXrwwqK85ceJE/r69ekKVjRs3agTnuHXrlqhjKn369MEXX3yBDz/8\nEAEBAfyeGpqfTxddXFzw7NkznZU/LS0NY8aMMXgwVD292pgxY/j248ePQxCkSbSqopOTkyGBTUu/\nCGzZsuWNIpCcnAzGmMH9ZqK8is4Yw/r167X2LV26VHIRsLe35zkBBEEweBDv2rVrUCqViI+P1wq7\n/cEHH+DKlStcCBISEgwKzU2Uly770qVLPEFGSkqKxvPLzMxEYGAgatasKep9W7hwoZYAxMbGihI6\nzdzcHHv37oUgCIiJiUHjxo35vuIQgTp16uCHH37Q9/ulXwT+/vtv/lD//PNPrf1ff/01n8ozNEyW\nnZ0dEhISIAjaWYcbN26M9PR0REVFSfaw7e3t+TiAIAgIDAxExYoVDbKpEoGC+v9WVlbYu3evztaC\nPhw5cmShBurE6lKpckVkZmZqDUCKFTVp9OjR/Jn88ccfGvtUIqAaKJSC+/bt08qQXQTqJwJE5ERE\nfkR0l4juENH4V9ttieg0ET149dfm1XYFEa0moodEdJOIWoghAu3ateMPddeuXVp99CpVqmg0+wwZ\nPCMi/PDDD2CMaVX0hg0b8mxEYhUsKysr9OrVCx07dgRRXiJPVd6+s2fPonnz5qL4adiwIbp06aJz\nn6mpKdq2bYuYmBjRBuwcHBzQoUMH1K1bF3379uXjAP/++y9/VhcuXDB4Tr1z5846U3Wp+Pvvv4ty\n/1TlQn39xp07d3DhwgWerUrKloCNjY2h4e/1FgFHelWRiciKiMKIqAERLSUir1fbvYhoyavPPYno\nOOWJQTsi+ldsEbh48SJPw0REMDEx4ckmGWP466+/DL7hw4YNA2MMGRkZmDJlCt8+ZMgQLgIF5b0r\nLC0tLTFq1CjcvXsXgiAgLS2NjzUIgoDIyEiDm+SFpZubm0Y2nwYNGohq//vvvwdjDNu3b0fFihXx\nwQcf4IMPPjB41qVixYqIi4vTqvjp6emIjY3l/4u1vkNdBOLj4zlPnjzJtxvagiqIU6ZMwbNnzwyx\nIU53gIgOElFXIgolIkc1oQh99fk3IvpM7Xh+nFgiwBjj2ViqVq2KCxcu4OLFi2CMISgoSLTVgurx\n8PLzyZMnevtp0aIFtm3bhtDQUAiCgJcvXyIyMlLLh/qgk5Rs3rw5nj17xkVAjHl0dXbu3JmP1aim\n08Sil5eXlgDcuXMHzZs3x7hx4/i2oKAgUUKDOzs7w93dHe7u7hpRhm1tbSULQU70/zUIgYGBhtgx\nXASIyJmIIoioIhElqW1XqP4noiNE9L7avjNE1MpQETh16hR/oJcvX+YtAQcHB1y9epXvW7dunWg3\n3snJCZs2bUJSUpJGIRMEAZs2bdLL5tChQzFz5kxeYA4cOIAaNWpg8ODBCAsL0xCBlJQU/P3334ZE\nl30jGzZsyBcPqSIbq7eyxOD+/ftfO5irL2vWrInw8HBuW6lUwtvbm1dCY2NjjdkI9cVXYlNqEejS\npQsEQTC0RWOYCBBRBSK6SkT9X/2flG//y6KIABF9TUTBr/jGC1izZg1/mPHx8WjRogVWrlyJs2fP\nalTQGjVqiP4AXFxcMHz4cO4rPDxc7yzB6hW8a9euqFatGqZPn67xboAq14GKqamp+OGHH/RZIfZG\nPnz4kFegxYsXiy4AlSpVwqNHj/jsgJi2ly9frvHs9+3bp3WMehJbKUOqSy0CqiQ4Bqao1ykCJlQI\nKBQKUyLaS0Q7AOx7tTlGoVA4AohWKBSORBT7avtzyhtMVOGdV9s0AGAjEW18ZR9vOgcvLy+qX78+\nubq6ko2NDR05coSsrKzI0tKSiIgEQaB58+bRs2fPCnNJRUJ4eDiFh4eTvb09denShcLCwnjaLn2g\nUCjI39+fZsyYQQ0aNCB7e3tSKBR05MgRWrhwIYWEhNCUKVPo/fffp9jYWEpOTqbk5GTKzc0V8aqI\n6tSpQzVr1qT09HRau3YtTZs2TVT7RHnpwJydnYmI6IcffhDdvjrmzZvHP5uamtKiRYuobdu2kvp8\nK1CIFoCCiP4gol/ybV9GmgODS1997kWaA4NXxBgYJMrLEKRrAOjly5fIyMiQTOVV/PHHHyEIAubM\nmaO3DV3jC3FxcVi5cqXWYJy5ublkmY9atmyJU6dOITo6WrJ3Bzr9r72rj42qytvPCdINLGBfXFPt\nuiLVrQRq6IehtLtgjUkVaAA/+oJoRUO7/uHKO6U18u4anZgYlMSmSl5eP/JuA/gGlLyCG1MTsfRD\navhYtaWl1G21DGxTWiiFflvm3uf9Y+Zeb6cz7XTm3t4Zep7kyczce+ec3/3dc5577rnnnN+KFXpf\nwJkzZ0zv5PRtCQwPD/PgwYM8ePAgv/rqqzFlRXv7YgUXLlxoaUsgNjaWqqryvffeCyedkFsCfwCQ\nB6BBCFHn3fYXAG8C+EQIsQWAC8C/e/eVw/OGoBXAIIDngsgjKJw5cwbDw8OjgmY2Nzfj5MmTZmUx\nIUhi586dIf//5MmTWLZsGQCgt7cXq1atQn19PQYHB8ccqwURNRspKSn47LPPcNtttyEzM9My/yUn\nJ2POnDkAgISEBMTFxaGjo8O09N1u96jfM2fOxGOPPTbmuOvXr6O0tBQnTpwwLW9faMFJh4aGtBub\nqRgYGADg8anpmOzbASuISahZdnY2i4uLeejQITY3N/OZZ56xTN19WVxcHHbn1owZMxgTE8OYmBhb\ngpwCv8wivHDhgul9AEYah+8eOnTI9PTnzZvH1157LeCUZW2koFWv7IzU+gS6uros6bsRQvD1119n\nf39/OHM7on/EoGR0cc+ePVQUhfX19abGjJQMmTICkYTENIeMQCQhITEWUgQkJKY5pAhISExzSBGQ\nkJjmiGoRqKysBElkZWXZbYqERNQiqkVAq/xSBCQkQkfUiYDT6QRJOJ1Ou02RkLgxYPdAoXAGC5E0\nZS1BI2fPnj1qyml5ebm+3JY23Vajoijs6+tjdXU177jjDksGeGzatIkk+dBDD5mediCY7VPJiOGN\nNWJQg5lOysnJ4alTp1hRUUHAs4T01atX6Xa7eezYMWZnZzM9PZ1vvPEG09PTuXz5cj7//PP8+uuv\nw53i6Zfz58/nyMgIVVW1JPzZeDBDCNLS0pidnT1mGG1KSgpbW1sZHx9vd6UwhXFxcfrkoRMnTvDZ\nZ5+13aYAvHFEoLKy0pI7Vm1tLTs7O/XoRS6XiwMDA6yrq7PlohmXtjZrncFAPtR+myEEs2bNYm1t\nLYeGhnju3Dnef//9o/Z9+umnPHDggKmz7dLS0lhYWMjS0lJ2d3ezsLDQ73GJiYksLCxkYWEhU1NT\nw8ozKSmJn3zyCdvb29nX18fOzk729/fzypUrIS9goi1btn//fp0FBQVcv349t27dyvXr14/aV19f\nT1VVOTw8zNWrV0+U/o0hAk6nUy+kZle62tpavvPOO/rv/Px8btiwwfR8gmFsbCwbGhqoqiqbmppM\nnWzkz4dZWVl0Op3MysoaJQTGSEjB8qOPPmJXVxcffvjhMUuwLV26lIqi8OWXXw7rHHJyclhTU0OX\ny0WXy8UrV66MeVzz97/c3Fx9fxhLd9PhcLC9vV2fqKRNA3/zzTd59uzZkBe3CbScXTBsbGycKP0b\nQwSMMLPS7dq1i4qi8IknnjA13VC5aNEiqqpKt9ttehRd413f336jSDidzkmlHR8fz56eHr711lt+\n92siEG7kocHBwTH9M74isHv3bmZkZIzi8ePH6Xa7OTAwELLAl5aWcmRkhIqi8Msvv9QjRs2dO1df\nySjUFYd3797NqqoqndeuXQtY6fv6+vTIVFr8ygnSj34RCPcOFYgFBQX8+eef6Xa7dRFITExkbm4u\nc3NzWVBQwPPnzzM/P5/79u1jTk6OqZXSl/Pnz+epU6eoqiobGhpMTdv3Tj/RMZN9JNAiOPuLCwF4\nRGBoaCjsprixsl++fJkul4tbtmxhdXU129ra2NPT47cjV/sezlJjNTU1VBSFVVVVo1po77//vt4y\nMGvZ8VtvvZXx8fF+uWDBglGxKY4dOzZRetEtAsEU3lBZWFioF46zZ8+ytraWP/30U8ACNDg4yNra\nWt51112m2qFxzZo1VFWV165dMz0QqhHjVfBwfN3U1MTe3l6/z8XPPfecKUFburu72draSofD4VdQ\nHnjgATocDjocDjqdzlHXsLW1NeSo1StWrCBJqqrK4uJiffuDDz6oV8aOjg5LyoUvk5KS9DyvX78e\nTKj36BaBcJqoE9HYEvBX6Ts7O+lyudjW1jaqGdrc3MyEhATTL25ZWZklrQCjDyeq3MEIRSAuXryY\nFy9eZGtrKzMyMkYtkbZz505LIzf549y5c/VHIEVRwgpH9tRTT1FVVXZ3dzM+Pp433XQTMzMzefHi\nRb0V8OGHH07JeW3YsGFUDIQg/hO9IhCoeZqVlaV3aPlysg7Nz8/njz/+yMuXL+t3EI3Gu8bGjRtH\n3VmamppMX+FYa22kpaWZmq4R4/nI6O9QBfeFF17Ql2c/evQoS0pKmJeXx88//3yMCOTk5DAvL0+n\n2ZXlgw8+0K/X4cOH9ZgVoVILPtPe3s4jR47oK1BrIvDqq6+afg6+jImJ4bfffquLQJDXKXpFwF8r\nIFhYeSE0MXC73ayvrzclzVmzZlFV1YAda+HYGmzFNnYcmtH3MnPmTMbFxTEuLo6KorCiooJFRUUs\nKiriggULGBcXx5tvvtm0oDFG7t69W79Ge/fuNe21ZEpKCjMzM5mZmcl58+bpbwpeeuklS8ucxu3b\nt+sCUFNTE+z/bgwR0FoCRlRWVuotAN/33ZN17rJly4IO8GDsS2hrazPl4q5du5aXLl0yfSCNEeNV\n7Mk8MoRCRVGmbDBNcnIyL1y4MO4rQ7OoVUgrA8VoXLJkCQcHB/U8t27dGux/o1cEfAuxVvGDwWSc\nm5iYyHPnzgX9/jgxMZFdXV2misDevXt5+PBh0wtOsD4JVixCZUtLSzgLZU6K2mtfK6If+VJRFDY3\nN3P27NmWn5exFdDS0jKZ/0a3CPi+HQgGk+3U2rFjx6QHkbhcLrrdbvb09IR9cTMyMuh2u7lixQpT\nC00wd3df/1o1f+D777+3vJJo1Dp7e3t7+eSTT1qal6IoUxY70hitat26dZP5b3SLgL/CPB5CuYt9\n8803AcNZ+WNaWpoeEvvq1athX9wXX3yRqqqOCnRpBifqD/D1q5UTiI4ePWp5JdGoPQZs27bN8rwU\nReHatWun5Lw0AaisrJxscJroFwGNgVoF2rDXUJ1bW1urF5x9+/aN24mUnJzMsrIy0/oEYmJi2NLS\nEm78eb/016cSyI9WCsCSJUtYVFQ0JRUlNjZWHxkYxJj6sJiTkzNlImAcGxDCeYUeizDSUFVVBSGE\n6ek2NjbinnvuwS233IJNmzZh0aJFGBkZAeCJH+gVLADAnXfeifj4eH3brl27wsr70Ucfxd13343+\n/v6w0pkIWVlZo85DQ1VVFaqrqy1dpyE1NRUVFRWWpW/EK6+8AgD47rvvUF5ebmle9913H/r7+1FX\nVzfxwWFi8+bNAIDTp0/jyJEj5iRqdysglJaAlXzkkUfY1tbG7u7uCcemK4rC8+fPc9u2bWGtJzBj\nxgy+++67VFWV5eXllpzXeB2pZg++CsS8vDxL4vT58umnn9Y7A5cvX25pXgkJCezv72dJSYnl56VF\nJh4YGODjjz8eSho3zuPAVDA1NZUOh4MlJSUBRcDhcHDOnDlh52Wcj/72229bcj5ZWVljhGCqKr9G\nKwYC+ePHH3+sX6P09HRL88rOzqaiKLz33nstzScpKUmfVfrFF1+Emo4UAUl7WVZWNiX5aCKwf/9+\ny/NauXIlFUWxtIWzdOlSXrp0Sb9RhJGWFAHJ6cE1a9awoqLC8g7BqeKOHTt0ATh+/Hg4aclYhBIS\n0xwyFqGEhMRYSBGQkJjmiJRxApcBDHg/owm/gbTZakSbvUDk2rzA38aI6BMAACHEP/w9r0QypM3W\nI9rsBaLPZvk4ICExzSFFQEJimiOSROADuw0IAdJm6xFt9gJRZnPE9AlISEjYg0hqCUhISNgA20VA\nCPGIEOIHIUSrEGK73fYEghDinBCiQQhRJ4T4h3fbfCHEESFEi/fz32y28W9CiC4hRKNhm18bhQfv\nev1+WgiRGkE2O4UQ7V5f1wkhVhv2/afX5h+EEA/bZPPvhBCVQogmIcQZIcR/eLdHtK8DwuY5AzMA\n/AggAUAMgHoAi+2eyxDA1nMAfuOzbSeA7d7v2wG8ZbONKwGkAmicyEYAqwF8AUAAWA7gRATZ7ARQ\n7OfYxd4y8isAC71lZ4YNNt8OINX7fS6Af3pti2hfB6LdLYFlAFpJ/kRyBMABAOtstmkyWAdgj/f7\nHgDrbbQFJGsAXPHZHMjGdQD20oPjAGKFELdPjaW/IIDNgbAOwAGSP5NsA9AKTxmaUpDsIPmd93sf\ngLMAfosI93Ug2C0CvwVwwfD7X95tkQgC+FII8a0Q4k/ebXEkO7zfLwKIs8e0cRHIxkj3/Z+9Tee/\nGR6zIs5mIcRdAFIAnECU+tpuEYgm/JFkKoBVAF4QQqw07qSn3RfRr1qiwUYv/hvA3QCSAXQAeNte\nc/xDCDEHwP8BcJDsNe6LIl/bLgLtAH5n+H2Hd1vEgWS797MLwCF4mqGdWrPO+9lln4UBEcjGiPU9\nyU6SCkkVwIf4pckfMTYLIWbCIwD/S/JT7+ao8zVgvwicAvB7IcRCIUQMgI0A/m6zTWMghPi1EGKu\n9h1ANoBGeGzd7D1sM4DP7LFwXASy8e8AnvH2XC8HcM3QlLUVPs/Lj8Lja8Bj80YhxK+EEAsB/B7A\nSRvsEwD+B8BZkiWGXVHnawD2vh0w9Jz+E56e3r/abU8AGxPg6ZWuB3BGsxPALQAqALQA+ArAfJvt\n3A9P8/k6PM+dWwLZCE9P9X95/d4A4P4Isnmf16bT8FSg2w3H/9Vr8w8AVtlk8x/haeqfBlDn5epI\n93UgyhGDEhLTHHY/DkhISNgMKQISEtMcUgQkJKY5pAhISExzSBGQkJjmkCIgITHNIUVAQmKaQ4qA\nhMQ0x/8DXgW7Npq3afwAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Y7NloDy08CQ9", + "colab_type": "text" + }, + "source": [ + "#### Model\n", + "Let's construct the computation graph. Below are the parameters:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dqjWHTo08CQ-", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# parameters \n", + "N_STEPS = 28\n", + "N_INPUTS = 28\n", + "N_NEURONS = 150\n", + "N_OUTPUTS = 10\n", + "N_EPHOCS = 10" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gpFu75e88CRB", + "colab_type": "text" + }, + "source": [ + "And finally, here is a figure of the RNN-based classification model we are building:\n", + "\n", + "![alt txt](https://docs.google.com/drawings/d/e/2PACX-1vQWhELhewvq_bHgqwf4vwDb5B9DN9-jAxeTF9Y73zr-OsW6OXC-ngxAfojivXyZEhjzLXceTZU2Ncz3/pub?w=550&h=600)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HukX7ZjP8CRC", + "colab_type": "text" + }, + "source": [ + "And here is the code for the model:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "14jNZxeg8CRD", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class ImageRNN(nn.Module):\n", + " def __init__(self, batch_size, n_steps, n_inputs, n_neurons, n_outputs):\n", + " super(ImageRNN, self).__init__()\n", + " \n", + " self.n_neurons = n_neurons\n", + " self.batch_size = batch_size\n", + " self.n_steps = n_steps\n", + " self.n_inputs = n_inputs\n", + " self.n_outputs = n_outputs\n", + " \n", + " self.basic_rnn = nn.RNN(self.n_inputs, self.n_neurons) \n", + " \n", + " self.FC = nn.Linear(self.n_neurons, self.n_outputs)\n", + " \n", + " def init_hidden(self,):\n", + " # (num_layers, batch_size, n_neurons)\n", + " return (torch.zeros(1, self.batch_size, self.n_neurons))\n", + " \n", + " def forward(self, X):\n", + " # transforms X to dimensions: n_steps X batch_size X n_inputs\n", + " X = X.permute(1, 0, 2) \n", + " \n", + " self.batch_size = X.size(1)\n", + " self.hidden = self.init_hidden()\n", + " \n", + " # lstm_out => n_steps, batch_size, n_neurons (hidden states for each time step)\n", + " # self.hidden => 1, batch_size, n_neurons (final state from each lstm_out)\n", + " lstm_out, self.hidden = self.basic_rnn(X, self.hidden) \n", + " out = self.FC(self.hidden)\n", + " \n", + " return out.view(-1, self.n_outputs) # batch_size X n_output" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8Yw8fFlp8CRJ", + "colab_type": "text" + }, + "source": [ + "The `ImageRNN` model is doing the following:\n", + "- The initialization function `__init__(...)` declares a few variables, and then a basic RNN layer `basic_rnn` followed by a fully-connected layer `self.FC`.\n", + "- The `init_hidden` function initializes hidden weights with zero values. \n", + "The `forward` function accepts an input of size `n_steps X batch_size X n_neurons`. Then the data flows through the RNN layer and then through the fully-connected layer. \n", + "- The output are the log probabilities of the model." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bJUCKm_U8CRK", + "colab_type": "text" + }, + "source": [ + "#### Testing the model with some samples\n", + "A very good practice encouraged by PyTorch developers throughout their documentation, and which I really like and highly recommend, is to always test the model with a portion of the dataset before actual training. This is to ensure that you have the correct dimension specified and that the model is outputing the information you expect. Below I show an example of how to test your model:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "P6W0jwK08CRM", + "colab_type": "code", + "outputId": "51d9f522-1558-4628-88f5-911c2ae505c7", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 357 + } + }, + "source": [ + "dataiter = iter(trainloader)\n", + "images, labels = dataiter.next()\n", + "model = ImageRNN(BATCH_SIZE, N_STEPS, N_INPUTS, N_NEURONS, N_OUTPUTS)\n", + "logits = model(images.view(-1, 28,28))\n", + "print(logits[0:10])" + ], + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "text": [ + "tensor([[-0.0918, 0.0176, 0.0474, -0.0849, -0.0141, 0.1235, 0.0070, 0.0719,\n", + " 0.0072, -0.0563],\n", + " [-0.1017, 0.0276, 0.0580, -0.0783, -0.0150, 0.1127, 0.0171, 0.0749,\n", + " 0.0092, -0.0654],\n", + " [-0.0941, 0.0234, 0.0402, -0.0825, -0.0402, 0.1208, 0.0117, 0.0753,\n", + " 0.0188, -0.0701],\n", + " [-0.1021, 0.0160, 0.0551, -0.0886, -0.0132, 0.1113, 0.0136, 0.0634,\n", + " -0.0021, -0.0533],\n", + " [-0.0874, 0.0227, 0.0468, -0.0806, -0.0118, 0.1191, 0.0105, 0.0696,\n", + " 0.0077, -0.0581],\n", + " [-0.0929, 0.0256, 0.0636, -0.0729, -0.0147, 0.1028, 0.0197, 0.0766,\n", + " 0.0140, -0.0700],\n", + " [-0.0960, 0.0257, 0.0497, -0.0822, -0.0177, 0.1183, 0.0120, 0.0728,\n", + " 0.0047, -0.0644],\n", + " [-0.0962, 0.0185, 0.0586, -0.0732, -0.0197, 0.1047, 0.0119, 0.0760,\n", + " 0.0104, -0.0641],\n", + " [-0.0880, 0.0151, 0.0478, -0.0839, -0.0210, 0.1098, 0.0152, 0.0732,\n", + " 0.0105, -0.0578],\n", + " [-0.0929, 0.0200, 0.0529, -0.0741, -0.0281, 0.1076, 0.0143, 0.0781,\n", + " 0.0135, -0.0696]], grad_fn=)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BKDkckp98CRQ", + "colab_type": "text" + }, + "source": [ + "#### Training" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "46Qke-c08CRR", + "colab_type": "text" + }, + "source": [ + "Now let's look at the code for training the image classification model. But first, let's declare a few helper functions needed to train the model:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "BoDWoBuy8CRS", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import torch.optim as optim\n", + "\n", + "# Device\n", + "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", + "\n", + "# Model instance\n", + "model = ImageRNN(BATCH_SIZE, N_STEPS, N_INPUTS, N_NEURONS, N_OUTPUTS)\n", + "criterion = nn.CrossEntropyLoss()\n", + "optimizer = optim.Adam(model.parameters(), lr=0.001)\n", + "\n", + "def get_accuracy(logit, target, batch_size):\n", + " ''' Obtain accuracy for training round '''\n", + " corrects = (torch.max(logit, 1)[1].view(target.size()).data == target.data).sum()\n", + " accuracy = 100.0 * corrects/batch_size\n", + " return accuracy.item()" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uvCzj28o8CRX", + "colab_type": "text" + }, + "source": [ + "Before training a model in PyTorch, you can programatically specify what device you want to use during training; the `torch.device(...)` function tells the program that we want to use the GPU if one is available, otherwise the CPU will be the default device.\n", + "\n", + "Then we create an instance of the model, `ImageRNN(...)``, with the proper parameters. The criterion represents the function we will use to compute the loss of the model. The `nn.CrossEntropyLoss()` function basically applies a log softmax followed by a negative log likelihood loss operation over the output of the model. To compute the loss, the function needs both the log probabilities and targets. We will see later in our code how to provide this to the criterion.\n", + "\n", + "For training, we also need an optimization algorithm which helps to update weights based on the current loss. This is achieved with the `optim.Adam` optimization function, which requires the model parameters and a learning rate. Alternatively, you can also use `optim.SGD` or any other optimization algorithm that's available. \n", + "\n", + "The `get_accuracy(...)` function simply computes the accuracy of the model given the log probabilities and target values. As an exercise, you can write code to test this function as we did with the model before.\n", + "\n", + "Let's put everything together and train our image classification model:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "LaAmGhZT8CRX", + "colab_type": "code", + "outputId": "a53631d3-ce78-4dca-d41a-85d1f2cf9e41", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 187 + } + }, + "source": [ + "for epoch in range(N_EPHOCS): # loop over the dataset multiple times\n", + " train_running_loss = 0.0\n", + " train_acc = 0.0\n", + " model.train()\n", + " \n", + " # TRAINING ROUND\n", + " for i, data in enumerate(trainloader):\n", + " # zero the parameter gradients\n", + " optimizer.zero_grad()\n", + " \n", + " # reset hidden states\n", + " model.hidden = model.init_hidden() \n", + " \n", + " # get the inputs\n", + " inputs, labels = data\n", + " inputs = inputs.view(-1, 28,28) \n", + "\n", + " # forward + backward + optimize\n", + " outputs = model(inputs)\n", + "\n", + " loss = criterion(outputs, labels)\n", + " loss.backward()\n", + " optimizer.step()\n", + "\n", + " train_running_loss += loss.detach().item()\n", + " train_acc += get_accuracy(outputs, labels, BATCH_SIZE)\n", + " \n", + " model.eval()\n", + " print('Epoch: %d | Loss: %.4f | Train Accuracy: %.2f' \n", + " %(epoch, train_running_loss / i, train_acc/i))" + ], + "execution_count": 19, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Epoch: 0 | Loss: 0.8915 | Train Accuracy: 70.15\n", + "Epoch: 1 | Loss: 0.4040 | Train Accuracy: 87.86\n", + "Epoch: 2 | Loss: 0.2492 | Train Accuracy: 92.83\n", + "Epoch: 3 | Loss: 0.1951 | Train Accuracy: 94.30\n", + "Epoch: 4 | Loss: 0.1736 | Train Accuracy: 94.93\n", + "Epoch: 5 | Loss: 0.1598 | Train Accuracy: 95.43\n", + "Epoch: 6 | Loss: 0.1394 | Train Accuracy: 95.97\n", + "Epoch: 7 | Loss: 0.1339 | Train Accuracy: 96.15\n", + "Epoch: 8 | Loss: 0.1283 | Train Accuracy: 96.30\n", + "Epoch: 9 | Loss: 0.1250 | Train Accuracy: 96.39\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "93-DMqi58CRf", + "colab_type": "text" + }, + "source": [ + "We can also compute accuracy on the testing dataset to test how well the model performs on the image classification task. As you can see below, our RNN model is performing very well on the MNIST classification task.\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "BzeqYT4N8CRi", + "colab_type": "code", + "outputId": "fbeff58d-756a-474c-975a-172b30ee36b3", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "test_acc = 0.0\n", + "for i, data in enumerate(testloader, 0):\n", + " inputs, labels = data\n", + " inputs = inputs.view(-1, 28, 28)\n", + "\n", + " outputs = model(inputs)\n", + "\n", + " test_acc += get_accuracy(outputs, labels, BATCH_SIZE)\n", + " \n", + "print('Test Accuracy: %.2f'%( test_acc/i))" + ], + "execution_count": 20, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Test Accuracy: 96.50\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FD98PsQi8CRv", + "colab_type": "text" + }, + "source": [ + "### Final Words\n", + "Please notice that we are not using GPU in this tutorial since the models we are building are relatively simple. As an exercise, you can take a look at the [PyTorch documentation](https://pytorch.org/docs/stable/notes/cuda.html) to learn how to program specific operations to execute on the GPU. You can then try to optimize the code to run on the GPU. If you need help with this, reach out to me on [Twitter](https://twitter.com/omarsar0).\n", + "\n", + "That's it for this tutorial. Congratulations! You are now able to implement a basic RNN in PyTorch. You also learned how to apply RNNs to solve a real-world, image classification problem.\n", + "\n", + "In the next tutorial, we will do more advanced things with RNNs and try to solve even more complex problems, such as sarcasm detection and sentiment classification. Until next time!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jKAm1xB08CRx", + "colab_type": "text" + }, + "source": [ + "### References\n", + "- [A Simple Neural Network from Scratch with PyTorch and Google Colab](https://github.com/omarsar/pytorch_intro_neural_network/blob/master/nn.ipynb)\n", + "- [Hands on Machine Learning with Scikit-learn and Tensorflow](http://shop.oreilly.com/product/0636920052289.do)" + ] + } + ] +} \ No newline at end of file From fab255bd9e53276e2e2dc1135ddccea5be3fa5dd Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 09:36:15 +0100 Subject: [PATCH 02/21] updated notebooks --- README.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index bcb199a..20918cd 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,16 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac read - + + Building RNNs is Fun with PyTorch + This notebook teaches you how to build a recurrent neural network (RNN) with a single layer, consisting of one single neuron. It also teaches how to implement a simple RNN-based model for image classification. + Neural Networks + Beginner + + + + read + From f27d4efe5f79e8239868541123f18d211d9d3087 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 09:37:52 +0100 Subject: [PATCH 03/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 20918cd..2dc67c1 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac This comprehensive tutorial aims to introduce the fundamentals of PyTorch building blocks for training neural networks. Neural Networks Beginner - + read From 883bb3293c0a6abd642191b89f3c89ec604312c2 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 09:38:31 +0100 Subject: [PATCH 04/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2dc67c1..53ced7b 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac - read + read From 9477e755cdbf356893b821a319fdeb98f71e55d6 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 10:38:20 +0100 Subject: [PATCH 05/21] Created using Colaboratory --- RNN_PT.ipynb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RNN_PT.ipynb b/RNN_PT.ipynb index 363ccc6..41ba54c 100644 --- a/RNN_PT.ipynb +++ b/RNN_PT.ipynb @@ -1018,6 +1018,8 @@ "## Building RNNs is Fun with PyTorch and Google Colab\n", "In this tutorial, I will first teach you how to build a recurrent neural network (RNN) with a single layer, consisting of one single neuron, with PyTorch and Google Colab. I will also show you how to implement a simple RNN-based model for image classification.\n", "\n", + "This work is heavily inspired by Aurélien Géron's book called [\"Hand-On Machine Learning with Scikit-Learn and TensorFlow\"](https://www.oreilly.com/library/view/hands-on-machine-learning/9781491962282/). Although his neural network implementations are purely in TensorFlow, I adopted/reused some notations/variables names and implemented things using PyTorch only. I really enjoyed his book and learned a lot from his explanations. His work inspired this tutorial and I strongly recommend the book.\n", + "\n", "We will be using Google Colab so we need to manually install the PyTorch library first. You can do this by using the following command:" ] }, From d23f19d08b10f9d70b9369f3858e51cba419a71f Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 10:47:10 +0100 Subject: [PATCH 06/21] Created using Colaboratory --- nn.ipynb | 1470 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1470 insertions(+) create mode 100644 nn.ipynb diff --git a/nn.ipynb b/nn.ipynb new file mode 100644 index 0000000..f9930dc --- /dev/null +++ b/nn.ipynb @@ -0,0 +1,1470 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "nn.ipynb", + "provenance": [], + "collapsed_sections": [], + "include_colab_link": true + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Ee4B4v5tAp1C", + "colab_type": "text" + }, + "source": [ + "## A Simple Neural Network from Scratch with PyTorch and Google Colab" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "w4cEhtf_Ap1E", + "colab_type": "text" + }, + "source": [ + "In this tutorial we will implement a simple neural network from scratch using PyTorch. The idea of the tutorial is to teach you the basics of PyTorch and how it can be used to implement a neural network from scratch. I will go over some of the basic functionalities and concepts available in PyTorch that will allow you to build your own neural networks. \n", + "\n", + "This tutorial assumes you have prior knowledge of how a neural network works. Don’t worry! Even if you are not so sure, you will be okay. For advanced PyTorch users, this tutorial may still serve as a refresher. This tutorial is heavily inspired by this [Neural Network implementation](https://repl.it/talk/announcements/Build-a-Neural-Network-in-Python/5457) coded purely using Numpy. In fact, I tried re-implementing the code using PyTorch instead and added my own intuitions and explanations. Thanks to [Samay](https://repl.it/@shamdasani) for his phenomenal work, I hope this inspires many others as it did with me.\n", + "\n", + "Since we are working on Google Colab, we will need to install the PyTorch library. You can do this by using the following command:" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "SpBiWQF2BrJK", + "colab_type": "code", + "outputId": "858d4853-ca0d-4d5b-f61c-10481b46f309", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 326 + } + }, + "source": [ + "!pip3 install torch torchvision" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Collecting torch\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7e/60/66415660aa46b23b5e1b72bc762e816736ce8d7260213e22365af51e8f9c/torch-1.0.0-cp36-cp36m-manylinux1_x86_64.whl (591.8MB)\n", + "\u001b[K 100% |████████████████████████████████| 591.8MB 26kB/s \n", + "tcmalloc: large alloc 1073750016 bytes == 0x61f82000 @ 0x7f400bb202a4 0x591a07 0x5b5d56 0x502e9a 0x506859 0x502209 0x502f3d 0x506859 0x504c28 0x502540 0x502f3d 0x506859 0x504c28 0x502540 0x502f3d 0x506859 0x504c28 0x502540 0x502f3d 0x507641 0x502209 0x502f3d 0x506859 0x504c28 0x502540 0x502f3d 0x507641 0x504c28 0x502540 0x502f3d 0x507641\n", + "\u001b[?25hCollecting torchvision\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/ca/0d/f00b2885711e08bd71242ebe7b96561e6f6d01fdb4b9dcf4d37e2e13c5e1/torchvision-0.2.1-py2.py3-none-any.whl (54kB)\n", + "\u001b[K 100% |████████████████████████████████| 61kB 23.4MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.14.6)\n", + "Collecting pillow>=4.1.1 (from torchvision)\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/92/e3/217dfd0834a51418c602c96b110059c477260c7fee898542b100913947cf/Pillow-5.4.0-cp36-cp36m-manylinux1_x86_64.whl (2.0MB)\n", + "\u001b[K 100% |████████████████████████████████| 2.0MB 6.8MB/s \n", + "\u001b[?25hRequirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.11.0)\n", + "Installing collected packages: torch, pillow, torchvision\n", + " Found existing installation: Pillow 4.0.0\n", + " Uninstalling Pillow-4.0.0:\n", + " Successfully uninstalled Pillow-4.0.0\n", + "Successfully installed pillow-5.4.0 torch-1.0.0 torchvision-0.2.1\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MP9ewMSlC7JU", + "colab_type": "text" + }, + "source": [ + "\n", + "The `torch` module provides all the necessary **tensor** operators you will need to implement your first neural network from scratch in PyTorch. That's right! In PyTorch everything is a Tensor, so this is the first thing you will need to get used to." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "bKmXKSQnAp1G", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import torch\n", + "import torch.nn as nn" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1EWBBl1nAp1M", + "colab_type": "text" + }, + "source": [ + "## Data\n", + "Let's start by creating some sample data using the `torch.tensor` command. In Numpy, this could be done with `np.array`. Both functions serve the same purpose, but in PyTorch everything is a Tensor as opposed to a vector or matrix. We define types in PyTorch using the `dtype=torch.xxx` command. \n", + "\n", + "In the data below, `X` represents the amount of hours studied and how much time students spent sleeping, whereas `y` represent grades. The variable `xPredicted` is a single input for which we want to predict a grade using the parameters learned by the neural network. Remember, the neural network wants to learn a mapping between `X` and `y`, so it will try to take a guess from what it has learned from the training data. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "fsAVbHnjAp1P", + "colab_type": "code", + "colab": {} + }, + "source": [ + "X = torch.tensor(([2, 9], [1, 5], [3, 6]), dtype=torch.float) # 3 X 2 tensor\n", + "y = torch.tensor(([92], [100], [89]), dtype=torch.float) # 3 X 1 tensor\n", + "xPredicted = torch.tensor(([4, 8]), dtype=torch.float) # 1 X 2 tensor" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "RC0ru9kCAp1U", + "colab_type": "text" + }, + "source": [ + "You can check the size of the tensors we have just created with the `size` command. This is equivalent to the `shape` command used in tools such as Numpy and Tensorflow. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "sfC-B1BEAp1W", + "colab_type": "code", + "outputId": "d2ec7994-41ad-41fa-a69c-c0b7123ef7cd", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + } + }, + "source": [ + "print(X.size())\n", + "print(y.size())" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "torch.Size([3, 2])\n", + "torch.Size([3, 1])\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zrND9MS9Ap1f", + "colab_type": "text" + }, + "source": [ + "## Scaling\n", + "\n", + "Below we are performing some scaling on the sample data. Notice that the `max` function returns both a tensor and the corresponding indices. So we use `_` to capture the indices which we won't use here because we are only interested in the max values to conduct the scaling. Perfect! Our data is now in a very nice format our neural network will appreciate later on. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "hlBvtfAmAp1i", + "colab_type": "code", + "outputId": "23e1d24b-fa29-4173-f884-f44bc8a48cea", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "# scale units\n", + "X_max, _ = torch.max(X, 0)\n", + "xPredicted_max, _ = torch.max(xPredicted, 0)\n", + "\n", + "X = torch.div(X, X_max)\n", + "xPredicted = torch.div(xPredicted, xPredicted_max)\n", + "y = y / 100 # max test score is 100\n", + "print(xPredicted)" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "tensor([0.5000, 1.0000])\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "R1kTs5S5Ap1m", + "colab_type": "text" + }, + "source": [ + "Notice that there are two functions `max` and `div` that I didn't discuss above. They do exactly what they imply: `max` finds the maximum value in a vector... I mean tensor; and `div` is basically a nice little function to divide two tensors. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xRvMSpEFAp1n", + "colab_type": "text" + }, + "source": [ + "## Model (Computation Graph)\n", + "Once the data has been processed and it is in the proper format, all you need to do now is to define your model. Here is where things begin to change a little as compared to how you would build your neural networks using, say, something like Keras or Tensorflow. However, you will realize quickly as you go along that PyTorch doesn't differ much from other deep learning tools. At the end of the day we are constructing a computation graph, which is used to dictate how data should flow and what type of operations are performed on this information. \n", + "\n", + "For illustration purposes, we are building the following neural network or computation graph:\n", + "\n", + "![alt txt](https://paper.dropbox.com/ep/redirect/image?url=https%3A%2F%2Fsummer-heart-0930.chufeiyun1688.workers.dev%3A443%2Fhttps%2Fd2mxuefqeaa7sj.cloudfront.net%2Fs_B715730329387B3CAE6924F0860FA5BB4EC7C824AA2008B58BBDC14F79BF4C11_1534128639562_nn-01.png&hmac=BS%2F9k8DplT6TcYtYMHbxWgqhIRPp6mKsRADkHqBhels%3D&width=1490)" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "C7pDC5SfAp1p", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class Neural_Network(nn.Module):\n", + " def __init__(self, ):\n", + " super(Neural_Network, self).__init__()\n", + " # parameters\n", + " # TODO: parameters can be parameterized instead of declaring them here\n", + " self.inputSize = 2\n", + " self.outputSize = 1\n", + " self.hiddenSize = 3\n", + " \n", + " # weights\n", + " self.W1 = torch.randn(self.inputSize, self.hiddenSize) # 3 X 2 tensor\n", + " self.W2 = torch.randn(self.hiddenSize, self.outputSize) # 3 X 1 tensor\n", + " \n", + " def forward(self, X):\n", + " self.z = torch.matmul(X, self.W1) # 3 X 3 \".dot\" does not broadcast in PyTorch\n", + " self.z2 = self.sigmoid(self.z) # activation function\n", + " self.z3 = torch.matmul(self.z2, self.W2)\n", + " o = self.sigmoid(self.z3) # final activation function\n", + " return o\n", + " \n", + " def sigmoid(self, s):\n", + " return 1 / (1 + torch.exp(-s))\n", + " \n", + " def sigmoidPrime(self, s):\n", + " # derivative of sigmoid\n", + " return s * (1 - s)\n", + " \n", + " def backward(self, X, y, o):\n", + " self.o_error = y - o # error in output\n", + " self.o_delta = self.o_error * self.sigmoidPrime(o) # derivative of sig to error\n", + " self.z2_error = torch.matmul(self.o_delta, torch.t(self.W2))\n", + " self.z2_delta = self.z2_error * self.sigmoidPrime(self.z2)\n", + " self.W1 += torch.matmul(torch.t(X), self.z2_delta)\n", + " self.W2 += torch.matmul(torch.t(self.z2), self.o_delta)\n", + " \n", + " def train(self, X, y):\n", + " # forward + backward pass for training\n", + " o = self.forward(X)\n", + " self.backward(X, y, o)\n", + " \n", + " def saveWeights(self, model):\n", + " # we will use the PyTorch internal storage functions\n", + " torch.save(model, \"NN\")\n", + " # you can reload model with all the weights and so forth with:\n", + " # torch.load(\"NN\")\n", + " \n", + " def predict(self):\n", + " print (\"Predicted data based on trained weights: \")\n", + " print (\"Input (scaled): \\n\" + str(xPredicted))\n", + " print (\"Output: \\n\" + str(self.forward(xPredicted)))\n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "qm5gimnyAp1s", + "colab_type": "text" + }, + "source": [ + "For the purpose of this tutorial, we are not going to be talking math stuff, that's for another day. I just want you to get a gist of what it takes to build a neural network from scratch using PyTorch. Let's break down the model which was declared via the class above. \n", + "\n", + "## Class Header\n", + "First, we defined our model via a class because that is the recommended way to build the computation graph. The class header contains the name of the class `Neural Network` and the parameter `nn.Module` which basically indicates that we are defining our own neural network. \n", + "\n", + "```python\n", + "class Neural_Network(nn.Module):\n", + "```\n", + "\n", + "## Initialization\n", + "The next step is to define the initializations ( `def __init__(self,)`) that will be performed upon creating an instance of the customized neural network. You can declare the parameters of your model here, but typically, you would declare the structure of your network in this section -- the size of the hidden layers and so forth. Since we are building the neural network from scratch, we explicitly declared the size of the weights matrices: one that stores the parameters from the input to hidden layer; and one that stores the parameter from the hidden to output layer. Both weight matrices are initialized with values randomly chosen from a normal distribution via `torch.randn(...)`. Note that we are not using bias just to keep things as simple as possible. \n", + "\n", + "```python\n", + "def __init__(self, ):\n", + " super(Neural_Network, self).__init__()\n", + " # parameters\n", + " # TODO: parameters can be parameterized instead of declaring them here\n", + " self.inputSize = 2\n", + " self.outputSize = 1\n", + " self.hiddenSize = 3\n", + "\n", + " # weights\n", + " self.W1 = torch.randn(self.inputSize, self.hiddenSize) # 3 X 2 tensor\n", + " self.W2 = torch.randn(self.hiddenSize, self.outputSize) # 3 X 1 tensor\n", + "```\n", + "\n", + "## The Forward Function\n", + "The `forward` function is where all the magic happens (see below). This is where the data enters and is fed into the computation graph (i.e., the neural network structure we have built). Since we are building a simple neural network with one hidden layer, our forward function looks very simple:\n", + "\n", + "```python\n", + "def forward(self, X):\n", + " self.z = torch.matmul(X, self.W1) \n", + " self.z2 = self.sigmoid(self.z) # activation function\n", + " self.z3 = torch.matmul(self.z2, self.W2)\n", + " o = self.sigmoid(self.z3) # final activation function\n", + " return o\n", + "```\n", + "\n", + "The `forward` function above takes the input `X`and then performs a matrix multiplication (`torch.matmul(...)`) with the first weight matrix `self.W1`. Then the result is applied an activation function, `sigmoid`. The resulting matrix of the activation is then multiplied with the second weight matrix `self.W2`. Then another activation if performed, which renders the output of the neural network or computation graph. The process I described above is simply what's known as a `feedforward pass`. In order for the weights to optimize when training, we need a backpropagation algorithm. \n", + "\n", + "## The Backward Function\n", + "The `backward` function contains the backpropagation algorithm, where the goal is to essentially minimize the loss with respect to our weights. In other words, the weights need to be updated in such a way that the loss decreases while the neural network is training (well, that is what we hope for). All this magic is possible with the gradient descent algorithm which is declared in the `backward` function. Take a minute or two to inspect what is happening in the code below:\n", + "\n", + "```python\n", + "def backward(self, X, y, o):\n", + " self.o_error = y - o # error in output\n", + " self.o_delta = self.o_error * self.sigmoidPrime(o) \n", + " self.z2_error = torch.matmul(self.o_delta, torch.t(self.W2))\n", + " self.z2_delta = self.z2_error * self.sigmoidPrime(self.z2)\n", + " self.W1 += torch.matmul(torch.t(X), self.z2_delta)\n", + " self.W2 += torch.matmul(torch.t(self.z2), self.o_delta)\n", + "```\n", + "\n", + "Notice that we are performing a lot of matrix multiplications along with the transpose operations via the `torch.matmul(...)` and `torch.t(...)` operations, respectively. The rest is simply gradient descent -- there is nothing to it." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9t26Dr5zAp1u", + "colab_type": "text" + }, + "source": [ + "## Training\n", + "All that is left now is to train the neural network. First we create an instance of the computation graph we have just built:\n", + "\n", + "```python\n", + "NN = Neural_Network()\n", + "```\n", + "\n", + "Then we train the model for `1000` rounds. Notice that in PyTorch `NN(X)` automatically calls the `forward` function so there is no need to explicitly call `NN.forward(X)`. \n", + "\n", + "After we have obtained the predicted output for ever round of training, we compute the loss, with the following code:\n", + "\n", + "```python\n", + "torch.mean((y - NN(X))**2).detach().item()\n", + "```\n", + "\n", + "The next step is to start the training (foward + backward) via `NN.train(X, y)`. After we have trained the neural network, we can store the model and output the predicted value of the single instance we declared in the beginning, `xPredicted`. \n", + "\n", + "Let's train!" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9sTddOpLAp1w", + "colab_type": "code", + "outputId": "a02d2b93-34da-4068-f1f2-843f1e30abf8", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 17156 + } + }, + "source": [ + "NN = Neural_Network()\n", + "for i in range(1000): # trains the NN 1,000 times\n", + " print (\"#\" + str(i) + \" Loss: \" + str(torch.mean((y - NN(X))**2).detach().item())) # mean sum squared loss\n", + " NN.train(X, y)\n", + "NN.saveWeights(NN)\n", + "NN.predict()" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "#0 Loss: 0.28770461678504944\n", + "#1 Loss: 0.19437099993228912\n", + "#2 Loss: 0.129642054438591\n", + "#3 Loss: 0.08898762613534927\n", + "#4 Loss: 0.0638350322842598\n", + "#5 Loss: 0.04783045873045921\n", + "#6 Loss: 0.037219222635030746\n", + "#7 Loss: 0.029889358207583427\n", + "#8 Loss: 0.024637090042233467\n", + "#9 Loss: 0.020752854645252228\n", + "#10 Loss: 0.01780204102396965\n", + "#11 Loss: 0.015508432872593403\n", + "#12 Loss: 0.013690348714590073\n", + "#13 Loss: 0.012224685400724411\n", + "#14 Loss: 0.011025689542293549\n", + "#15 Loss: 0.0100322300568223\n", + "#16 Loss: 0.009199750609695911\n", + "#17 Loss: 0.008495191112160683\n", + "#18 Loss: 0.007893583737313747\n", + "#19 Loss: 0.007375772576779127\n", + "#20 Loss: 0.006926907692104578\n", + "#21 Loss: 0.006535270716995001\n", + "#22 Loss: 0.006191555876284838\n", + "#23 Loss: 0.005888286512345076\n", + "#24 Loss: 0.005619380157440901\n", + "#25 Loss: 0.0053798723965883255\n", + "#26 Loss: 0.005165652371942997\n", + "#27 Loss: 0.004973314236849546\n", + "#28 Loss: 0.0048000202514231205\n", + "#29 Loss: 0.004643348511308432\n", + "#30 Loss: 0.00450127711519599\n", + "#31 Loss: 0.004372074268758297\n", + "#32 Loss: 0.004254247527569532\n", + "#33 Loss: 0.004146536346524954\n", + "#34 Loss: 0.004047831054776907\n", + "#35 Loss: 0.003957169130444527\n", + "#36 Loss: 0.0038737261202186346\n", + "#37 Loss: 0.0037967758253216743\n", + "#38 Loss: 0.0037256714422255754\n", + "#39 Loss: 0.0036598537117242813\n", + "#40 Loss: 0.003598827635869384\n", + "#41 Loss: 0.0035421468783169985\n", + "#42 Loss: 0.0034894247073680162\n", + "#43 Loss: 0.003440307453274727\n", + "#44 Loss: 0.0033944963943213224\n", + "#45 Loss: 0.003351695602759719\n", + "#46 Loss: 0.003311669686809182\n", + "#47 Loss: 0.003274182789027691\n", + "#48 Loss: 0.0032390293199568987\n", + "#49 Loss: 0.0032060390803962946\n", + "#50 Loss: 0.0031750358175486326\n", + "#51 Loss: 0.0031458677258342505\n", + "#52 Loss: 0.003118406282737851\n", + "#53 Loss: 0.0030925225000828505\n", + "#54 Loss: 0.0030680971685796976\n", + "#55 Loss: 0.0030450366903096437\n", + "#56 Loss: 0.003023233264684677\n", + "#57 Loss: 0.0030026088934391737\n", + "#58 Loss: 0.002983089303597808\n", + "#59 Loss: 0.0029645822942256927\n", + "#60 Loss: 0.00294703827239573\n", + "#61 Loss: 0.0029303862247616053\n", + "#62 Loss: 0.002914572134613991\n", + "#63 Loss: 0.0028995368629693985\n", + "#64 Loss: 0.0028852447867393494\n", + "#65 Loss: 0.002871639095246792\n", + "#66 Loss: 0.002858673455193639\n", + "#67 Loss: 0.0028463276103138924\n", + "#68 Loss: 0.0028345445170998573\n", + "#69 Loss: 0.0028233081102371216\n", + "#70 Loss: 0.0028125671669840813\n", + "#71 Loss: 0.002802313072606921\n", + "#72 Loss: 0.0027925113681703806\n", + "#73 Loss: 0.002783131552860141\n", + "#74 Loss: 0.0027741591911762953\n", + "#75 Loss: 0.00276556215249002\n", + "#76 Loss: 0.0027573201805353165\n", + "#77 Loss: 0.002749415347352624\n", + "#78 Loss: 0.002741842297837138\n", + "#79 Loss: 0.0027345670387148857\n", + "#80 Loss: 0.00272758468054235\n", + "#81 Loss: 0.0027208721730858088\n", + "#82 Loss: 0.002714422531425953\n", + "#83 Loss: 0.002708215033635497\n", + "#84 Loss: 0.0027022461872547865\n", + "#85 Loss: 0.0026964957360178232\n", + "#86 Loss: 0.002690958557650447\n", + "#87 Loss: 0.0026856244076043367\n", + "#88 Loss: 0.002680474892258644\n", + "#89 Loss: 0.002675510011613369\n", + "#90 Loss: 0.002670713933184743\n", + "#91 Loss: 0.0026660896837711334\n", + "#92 Loss: 0.0026616165414452553\n", + "#93 Loss: 0.0026572979986667633\n", + "#94 Loss: 0.0026531198527663946\n", + "#95 Loss: 0.002649075584486127\n", + "#96 Loss: 0.002645164029672742\n", + "#97 Loss: 0.0026413705199956894\n", + "#98 Loss: 0.0026377029716968536\n", + "#99 Loss: 0.002634142292663455\n", + "#100 Loss: 0.00263069081120193\n", + "#101 Loss: 0.0026273438706994057\n", + "#102 Loss: 0.0026240937877446413\n", + "#103 Loss: 0.0026209382340312004\n", + "#104 Loss: 0.002617868361994624\n", + "#105 Loss: 0.002614888595417142\n", + "#106 Loss: 0.0026119956746697426\n", + "#107 Loss: 0.002609172137454152\n", + "#108 Loss: 0.0026064326521009207\n", + "#109 Loss: 0.002603760687634349\n", + "#110 Loss: 0.00260116346180439\n", + "#111 Loss: 0.002598624676465988\n", + "#112 Loss: 0.0025961531791836023\n", + "#113 Loss: 0.0025937433820217848\n", + "#114 Loss: 0.0025913880672305822\n", + "#115 Loss: 0.0025890925899147987\n", + "#116 Loss: 0.002586849732324481\n", + "#117 Loss: 0.002584656234830618\n", + "#118 Loss: 0.0025825174525380135\n", + "#119 Loss: 0.0025804194156080484\n", + "#120 Loss: 0.0025783723685890436\n", + "#121 Loss: 0.002576368162408471\n", + "#122 Loss: 0.002574402838945389\n", + "#123 Loss: 0.002572478959336877\n", + "#124 Loss: 0.0025705902371555567\n", + "#125 Loss: 0.0025687431916594505\n", + "#126 Loss: 0.002566935494542122\n", + "#127 Loss: 0.0025651559699326754\n", + "#128 Loss: 0.002563410671427846\n", + "#129 Loss: 0.0025617002975195646\n", + "#130 Loss: 0.0025600148364901543\n", + "#131 Loss: 0.0025583638343960047\n", + "#132 Loss: 0.002556734485551715\n", + "#133 Loss: 0.002555140992626548\n", + "#134 Loss: 0.0025535663589835167\n", + "#135 Loss: 0.0025520166382193565\n", + "#136 Loss: 0.002550497418269515\n", + "#137 Loss: 0.0025489996187388897\n", + "#138 Loss: 0.002547516720369458\n", + "#139 Loss: 0.0025460589677095413\n", + "#140 Loss: 0.0025446258950978518\n", + "#141 Loss: 0.0025432079564779997\n", + "#142 Loss: 0.0025418128352612257\n", + "#143 Loss: 0.0025404333136975765\n", + "#144 Loss: 0.0025390759110450745\n", + "#145 Loss: 0.002537728287279606\n", + "#146 Loss: 0.0025364060420542955\n", + "#147 Loss: 0.0025350917130708694\n", + "#148 Loss: 0.002533797873184085\n", + "#149 Loss: 0.002532513812184334\n", + "#150 Loss: 0.0025312507059425116\n", + "#151 Loss: 0.0025300011038780212\n", + "#152 Loss: 0.0025287508033216\n", + "#153 Loss: 0.0025275293737649918\n", + "#154 Loss: 0.002526313764974475\n", + "#155 Loss: 0.00252510909922421\n", + "#156 Loss: 0.0025239146780222654\n", + "#157 Loss: 0.0025227360893040895\n", + "#158 Loss: 0.002521563321352005\n", + "#159 Loss: 0.002520401030778885\n", + "#160 Loss: 0.002519249450415373\n", + "#161 Loss: 0.0025181034579873085\n", + "#162 Loss: 0.0025169753935188055\n", + "#163 Loss: 0.0025158498901873827\n", + "#164 Loss: 0.0025147362612187862\n", + "#165 Loss: 0.002513629151508212\n", + "#166 Loss: 0.002512530190870166\n", + "#167 Loss: 0.0025114361196756363\n", + "#168 Loss: 0.0025103483349084854\n", + "#169 Loss: 0.0025092700961977243\n", + "#170 Loss: 0.0025081969797611237\n", + "#171 Loss: 0.0025071338750422\n", + "#172 Loss: 0.0025060747284442186\n", + "#173 Loss: 0.0025050221011042595\n", + "#174 Loss: 0.002503973664715886\n", + "#175 Loss: 0.002502931747585535\n", + "#176 Loss: 0.002501895884051919\n", + "#177 Loss: 0.0025008656084537506\n", + "#178 Loss: 0.00249984092079103\n", + "#179 Loss: 0.002498818328604102\n", + "#180 Loss: 0.002497798763215542\n", + "#181 Loss: 0.0024967871140688658\n", + "#182 Loss: 0.00249578058719635\n", + "#183 Loss: 0.0024947759229689837\n", + "#184 Loss: 0.0024937766138464212\n", + "#185 Loss: 0.002492778468877077\n", + "#186 Loss: 0.0024917826522141695\n", + "#187 Loss: 0.0024907945189625025\n", + "#188 Loss: 0.002489812206476927\n", + "#189 Loss: 0.002488828031346202\n", + "#190 Loss: 0.0024878503754734993\n", + "#191 Loss: 0.0024868694599717855\n", + "#192 Loss: 0.002485897159203887\n", + "#193 Loss: 0.002484926488250494\n", + "#194 Loss: 0.0024839574471116066\n", + "#195 Loss: 0.0024829902686178684\n", + "#196 Loss: 0.002482031239196658\n", + "#197 Loss: 0.0024810675531625748\n", + "#198 Loss: 0.002480114810168743\n", + "#199 Loss: 0.00247915368527174\n", + "#200 Loss: 0.0024782009422779083\n", + "#201 Loss: 0.002477245405316353\n", + "#202 Loss: 0.0024762984830886126\n", + "#203 Loss: 0.002475348999723792\n", + "#204 Loss: 0.002474398585036397\n", + "#205 Loss: 0.0024734551552683115\n", + "#206 Loss: 0.002472516382113099\n", + "#207 Loss: 0.002471569227054715\n", + "#208 Loss: 0.002470628125593066\n", + "#209 Loss: 0.0024696916807442904\n", + "#210 Loss: 0.002468749647960067\n", + "#211 Loss: 0.0024678176268935204\n", + "#212 Loss: 0.0024668758269399405\n", + "#213 Loss: 0.002465949160978198\n", + "#214 Loss: 0.0024650150444358587\n", + "#215 Loss: 0.00246407906524837\n", + "#216 Loss: 0.002463151467964053\n", + "#217 Loss: 0.002462216652929783\n", + "#218 Loss: 0.0024612878914922476\n", + "#219 Loss: 0.002460360061377287\n", + "#220 Loss: 0.0024594322312623262\n", + "#221 Loss: 0.0024585050996392965\n", + "#222 Loss: 0.002457576571032405\n", + "#223 Loss: 0.0024566520005464554\n", + "#224 Loss: 0.002455727430060506\n", + "#225 Loss: 0.002454800298437476\n", + "#226 Loss: 0.002453884808346629\n", + "#227 Loss: 0.0024529551155865192\n", + "#228 Loss: 0.002452034503221512\n", + "#229 Loss: 0.002451109467074275\n", + "#230 Loss: 0.0024501883890479803\n", + "#231 Loss: 0.002449269639328122\n", + "#232 Loss: 0.0024483499582856894\n", + "#233 Loss: 0.002447424689307809\n", + "#234 Loss: 0.0024465022142976522\n", + "#235 Loss: 0.0024455797392874956\n", + "#236 Loss: 0.0024446637835353613\n", + "#237 Loss: 0.002443745033815503\n", + "#238 Loss: 0.0024428225588053465\n", + "#239 Loss: 0.0024419049732387066\n", + "#240 Loss: 0.002440983895212412\n", + "#241 Loss: 0.0024400672409683466\n", + "#242 Loss: 0.002439146162942052\n", + "#243 Loss: 0.0024382262490689754\n", + "#244 Loss: 0.002437308896332979\n", + "#245 Loss: 0.0024363857228308916\n", + "#246 Loss: 0.002435472561046481\n", + "#247 Loss: 0.0024345542769879103\n", + "#248 Loss: 0.0024336313363164663\n", + "#249 Loss: 0.00243271142244339\n", + "#250 Loss: 0.00243179383687675\n", + "#251 Loss: 0.0024308778811246157\n", + "#252 Loss: 0.0024299558717757463\n", + "#253 Loss: 0.0024290340952575207\n", + "#254 Loss: 0.002428111620247364\n", + "#255 Loss: 0.002427193336188793\n", + "#256 Loss: 0.002426273887977004\n", + "#257 Loss: 0.002425355603918433\n", + "#258 Loss: 0.002424436155706644\n", + "#259 Loss: 0.002423514612019062\n", + "#260 Loss: 0.002422596327960491\n", + "#261 Loss: 0.0024216733872890472\n", + "#262 Loss: 0.0024207504466176033\n", + "#263 Loss: 0.002419829135760665\n", + "#264 Loss: 0.0024189057294279337\n", + "#265 Loss: 0.0024179841857403517\n", + "#266 Loss: 0.002417063107714057\n", + "#267 Loss: 0.0024161438923329115\n", + "#268 Loss: 0.0024152155965566635\n", + "#269 Loss: 0.0024142952170222998\n", + "#270 Loss: 0.0024133676197379827\n", + "#271 Loss: 0.002412450732663274\n", + "#272 Loss: 0.002411528956145048\n", + "#273 Loss: 0.0024105983320623636\n", + "#274 Loss: 0.0024096802808344364\n", + "#275 Loss: 0.0024087547790259123\n", + "#276 Loss: 0.0024078262504190207\n", + "#277 Loss: 0.0024068995844572783\n", + "#278 Loss: 0.0024059752468019724\n", + "#279 Loss: 0.002405051840469241\n", + "#280 Loss: 0.002404116792604327\n", + "#281 Loss: 0.0024031943175941706\n", + "#282 Loss: 0.0024022667203098536\n", + "#283 Loss: 0.002401341451331973\n", + "#284 Loss: 0.002400410594418645\n", + "#285 Loss: 0.0023994811344891787\n", + "#286 Loss: 0.0023985551670193672\n", + "#287 Loss: 0.0023976238444447517\n", + "#288 Loss: 0.0023966955486685038\n", + "#289 Loss: 0.0023957621306180954\n", + "#290 Loss: 0.002394832205027342\n", + "#291 Loss: 0.0023939006496220827\n", + "#292 Loss: 0.002392966765910387\n", + "#293 Loss: 0.00239203916862607\n", + "#294 Loss: 0.002391106216236949\n", + "#295 Loss: 0.0023901707027107477\n", + "#296 Loss: 0.002389240777119994\n", + "#297 Loss: 0.0023883050307631493\n", + "#298 Loss: 0.0023873704485595226\n", + "#299 Loss: 0.0023864342365413904\n", + "#300 Loss: 0.0023854991886764765\n", + "#301 Loss: 0.0023845701944082975\n", + "#302 Loss: 0.0023836297914385796\n", + "#303 Loss: 0.0023826900869607925\n", + "#304 Loss: 0.0023817545734345913\n", + "#305 Loss: 0.002380818361416459\n", + "#306 Loss: 0.0023798795882612467\n", + "#307 Loss: 0.0023789377883076668\n", + "#308 Loss: 0.0023780011106282473\n", + "#309 Loss: 0.0023770590778440237\n", + "#310 Loss: 0.0023761214688420296\n", + "#311 Loss: 0.0023751859553158283\n", + "#312 Loss: 0.0023742406629025936\n", + "#313 Loss: 0.002373295836150646\n", + "#314 Loss: 0.0023723554331809282\n", + "#315 Loss: 0.002371413866057992\n", + "#316 Loss: 0.0023704750929027796\n", + "#317 Loss: 0.002369531663134694\n", + "#318 Loss: 0.0023685868363827467\n", + "#319 Loss: 0.002367644337937236\n", + "#320 Loss: 0.002366698579862714\n", + "#321 Loss: 0.0023657495621591806\n", + "#322 Loss: 0.0023648033384233713\n", + "#323 Loss: 0.002363859675824642\n", + "#324 Loss: 0.0023629090283066034\n", + "#325 Loss: 0.0023619639687240124\n", + "#326 Loss: 0.0023610175121575594\n", + "#327 Loss: 0.002360069891437888\n", + "#328 Loss: 0.002359122270718217\n", + "#329 Loss: 0.0023581702262163162\n", + "#330 Loss: 0.0023572223726660013\n", + "#331 Loss: 0.002356275450438261\n", + "#332 Loss: 0.0023553166538476944\n", + "#333 Loss: 0.0023543667048215866\n", + "#334 Loss: 0.0023534176871180534\n", + "#335 Loss: 0.002352464245632291\n", + "#336 Loss: 0.0023515131324529648\n", + "#337 Loss: 0.0023505568969994783\n", + "#338 Loss: 0.0023496015928685665\n", + "#339 Loss: 0.002348652807995677\n", + "#340 Loss: 0.002347696339711547\n", + "#341 Loss: 0.0023467380087822676\n", + "#342 Loss: 0.0023457861971110106\n", + "#343 Loss: 0.0023448301944881678\n", + "#344 Loss: 0.0023438704665750265\n", + "#345 Loss: 0.002342912135645747\n", + "#346 Loss: 0.002341957064345479\n", + "#347 Loss: 0.0023409996647387743\n", + "#348 Loss: 0.0023400387726724148\n", + "#349 Loss: 0.002339078113436699\n", + "#350 Loss: 0.002338117454200983\n", + "#351 Loss: 0.0023371621500700712\n", + "#352 Loss: 0.0023361986968666315\n", + "#353 Loss: 0.00233523640781641\n", + "#354 Loss: 0.0023342801723629236\n", + "#355 Loss: 0.002333313226699829\n", + "#356 Loss: 0.002332353265956044\n", + "#357 Loss: 0.002331388648599386\n", + "#358 Loss: 0.0023304217029362917\n", + "#359 Loss: 0.0023294605780392885\n", + "#360 Loss: 0.002328496426343918\n", + "#361 Loss: 0.002327530412003398\n", + "#362 Loss: 0.0023265639320015907\n", + "#363 Loss: 0.0023255993146449327\n", + "#364 Loss: 0.0023246288765221834\n", + "#365 Loss: 0.0023236607667058706\n", + "#366 Loss: 0.002322700573131442\n", + "#367 Loss: 0.0023217289708554745\n", + "#368 Loss: 0.0023207550402730703\n", + "#369 Loss: 0.002319787396118045\n", + "#370 Loss: 0.002318824175745249\n", + "#371 Loss: 0.0023178488481789827\n", + "#372 Loss: 0.002316881902515888\n", + "#373 Loss: 0.0023159075062721968\n", + "#374 Loss: 0.002314941259101033\n", + "#375 Loss: 0.0023139675613492727\n", + "#376 Loss: 0.0023129950277507305\n", + "#377 Loss: 0.0023120215628296137\n", + "#378 Loss: 0.002311046002432704\n", + "#379 Loss: 0.002310073934495449\n", + "#380 Loss: 0.002309101400896907\n", + "#381 Loss: 0.0023081284016370773\n", + "#382 Loss: 0.00230714725330472\n", + "#383 Loss: 0.00230617169290781\n", + "#384 Loss: 0.0023051972966641188\n", + "#385 Loss: 0.002304219640791416\n", + "#386 Loss: 0.0023032415192574263\n", + "#387 Loss: 0.002302265027537942\n", + "#388 Loss: 0.0023012871388345957\n", + "#389 Loss: 0.002300310181453824\n", + "#390 Loss: 0.002299328101798892\n", + "#391 Loss: 0.0022983483504503965\n", + "#392 Loss: 0.0022973709274083376\n", + "#393 Loss: 0.002296391176059842\n", + "#394 Loss: 0.002295407932251692\n", + "#395 Loss: 0.00229442841373384\n", + "#396 Loss: 0.002293441677466035\n", + "#397 Loss: 0.0022924619261175394\n", + "#398 Loss: 0.0022914784494787455\n", + "#399 Loss: 0.0022904963698238134\n", + "#400 Loss: 0.0022895135916769505\n", + "#401 Loss: 0.0022885303478688\n", + "#402 Loss: 0.0022875459399074316\n", + "#403 Loss: 0.0022865592036396265\n", + "#404 Loss: 0.0022855724673718214\n", + "#405 Loss: 0.0022845915518701077\n", + "#406 Loss: 0.002283601788803935\n", + "#407 Loss: 0.002282612957060337\n", + "#408 Loss: 0.002281626919284463\n", + "#409 Loss: 0.0022806443739682436\n", + "#410 Loss: 0.0022796487901359797\n", + "#411 Loss: 0.0022786634508520365\n", + "#412 Loss: 0.0022776739206165075\n", + "#413 Loss: 0.0022766822949051857\n", + "#414 Loss: 0.0022756929975003004\n", + "#415 Loss: 0.0022747062612324953\n", + "#416 Loss: 0.00227371440269053\n", + "#417 Loss: 0.0022727230098098516\n", + "#418 Loss: 0.002271731849759817\n", + "#419 Loss: 0.0022707392927259207\n", + "#420 Loss: 0.002269746968522668\n", + "#421 Loss: 0.002268751384690404\n", + "#422 Loss: 0.002267759060487151\n", + "#423 Loss: 0.0022667646408081055\n", + "#424 Loss: 0.0022657769732177258\n", + "#425 Loss: 0.002264777896925807\n", + "#426 Loss: 0.002263784408569336\n", + "#427 Loss: 0.0022627897560596466\n", + "#428 Loss: 0.0022617937065660954\n", + "#429 Loss: 0.002260798355564475\n", + "#430 Loss: 0.0022597969509661198\n", + "#431 Loss: 0.002258802531287074\n", + "#432 Loss: 0.0022578088100999594\n", + "#433 Loss: 0.0022568099666386843\n", + "#434 Loss: 0.002255811123177409\n", + "#435 Loss: 0.0022548120468854904\n", + "#436 Loss: 0.0022538129705935717\n", + "#437 Loss: 0.0022528113331645727\n", + "#438 Loss: 0.002251812256872654\n", + "#439 Loss: 0.00225081411190331\n", + "#440 Loss: 0.0022498099133372307\n", + "#441 Loss: 0.002248812699690461\n", + "#442 Loss: 0.002247813157737255\n", + "#443 Loss: 0.0022468070965260267\n", + "#444 Loss: 0.002245804527774453\n", + "#445 Loss: 0.0022448061499744654\n", + "#446 Loss: 0.002243800787255168\n", + "#447 Loss: 0.0022427986841648817\n", + "#448 Loss: 0.0022417923901230097\n", + "#449 Loss: 0.0022407902870327234\n", + "#450 Loss: 0.0022397860884666443\n", + "#451 Loss: 0.002238777931779623\n", + "#452 Loss: 0.002237774431705475\n", + "#453 Loss: 0.00223676860332489\n", + "#454 Loss: 0.0022357627749443054\n", + "#455 Loss: 0.002234755316749215\n", + "#456 Loss: 0.0022337529808282852\n", + "#457 Loss: 0.0022327450569719076\n", + "#458 Loss: 0.0022317382972687483\n", + "#459 Loss: 0.002230728277936578\n", + "#460 Loss: 0.0022297168616205454\n", + "#461 Loss: 0.0022287091705948114\n", + "#462 Loss: 0.002227703807875514\n", + "#463 Loss: 0.002226694021373987\n", + "#464 Loss: 0.002225684467703104\n", + "#465 Loss: 0.0022246765438467264\n", + "#466 Loss: 0.0022236653603613377\n", + "#467 Loss: 0.0022226530127227306\n", + "#468 Loss: 0.002221642527729273\n", + "#469 Loss: 0.0022206297144293785\n", + "#470 Loss: 0.0022196185309439898\n", + "#471 Loss: 0.0022186103742569685\n", + "#472 Loss: 0.0022175933700054884\n", + "#473 Loss: 0.0022165849804878235\n", + "#474 Loss: 0.0022155700717121363\n", + "#475 Loss: 0.0022145553957670927\n", + "#476 Loss: 0.0022135439794510603\n", + "#477 Loss: 0.0022125281393527985\n", + "#478 Loss: 0.002211514627560973\n", + "#479 Loss: 0.002210496924817562\n", + "#480 Loss: 0.0022094829473644495\n", + "#481 Loss: 0.0022084659431129694\n", + "#482 Loss: 0.0022074568551033735\n", + "#483 Loss: 0.002206437522545457\n", + "#484 Loss: 0.0022054200526326895\n", + "#485 Loss: 0.0022044044453650713\n", + "#486 Loss: 0.0022033853456377983\n", + "#487 Loss: 0.0022023695055395365\n", + "#488 Loss: 0.002201352035626769\n", + "#489 Loss: 0.0022003341000527143\n", + "#490 Loss: 0.002199317794293165\n", + "#491 Loss: 0.0021982965990900993\n", + "#492 Loss: 0.0021972774993628263\n", + "#493 Loss: 0.00219626072794199\n", + "#494 Loss: 0.0021952392999082804\n", + "#495 Loss: 0.002194217639043927\n", + "#496 Loss: 0.002193200634792447\n", + "#497 Loss: 0.002192180836573243\n", + "#498 Loss: 0.0021911589428782463\n", + "#499 Loss: 0.0021901384461671114\n", + "#500 Loss: 0.002189117018133402\n", + "#501 Loss: 0.0021880920976400375\n", + "#502 Loss: 0.0021870729979127645\n", + "#503 Loss: 0.0021860499400645494\n", + "#504 Loss: 0.0021850315388292074\n", + "#505 Loss: 0.002184005454182625\n", + "#506 Loss: 0.0021829840261489153\n", + "#507 Loss: 0.002181959105655551\n", + "#508 Loss: 0.0021809397730976343\n", + "#509 Loss: 0.002179911592975259\n", + "#510 Loss: 0.002178889000788331\n", + "#511 Loss: 0.0021778629161417484\n", + "#512 Loss: 0.002176836598664522\n", + "#513 Loss: 0.002175812376663089\n", + "#514 Loss: 0.0021747888531535864\n", + "#515 Loss: 0.0021737609058618546\n", + "#516 Loss: 0.002172738080844283\n", + "#517 Loss: 0.002171711064875126\n", + "#518 Loss: 0.0021706840489059687\n", + "#519 Loss: 0.0021696598269045353\n", + "#520 Loss: 0.0021686323452740908\n", + "#521 Loss: 0.0021676046308130026\n", + "#522 Loss: 0.0021665773820132017\n", + "#523 Loss: 0.0021655478049069643\n", + "#524 Loss: 0.0021645205561071634\n", + "#525 Loss: 0.002163497731089592\n", + "#526 Loss: 0.002162465127184987\n", + "#527 Loss: 0.0021614336874336004\n", + "#528 Loss: 0.0021604085341095924\n", + "#529 Loss: 0.0021593787241727114\n", + "#530 Loss: 0.0021583528723567724\n", + "#531 Loss: 0.0021573195699602365\n", + "#532 Loss: 0.0021562918554991484\n", + "#533 Loss: 0.0021552571561187506\n", + "#534 Loss: 0.0021542287431657314\n", + "#535 Loss: 0.0021532000973820686\n", + "#536 Loss: 0.0021521716844290495\n", + "#537 Loss: 0.0021511383820325136\n", + "#538 Loss: 0.0021501071751117706\n", + "#539 Loss: 0.0021490773651748896\n", + "#540 Loss: 0.0021480440627783537\n", + "#541 Loss: 0.002147009363397956\n", + "#542 Loss: 0.0021459797862917185\n", + "#543 Loss: 0.002144948346540332\n", + "#544 Loss: 0.002143915044143796\n", + "#545 Loss: 0.0021428829059004784\n", + "#546 Loss: 0.002141848672181368\n", + "#547 Loss: 0.0021408156026154757\n", + "#548 Loss: 0.002139780670404434\n", + "#549 Loss: 0.0021387485321611166\n", + "#550 Loss: 0.002137715695425868\n", + "#551 Loss: 0.0021366847213357687\n", + "#552 Loss: 0.0021356476936489344\n", + "#553 Loss: 0.0021346136927604675\n", + "#554 Loss: 0.0021335785277187824\n", + "#555 Loss: 0.002132538938894868\n", + "#556 Loss: 0.002131509128957987\n", + "#557 Loss: 0.002130476525053382\n", + "#558 Loss: 0.0021294394973665476\n", + "#559 Loss: 0.002128403866663575\n", + "#560 Loss: 0.002127366838976741\n", + "#561 Loss: 0.0021263323724269867\n", + "#562 Loss: 0.002125295577570796\n", + "#563 Loss: 0.002124261111021042\n", + "#564 Loss: 0.0021232208237051964\n", + "#565 Loss: 0.002122187288478017\n", + "#566 Loss: 0.0021211470011621714\n", + "#567 Loss: 0.002120112767443061\n", + "#568 Loss: 0.002119072712957859\n", + "#569 Loss: 0.0021180338226258755\n", + "#570 Loss: 0.00211700308136642\n", + "#571 Loss: 0.0021159613970667124\n", + "#572 Loss: 0.0021149280946701765\n", + "#573 Loss: 0.0021138915326446295\n", + "#574 Loss: 0.0021128482185304165\n", + "#575 Loss: 0.0021118095610290766\n", + "#576 Loss: 0.0021107716020196676\n", + "#577 Loss: 0.002109734108671546\n", + "#578 Loss: 0.0021087005734443665\n", + "#579 Loss: 0.002107657492160797\n", + "#580 Loss: 0.00210661836899817\n", + "#581 Loss: 0.002105577616021037\n", + "#582 Loss: 0.0021045382600277662\n", + "#583 Loss: 0.002103500533849001\n", + "#584 Loss: 0.0021024595480412245\n", + "#585 Loss: 0.0021014243829995394\n", + "#586 Loss: 0.002100378042086959\n", + "#587 Loss: 0.002099341945722699\n", + "#588 Loss: 0.00209829886443913\n", + "#589 Loss: 0.0020972639322280884\n", + "#590 Loss: 0.0020962206181138754\n", + "#591 Loss: 0.002095181494951248\n", + "#592 Loss: 0.0020941428374499083\n", + "#593 Loss: 0.002093098359182477\n", + "#594 Loss: 0.002092057839035988\n", + "#595 Loss: 0.0020910180173814297\n", + "#596 Loss: 0.002089978661388159\n", + "#597 Loss: 0.0020889334846287966\n", + "#598 Loss: 0.0020878936629742384\n", + "#599 Loss: 0.0020868529099971056\n", + "#600 Loss: 0.002085815416648984\n", + "#601 Loss: 0.0020847702398896217\n", + "#602 Loss: 0.0020837283227592707\n", + "#603 Loss: 0.0020826871041208506\n", + "#604 Loss: 0.0020816465839743614\n", + "#605 Loss: 0.002080598147585988\n", + "#606 Loss: 0.002079556928947568\n", + "#607 Loss: 0.0020785192027688026\n", + "#608 Loss: 0.0020774772856384516\n", + "#609 Loss: 0.002076430944725871\n", + "#610 Loss: 0.00207538646645844\n", + "#611 Loss: 0.00207435037009418\n", + "#612 Loss: 0.002073307754471898\n", + "#613 Loss: 0.002072261879220605\n", + "#614 Loss: 0.0020712194964289665\n", + "#615 Loss: 0.0020701782777905464\n", + "#616 Loss: 0.0020691361278295517\n", + "#617 Loss: 0.0020680923480540514\n", + "#618 Loss: 0.0020670518279075623\n", + "#619 Loss: 0.0020660050213336945\n", + "#620 Loss: 0.0020649584475904703\n", + "#621 Loss: 0.0020639190915971994\n", + "#622 Loss: 0.002062877407297492\n", + "#623 Loss: 0.0020618324633687735\n", + "#624 Loss: 0.0020607870537787676\n", + "#625 Loss: 0.00205974536947906\n", + "#626 Loss: 0.0020587043836712837\n", + "#627 Loss: 0.0020576564129441977\n", + "#628 Loss: 0.0020566147286444902\n", + "#629 Loss: 0.002055570250377059\n", + "#630 Loss: 0.0020545274019241333\n", + "#631 Loss: 0.0020534859504550695\n", + "#632 Loss: 0.002052436349913478\n", + "#633 Loss: 0.0020513960625976324\n", + "#634 Loss: 0.0020503487903624773\n", + "#635 Loss: 0.0020493092015385628\n", + "#636 Loss: 0.002048263093456626\n", + "#637 Loss: 0.002047223038971424\n", + "#638 Loss: 0.002046172507107258\n", + "#639 Loss: 0.002045132452622056\n", + "#640 Loss: 0.002044085180386901\n", + "#641 Loss: 0.002043043961748481\n", + "#642 Loss: 0.0020420013461261988\n", + "#643 Loss: 0.0020409554708749056\n", + "#644 Loss: 0.002039908664301038\n", + "#645 Loss: 0.002038867911323905\n", + "#646 Loss: 0.0020378208719193935\n", + "#647 Loss: 0.0020367794204503298\n", + "#648 Loss: 0.0020357321482151747\n", + "#649 Loss: 0.002034691860899329\n", + "#650 Loss: 0.002033643191680312\n", + "#651 Loss: 0.002032601274549961\n", + "#652 Loss: 0.002031555864959955\n", + "#653 Loss: 0.0020305109210312366\n", + "#654 Loss: 0.002029466675594449\n", + "#655 Loss: 0.002028421498835087\n", + "#656 Loss: 0.002027378184720874\n", + "#657 Loss: 0.0020263351034373045\n", + "#658 Loss: 0.0020252885296940804\n", + "#659 Loss: 0.0020242466125637293\n", + "#660 Loss: 0.002023200271651149\n", + "#661 Loss: 0.002022160217165947\n", + "#662 Loss: 0.0020211131777614355\n", + "#663 Loss: 0.0020200731232762337\n", + "#664 Loss: 0.0020190232899039984\n", + "#665 Loss: 0.0020179767161607742\n", + "#666 Loss: 0.002016937592998147\n", + "#667 Loss: 0.002015892183408141\n", + "#668 Loss: 0.0020148518960922956\n", + "#669 Loss: 0.0020138081163167953\n", + "#670 Loss: 0.0020127587486058474\n", + "#671 Loss: 0.0020117172971367836\n", + "#672 Loss: 0.0020106742158532143\n", + "#673 Loss: 0.002009629737585783\n", + "#674 Loss: 0.002008582465350628\n", + "#675 Loss: 0.0020075414795428514\n", + "#676 Loss: 0.002006495138630271\n", + "#677 Loss: 0.0020054553169757128\n", + "#678 Loss: 0.002004409907385707\n", + "#679 Loss: 0.002003363100811839\n", + "#680 Loss: 0.002002324676141143\n", + "#681 Loss: 0.002001277869567275\n", + "#682 Loss: 0.002000238513574004\n", + "#683 Loss: 0.001999191241338849\n", + "#684 Loss: 0.0019981495570391417\n", + "#685 Loss: 0.0019971048459410667\n", + "#686 Loss: 0.0019960617646574974\n", + "#687 Loss: 0.0019950189162045717\n", + "#688 Loss: 0.0019939783960580826\n", + "#689 Loss: 0.001992932753637433\n", + "#690 Loss: 0.001991888275370002\n", + "#691 Loss: 0.0019908458925783634\n", + "#692 Loss: 0.001989804906770587\n", + "#693 Loss: 0.0019887599628418684\n", + "#694 Loss: 0.0019877159502357244\n", + "#695 Loss: 0.001986677525565028\n", + "#696 Loss: 0.0019856367725878954\n", + "#697 Loss: 0.001984592527151108\n", + "#698 Loss: 0.001983546419069171\n", + "#699 Loss: 0.001982505898922682\n", + "#700 Loss: 0.0019814646802842617\n", + "#701 Loss: 0.0019804220646619797\n", + "#702 Loss: 0.0019793810788542032\n", + "#703 Loss: 0.0019783375319093466\n", + "#704 Loss: 0.0019772977102547884\n", + "#705 Loss: 0.0019762550946325064\n", + "#706 Loss: 0.0019752129446715117\n", + "#707 Loss: 0.001974171493202448\n", + "#708 Loss: 0.001973131438717246\n", + "#709 Loss: 0.001972092781215906\n", + "#710 Loss: 0.0019710464403033257\n", + "#711 Loss: 0.0019700077828019857\n", + "#712 Loss: 0.001968963770195842\n", + "#713 Loss: 0.0019679246470332146\n", + "#714 Loss: 0.0019668852910399437\n", + "#715 Loss: 0.001965844538062811\n", + "#716 Loss: 0.001964807277545333\n", + "#717 Loss: 0.0019637665245682\n", + "#718 Loss: 0.0019627264700829983\n", + "#719 Loss: 0.00196168408729136\n", + "#720 Loss: 0.00196064286865294\n", + "#721 Loss: 0.0019596030469983816\n", + "#722 Loss: 0.001958560897037387\n", + "#723 Loss: 0.001957525731995702\n", + "#724 Loss: 0.001956489635631442\n", + "#725 Loss: 0.001955445623025298\n", + "#726 Loss: 0.001954407896846533\n", + "#727 Loss: 0.0019533671438694\n", + "#728 Loss: 0.0019523290684446692\n", + "#729 Loss: 0.0019512904109433293\n", + "#730 Loss: 0.0019502503564581275\n", + "#731 Loss: 0.0019492128631100059\n", + "#732 Loss: 0.0019481779308989644\n", + "#733 Loss: 0.0019471339182928205\n", + "#734 Loss: 0.0019461024785414338\n", + "#735 Loss: 0.0019450596300885081\n", + "#736 Loss: 0.0019440216710790992\n", + "#737 Loss: 0.001942987204529345\n", + "#738 Loss: 0.0019419504096731544\n", + "#739 Loss: 0.0019409122178331017\n", + "#740 Loss: 0.0019398737931624055\n", + "#741 Loss: 0.0019388411892578006\n", + "#742 Loss: 0.001937802298925817\n", + "#743 Loss: 0.001936764339916408\n", + "#744 Loss: 0.001935729756951332\n", + "#745 Loss: 0.0019346913322806358\n", + "#746 Loss: 0.0019336584955453873\n", + "#747 Loss: 0.0019326211186125875\n", + "#748 Loss: 0.001931585487909615\n", + "#749 Loss: 0.0019305492751300335\n", + "#750 Loss: 0.0019295121310278773\n", + "#751 Loss: 0.0019284767331555486\n", + "#752 Loss: 0.0019274475052952766\n", + "#753 Loss: 0.0019264090806245804\n", + "#754 Loss: 0.0019253772916272283\n", + "#755 Loss: 0.0019243452697992325\n", + "#756 Loss: 0.0019233074272051454\n", + "#757 Loss: 0.0019222754053771496\n", + "#758 Loss: 0.0019212419865652919\n", + "#759 Loss: 0.0019202110124751925\n", + "#760 Loss: 0.0019191773608326912\n", + "#761 Loss: 0.0019181432435289025\n", + "#762 Loss: 0.0019171085441485047\n", + "#763 Loss: 0.0019160775700584054\n", + "#764 Loss: 0.0019150450825691223\n", + "#765 Loss: 0.0019140088697895408\n", + "#766 Loss: 0.0019129784777760506\n", + "#767 Loss: 0.0019119485514238477\n", + "#768 Loss: 0.0019109140848740935\n", + "#769 Loss: 0.0019098850898444653\n", + "#770 Loss: 0.001908852718770504\n", + "#771 Loss: 0.001907822792418301\n", + "#772 Loss: 0.0019067925168201327\n", + "#773 Loss: 0.0019057630561292171\n", + "#774 Loss: 0.0019047335954383016\n", + "#775 Loss: 0.0019037051824852824\n", + "#776 Loss: 0.0019026693189516664\n", + "#777 Loss: 0.0019016433507204056\n", + "#778 Loss: 0.0019006148213520646\n", + "#779 Loss: 0.0018995892023667693\n", + "#780 Loss: 0.0018985569477081299\n", + "#781 Loss: 0.0018975288840010762\n", + "#782 Loss: 0.0018965002382174134\n", + "#783 Loss: 0.0018954715924337506\n", + "#784 Loss: 0.0018944436451420188\n", + "#785 Loss: 0.0018934140680357814\n", + "#786 Loss: 0.0018923920579254627\n", + "#787 Loss: 0.0018913644598796964\n", + "#788 Loss: 0.001890333485789597\n", + "#789 Loss: 0.0018893079832196236\n", + "#790 Loss: 0.0018882853910326958\n", + "#791 Loss: 0.001887254766188562\n", + "#792 Loss: 0.0018862345023080707\n", + "#793 Loss: 0.0018852058565244079\n", + "#794 Loss: 0.00188418326433748\n", + "#795 Loss: 0.0018831556662917137\n", + "#796 Loss: 0.0018821310950443149\n", + "#797 Loss: 0.0018811067566275597\n", + "#798 Loss: 0.001880083349533379\n", + "#799 Loss: 0.001879060291685164\n", + "#800 Loss: 0.0018780353711917996\n", + "#801 Loss: 0.0018770135939121246\n", + "#802 Loss: 0.0018759918166324496\n", + "#803 Loss: 0.0018749730661511421\n", + "#804 Loss: 0.0018739477964118123\n", + "#805 Loss: 0.0018729200819507241\n", + "#806 Loss: 0.0018719009822234511\n", + "#807 Loss: 0.001870879321359098\n", + "#808 Loss: 0.001869861502200365\n", + "#809 Loss: 0.0018688408890739083\n", + "#810 Loss: 0.001867820625193417\n", + "#811 Loss: 0.0018667984986677766\n", + "#812 Loss: 0.0018657720647752285\n", + "#813 Loss: 0.001864760648459196\n", + "#814 Loss: 0.0018637363100424409\n", + "#815 Loss: 0.0018627209356054664\n", + "#816 Loss: 0.0018617023015394807\n", + "#817 Loss: 0.0018606797093525529\n", + "#818 Loss: 0.0018596658483147621\n", + "#819 Loss: 0.0018586452351883054\n", + "#820 Loss: 0.0018576303264126182\n", + "#821 Loss: 0.001856614020653069\n", + "#822 Loss: 0.001855594920925796\n", + "#823 Loss: 0.0018545795464888215\n", + "#824 Loss: 0.001853560097515583\n", + "#825 Loss: 0.0018525446066632867\n", + "#826 Loss: 0.0018515288829803467\n", + "#827 Loss: 0.00185050955042243\n", + "#828 Loss: 0.0018494967371225357\n", + "#829 Loss: 0.0018484825268387794\n", + "#830 Loss: 0.001847467734478414\n", + "#831 Loss: 0.0018464555032551289\n", + "#832 Loss: 0.0018454398959875107\n", + "#833 Loss: 0.0018444285960868\n", + "#834 Loss: 0.0018434150842949748\n", + "#835 Loss: 0.0018424022709950805\n", + "#836 Loss: 0.001841390854679048\n", + "#837 Loss: 0.0018403776921331882\n", + "#838 Loss: 0.0018393672071397305\n", + "#839 Loss: 0.00183835718780756\n", + "#840 Loss: 0.0018373435596004128\n", + "#841 Loss: 0.001836334471590817\n", + "#842 Loss: 0.0018353263149037957\n", + "#843 Loss: 0.0018343138508498669\n", + "#844 Loss: 0.0018333062762394547\n", + "#845 Loss: 0.001832296489737928\n", + "#846 Loss: 0.0018312829779461026\n", + "#847 Loss: 0.0018302792450413108\n", + "#848 Loss: 0.0018292715540155768\n", + "#849 Loss: 0.0018282626988366246\n", + "#850 Loss: 0.0018272522138431668\n", + "#851 Loss: 0.001826247200369835\n", + "#852 Loss: 0.0018252409063279629\n", + "#853 Loss: 0.001824233098886907\n", + "#854 Loss: 0.0018232259899377823\n", + "#855 Loss: 0.001822225865907967\n", + "#856 Loss: 0.0018212157301604748\n", + "#857 Loss: 0.0018202122300863266\n", + "#858 Loss: 0.0018192125717177987\n", + "#859 Loss: 0.0018182039493694901\n", + "#860 Loss: 0.0018171994015574455\n", + "#861 Loss: 0.0018161969492211938\n", + "#862 Loss: 0.00181519181933254\n", + "#863 Loss: 0.0018141911132261157\n", + "#864 Loss: 0.001813187263906002\n", + "#865 Loss: 0.0018121921457350254\n", + "#866 Loss: 0.0018111892277374864\n", + "#867 Loss: 0.0018101868918165565\n", + "#868 Loss: 0.0018091824604198337\n", + "#869 Loss: 0.001808184664696455\n", + "#870 Loss: 0.0018071848899126053\n", + "#871 Loss: 0.0018061831360682845\n", + "#872 Loss: 0.0018051863880828023\n", + "#873 Loss: 0.0018041870789602399\n", + "#874 Loss: 0.0018031877698376775\n", + "#875 Loss: 0.0018021933501586318\n", + "#876 Loss: 0.0018011946231126785\n", + "#877 Loss: 0.0018001968273892999\n", + "#878 Loss: 0.0017991961212828755\n", + "#879 Loss: 0.001798199606128037\n", + "#880 Loss: 0.0017972056521102786\n", + "#881 Loss: 0.0017962086712941527\n", + "#882 Loss: 0.0017952205380424857\n", + "#883 Loss: 0.0017942209960892797\n", + "#884 Loss: 0.001793228555470705\n", + "#885 Loss: 0.0017922349506989121\n", + "#886 Loss: 0.001791241578757763\n", + "#887 Loss: 0.001790247275494039\n", + "#888 Loss: 0.0017892572795972228\n", + "#889 Loss: 0.0017882628599181771\n", + "#890 Loss: 0.0017872735625132918\n", + "#891 Loss: 0.0017862803069874644\n", + "#892 Loss: 0.001785286352969706\n", + "#893 Loss: 0.0017842984525486827\n", + "#894 Loss: 0.0017833089223131537\n", + "#895 Loss: 0.0017823184607550502\n", + "#896 Loss: 0.0017813298618420959\n", + "#897 Loss: 0.0017803410300984979\n", + "#898 Loss: 0.0017793524311855435\n", + "#899 Loss: 0.0017783649964258075\n", + "#900 Loss: 0.001777378492988646\n", + "#901 Loss: 0.0017763897776603699\n", + "#902 Loss: 0.0017754010623320937\n", + "#903 Loss: 0.001774418051354587\n", + "#904 Loss: 0.0017734314315021038\n", + "#905 Loss: 0.0017724483041092753\n", + "#906 Loss: 0.0017714608693495393\n", + "#907 Loss: 0.0017704787896946073\n", + "#908 Loss: 0.0017694927519187331\n", + "#909 Loss: 0.0017685088096186519\n", + "#910 Loss: 0.0017675244016572833\n", + "#911 Loss: 0.001766547211445868\n", + "#912 Loss: 0.001765563734807074\n", + "#913 Loss: 0.001764580956660211\n", + "#914 Loss: 0.0017636003904044628\n", + "#915 Loss: 0.0017626197077333927\n", + "#916 Loss: 0.0017616351833567023\n", + "#917 Loss: 0.0017606564797461033\n", + "#918 Loss: 0.0017596777761355042\n", + "#919 Loss: 0.0017587020993232727\n", + "#920 Loss: 0.001757721765898168\n", + "#921 Loss: 0.0017567459726706147\n", + "#922 Loss: 0.0017557647079229355\n", + "#923 Loss: 0.0017547908937558532\n", + "#924 Loss: 0.0017538117244839668\n", + "#925 Loss: 0.0017528367461636662\n", + "#926 Loss: 0.0017518624663352966\n", + "#927 Loss: 0.0017508859746158123\n", + "#928 Loss: 0.0017499076202511787\n", + "#929 Loss: 0.0017489390447735786\n", + "#930 Loss: 0.0017479656962677836\n", + "#931 Loss: 0.0017469911836087704\n", + "#932 Loss: 0.0017460188828408718\n", + "#933 Loss: 0.0017450453015044332\n", + "#934 Loss: 0.00174407206941396\n", + "#935 Loss: 0.0017430986044928432\n", + "#936 Loss: 0.0017421283992007375\n", + "#937 Loss: 0.001741158775985241\n", + "#938 Loss: 0.0017401917139068246\n", + "#939 Loss: 0.0017392206937074661\n", + "#940 Loss: 0.0017382544465363026\n", + "#941 Loss: 0.0017372820293530822\n", + "#942 Loss: 0.001736316829919815\n", + "#943 Loss: 0.0017353454604744911\n", + "#944 Loss: 0.0017343764193356037\n", + "#945 Loss: 0.0017334137810394168\n", + "#946 Loss: 0.0017324457876384258\n", + "#947 Loss: 0.0017314818687736988\n", + "#948 Loss: 0.001730515738017857\n", + "#949 Loss: 0.0017295492580160499\n", + "#950 Loss: 0.0017285882495343685\n", + "#951 Loss: 0.0017276207217946649\n", + "#952 Loss: 0.0017266602953895926\n", + "#953 Loss: 0.0017256977735087276\n", + "#954 Loss: 0.0017247359501197934\n", + "#955 Loss: 0.0017237764550372958\n", + "#956 Loss: 0.0017228134674951434\n", + "#957 Loss: 0.0017218533903360367\n", + "#958 Loss: 0.0017208936624228954\n", + "#959 Loss: 0.001719936146400869\n", + "#960 Loss: 0.001718974090181291\n", + "#961 Loss: 0.0017180143622681499\n", + "#962 Loss: 0.001717058359645307\n", + "#963 Loss: 0.0017161048017442226\n", + "#964 Loss: 0.001715144026093185\n", + "#965 Loss: 0.0017141870921477675\n", + "#966 Loss: 0.0017132310895249248\n", + "#967 Loss: 0.0017122785793617368\n", + "#968 Loss: 0.0017113216454163194\n", + "#969 Loss: 0.001710368786007166\n", + "#970 Loss: 0.0017094146460294724\n", + "#971 Loss: 0.001708458294160664\n", + "#972 Loss: 0.0017075081123039126\n", + "#973 Loss: 0.0017065554857254028\n", + "#974 Loss: 0.0017056027427315712\n", + "#975 Loss: 0.0017046512803062797\n", + "#976 Loss: 0.0017037037760019302\n", + "#977 Loss: 0.0017027502181008458\n", + "#978 Loss: 0.0017018018988892436\n", + "#979 Loss: 0.001700854511000216\n", + "#980 Loss: 0.0016999054932966828\n", + "#981 Loss: 0.001698957639746368\n", + "#982 Loss: 0.0016980115324258804\n", + "#983 Loss: 0.0016970612341538072\n", + "#984 Loss: 0.0016961172223091125\n", + "#985 Loss: 0.0016951701836660504\n", + "#986 Loss: 0.001694221398793161\n", + "#987 Loss: 0.0016932813450694084\n", + "#988 Loss: 0.0016923333751037717\n", + "#989 Loss: 0.0016913922736421227\n", + "#990 Loss: 0.0016904502408578992\n", + "#991 Loss: 0.0016895070439204574\n", + "#992 Loss: 0.0016885654767975211\n", + "#993 Loss: 0.001687621814198792\n", + "#994 Loss: 0.0016866797814145684\n", + "#995 Loss: 0.001685741706751287\n", + "#996 Loss: 0.0016847997903823853\n", + "#997 Loss: 0.0016838625306263566\n", + "#998 Loss: 0.0016829235246405005\n", + "#999 Loss: 0.0016819849843159318\n", + "Predicted data based on trained weights: \n", + "Input (scaled): \n", + "tensor([0.5000, 1.0000])\n", + "Output: \n", + "tensor([0.9505])\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/torch/serialization.py:241: UserWarning: Couldn't retrieve source code for container of type Neural_Network. It won't be checked for correctness upon loading.\n", + " \"type \" + obj.__name__ + \". It won't be checked \"\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "L9nBzkgdbjcA", + "colab_type": "text" + }, + "source": [ + "The loss keeps decreasing, which means that the neural network is learning something. That's it. Congratulations! You have just learned how to create and train a neural network from scratch using PyTorch. There are so many things you can do with the shallow network we have just implemented. You can add more hidden layers or try to incorporate the bias terms for practice. I would love to see what you will build from here. Reach me out on [Twitter](https://twitter.com/omarsar0) if you have any further questions or leave your comments here. Until next time!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zcms4BCySKXj", + "colab_type": "text" + }, + "source": [ + "## References:\n", + "- [PyTorch nn. Modules](https://pytorch.org/tutorials/beginner/pytorch_with_examples.html#pytorch-custom-nn-modules)\n", + "- [Build a Neural Network with Numpy](https://enlight.nyc/neural-network)\n" + ] + } + ] +} \ No newline at end of file From 19e4c193063723f13492385ab184e70135cbca51 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 10:52:33 +0100 Subject: [PATCH 07/21] Created using Colaboratory --- nn.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nn.ipynb b/nn.ipynb index f9930dc..e7fdd9e 100644 --- a/nn.ipynb +++ b/nn.ipynb @@ -246,7 +246,8 @@ "\n", "For illustration purposes, we are building the following neural network or computation graph:\n", "\n", - "![alt txt](https://paper.dropbox.com/ep/redirect/image?url=https%3A%2F%2Fsummer-heart-0930.chufeiyun1688.workers.dev%3A443%2Fhttps%2Fd2mxuefqeaa7sj.cloudfront.net%2Fs_B715730329387B3CAE6924F0860FA5BB4EC7C824AA2008B58BBDC14F79BF4C11_1534128639562_nn-01.png&hmac=BS%2F9k8DplT6TcYtYMHbxWgqhIRPp6mKsRADkHqBhels%3D&width=1490)" + "\n", + "![alt text](https://drive.google.com/uc?export=view&id=1l-sKpcCJCEUJV1BlAqcVAvLXLpYCInV6)" ] }, { From a2fbe8ff710496cd64f3320b0fa5fbdf69ed6d3b Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Fri, 28 Feb 2020 11:34:21 +0100 Subject: [PATCH 08/21] Update README.md --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index 53ced7b..1c15292 100644 --- a/README.md +++ b/README.md @@ -68,5 +68,15 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac read + + A Simple Neural Network from Scratch with PyTorch and Google Colab + In this tutorial we implement a simple neural network from scratch using PyTorch. + Neural Networks + Beginner + + + + read + From 4480b64b2373c400b7db28a3169bfbd589eeda95 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 18 Mar 2020 23:40:57 +0100 Subject: [PATCH 09/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1c15292..0bb8e24 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac In this tutorial we implement a simple neural network from scratch using PyTorch. Neural Networks Beginner - + read From b3e989ece700bb2dc0f068d066941f1a17d8120c Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Sat, 28 Mar 2020 14:33:19 +0100 Subject: [PATCH 10/21] Update README.md --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index 0bb8e24..5dcf4f7 100644 --- a/README.md +++ b/README.md @@ -80,3 +80,17 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac + +--- + +If this repository helped you in your research, please consider citing us: + +``` +@misc{saravia2020dair, + title={dair.ai: Democratizing Artificial Intelligence Research, Education, and Technologies}, + author={Saravia, Elvis}, + journal={https://github.com/dair-ai}, + number={1}, + year={2020} +} +``` From 952c16636391dde2f84c97b25b5fa3bf1186c09e Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Sat, 28 Mar 2020 18:48:21 +0100 Subject: [PATCH 11/21] Update README.md --- README.md | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/README.md b/README.md index 5dcf4f7..f715aba 100644 --- a/README.md +++ b/README.md @@ -81,16 +81,3 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac ---- - -If this repository helped you in your research, please consider citing us: - -``` -@misc{saravia2020dair, - title={dair.ai: Democratizing Artificial Intelligence Research, Education, and Technologies}, - author={Saravia, Elvis}, - journal={https://github.com/dair-ai}, - number={1}, - year={2020} -} -``` From f29afbcf78432141219b5c94d3b2e82c99c25ffe Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:16:10 +0200 Subject: [PATCH 12/21] Update README.md --- README.md | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index f715aba..6844341 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Description Category Level - Link + Notebook Blog @@ -18,7 +18,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Machine Learning Beginner - + read @@ -30,7 +30,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Deep Learning Beginner - + read @@ -41,7 +41,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Image Classification Intermediate - + read @@ -52,7 +52,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Neural Networks Beginner - + read @@ -63,7 +63,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Neural Networks Beginner - + read @@ -74,10 +74,23 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Neural Networks Beginner - + read + + Fine-tuning BERT Language Model for Emotion Classification + In this tutorial we demonstrate how to fine-tune BERT-based model for multiclass emotion classification. + Neural Networks + Advanced + + + + coming soon! + + + + From 1264788f81e2b057e8c80f6c7cd058d866075945 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:16:44 +0200 Subject: [PATCH 13/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6844341..decc813 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac Fine-tuning BERT Language Model for Emotion Classification In this tutorial we demonstrate how to fine-tune BERT-based model for multiclass emotion classification. - Neural Networks + Deep Learning NLP Advanced From 769eaa0cd203cb18afef14007e64a0ffb2d22a43 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:19:11 +0200 Subject: [PATCH 14/21] Update README.md --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index decc813..5f32e5f 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,18 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac read + + NLP Basic + In this tutorial we show the basics of preparing your textual data for NLP. + NLP + Beginner + + + + coming soon! + + + Fine-tuning BERT Language Model for Emotion Classification In this tutorial we demonstrate how to fine-tune BERT-based model for multiclass emotion classification. From a8eadb0cd8b2a1996e26facf7e92c8327305d1d2 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:19:25 +0200 Subject: [PATCH 15/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5f32e5f..cb1b01a 100644 --- a/README.md +++ b/README.md @@ -80,7 +80,7 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac - NLP Basic + NLP Basics In this tutorial we show the basics of preparing your textual data for NLP. NLP Beginner From 6c0704d46b30fdee3983ef8e0b3dba70f1ad3a35 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:21:16 +0200 Subject: [PATCH 16/21] Update README.md --- README.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/README.md b/README.md index cb1b01a..f480d50 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,18 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac + + Neural Machine Translation with Attention using PyTorch + In this notebook we are going to perform neural machine translation using a deep learning based approach and attention mechanism. + Deep Learning NLP + Advanced + + + + coming soon! + + + Fine-tuning BERT Language Model for Emotion Classification In this tutorial we demonstrate how to fine-tune BERT-based model for multiclass emotion classification. @@ -104,5 +116,6 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac + From 8eb3e2dda3c23b0a5f717f440f058365b06e6c33 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:23:38 +0200 Subject: [PATCH 17/21] Update README.md --- README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f480d50..1258df1 100644 --- a/README.md +++ b/README.md @@ -103,8 +103,20 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac + + Fine-tuning BERT Language Model for English Sentiment Classification + In this tutorial we demonstrate how to fine-tune BERT-based model for sentiment classification. + Deep Learning NLP + Intermediate + + + + coming soon! + + + - Fine-tuning BERT Language Model for Emotion Classification + Fine-tuning BERT Language Model for English Emotion Classification In this tutorial we demonstrate how to fine-tune BERT-based model for multiclass emotion classification. Deep Learning NLP Advanced From 68025d9d542176a4405d747b72781f8433219700 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:26:56 +0200 Subject: [PATCH 18/21] Update README.md --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index 1258df1..de32007 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,18 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac + + Deep Learning for NLP + In this notebook we are going to use deep learning (RNN model) for approaching NLP tasks. + Deep Learning NLP + Beginner + + + + coming soon! + + + Neural Machine Translation with Attention using PyTorch In this notebook we are going to perform neural machine translation using a deep learning based approach and attention mechanism. From 2be78913c5f08512239e74be0f04aa7fc122730f Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:30:22 +0200 Subject: [PATCH 19/21] Update README.md --- README.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index de32007..a22c0a5 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,16 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac coming soon! - + + Text Similarity Search using Pretrained Language Models + In this tutorial we show how to build a simple text similarity search application using pretrained language models and Elasticsearch. + Deep Learning NLP Applications + Advanced + + + + coming soon! + From 07afbfedb4948370ffc2954383bdd616770316e6 Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Wed, 15 Apr 2020 14:40:57 +0200 Subject: [PATCH 20/21] Update README.md --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index a22c0a5..a7bc129 100644 --- a/README.md +++ b/README.md @@ -149,6 +149,17 @@ A collection of PyTorch notebooks for studying and practicing deep learning. Eac coming soon! + + Spinal Cord Gray Matter Segmentation Using PyTorch + In this notebook we are going to explore a medical imaging open-source library known as MedicalTorch, which was built on top of PyTorch. + Deep Learning in Medicine + Advanced + + + + coming soon! + + From a5eee7c71d34d94b3711fe24474ee5b6b4b3056f Mon Sep 17 00:00:00 2001 From: Elvis Saravia Date: Thu, 18 Jun 2020 22:02:55 +0200 Subject: [PATCH 21/21] Create FUNDING.yml --- .github/FUNDING.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..7032a31 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [dair-ai] +patreon: # Replace with a single Patreon username +open_collective: dairai +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']