4283 lines
225 KiB
Plaintext
4283 lines
225 KiB
Plaintext
{
|
||
"cells": [
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"**Chapter 9 – Up and running with TensorFlow**"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"_This notebook contains all the sample code and solutions to the exercices in chapter 9._"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Setup"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"First, let's make sure this notebook works well in both python 2 and 3, import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 1,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"# To support both python 2 and python 3\n",
|
||
"from __future__ import division, print_function, unicode_literals\n",
|
||
"\n",
|
||
"# Common imports\n",
|
||
"import numpy as np\n",
|
||
"import numpy.random as rnd\n",
|
||
"import os\n",
|
||
"\n",
|
||
"# to make this notebook's output stable across runs\n",
|
||
"rnd.seed(42)\n",
|
||
"\n",
|
||
"# To plot pretty figures\n",
|
||
"%matplotlib inline\n",
|
||
"import matplotlib\n",
|
||
"import matplotlib.pyplot as plt\n",
|
||
"plt.rcParams['axes.labelsize'] = 14\n",
|
||
"plt.rcParams['xtick.labelsize'] = 12\n",
|
||
"plt.rcParams['ytick.labelsize'] = 12\n",
|
||
"\n",
|
||
"# Where to save the figures\n",
|
||
"PROJECT_ROOT_DIR = \".\"\n",
|
||
"CHAPTER_ID = \"tensorflow\"\n",
|
||
"\n",
|
||
"def save_fig(fig_id, tight_layout=True):\n",
|
||
" path = os.path.join(PROJECT_ROOT_DIR, \"images\", CHAPTER_ID, fig_id + \".png\")\n",
|
||
" print(\"Saving figure\", fig_id)\n",
|
||
" if tight_layout:\n",
|
||
" plt.tight_layout()\n",
|
||
" plt.savefig(path, format='png', dpi=300)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Creating and running a graph"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 2,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"import tensorflow as tf\n",
|
||
"\n",
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"x = tf.Variable(3, name=\"x\")\n",
|
||
"y = tf.Variable(4, name=\"y\")\n",
|
||
"f = x*x*y + y + 2"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 3,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"<tf.Tensor 'add_1:0' shape=() dtype=int32>"
|
||
]
|
||
},
|
||
"execution_count": 3,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"f"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 4,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"42\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"sess = tf.Session()\n",
|
||
"sess.run(x.initializer)\n",
|
||
"sess.run(y.initializer)\n",
|
||
"result = sess.run(f)\n",
|
||
"print(result)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 5,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"sess.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 6,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"with tf.Session() as sess:\n",
|
||
" x.initializer.run()\n",
|
||
" y.initializer.run()\n",
|
||
" result = f.eval()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 7,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"42"
|
||
]
|
||
},
|
||
"execution_count": 7,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"result"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 8,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"init = tf.global_variables_initializer()\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" init.run()\n",
|
||
" result = f.eval()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 9,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"42"
|
||
]
|
||
},
|
||
"execution_count": 9,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"result"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 10,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"init = tf.global_variables_initializer()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 11,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"42\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"sess = tf.InteractiveSession()\n",
|
||
"init.run()\n",
|
||
"result = f.eval()\n",
|
||
"print(result)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 12,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"sess.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 13,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"42"
|
||
]
|
||
},
|
||
"execution_count": 13,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"result"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Managing graphs"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 14,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"True"
|
||
]
|
||
},
|
||
"execution_count": 14,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"x1 = tf.Variable(1)\n",
|
||
"x1.graph is tf.get_default_graph()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 15,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"True"
|
||
]
|
||
},
|
||
"execution_count": 15,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"graph = tf.Graph()\n",
|
||
"with graph.as_default():\n",
|
||
" x2 = tf.Variable(2)\n",
|
||
"\n",
|
||
"x2.graph is graph"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 16,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true,
|
||
"scrolled": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"False"
|
||
]
|
||
},
|
||
"execution_count": 16,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"x2.graph is tf.get_default_graph()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 17,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"10\n",
|
||
"15\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"w = tf.constant(3)\n",
|
||
"x = w + 2\n",
|
||
"y = x + 5\n",
|
||
"z = x * 3\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" print(y.eval()) # 10\n",
|
||
" print(z.eval()) # 15"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 18,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"10\n",
|
||
"15\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"with tf.Session() as sess:\n",
|
||
" y_val, z_val = sess.run([y, z])\n",
|
||
" print(y_val) # 10\n",
|
||
" print(z_val) # 15"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Linear Regression"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Using the Normal Equation"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 19,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"import numpy as np\n",
|
||
"from sklearn.datasets import fetch_california_housing\n",
|
||
"\n",
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"housing = fetch_california_housing()\n",
|
||
"m, n = housing.data.shape\n",
|
||
"housing_data_plus_bias = np.c_[np.ones((m, 1)), housing.data]\n",
|
||
"\n",
|
||
"X = tf.constant(housing_data_plus_bias, dtype=tf.float32, name=\"X\")\n",
|
||
"y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name=\"y\")\n",
|
||
"XT = tf.transpose(X)\n",
|
||
"theta = tf.matmul(tf.matmul(tf.matrix_inverse(tf.matmul(XT, X)), XT), y)\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" theta_value = theta.eval()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 20,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ -3.74651413e+01],\n",
|
||
" [ 4.35734153e-01],\n",
|
||
" [ 9.33829229e-03],\n",
|
||
" [ -1.06622010e-01],\n",
|
||
" [ 6.44106984e-01],\n",
|
||
" [ -4.25131839e-06],\n",
|
||
" [ -3.77322501e-03],\n",
|
||
" [ -4.26648885e-01],\n",
|
||
" [ -4.40514028e-01]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 20,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"theta_value"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"Compare with pure NumPy"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 21,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[[ -3.69419202e+01]\n",
|
||
" [ 4.36693293e-01]\n",
|
||
" [ 9.43577803e-03]\n",
|
||
" [ -1.07322041e-01]\n",
|
||
" [ 6.45065694e-01]\n",
|
||
" [ -3.97638942e-06]\n",
|
||
" [ -3.78654265e-03]\n",
|
||
" [ -4.21314378e-01]\n",
|
||
" [ -4.34513755e-01]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"X = housing_data_plus_bias\n",
|
||
"y = housing.target.reshape(-1, 1)\n",
|
||
"theta_numpy = np.linalg.inv(X.T.dot(X)).dot(X.T).dot(y)\n",
|
||
"\n",
|
||
"print(theta_numpy)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"Compare with Scikit-Learn"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 22,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[[ -3.69419202e+01]\n",
|
||
" [ 4.36693293e-01]\n",
|
||
" [ 9.43577803e-03]\n",
|
||
" [ -1.07322041e-01]\n",
|
||
" [ 6.45065694e-01]\n",
|
||
" [ -3.97638942e-06]\n",
|
||
" [ -3.78654265e-03]\n",
|
||
" [ -4.21314378e-01]\n",
|
||
" [ -4.34513755e-01]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"from sklearn.linear_model import LinearRegression\n",
|
||
"lin_reg = LinearRegression()\n",
|
||
"lin_reg.fit(housing.data, housing.target.reshape(-1, 1))\n",
|
||
"\n",
|
||
"print(np.r_[lin_reg.intercept_.reshape(-1, 1), lin_reg.coef_.T])"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Using Batch Gradient Descent"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"Gradient Descent requires scaling the feature vectors first. We could do this using TF, but let's just use Scikit-Learn for now."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 23,
|
||
"metadata": {
|
||
"collapsed": true,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"from sklearn.preprocessing import StandardScaler\n",
|
||
"scaler = StandardScaler()\n",
|
||
"scaled_housing_data = scaler.fit_transform(housing.data)\n",
|
||
"scaled_housing_data_plus_bias = np.c_[np.ones((m, 1)), scaled_housing_data]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 24,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[ 1.00000000e+00 6.60969987e-17 5.50808322e-18 6.60969987e-17\n",
|
||
" -1.06030602e-16 -1.10161664e-17 3.44255201e-18 -1.07958431e-15\n",
|
||
" -8.52651283e-15]\n",
|
||
"[ 0.38915536 0.36424355 0.5116157 ..., -0.06612179 -0.06360587\n",
|
||
" 0.01359031]\n",
|
||
"0.111111111111\n",
|
||
"(20640, 9)\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(scaled_housing_data_plus_bias.mean(axis=0))\n",
|
||
"print(scaled_housing_data_plus_bias.mean(axis=1))\n",
|
||
"print(scaled_housing_data_plus_bias.mean())\n",
|
||
"print(scaled_housing_data_plus_bias.shape)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Manually computing the gradients"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 25,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch 0 MSE = 2.75443\n",
|
||
"Epoch 100 MSE = 0.632222\n",
|
||
"Epoch 200 MSE = 0.57278\n",
|
||
"Epoch 300 MSE = 0.558501\n",
|
||
"Epoch 400 MSE = 0.549069\n",
|
||
"Epoch 500 MSE = 0.542288\n",
|
||
"Epoch 600 MSE = 0.537379\n",
|
||
"Epoch 700 MSE = 0.533822\n",
|
||
"Epoch 800 MSE = 0.531243\n",
|
||
"Epoch 900 MSE = 0.529371\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01\n",
|
||
"\n",
|
||
"X = tf.constant(scaled_housing_data_plus_bias, dtype=tf.float32, name=\"X\")\n",
|
||
"y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")\n",
|
||
"error = y_pred - y\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\")\n",
|
||
"gradients = 2/m * tf.matmul(tf.transpose(X), error)\n",
|
||
"training_op = tf.assign(theta, theta - learning_rate * gradients)\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" if epoch % 100 == 0:\n",
|
||
" print(\"Epoch\", epoch, \"MSE =\", mse.eval())\n",
|
||
" sess.run(training_op)\n",
|
||
" \n",
|
||
" best_theta = theta.eval()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 26,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 2.06855226e+00],\n",
|
||
" [ 7.74078071e-01],\n",
|
||
" [ 1.31192386e-01],\n",
|
||
" [ -1.17845096e-01],\n",
|
||
" [ 1.64778158e-01],\n",
|
||
" [ 7.44080753e-04],\n",
|
||
" [ -3.91945168e-02],\n",
|
||
" [ -8.61356616e-01],\n",
|
||
" [ -8.23479712e-01]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 26,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"best_theta"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Using autodiff"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Same as above except for the `gradients = ...` line:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 27,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01\n",
|
||
"\n",
|
||
"X = tf.constant(scaled_housing_data_plus_bias, dtype=tf.float32, name=\"X\")\n",
|
||
"y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")\n",
|
||
"error = y_pred - y\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 28,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"gradients = tf.gradients(mse, [theta])[0]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 29,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch 0 MSE = 2.75443\n",
|
||
"Epoch 100 MSE = 0.632222\n",
|
||
"Epoch 200 MSE = 0.57278\n",
|
||
"Epoch 300 MSE = 0.558501\n",
|
||
"Epoch 400 MSE = 0.549069\n",
|
||
"Epoch 500 MSE = 0.542288\n",
|
||
"Epoch 600 MSE = 0.537379\n",
|
||
"Epoch 700 MSE = 0.533822\n",
|
||
"Epoch 800 MSE = 0.531243\n",
|
||
"Epoch 900 MSE = 0.529371\n",
|
||
"Best theta:\n",
|
||
"[[ 2.06855249e+00]\n",
|
||
" [ 7.74078071e-01]\n",
|
||
" [ 1.31192386e-01]\n",
|
||
" [ -1.17845066e-01]\n",
|
||
" [ 1.64778143e-01]\n",
|
||
" [ 7.44078017e-04]\n",
|
||
" [ -3.91945094e-02]\n",
|
||
" [ -8.61356676e-01]\n",
|
||
" [ -8.23479772e-01]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"training_op = tf.assign(theta, theta - learning_rate * gradients)\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" if epoch % 100 == 0:\n",
|
||
" print(\"Epoch\", epoch, \"MSE =\", mse.eval())\n",
|
||
" sess.run(training_op)\n",
|
||
" \n",
|
||
" best_theta = theta.eval()\n",
|
||
"\n",
|
||
"print(\"Best theta:\")\n",
|
||
"print(best_theta)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"How could you find the partial derivatives of the following function with regards to `a` and `b`?"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 30,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"def my_func(a, b):\n",
|
||
" z = 0\n",
|
||
" for i in range(100):\n",
|
||
" z = a * np.cos(z + i) + z * np.sin(b - i)\n",
|
||
" return z"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 31,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"-0.21253923284754914"
|
||
]
|
||
},
|
||
"execution_count": 31,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"my_func(0.2, 0.3)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 32,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"a = tf.Variable(0.2, name=\"a\")\n",
|
||
"b = tf.Variable(0.3, name=\"b\")\n",
|
||
"z = tf.constant(0.0, name=\"z0\")\n",
|
||
"for i in range(100):\n",
|
||
" z = a * tf.cos(z + i) + z * tf.sin(b - i)\n",
|
||
"\n",
|
||
"grads = tf.gradients(z, [a, b])\n",
|
||
"init = tf.global_variables_initializer()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's compute the function at $a=0.2$ and $b=0.3$, and the partial derivatives at that point with regards to $a$ and with regards to $b$:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 33,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"-0.212537\n",
|
||
"[-1.1388494, 0.19671395]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"with tf.Session() as sess:\n",
|
||
" init.run()\n",
|
||
" print(z.eval())\n",
|
||
" print(sess.run(grads))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Using a `GradientDescentOptimizer`"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 34,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01\n",
|
||
"\n",
|
||
"X = tf.constant(scaled_housing_data_plus_bias, dtype=tf.float32, name=\"X\")\n",
|
||
"y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")\n",
|
||
"error = y_pred - y\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 35,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
|
||
"training_op = optimizer.minimize(mse)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 36,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch 0 MSE = 2.75443\n",
|
||
"Epoch 100 MSE = 0.632222\n",
|
||
"Epoch 200 MSE = 0.57278\n",
|
||
"Epoch 300 MSE = 0.558501\n",
|
||
"Epoch 400 MSE = 0.549069\n",
|
||
"Epoch 500 MSE = 0.542288\n",
|
||
"Epoch 600 MSE = 0.537379\n",
|
||
"Epoch 700 MSE = 0.533822\n",
|
||
"Epoch 800 MSE = 0.531243\n",
|
||
"Epoch 900 MSE = 0.529371\n",
|
||
"Best theta:\n",
|
||
"[[ 2.06855249e+00]\n",
|
||
" [ 7.74078071e-01]\n",
|
||
" [ 1.31192386e-01]\n",
|
||
" [ -1.17845066e-01]\n",
|
||
" [ 1.64778143e-01]\n",
|
||
" [ 7.44078017e-04]\n",
|
||
" [ -3.91945094e-02]\n",
|
||
" [ -8.61356676e-01]\n",
|
||
" [ -8.23479772e-01]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"init = tf.global_variables_initializer()\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" if epoch % 100 == 0:\n",
|
||
" print(\"Epoch\", epoch, \"MSE =\", mse.eval())\n",
|
||
" sess.run(training_op)\n",
|
||
" \n",
|
||
" best_theta = theta.eval()\n",
|
||
"\n",
|
||
"print(\"Best theta:\")\n",
|
||
"print(best_theta)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Using a momentum optimizer"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 37,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01\n",
|
||
"\n",
|
||
"X = tf.constant(scaled_housing_data_plus_bias, dtype=tf.float32, name=\"X\")\n",
|
||
"y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")\n",
|
||
"error = y_pred - y\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 38,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate,\n",
|
||
" momentum=0.9)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 39,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"training_op = optimizer.minimize(mse)\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 40,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Best theta:\n",
|
||
"[[ 2.06855798]\n",
|
||
" [ 0.82961673]\n",
|
||
" [ 0.11875112]\n",
|
||
" [-0.26552212]\n",
|
||
" [ 0.30569226]\n",
|
||
" [-0.00450316]\n",
|
||
" [-0.03932616]\n",
|
||
" [-0.89989167]\n",
|
||
" [-0.87054664]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" sess.run(training_op)\n",
|
||
" \n",
|
||
" best_theta = theta.eval()\n",
|
||
"\n",
|
||
"print(\"Best theta:\")\n",
|
||
"print(best_theta)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Feeding data to the training algorithm"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Placeholder nodes"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 41,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[[ 6. 7. 8.]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"A = tf.placeholder(tf.float32, shape=(None, 3))\n",
|
||
"B = A + 5\n",
|
||
"with tf.Session() as sess:\n",
|
||
" B_val_1 = B.eval(feed_dict={A: [[1, 2, 3]]})\n",
|
||
" B_val_2 = B.eval(feed_dict={A: [[4, 5, 6], [7, 8, 9]]})\n",
|
||
"\n",
|
||
"print(B_val_1)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 42,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[[ 9. 10. 11.]\n",
|
||
" [ 12. 13. 14.]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(B_val_2)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Mini-batch Gradient Descent"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 43,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 44,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"X = tf.placeholder(tf.float32, shape=(None, n + 1), name=\"X\")\n",
|
||
"y = tf.placeholder(tf.float32, shape=(None, 1), name=\"y\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 45,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")\n",
|
||
"error = y_pred - y\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\")\n",
|
||
"optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
|
||
"training_op = optimizer.minimize(mse)\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 46,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"n_epochs = 10"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 47,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"batch_size = 100\n",
|
||
"n_batches = int(np.ceil(m / batch_size))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 48,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"def fetch_batch(epoch, batch_index, batch_size):\n",
|
||
" rnd.seed(epoch * n_batches + batch_index) # not shown in the book\n",
|
||
" indices = rnd.randint(m, size=batch_size) # not shown\n",
|
||
" X_batch = scaled_housing_data_plus_bias[indices] # not shown\n",
|
||
" y_batch = housing.target.reshape(-1, 1)[indices] # not shown\n",
|
||
" return X_batch, y_batch\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" for batch_index in range(n_batches):\n",
|
||
" X_batch, y_batch = fetch_batch(epoch, batch_index, batch_size)\n",
|
||
" sess.run(training_op, feed_dict={X: X_batch, y: y_batch})\n",
|
||
"\n",
|
||
" best_theta = theta.eval()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 49,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 2.07001591],\n",
|
||
" [ 0.82045609],\n",
|
||
" [ 0.1173173 ],\n",
|
||
" [-0.22739051],\n",
|
||
" [ 0.31134021],\n",
|
||
" [ 0.00353193],\n",
|
||
" [-0.01126994],\n",
|
||
" [-0.91643935],\n",
|
||
" [-0.87950081]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 49,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"best_theta"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Saving and restoring a model"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 50,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch 0 MSE = 2.75443\n",
|
||
"Epoch 100 MSE = 0.632222\n",
|
||
"Epoch 200 MSE = 0.57278\n",
|
||
"Epoch 300 MSE = 0.558501\n",
|
||
"Epoch 400 MSE = 0.549069\n",
|
||
"Epoch 500 MSE = 0.542288\n",
|
||
"Epoch 600 MSE = 0.537379\n",
|
||
"Epoch 700 MSE = 0.533822\n",
|
||
"Epoch 800 MSE = 0.531243\n",
|
||
"Epoch 900 MSE = 0.529371\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_epochs = 1000 # not shown in the book\n",
|
||
"learning_rate = 0.01 # not shown\n",
|
||
"\n",
|
||
"X = tf.constant(scaled_housing_data_plus_bias, dtype=tf.float32, name=\"X\") # not shown\n",
|
||
"y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name=\"y\") # not shown\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\") # not shown\n",
|
||
"error = y_pred - y # not shown\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\") # not shown\n",
|
||
"optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate) # not shown\n",
|
||
"training_op = optimizer.minimize(mse) # not shown\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()\n",
|
||
"saver = tf.train.Saver()\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" if epoch % 100 == 0:\n",
|
||
" print(\"Epoch\", epoch, \"MSE =\", mse.eval()) # not shown\n",
|
||
" save_path = saver.save(sess, \"/tmp/my_model.ckpt\")\n",
|
||
" sess.run(training_op)\n",
|
||
" \n",
|
||
" best_theta = theta.eval()\n",
|
||
" save_path = saver.save(sess, \"/tmp/my_model_final.ckpt\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 51,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 2.06855249e+00],\n",
|
||
" [ 7.74078071e-01],\n",
|
||
" [ 1.31192386e-01],\n",
|
||
" [ -1.17845066e-01],\n",
|
||
" [ 1.64778143e-01],\n",
|
||
" [ 7.44078017e-04],\n",
|
||
" [ -3.91945094e-02],\n",
|
||
" [ -8.61356676e-01],\n",
|
||
" [ -8.23479772e-01]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 51,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"best_theta"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 52,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"INFO:tensorflow:Restoring parameters from /tmp/my_model_final.ckpt\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"with tf.Session() as sess:\n",
|
||
" saver.restore(sess, \"/tmp/my_model_final.ckpt\")\n",
|
||
" best_theta_restored = theta.eval() # not shown in the book"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 53,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"True"
|
||
]
|
||
},
|
||
"execution_count": 53,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"np.allclose(best_theta, best_theta_restored)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"If you want to have a saver that loads and restores `theta` with a different name, such as `\"weights\"`:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 54,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"saver = tf.train.Saver({\"weights\": theta})"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"By default the saver also saves the graph structure itself in a second file with the extension `.meta`. You can use the function `tf.train.import_meta_graph()` to restore the graph structure. This function loads the graph into the default graph and returns a `Saver` that can then be used to restore the graph state (i.e., the variable values):"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 55,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"INFO:tensorflow:Restoring parameters from /tmp/my_model_final.ckpt\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph() # notice that we start with an empty graph.\n",
|
||
"\n",
|
||
"saver = tf.train.import_meta_graph(\"/tmp/my_model_final.ckpt.meta\") # this loads the graph structure\n",
|
||
"theta = tf.get_default_graph().get_tensor_by_name(\"theta:0\") # not shown in the book\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" saver.restore(sess, \"/tmp/my_model_final.ckpt\") # this restores the graph's state\n",
|
||
" best_theta_restored = theta.eval() # not shown in the book"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 56,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"True"
|
||
]
|
||
},
|
||
"execution_count": 56,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"np.allclose(best_theta, best_theta_restored)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"This means that you can import a pretrained model without having to have the corresponding Python code to build the graph. This is very handy when you keep tweaking and saving your model: you can load a previously saved model without having to search for the version of the code that built it."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Visualizing the graph\n",
|
||
"## inside Jupyter"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 57,
|
||
"metadata": {
|
||
"collapsed": true,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"from IPython.display import clear_output, Image, display, HTML\n",
|
||
"\n",
|
||
"def strip_consts(graph_def, max_const_size=32):\n",
|
||
" \"\"\"Strip large constant values from graph_def.\"\"\"\n",
|
||
" strip_def = tf.GraphDef()\n",
|
||
" for n0 in graph_def.node:\n",
|
||
" n = strip_def.node.add() \n",
|
||
" n.MergeFrom(n0)\n",
|
||
" if n.op == 'Const':\n",
|
||
" tensor = n.attr['value'].tensor\n",
|
||
" size = len(tensor.tensor_content)\n",
|
||
" if size > max_const_size:\n",
|
||
" tensor.tensor_content = b\"<stripped %d bytes>\"%size\n",
|
||
" return strip_def\n",
|
||
"\n",
|
||
"def show_graph(graph_def, max_const_size=32):\n",
|
||
" \"\"\"Visualize TensorFlow graph.\"\"\"\n",
|
||
" if hasattr(graph_def, 'as_graph_def'):\n",
|
||
" graph_def = graph_def.as_graph_def()\n",
|
||
" strip_def = strip_consts(graph_def, max_const_size=max_const_size)\n",
|
||
" code = \"\"\"\n",
|
||
" <script>\n",
|
||
" function load() {{\n",
|
||
" document.getElementById(\"{id}\").pbtxt = {data};\n",
|
||
" }}\n",
|
||
" </script>\n",
|
||
" <link rel=\"import\" href=\"https://tensorboard.appspot.com/tf-graph-basic.build.html\" onload=load()>\n",
|
||
" <div style=\"height:600px\">\n",
|
||
" <tf-graph-basic id=\"{id}\"></tf-graph-basic>\n",
|
||
" </div>\n",
|
||
" \"\"\".format(data=repr(str(strip_def)), id='graph'+str(np.random.rand()))\n",
|
||
"\n",
|
||
" iframe = \"\"\"\n",
|
||
" <iframe seamless style=\"width:1200px;height:620px;border:0\" srcdoc=\"{}\"></iframe>\n",
|
||
" \"\"\".format(code.replace('\"', '"'))\n",
|
||
" display(HTML(iframe))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 58,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true,
|
||
"scrolled": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"\n",
|
||
" <iframe seamless style=\"width:1200px;height:620px;border:0\" srcdoc=\"\n",
|
||
" <script>\n",
|
||
" function load() {\n",
|
||
" document.getElementById("graph0.1784179106002547").pbtxt = 'node {\\n name: "X"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n dim {\\n size: 20640\\n }\\n dim {\\n size: 9\\n }\\n }\\n tensor_content: "<stripped 743040 bytes>"\\n }\\n }\\n }\\n}\\nnode {\\n name: "y"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n dim {\\n size: 20640\\n }\\n dim {\\n size: 1\\n }\\n }\\n tensor_content: "<stripped 82560 bytes>"\\n }\\n }\\n }\\n}\\nnode {\\n name: "random_uniform/shape"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\t\\\\000\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "random_uniform/min"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n }\\n float_val: -1.0\\n }\\n }\\n }\\n}\\nnode {\\n name: "random_uniform/max"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n }\\n float_val: 1.0\\n }\\n }\\n }\\n}\\nnode {\\n name: "random_uniform/RandomUniform"\\n op: "RandomUniform"\\n input: "random_uniform/shape"\\n attr {\\n key: "T"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "seed"\\n value {\\n i: 87654321\\n }\\n }\\n attr {\\n key: "seed2"\\n value {\\n i: 42\\n }\\n }\\n}\\nnode {\\n name: "random_uniform/sub"\\n op: "Sub"\\n input: "random_uniform/max"\\n input: "random_uniform/min"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "random_uniform/mul"\\n op: "Mul"\\n input: "random_uniform/RandomUniform"\\n input: "random_uniform/sub"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "random_uniform"\\n op: "Add"\\n input: "random_uniform/mul"\\n input: "random_uniform/min"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "theta"\\n op: "VariableV2"\\n attr {\\n key: "container"\\n value {\\n s: ""\\n }\\n }\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "shape"\\n value {\\n shape {\\n dim {\\n size: 9\\n }\\n dim {\\n size: 1\\n }\\n }\\n }\\n }\\n attr {\\n key: "shared_name"\\n value {\\n s: ""\\n }\\n }\\n}\\nnode {\\n name: "theta/Assign"\\n op: "Assign"\\n input: "theta"\\n input: "random_uniform"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@theta"\\n }\\n }\\n }\\n attr {\\n key: "use_locking"\\n value {\\n b: true\\n }\\n }\\n attr {\\n key: "validate_shape"\\n value {\\n b: true\\n }\\n }\\n}\\nnode {\\n name: "theta/read"\\n op: "Identity"\\n input: "theta"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@theta"\\n }\\n }\\n }\\n}\\nnode {\\n name: "predictions"\\n op: "MatMul"\\n input: "X"\\n input: "theta/read"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "transpose_a"\\n value {\\n b: false\\n }\\n }\\n attr {\\n key: "transpose_b"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "sub"\\n op: "Sub"\\n input: "predictions"\\n input: "y"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "Square"\\n op: "Square"\\n input: "sub"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "Const"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\000\\\\000\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "mse"\\n op: "Mean"\\n input: "Square"\\n input: "Const"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tidx"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "keep_dims"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "gradients/Shape"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n }\\n }\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/Const"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n }\\n float_val: 1.0\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/Fill"\\n op: "Fill"\\n input: "gradients/Shape"\\n input: "gradients/Const"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Reshape/shape"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\001\\\\000\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Reshape"\\n op: "Reshape"\\n input: "gradients/Fill"\\n input: "gradients/mse_grad/Reshape/shape"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tshape"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Tile/multiples"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\240P\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Tile"\\n op: "Tile"\\n input: "gradients/mse_grad/Reshape"\\n input: "gradients/mse_grad/Tile/multiples"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tmultiples"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Shape"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\240P\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Shape_1"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n }\\n }\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Const"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 1\\n }\\n }\\n int_val: 0\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Prod"\\n op: "Prod"\\n input: "gradients/mse_grad/Shape"\\n input: "gradients/mse_grad/Const"\\n attr {\\n key: "T"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "Tidx"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "keep_dims"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Const_1"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 1\\n }\\n }\\n int_val: 0\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Prod_1"\\n op: "Prod"\\n input: "gradients/mse_grad/Shape_1"\\n input: "gradients/mse_grad/Const_1"\\n attr {\\n key: "T"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "Tidx"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "keep_dims"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Maximum/y"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n }\\n int_val: 1\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Maximum"\\n op: "Maximum"\\n input: "gradients/mse_grad/Prod_1"\\n input: "gradients/mse_grad/Maximum/y"\\n attr {\\n key: "T"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/floordiv"\\n op: "FloorDiv"\\n input: "gradients/mse_grad/Prod"\\n input: "gradients/mse_grad/Maximum"\\n attr {\\n key: "T"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/Cast"\\n op: "Cast"\\n input: "gradients/mse_grad/floordiv"\\n attr {\\n key: "DstT"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "SrcT"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/mse_grad/truediv"\\n op: "RealDiv"\\n input: "gradients/mse_grad/Tile"\\n input: "gradients/mse_grad/Cast"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "gradients/Square_grad/mul/x"\\n op: "Const"\\n input: "^gradients/mse_grad/truediv"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n }\\n float_val: 2.0\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/Square_grad/mul"\\n op: "Mul"\\n input: "gradients/Square_grad/mul/x"\\n input: "sub"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "gradients/Square_grad/mul_1"\\n op: "Mul"\\n input: "gradients/mse_grad/truediv"\\n input: "gradients/Square_grad/mul"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Shape"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\240P\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Shape_1"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_INT32\\n tensor_shape {\\n dim {\\n size: 2\\n }\\n }\\n tensor_content: "\\\\240P\\\\000\\\\000\\\\001\\\\000\\\\000\\\\000"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/BroadcastGradientArgs"\\n op: "BroadcastGradientArgs"\\n input: "gradients/sub_grad/Shape"\\n input: "gradients/sub_grad/Shape_1"\\n attr {\\n key: "T"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Sum"\\n op: "Sum"\\n input: "gradients/Square_grad/mul_1"\\n input: "gradients/sub_grad/BroadcastGradientArgs"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tidx"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "keep_dims"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Reshape"\\n op: "Reshape"\\n input: "gradients/sub_grad/Sum"\\n input: "gradients/sub_grad/Shape"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tshape"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Sum_1"\\n op: "Sum"\\n input: "gradients/Square_grad/mul_1"\\n input: "gradients/sub_grad/BroadcastGradientArgs:1"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tidx"\\n value {\\n type: DT_INT32\\n }\\n }\\n attr {\\n key: "keep_dims"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Neg"\\n op: "Neg"\\n input: "gradients/sub_grad/Sum_1"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/Reshape_1"\\n op: "Reshape"\\n input: "gradients/sub_grad/Neg"\\n input: "gradients/sub_grad/Shape_1"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "Tshape"\\n value {\\n type: DT_INT32\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/tuple/group_deps"\\n op: "NoOp"\\n input: "^gradients/sub_grad/Reshape"\\n input: "^gradients/sub_grad/Reshape_1"\\n}\\nnode {\\n name: "gradients/sub_grad/tuple/control_dependency"\\n op: "Identity"\\n input: "gradients/sub_grad/Reshape"\\n input: "^gradients/sub_grad/tuple/group_deps"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@gradients/sub_grad/Reshape"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/sub_grad/tuple/control_dependency_1"\\n op: "Identity"\\n input: "gradients/sub_grad/Reshape_1"\\n input: "^gradients/sub_grad/tuple/group_deps"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@gradients/sub_grad/Reshape_1"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/predictions_grad/MatMul"\\n op: "MatMul"\\n input: "gradients/sub_grad/tuple/control_dependency"\\n input: "theta/read"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "transpose_a"\\n value {\\n b: false\\n }\\n }\\n attr {\\n key: "transpose_b"\\n value {\\n b: true\\n }\\n }\\n}\\nnode {\\n name: "gradients/predictions_grad/MatMul_1"\\n op: "MatMul"\\n input: "X"\\n input: "gradients/sub_grad/tuple/control_dependency"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "transpose_a"\\n value {\\n b: true\\n }\\n }\\n attr {\\n key: "transpose_b"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "gradients/predictions_grad/tuple/group_deps"\\n op: "NoOp"\\n input: "^gradients/predictions_grad/MatMul"\\n input: "^gradients/predictions_grad/MatMul_1"\\n}\\nnode {\\n name: "gradients/predictions_grad/tuple/control_dependency"\\n op: "Identity"\\n input: "gradients/predictions_grad/MatMul"\\n input: "^gradients/predictions_grad/tuple/group_deps"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@gradients/predictions_grad/MatMul"\\n }\\n }\\n }\\n}\\nnode {\\n name: "gradients/predictions_grad/tuple/control_dependency_1"\\n op: "Identity"\\n input: "gradients/predictions_grad/MatMul_1"\\n input: "^gradients/predictions_grad/tuple/group_deps"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@gradients/predictions_grad/MatMul_1"\\n }\\n }\\n }\\n}\\nnode {\\n name: "GradientDescent/learning_rate"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_FLOAT\\n tensor_shape {\\n }\\n float_val: 0.009999999776482582\\n }\\n }\\n }\\n}\\nnode {\\n name: "GradientDescent/update_theta/ApplyGradientDescent"\\n op: "ApplyGradientDescent"\\n input: "theta"\\n input: "GradientDescent/learning_rate"\\n input: "gradients/predictions_grad/tuple/control_dependency_1"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@theta"\\n }\\n }\\n }\\n attr {\\n key: "use_locking"\\n value {\\n b: false\\n }\\n }\\n}\\nnode {\\n name: "GradientDescent"\\n op: "NoOp"\\n input: "^GradientDescent/update_theta/ApplyGradientDescent"\\n}\\nnode {\\n name: "init"\\n op: "NoOp"\\n input: "^theta/Assign"\\n}\\nnode {\\n name: "save/Const"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_STRING\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_STRING\\n tensor_shape {\\n }\\n string_val: "model"\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/SaveV2/tensor_names"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_STRING\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_STRING\\n tensor_shape {\\n dim {\\n size: 1\\n }\\n }\\n string_val: "theta"\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/SaveV2/shape_and_slices"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_STRING\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_STRING\\n tensor_shape {\\n dim {\\n size: 1\\n }\\n }\\n string_val: ""\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/SaveV2"\\n op: "SaveV2"\\n input: "save/Const"\\n input: "save/SaveV2/tensor_names"\\n input: "save/SaveV2/shape_and_slices"\\n input: "theta"\\n attr {\\n key: "dtypes"\\n value {\\n list {\\n type: DT_FLOAT\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/control_dependency"\\n op: "Identity"\\n input: "save/Const"\\n input: "^save/SaveV2"\\n attr {\\n key: "T"\\n value {\\n type: DT_STRING\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@save/Const"\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/RestoreV2/tensor_names"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_STRING\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_STRING\\n tensor_shape {\\n dim {\\n size: 1\\n }\\n }\\n string_val: "theta"\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/RestoreV2/shape_and_slices"\\n op: "Const"\\n attr {\\n key: "dtype"\\n value {\\n type: DT_STRING\\n }\\n }\\n attr {\\n key: "value"\\n value {\\n tensor {\\n dtype: DT_STRING\\n tensor_shape {\\n dim {\\n size: 1\\n }\\n }\\n string_val: ""\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/RestoreV2"\\n op: "RestoreV2"\\n input: "save/Const"\\n input: "save/RestoreV2/tensor_names"\\n input: "save/RestoreV2/shape_and_slices"\\n attr {\\n key: "dtypes"\\n value {\\n list {\\n type: DT_FLOAT\\n }\\n }\\n }\\n}\\nnode {\\n name: "save/Assign"\\n op: "Assign"\\n input: "theta"\\n input: "save/RestoreV2"\\n attr {\\n key: "T"\\n value {\\n type: DT_FLOAT\\n }\\n }\\n attr {\\n key: "_class"\\n value {\\n list {\\n s: "loc:@theta"\\n }\\n }\\n }\\n attr {\\n key: "use_locking"\\n value {\\n b: true\\n }\\n }\\n attr {\\n key: "validate_shape"\\n value {\\n b: true\\n }\\n }\\n}\\nnode {\\n name: "save/restore_all"\\n op: "NoOp"\\n input: "^save/Assign"\\n}\\n';\n",
|
||
" }\n",
|
||
" </script>\n",
|
||
" <link rel="import" href="https://tensorboard.appspot.com/tf-graph-basic.build.html" onload=load()>\n",
|
||
" <div style="height:600px">\n",
|
||
" <tf-graph-basic id="graph0.1784179106002547"></tf-graph-basic>\n",
|
||
" </div>\n",
|
||
" \"></iframe>\n",
|
||
" "
|
||
],
|
||
"text/plain": [
|
||
"<IPython.core.display.HTML object>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"show_graph(tf.get_default_graph())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Using TensorBoard"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 59,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"from datetime import datetime\n",
|
||
"\n",
|
||
"now = datetime.utcnow().strftime(\"%Y%m%d%H%M%S\")\n",
|
||
"root_logdir = \"tf_logs\"\n",
|
||
"logdir = \"{}/run-{}/\".format(root_logdir, now)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 60,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01\n",
|
||
"\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n + 1), name=\"X\")\n",
|
||
"y = tf.placeholder(tf.float32, shape=(None, 1), name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")\n",
|
||
"error = y_pred - y\n",
|
||
"mse = tf.reduce_mean(tf.square(error), name=\"mse\")\n",
|
||
"optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
|
||
"training_op = optimizer.minimize(mse)\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 61,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"mse_summary = tf.summary.scalar('MSE', mse)\n",
|
||
"file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 62,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"n_epochs = 10\n",
|
||
"batch_size = 100\n",
|
||
"n_batches = int(np.ceil(m / batch_size))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 63,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"with tf.Session() as sess: # not shown in the book\n",
|
||
" sess.run(init) # not shown\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs): # not shown\n",
|
||
" for batch_index in range(n_batches):\n",
|
||
" X_batch, y_batch = fetch_batch(epoch, batch_index, batch_size)\n",
|
||
" if batch_index % 10 == 0:\n",
|
||
" summary_str = mse_summary.eval(feed_dict={X: X_batch, y: y_batch})\n",
|
||
" step = epoch * n_batches + batch_index\n",
|
||
" file_writer.add_summary(summary_str, step)\n",
|
||
" sess.run(training_op, feed_dict={X: X_batch, y: y_batch})\n",
|
||
"\n",
|
||
" best_theta = theta.eval() # not shown"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 64,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"file_writer.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 65,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 2.07001591],\n",
|
||
" [ 0.82045609],\n",
|
||
" [ 0.1173173 ],\n",
|
||
" [-0.22739051],\n",
|
||
" [ 0.31134021],\n",
|
||
" [ 0.00353193],\n",
|
||
" [-0.01126994],\n",
|
||
" [-0.91643935],\n",
|
||
" [-0.87950081]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 65,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"best_theta"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Name scopes"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 66,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"now = datetime.utcnow().strftime(\"%Y%m%d%H%M%S\")\n",
|
||
"root_logdir = \"tf_logs\"\n",
|
||
"logdir = \"{}/run-{}/\".format(root_logdir, now)\n",
|
||
"\n",
|
||
"n_epochs = 1000\n",
|
||
"learning_rate = 0.01\n",
|
||
"\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n + 1), name=\"X\")\n",
|
||
"y = tf.placeholder(tf.float32, shape=(None, 1), name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"y_pred = tf.matmul(X, theta, name=\"predictions\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 67,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"with tf.name_scope(\"loss\") as scope:\n",
|
||
" error = y_pred - y\n",
|
||
" mse = tf.reduce_mean(tf.square(error), name=\"mse\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 68,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
|
||
"training_op = optimizer.minimize(mse)\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()\n",
|
||
"\n",
|
||
"mse_summary = tf.summary.scalar('MSE', mse)\n",
|
||
"file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 69,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Best theta:\n",
|
||
"[[ 2.07001591]\n",
|
||
" [ 0.82045609]\n",
|
||
" [ 0.1173173 ]\n",
|
||
" [-0.22739051]\n",
|
||
" [ 0.31134021]\n",
|
||
" [ 0.00353193]\n",
|
||
" [-0.01126994]\n",
|
||
" [-0.91643935]\n",
|
||
" [-0.87950081]]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"n_epochs = 10\n",
|
||
"batch_size = 100\n",
|
||
"n_batches = int(np.ceil(m / batch_size))\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" for batch_index in range(n_batches):\n",
|
||
" X_batch, y_batch = fetch_batch(epoch, batch_index, batch_size)\n",
|
||
" if batch_index % 10 == 0:\n",
|
||
" summary_str = mse_summary.eval(feed_dict={X: X_batch, y: y_batch})\n",
|
||
" step = epoch * n_batches + batch_index\n",
|
||
" file_writer.add_summary(summary_str, step)\n",
|
||
" sess.run(training_op, feed_dict={X: X_batch, y: y_batch})\n",
|
||
"\n",
|
||
" best_theta = theta.eval()\n",
|
||
"\n",
|
||
"file_writer.flush()\n",
|
||
"file_writer.close()\n",
|
||
"print(\"Best theta:\")\n",
|
||
"print(best_theta)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 70,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"loss/sub\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(error.op.name)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 71,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"loss/mse\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(mse.op.name)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 72,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"a\n",
|
||
"a_1\n",
|
||
"param/a\n",
|
||
"param_1/a\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"a1 = tf.Variable(0, name=\"a\") # name == \"a\"\n",
|
||
"a2 = tf.Variable(0, name=\"a\") # name == \"a_1\"\n",
|
||
"\n",
|
||
"with tf.name_scope(\"param\"): # name == \"param\"\n",
|
||
" a3 = tf.Variable(0, name=\"a\") # name == \"param/a\"\n",
|
||
"\n",
|
||
"with tf.name_scope(\"param\"): # name == \"param_1\"\n",
|
||
" a4 = tf.Variable(0, name=\"a\") # name == \"param_1/a\"\n",
|
||
"\n",
|
||
"for node in (a1, a2, a3, a4):\n",
|
||
" print(node.op.name)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Modularity"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"An ugly flat code:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 73,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"n_features = 3\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"\n",
|
||
"w1 = tf.Variable(tf.random_normal((n_features, 1)), name=\"weights1\")\n",
|
||
"w2 = tf.Variable(tf.random_normal((n_features, 1)), name=\"weights2\")\n",
|
||
"b1 = tf.Variable(0.0, name=\"bias1\")\n",
|
||
"b2 = tf.Variable(0.0, name=\"bias2\")\n",
|
||
"\n",
|
||
"z1 = tf.add(tf.matmul(X, w1), b1, name=\"z1\")\n",
|
||
"z2 = tf.add(tf.matmul(X, w2), b2, name=\"z2\")\n",
|
||
"\n",
|
||
"relu1 = tf.maximum(z1, 0., name=\"relu1\")\n",
|
||
"relu2 = tf.maximum(z1, 0., name=\"relu2\") # Oops, cut&paste error! Did you spot it?\n",
|
||
"\n",
|
||
"output = tf.add(relu1, relu2, name=\"output\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"Much better, using a function to build the ReLUs:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 74,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X):\n",
|
||
" w_shape = (int(X.get_shape()[1]), 1)\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\")\n",
|
||
" b = tf.Variable(0.0, name=\"bias\")\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\")\n",
|
||
" return tf.maximum(z, 0., name=\"relu\")\n",
|
||
"\n",
|
||
"n_features = 3\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"relus = [relu(X) for i in range(5)]\n",
|
||
"output = tf.add_n(relus, name=\"output\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 75,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"file_writer = tf.summary.FileWriter(\"logs/relu1\", tf.get_default_graph())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"Even better using name scopes:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 76,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X):\n",
|
||
" with tf.name_scope(\"relu\"):\n",
|
||
" w_shape = (int(X.get_shape()[1]), 1) # not shown in the book\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\") # not shown\n",
|
||
" b = tf.Variable(0.0, name=\"bias\") # not shown\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\") # not shown\n",
|
||
" return tf.maximum(z, 0., name=\"max\") # not shown"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 77,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"n_features = 3\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"relus = [relu(X) for i in range(5)]\n",
|
||
"output = tf.add_n(relus, name=\"output\")\n",
|
||
"\n",
|
||
"file_writer = tf.summary.FileWriter(\"logs/relu2\", tf.get_default_graph())\n",
|
||
"file_writer.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"## Sharing Variables"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"Sharing a `threshold` variable the classic way, by defining it outside of the `relu()` function then passing it as a parameter:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 78,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X, threshold):\n",
|
||
" with tf.name_scope(\"relu\"):\n",
|
||
" w_shape = (int(X.get_shape()[1]), 1) # not shown in the book\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\") # not shown\n",
|
||
" b = tf.Variable(0.0, name=\"bias\") # not shown\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\") # not shown\n",
|
||
" return tf.maximum(z, threshold, name=\"max\")\n",
|
||
"\n",
|
||
"threshold = tf.Variable(0.0, name=\"threshold\")\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"relus = [relu(X, threshold) for i in range(5)]\n",
|
||
"output = tf.add_n(relus, name=\"output\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 79,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X):\n",
|
||
" with tf.name_scope(\"relu\"):\n",
|
||
" if not hasattr(relu, \"threshold\"):\n",
|
||
" relu.threshold = tf.Variable(0.0, name=\"threshold\")\n",
|
||
" w_shape = int(X.get_shape()[1]), 1 # not shown in the book\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\") # not shown\n",
|
||
" b = tf.Variable(0.0, name=\"bias\") # not shown\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\") # not shown\n",
|
||
" return tf.maximum(z, relu.threshold, name=\"max\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 80,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"relus = [relu(X) for i in range(5)]\n",
|
||
"output = tf.add_n(relus, name=\"output\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 81,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"with tf.variable_scope(\"relu\"):\n",
|
||
" threshold = tf.get_variable(\"threshold\", shape=(),\n",
|
||
" initializer=tf.constant_initializer(0.0))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 82,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"with tf.variable_scope(\"relu\", reuse=True):\n",
|
||
" threshold = tf.get_variable(\"threshold\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 83,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"with tf.variable_scope(\"relu\") as scope:\n",
|
||
" scope.reuse_variables()\n",
|
||
" threshold = tf.get_variable(\"threshold\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 84,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X):\n",
|
||
" with tf.variable_scope(\"relu\", reuse=True):\n",
|
||
" threshold = tf.get_variable(\"threshold\")\n",
|
||
" w_shape = int(X.get_shape()[1]), 1 # not shown\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\") # not shown\n",
|
||
" b = tf.Variable(0.0, name=\"bias\") # not shown\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\") # not shown\n",
|
||
" return tf.maximum(z, threshold, name=\"max\")\n",
|
||
"\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"with tf.variable_scope(\"relu\"):\n",
|
||
" threshold = tf.get_variable(\"threshold\", shape=(),\n",
|
||
" initializer=tf.constant_initializer(0.0))\n",
|
||
"relus = [relu(X) for relu_index in range(5)]\n",
|
||
"output = tf.add_n(relus, name=\"output\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 85,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"file_writer = tf.summary.FileWriter(\"logs/relu6\", tf.get_default_graph())\n",
|
||
"file_writer.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 86,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X):\n",
|
||
" with tf.variable_scope(\"relu\"):\n",
|
||
" threshold = tf.get_variable(\"threshold\", shape=(), initializer=tf.constant_initializer(0.0))\n",
|
||
" w_shape = (int(X.get_shape()[1]), 1)\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\")\n",
|
||
" b = tf.Variable(0.0, name=\"bias\")\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\")\n",
|
||
" return tf.maximum(z, threshold, name=\"max\")\n",
|
||
"\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"with tf.variable_scope(\"\", default_name=\"\") as scope:\n",
|
||
" first_relu = relu(X) # create the shared variable\n",
|
||
" scope.reuse_variables() # then reuse it\n",
|
||
" relus = [first_relu] + [relu(X) for i in range(4)]\n",
|
||
"output = tf.add_n(relus, name=\"output\")\n",
|
||
"\n",
|
||
"file_writer = tf.summary.FileWriter(\"logs/relu8\", tf.get_default_graph())\n",
|
||
"file_writer.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 87,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"def relu(X):\n",
|
||
" threshold = tf.get_variable(\"threshold\", shape=(),\n",
|
||
" initializer=tf.constant_initializer(0.0))\n",
|
||
" w_shape = (int(X.get_shape()[1]), 1) # not shown in the book\n",
|
||
" w = tf.Variable(tf.random_normal(w_shape), name=\"weights\") # not shown\n",
|
||
" b = tf.Variable(0.0, name=\"bias\") # not shown\n",
|
||
" z = tf.add(tf.matmul(X, w), b, name=\"z\") # not shown\n",
|
||
" return tf.maximum(z, threshold, name=\"max\")\n",
|
||
"\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_features), name=\"X\")\n",
|
||
"relus = []\n",
|
||
"for relu_index in range(5):\n",
|
||
" with tf.variable_scope(\"relu\", reuse=(relu_index >= 1)) as scope:\n",
|
||
" relus.append(relu(X))\n",
|
||
"output = tf.add_n(relus, name=\"output\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 88,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"file_writer = tf.summary.FileWriter(\"logs/relu9\", tf.get_default_graph())\n",
|
||
"file_writer.close()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Extra material"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 89,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"x0: my_scope/x\n",
|
||
"x1: my_scope/x_1\n",
|
||
"x2: my_scope/x_2\n",
|
||
"x3: my_scope/x\n",
|
||
"x4: my_scope_1/x\n",
|
||
"x5: my_scope/x\n",
|
||
"True\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"with tf.variable_scope(\"my_scope\"):\n",
|
||
" x0 = tf.get_variable(\"x\", shape=(), initializer=tf.constant_initializer(0.))\n",
|
||
" x1 = tf.Variable(0., name=\"x\")\n",
|
||
" x2 = tf.Variable(0., name=\"x\")\n",
|
||
"\n",
|
||
"with tf.variable_scope(\"my_scope\", reuse=True):\n",
|
||
" x3 = tf.get_variable(\"x\")\n",
|
||
" x4 = tf.Variable(0., name=\"x\")\n",
|
||
"\n",
|
||
"with tf.variable_scope(\"\", default_name=\"\", reuse=True):\n",
|
||
" x5 = tf.get_variable(\"my_scope/x\")\n",
|
||
"\n",
|
||
"print(\"x0:\", x0.op.name)\n",
|
||
"print(\"x1:\", x1.op.name)\n",
|
||
"print(\"x2:\", x2.op.name)\n",
|
||
"print(\"x3:\", x3.op.name)\n",
|
||
"print(\"x4:\", x4.op.name)\n",
|
||
"print(\"x5:\", x5.op.name)\n",
|
||
"print(x0 is x3 and x3 is x5)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"The first `variable_scope()` block first creates the shared variable `x0`, named `my_scope/x`. For all operations other than shared variables (including non-shared variables), the variable scope acts like a regular name scope, which is why the two variables `x1` and `x2` have a name with a prefix `my_scope/`. Note however that TensorFlow makes their names unique by adding an index: `my_scope/x_1` and `my_scope/x_2`.\n",
|
||
"\n",
|
||
"The second `variable_scope()` block reuses the shared variables in scope `my_scope`, which is why `x0 is x3`. Once again, for all operations other than shared variables it acts as a named scope, and since it's a separate block from the first one, the name of the scope is made unique by TensorFlow (`my_scope_1`) and thus the variable `x4` is named `my_scope_1/x`.\n",
|
||
"\n",
|
||
"The third block shows another way to get a handle on the shared variable `my_scope/x` by creating a `variable_scope()` at the root scope (whose name is an empty string), then calling `get_variable()` with the full name of the shared variable (i.e. `\"my_scope/x\"`)."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Strings"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 90,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[b'Do' b'you' b'want' b'some' b'caf\\xc3\\xa9?']\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"text = np.array(\"Do you want some café?\".split())\n",
|
||
"text_tensor = tf.constant(text)\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" print(text_tensor.eval())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Implementing a Home-Made Computation Graph"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 91,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"f(x,y) = ((x) * (x)) * (y) + y + 2\n",
|
||
"f(3,4) = 42\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"class Const(object):\n",
|
||
" def __init__(self, value):\n",
|
||
" self.value = value\n",
|
||
" def evaluate(self):\n",
|
||
" return self.value\n",
|
||
" def __str__(self):\n",
|
||
" return str(self.value)\n",
|
||
"\n",
|
||
"class Var(object):\n",
|
||
" def __init__(self, init_value, name):\n",
|
||
" self.value = init_value\n",
|
||
" self.name = name\n",
|
||
" def evaluate(self):\n",
|
||
" return self.value\n",
|
||
" def __str__(self):\n",
|
||
" return self.name\n",
|
||
"\n",
|
||
"class BinaryOperator(object):\n",
|
||
" def __init__(self, a, b):\n",
|
||
" self.a = a\n",
|
||
" self.b = b\n",
|
||
"\n",
|
||
"class Add(BinaryOperator):\n",
|
||
" def evaluate(self):\n",
|
||
" return self.a.evaluate() + self.b.evaluate()\n",
|
||
" def __str__(self):\n",
|
||
" return \"{} + {}\".format(self.a, self.b)\n",
|
||
"\n",
|
||
"class Mul(BinaryOperator):\n",
|
||
" def evaluate(self):\n",
|
||
" return self.a.evaluate() * self.b.evaluate()\n",
|
||
" def __str__(self):\n",
|
||
" return \"({}) * ({})\".format(self.a, self.b)\n",
|
||
"\n",
|
||
"x = Var(3, name=\"x\")\n",
|
||
"y = Var(4, name=\"y\")\n",
|
||
"f = Add(Mul(Mul(x, x), y), Add(y, Const(2))) # f(x,y) = x²y + y + 2\n",
|
||
"print(\"f(x,y) =\", f)\n",
|
||
"print(\"f(3,4) =\", f.evaluate())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## Computing gradients\n",
|
||
"### Mathematical differentiation"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 92,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"df/dx(3,4) = 24\n",
|
||
"df/dy(3,4) = 10\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"df_dx = Mul(Const(2), Mul(x, y)) # df/dx = 2xy\n",
|
||
"df_dy = Add(Mul(x, x), Const(1)) # df/dy = x² + 1\n",
|
||
"print(\"df/dx(3,4) =\", df_dx.evaluate())\n",
|
||
"print(\"df/dy(3,4) =\", df_dy.evaluate())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Numerical differentiation"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 93,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"df/dx(3,4) = 24.000400000048216\n",
|
||
"df/dy(3,4) = 10.000000000047748\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"def gradients(func, vars_list, eps=0.0001):\n",
|
||
" partial_derivatives = []\n",
|
||
" base_func_eval = func.evaluate()\n",
|
||
" for var in vars_list:\n",
|
||
" original_value = var.value\n",
|
||
" var.value = var.value + eps\n",
|
||
" tweaked_func_eval = func.evaluate()\n",
|
||
" var.value = original_value\n",
|
||
" derivative = (tweaked_func_eval - base_func_eval) / eps\n",
|
||
" partial_derivatives.append(derivative)\n",
|
||
" return partial_derivatives\n",
|
||
"\n",
|
||
"df_dx, df_dy = gradients(f, [x, y])\n",
|
||
"print(\"df/dx(3,4) =\", df_dx)\n",
|
||
"print(\"df/dy(3,4) =\", df_dy)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Symbolic differentiation"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 94,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"df/dx(3,4) = 24.0\n",
|
||
"df/dy(3,4) = 10.0\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"Const.derive = lambda self, var: Const(0)\n",
|
||
"Var.derive = lambda self, var: Const(1) if self is var else Const(0)\n",
|
||
"Add.derive = lambda self, var: Add(self.a.derive(var), self.b.derive(var))\n",
|
||
"Mul.derive = lambda self, var: Add(Mul(self.a, self.b.derive(var)), Mul(self.a.derive(var), self.b))\n",
|
||
"\n",
|
||
"x = Var(3.0, name=\"x\")\n",
|
||
"y = Var(4.0, name=\"y\")\n",
|
||
"f = Add(Mul(Mul(x, x), y), Add(y, Const(2))) # f(x,y) = x²y + y + 2\n",
|
||
"\n",
|
||
"df_dx = f.derive(x) # 2xy\n",
|
||
"df_dy = f.derive(y) # x² + 1\n",
|
||
"print(\"df/dx(3,4) =\", df_dx.evaluate())\n",
|
||
"print(\"df/dy(3,4) =\", df_dy.evaluate())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Automatic differentiation (autodiff) – forward mode"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 95,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"class DualNumber(object):\n",
|
||
" def __init__(self, value=0.0, eps=0.0):\n",
|
||
" self.value = value\n",
|
||
" self.eps = eps\n",
|
||
" def __add__(self, b):\n",
|
||
" return DualNumber(self.value + self.to_dual(b).value,\n",
|
||
" self.eps + self.to_dual(b).eps)\n",
|
||
" def __radd__(self, a):\n",
|
||
" return self.to_dual(a).__add__(self)\n",
|
||
" def __mul__(self, b):\n",
|
||
" return DualNumber(self.value * self.to_dual(b).value,\n",
|
||
" self.eps * self.to_dual(b).value + self.value * self.to_dual(b).eps)\n",
|
||
" def __rmul__(self, a):\n",
|
||
" return self.to_dual(a).__mul__(self)\n",
|
||
" def __str__(self):\n",
|
||
" if self.eps:\n",
|
||
" return \"{:.1f} + {:.1f}ε\".format(self.value, self.eps)\n",
|
||
" else:\n",
|
||
" return \"{:.1f}\".format(self.value)\n",
|
||
" def __repr__(self):\n",
|
||
" return str(self)\n",
|
||
" @classmethod\n",
|
||
" def to_dual(cls, n):\n",
|
||
" if hasattr(n, \"value\"):\n",
|
||
" return n\n",
|
||
" else:\n",
|
||
" return cls(n)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"$3 + (3 + 4 \\epsilon) = 6 + 4\\epsilon$"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 96,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"6.0 + 4.0ε"
|
||
]
|
||
},
|
||
"execution_count": 96,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"3 + DualNumber(3, 4)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"$(3 + 4ε)\\times(5 + 7ε) = 3 \\times 5 + 3 \\times 7ε + 4ε \\times 5 + 4ε \\times 7ε = 15 + 21ε + 20ε + 28ε^2 = 15 + 41ε + 28 \\times 0 = 15 + 41ε$"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 97,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"15.0 + 41.0ε"
|
||
]
|
||
},
|
||
"execution_count": 97,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"DualNumber(3, 4) * DualNumber(5, 7)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 98,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"42.0"
|
||
]
|
||
},
|
||
"execution_count": 98,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"x.value = DualNumber(3.0)\n",
|
||
"y.value = DualNumber(4.0)\n",
|
||
"\n",
|
||
"f.evaluate()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 99,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"x.value = DualNumber(3.0, 1.0) # 3 + ε\n",
|
||
"y.value = DualNumber(4.0) # 4\n",
|
||
"\n",
|
||
"df_dx = f.evaluate().eps\n",
|
||
"\n",
|
||
"x.value = DualNumber(3.0) # 3\n",
|
||
"y.value = DualNumber(4.0, 1.0) # 4 + ε\n",
|
||
"\n",
|
||
"df_dy = f.evaluate().eps"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 100,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"24.0"
|
||
]
|
||
},
|
||
"execution_count": 100,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"df_dx"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 101,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"10.0"
|
||
]
|
||
},
|
||
"execution_count": 101,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"df_dy"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Autodiff – Reverse mode"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 102,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"f(x,y) = ((x) * (x)) * (y) + y + 2\n",
|
||
"f(3,4) = 42\n",
|
||
"df_dx = 24.0\n",
|
||
"df_dy = 10.0\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"class Const(object):\n",
|
||
" def __init__(self, value):\n",
|
||
" self.value = value\n",
|
||
" def evaluate(self):\n",
|
||
" return self.value\n",
|
||
" def backpropagate(self, gradient):\n",
|
||
" pass\n",
|
||
" def __str__(self):\n",
|
||
" return str(self.value)\n",
|
||
"\n",
|
||
"class Var(object):\n",
|
||
" def __init__(self, init_value, name):\n",
|
||
" self.value = init_value\n",
|
||
" self.name = name\n",
|
||
" self.gradient = 0\n",
|
||
" def evaluate(self):\n",
|
||
" return self.value\n",
|
||
" def backpropagate(self, gradient):\n",
|
||
" self.gradient += gradient\n",
|
||
" def __str__(self):\n",
|
||
" return self.name\n",
|
||
"\n",
|
||
"class BinaryOperator(object):\n",
|
||
" def __init__(self, a, b):\n",
|
||
" self.a = a\n",
|
||
" self.b = b\n",
|
||
"\n",
|
||
"class Add(BinaryOperator):\n",
|
||
" def evaluate(self):\n",
|
||
" self.value = self.a.evaluate() + self.b.evaluate()\n",
|
||
" return self.value\n",
|
||
" def backpropagate(self, gradient):\n",
|
||
" self.a.backpropagate(gradient)\n",
|
||
" self.b.backpropagate(gradient)\n",
|
||
" def __str__(self):\n",
|
||
" return \"{} + {}\".format(self.a, self.b)\n",
|
||
"\n",
|
||
"class Mul(BinaryOperator):\n",
|
||
" def evaluate(self):\n",
|
||
" self.value = self.a.evaluate() * self.b.evaluate()\n",
|
||
" return self.value\n",
|
||
" def backpropagate(self, gradient):\n",
|
||
" self.a.backpropagate(gradient * self.b.value)\n",
|
||
" self.b.backpropagate(gradient * self.a.value)\n",
|
||
" def __str__(self):\n",
|
||
" return \"({}) * ({})\".format(self.a, self.b)\n",
|
||
"\n",
|
||
"x = Var(3, name=\"x\")\n",
|
||
"y = Var(4, name=\"y\")\n",
|
||
"f = Add(Mul(Mul(x, x), y), Add(y, Const(2))) # f(x,y) = x²y + y + 2\n",
|
||
"\n",
|
||
"result = f.evaluate()\n",
|
||
"f.backpropagate(1.0)\n",
|
||
"\n",
|
||
"print(\"f(x,y) =\", f)\n",
|
||
"print(\"f(3,4) =\", result)\n",
|
||
"print(\"df_dx =\", x.gradient)\n",
|
||
"print(\"df_dy =\", y.gradient)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"### Autodiff – reverse mode (using TensorFlow)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 103,
|
||
"metadata": {
|
||
"collapsed": false,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"(42.0, [24.0, 10.0])"
|
||
]
|
||
},
|
||
"execution_count": 103,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"tf.reset_default_graph()\n",
|
||
"\n",
|
||
"x = tf.Variable(3., name=\"x\")\n",
|
||
"y = tf.Variable(4., name=\"y\")\n",
|
||
"f = x*x*y + y + 2\n",
|
||
"\n",
|
||
"gradients = tf.gradients(f, [x, y])\n",
|
||
"\n",
|
||
"init = tf.global_variables_initializer()\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" init.run()\n",
|
||
" f_val, gradients_val = sess.run([f, gradients])\n",
|
||
"\n",
|
||
"f_val, gradients_val"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"# Exercise solutions"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"## 1. to 11."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {
|
||
"collapsed": true,
|
||
"deletable": true,
|
||
"editable": true
|
||
},
|
||
"source": [
|
||
"See appendix A."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"## 12. Logistic Regression with Mini-Batch Gradient Descent using TensorFlow"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"First, let's create the moons dataset using Scikit-Learn's `make_moons()` function:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 104,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"from sklearn.datasets import make_moons\n",
|
||
"\n",
|
||
"m = 1000\n",
|
||
"X_moons, y_moons = make_moons(m, noise=0.1)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's take a peek at the dataset:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 105,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEFCAYAAAAIZiutAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd4FWXauO85KaQHQiD0YkKL/lSUIqw0FYLUUAQEVhAL\notQAKq4IrisKCPj5qbuf6wKrokICUlNsoIgISBEERBNC7z0hkHLm+f0xJXNOTiCQQBKZ+7rOlZw5\nc955Z87M+7zvUxURwcbGxsbGxoqjtDtgY2NjY1P2sIWDjY2NjU0BbOFgY2NjY1MAWzjY2NjY2BTA\nFg42NjY2NgWwhYONjY2NTQFs4WBjY2NjU4ASFQ6KojynKMomRVEuK4oy9wr7DVEUJU9RlAuKomTo\nf9uWZF9sbGxsbK4f7xJu7zDwGhAD+F9l3x9FxBYINjY2NmWQEhUOIrIUQFGU5kDNkmzbxsbGxubm\nUZo2h6aKopxQFOU3RVFeVhTFtn/Y2NjYlBFKWq1UVL4D7hCR/Yqi3A4sAnKB6aXUHxsbGxsbC6Uy\nWxeRfSKyX/9/J/B3oG9p9MXGxsbGpiCltXLwhOJxo6LYaWNtbGxsrgMR8TiuFoWSdmX1UhTFD/AC\nvBVFqaAoipeH/TorilJV/78x8DKwtLB2RaTMv6ZMmVLqfbD7affR7qfdT+NVXEparfQykAW8AAzS\n//+boii19XiGWvp+DwLbFUXJAFYCCcAbJdwXGxsbG5vrpKRdWV8FXi3k42DLfhOBiSV5bBsbGxub\nksN2Hy0h2rdvX9pdKBJ2P0uO8tBHsPtZ0pSXfhYXpSR0UzcSRVGkrPfRxsbGpqyhKApSDIN0WfJW\nsrGxKWfUq1eP/fv3l3Y3bmnq1q3Lvn37Srxde+VgY2Nz3eiz09Luxi1NYb9BcVcOts3BxsbGxqYA\ntnCwsbGxsSmALRxsbGxsbApgCwcbGxubq3Dw4EFCQkKuaF8JDg6+IYbh0sIWDjY2Nn9K6tWrR0BA\nACEhIVSvXp1hw4aRlZV1XW3Vrl2bCxcuoCiafbdDhw7Mneta7DIjI4N69eoVt9tlBls42NjYlDjp\n+9IZPHowHYZ2YPDowaTvS7/pbSiKwqpVq7hw4QJbtmxh06ZN/OMf/7jmftyq2MLBxsamREnfl07H\nkR1ZELyANfXXsCB4AR1Hdrymwb0k2gBMNVD16tV5+OGH+fXXXzl69Cg9evSgcuXKNGzYkA8//NDc\nf9OmTTRv3pzQ0FCqV6/OhAkTANi/fz8OhwNVVXn55ZdZu3YtI0eOJCQkhNGjRwPgcDjYu3cvGzZs\noHr16i4qqC+++IK77rrL7NObb75JVFQUVapUYcCAAZw7d+6azutmYAsHGxubEmXy7Mmk3ZUGvvoG\nX0i7K43Jsyff1DasHDx4kMTERJo2bcqjjz5KnTp1OHbsGPHx8bz00kusXr0agDFjxjB27FjOnz9P\nWloa/fr1M9swVEr/+Mc/aNOmDe+++y4XLlzgnXfecfm8ZcuWBAUF8e2335rf/eyzzxg8eDAA//M/\n/8Py5ctZu3YtR44coVKlSjz77LPXdV43Els42NjYlCiHLxzOH9QNfOHIhSM3tQ2A2NhYwsLCaNu2\nLR06dOCpp55i3bp1zJgxAx8fH+666y6efPJJPv74YwB8fHxITU3l9OnTBAQE0KJFiyIfy7pSGDBg\nAJ9++img2SISExN59NFHAfjggw94/fXXqV69Oj4+PrzyyiskJCSgquo1nduNxhYONjY2JUrNkJqQ\n47YxB2qE1LipbQAsW7aMM2fOkJ6ezv/+7/9y5MgRwsLCCAgIMPepW7cuhw8fBmDu3Lns2bOHxo0b\n07JlS1atWnVNxzMYOHAgX3zxBbm5uSxZsoR7772XWrW0igX79++nV69ehIWFERYWRnR0ND4+Phw/\nfvy6jnWjsIWDjY1NifJa3GtE/hKZP7jnQOQvkbwW99pNbQMo4Hpao0YNzpw5w8WLF81tBw4coGbN\nmgBERkby6aefcvLkSZ5//nn69u3LpUuXCrRrqJAKo0mTJtStW5fExEQ+++wzBg4caH5Wp04dkpKS\nOHPmDGfOnOHs2bNcvHiR6tWrX9O53Whs4WBjY1Oi1K9Xn6/e/YpBGYPokN6BQRmD+Ordr6hfr/5N\nbcMTtWrVonXr1kyaNIns7Gy2b9/Of/7zH9MesGDBAk6dOgVAaGgoiqLg5aUVs7QKmoiICPbu3XvF\nYw0cOJB33nmHtWvX8sgjj5jbhw8fzksvvcSBAwcAOHnyJMuXLy/Wed0QSruUXRFK3YmNjU3ZpCw/\nn/Xr15dvvvmmwPbDhw9Lt27dJCwsTKKiouSDDz4wPxs8eLBUrVpVgoOD5Y477pDly5eLiMi+ffvE\n4XCI0+kUEZH169dLw4YNJSwsTMaMGSMiIg6HQ9LS0sy2Dhw4IF5eXtK9e3eX46uqKnPmzJFGjRpJ\nSEiIREVFyd/+9rfrPs/CfgN9+3WPvXZWVhsbm+vGzspa+thZWW1uOCLCjBdfLFMPe1nsk43NrYAt\nHGxMUhYv5uj77/PlkiWl3RWTstgnG5tbAVs43GIUNhMXEVLeeovZGRkkz5xZIjP14s76b0SfbGxs\nioYtHG4xCpuJpyxeTOcdO1CAmB07SmSmXtxZ/43ok42NTdGwhcMtRGEzcWN7Jz1jZUxWVrFn6sWd\n9d+IPtnY2BQdWzjcQhQ2E7duB0pkpm60CXBp82ZSFi8u8ndFhBG9exOzfXux+iQiTH/hBabbBm0b\nm2vGdmW9RRAR4lq1YvaGDSiAAHEtWzJ7/Xreiosjc8sWl6hPESHonnuYOGdOsY6VAiQDR6Oi+Pz3\n368aWQqQnJDA8oEDuRQVhZKZSe369U13vWvpU3JCAvP++leqKgrdPv6YmD59rvlcbK6M7cpa+two\nV9ZSD3K72osyHGRTnkiKj5fkgAARMF9JAQGSnJBQIu2rqirTX3hBVFU1j6WCjAVRQYYqiiTFxxep\nnbEtW2rfadBAxgQFFeijqqry5gsvyJvPPy+qqhbazpgWLWSMfvwxLVsWuq/N9WM/n6VPYb8BxQyC\n875uqWJTrtixbh2ZzZqx3n118MMPJTKjNo3PzZubx/r81Cl6//YbiqoSqygsmz+fzn37XrUdQ8UV\nm5ZGkqqSNGMGnXr3BmDmpEn8v3vv5dA773BChJTmzdn+889MfOMNl1VJyuLFRGzbxj1oKqmHtm3j\nyyVL7NWDTanQpUsXHn30Uf7617+WdleKTnEky814Yc9MioV1Rn8jj2HM9sfqM3TrNtFn72OvMnv3\n9J1YkNd9fCQ5IUGS4uNlTFCQDG3QQFsNgAyJiiqwunBfNYi9erhhlOXns27duhIRESFZWVnmtg8/\n/FDat29/Q487depU+etf/3pDj2GlsN+AYq4cbIP0n5zkhAS+nj37mgzCRUVEi2NITkgoYOi+HiO3\np+88BazMzWXV9Okkz5xJTGYmsampKEBn4LbUVDpnZrp4Mhmrhof1Noy2jNXDjcC4FkYfbErmmhSn\nDUVRcDqdvP322wW221wdWzj8iRER5r/0EtG5ucybNOmqD9i1Pogpixdz5L33WPS3vxVwOd3+ww/8\n2KwZU9u1M1/rmzVj+w8/FHrsD6ZPZ9299zI6OprHgdFAPHAnsGPTJqpu2UIK0EPvXwxwBs3g3Wn7\ndnPg37FuHZuqVOHT0FCG6q8hISEsCQ8vcPySGtTtSO6ClMQ1KW4bEydOZNasWVy4cKHAZ7/99hud\nOnWicuXKNGnShPj4ePOzM2fO0L17d0JDQ2nZsiWTJ0+mTZs25udjx46lTp06hIaG0rx5c37Q76uU\nlBSmTZvGwoULCQ4OpmnTpgB06NCBuXPnkpOTQ6VKldi1a5fZ1qlTpwgICDCzwa5cuZKmTZtSqVIl\n7r//fnboXn83neIsO27GizK8bC3rJC5aJEMVpcgG4aT4eBkbHFwkI7WhAkoEWeZwSHEM3aqqyvCe\nPWWMfuzpY8ZIn+BgcVoM2j1AeoKstBxHQJJApul/r6a2Ku45X6n/7mq1W4XCns+SuCbFbaNevXry\nzTffSJ8+feTll18WEU2t1KFDB7l48aLUrl1b/vvf/4qqqrJ161YJDw+XXbt2iYhI//795dFHH5XL\nly/Lrl27pHbt2tKmTRuz7QULFsjZs2fF6XTK7NmzpVq1apKdnS0intVK7du3l//85z8iIvLEE0+Y\n/RERee+99+Thhx8WEZHNmzdL1apVZdOmTaKqqnz00UdSr149ycnJKfQ8C/sNKKZaqdQH/6t20BYO\n14WqqtIvKkqS9EE0EaRfVNRVvXveBBnTosVVbQPDe/aUpIAAmQEyUlFkVHS0TGnXTqa0ayevtG0r\nM8aOLXJfExctkliHw7QLPN2zpyT5+0uSPugb/W8N8jjIYyD9QQbqf2NBZlynUPI0+FyrncbqCVaS\nHmDlgcKez5K4JsVtwxAOv/76q1SsWFFOnTplCoeFCxdK27ZtXfYfPny4/P3vfxen0yk+Pj7yxx9/\nmJ+9/PLLLsLBnUqVKsn27dtF5OrC4euvv5bbbrvN/Owvf/mLfPLJJyIiMmLECHnllVdcvtuoUSP5\n/vvvCz32jRIOtlrpT0pyQgIBaWnE6O87AwFpaYXaHpITEtj/888cAfb//PMVbRTJCQkcX7GCmKws\nJgLviOAVHMyU1auZumYNr373XZFjEUQ01ddwVTXtAqdWrODzunX5T3CwS/+jAGnShPrt2pFdowbH\nHQ4qAZf9/Mhs25YfmzVj7ptvXpNarLCgwKKqMkTsSG53SuKalOR1vf322+nWrRtvvPGGuW3//v38\n9NNPZqnOSpUq8emnn3L8+HFOnjxJXl6eWdYToHbt2i5tzpo1i+joaCpVqkSlSpW4cOGCqRa6Gg88\n8ACXL19m06ZNHDhwgF9++YXY2FizX7NmzXLp16FDhzhy5NpqZ5cEtnD4k7J8/nx6KYqLQdZwJ3VH\nRJg/aRLVVJVqQDVVLdRG4T6YG2132rGDEb17X/HhFSmo33cXYl2ys8lQVbJyc3nC6XQ5xgAgKzeX\nKatX8/Tbb1NbhPeBatnZtBo1itajRlFjzx6Pg7r7sQsbfFRVvWLaD/d2bkR0eXmnJK5JSV/XqVOn\n8u9//9usFV2nTh3at2/vUqrzwoULvPvuu1SpUgUfHx8OHTpkfv/gwYPm/2vXrmXGjBkkJCRw9uxZ\nzp49S0hIiHlPXM3grSgK/fr149NPP+XTTz+lW7duBAYGApoQ+tvf/ubSr8zMTPr3739d510c7DiH\nPym3NWzI5sxMtrjFNdzWoEGBfZMTEpC0NN4D+gBLgP6pqdoD6haXkLJ4MeH79rEeSFQUaNKEsCpV\nOH3yJAdXrbpiLIE1FiKmTx9T0DwuYg4CXwKNgD1797KwZk0SL10izBjQgfB9+0hZvNjle/1EmDdp\nEtXDwpiTkUHczJl06t27QNzDkfffZ8SePfxT96aK2b6dmcBE8gefNydO5NLmzUD+YGQ9H/dzuNHx\nI+WRkrgmJX1dIyMj6d+/P++88w533nknXbt25YUXXuCTTz5hwIABiAi//PILwcHBNGrUiN69e5sC\nZf/+/Xz00UfUrVsXgMzMTHx8fKhcuTI5OTm8+eabZGRkmMeKiIjg66+/RkQKFRSPPvoosbGxhIeH\n8/rrr5vbn3rqKXr37s2DDz5IixYtuHjxIt999x3t2rUzBchNozg6KfcX8BywCbgMzL3KvuOAo8BZ\n4EPAp5D9CtW12ZQMw7t0kaW6ft8w+H4BMrxrV5f9CotdcDqd5vYxLVt6jFz2pN9Pio+XZ7295RWQ\nV3QbQh+93X4gj0REyNBateSVtm1d7BkjHn7YNLQb/RgCMs3Hx6N+2nrsWIdDkuLjZcbYsTK0SRMZ\n4eUlj+v2kslt2shDoaEyBiTZQ2xGUQ2kNyO2pKxQlp9P9zKhBw8eFH9/f3nggQdEROT333+Xrl27\nSpUqVSQ8PFwefPBB+eWXX0RE5OTJk9K1a1cJDQ2VFi1ayIsvvigPPfSQiIg4nU554oknJCQkRGrU\nqCEzZ850Odbp06fl/vvvl0qVKsm9994rIiIdOnQwbQ4GUVFREh4eLrm5uS7bU1JSpHnz5lKpUiWp\nUaOG9OvXTzIzMws9z8J+A8qSQRqIBXoA711JOKB5IR4FGgOhwGpgWiH7FnpRbIqPqqoytEEDF88g\nY8Ad2qCBywBXWAqOaePHm9tXVKgg/fz8ChgPPRkXZ4wdK5PbtpWBQUHSFuRVfVA2vJD+4e0trfTB\n3MozXbrIUkVx6ccSkOH6/06QjtWryxu6kEqKj5ck/diGYd7pdMqYli3NfZ1Op4t3l3EtrIKmqAbS\nkvCAKi/cKs/nCy+8IEOHDi3tbnikXAgHs1F47SrCYQHwD8v7B4CjhexbrAtnUziG19Fyb29JsgzM\nxmuZt7fLADdj7FiXWbwx2+5XrZprJLKbx5OnFccYfcUxPDZWXvfxkYfRXFWt+3RRFGkO8khkpIuQ\nmj5mjAwMDpZXQKboq45+IM9aBMsIkL4+PtpA7XbsoYoir8fFycoKFSQJ5DmQNyZMkGe6dDHdcpc5\nHPJ4dLTpeVXUiO9bza31z/p8/vbbb6b30YYNGyQ8PFyWL19eyr3yzJ9NOGwDHrG8rww4gUoe9i3e\nlbvFuBaVRlJ8vHTx8ZGR0dHSwcdH/gpyH8ijfn7yeM2aRXJJ9biaAJnm61tgxq2CTNcH1hUVKsi0\n8eNlhI+PPOjnJ4kgS92E0xJ90H8IXFYP1pWAMVA/DfJAhQoyuU0b6RMcbAqpfpGRLvsaq4f2FSpI\nnt6+ChIbGCijmjWTN/X37oN7URMX3mpurX/W53PTpk0SFRUlgYGBUr9+fZk+fXppd6lQbpRwuCEp\nuxVFeQ2oKSLDCvk8FXhWRL7U33sDOUA9ETngtq/ciD7+WUlOSCB52DAuPfAAtzVuzPOWhHQiwsxJ\nk5iou/QZabWHNWhAv0OHUC5dYgWwPiCAnzMycDgKd2Yz2uLyZS5u3QpA2tatROqGuUDgqFtK8F1p\naYQdPkwOkB0UhDMoiM+PHWOYotBEhIOKQmbNmuQeOcJFhwPfvDzuBXYClyIjWfjHHyiKwsxx4zjw\n5Zcou3cTJsJ+wA846XBw77hx3Pnuu3TNziYJmOtw4F2xIuFnzlAZLaI6Q9+/DnA7mh50JfCholBf\nhM5oes/kgACUjz4ipk8fZo4bV2ha8wmzZzNz0iQmTJvG+NatXdKiD6henc8OHbritSzP2Cm7S58b\nlbK7tLyVMoEQy/sQtGcpw9POU6dONf9v37497du3v4FdK7+IaO6ZczIyeGDZMny//NL0qgFXTxsR\nMV0Fe6amslQEf6AbkJeVxfSJE5k0a1ahxzLa6jxvHjFvv01SfDzHBw5kCvAWmgdQiu7tM3HOHFRV\npXtAAIuAOCA8O5u7c3NNbyMHMFGE3mfPkqCqtFNVXkaLb0gCZusxGp379mXinDnaYB0ejgCZW7cy\nNyODvgEBbP3kE17Mzgb9uymqyu/nz7MAzW9bgPuBqsAB4EV9207ggghvAQOCg1l3991s3beP+7//\nnl82bWLi7NmFep4kJyRw9P33mZ6XR4yb++XAo0eZ8cILvDhz5nX8ojY2RWfNmjWsWbOm5BoszrKj\nsBdFszm8Znn/AHCkkH2vcZF162KoW1SQobhmIrXqwse0bKnZBCxqlqfR0lAYhtgYb29xOp0ej+Mp\nmvqZLl3kGS8v6VqrlukBZFVL/WPcOFNttAotHYaL/t5QN4G8jhYFbf38MZCnu3TxeM6GGmear68s\n1z2WrCqkf7jZU1aAdCPfMytJP/6rIG+Qb2QfGxws08aPlzFBQTI8NtZFVed0OqVXy5aSl5dnXtd+\n1arJc02ayCCHQ0ZFR8ug4GCZDNIlIuJPa3uwn8/Sp7DfgLJkcwC80Fbs04CPgAqAl4f9YoAjQBOg\nEvAN8HohbZbA5SsfFMcF0jr4W9NOrKhQwUx3bfUoesPX1xx4jdTWQy3f+wJk2vjxHo+VFB8v03x9\nZaw+IFuNvrGBgZrnk0Vf73Q6pbO3tznYv+7BvvCFosiDIJNB2hRif2jpNsi6G4mng/Tw8pLn0AzV\nz4H08vKSB318pFfNmtLXz09G6ed5n36sV8h3n+0O8jBInn4exvmsAnnGzWtqWlycPAPyVPfu5nVN\nDAjQUpSgeUQl3QK2h1vp+SyrlBfhMAVQ0YzLxusVoDaayqiWZd+xwDHgHHacg4gUzwVy1cKF8qTD\n4dEl1X2loIL0CQ6WV9q2laHR0fKFRSAkWfbpUbGiR28clwpruBp9jTasA+LrcXHmYK+CdNEH5liQ\nASB9FUVifXxMb6N++kphIMhAf38ZEhoqXfz95WmHw+XaJC5aJMN1oaOCvAkF4h8MIWV1U3XP2bTc\ncv5Poq2gVurffwrkEVxde51OpykEH9ZzQpnXTBcuhfXjz0bdunUFsF+l+Kpbt67H36ZMCYcb8bpV\nhMO1uEB6WmH0btpUngZ50DLYGa9lPj7mSsF4JQYEeHTztAoWd1dWkfxVg6GmSSR/8DTaGAouq4fO\n9erJo2hJ8mLIXxUsQRukx6ANqpNBRlmS+HlyI+2nxySIiKnKejw6WoZGR0svRSkQ/2CcpzUJ4Qy0\nFcOzaILKvbhQR/1/o29d9fdL9dXDtLg4WYlr0KDxWoomUJLctv+ZVw82ZZPiCgc7fUYZwVMSuKKm\noVBVFfX33/kXmrE1ISSExRYPpSynE2fFikyNiuL0iRMc2r2bWvXqkTdvHrFuBtR2QBegQXQ0lcLD\nXdIViGg1FxRfX17MyTH7MwjXojqxaGkwYnbsIGXxYhpHRNBp3z5SAH807yD0/eLQdIzbgHuATiIM\nOHuWt3fsMD18rMWEDAPvCzNm4Hf6NLOdTsYGBrL70CEeEGGmjw+b77uPc6dPc+a336hUrx7qvHlm\nyo+f9GMfAM6jFROy9v0JYBnalGwe8DkwDLQ6EqrKsEmTOHvkCC+iGd4zgHkOB9UbNeLk7t00RDNu\nV8M1vYhI0VI/iOR7lNlFaWxKE1s4lAFENC+j2ZYkcJ7yAwGoqsrs0aNJzshgvL7P9IkTefLiRRRg\nEvDr00979I4REQY0bEhd4GhODvc2aMDCHTtYWbcuym+/UUmE34AOwNHgYKauWVMgP1HNnTupaUmI\n9ytwQFH4OCCAahcvEg6cBH4PC6P1HXdwat48um/fTgqaEMjFdTA2Eu4dR/NK6oQmAKY//zxe3t5M\nmDbNvDYC/Ab89P77JDVvbgqMS1u20NDpRIA7cnPxb96cjHXr+FhViQsOpnqDBlzMzHRx6d2/fj05\nubl8BSQAkXp/VGAXcB/wN/KF3TI076eee/eyU086OFHv+1JV5d3LlxntcNBDVWnmcLC0cWNqh4cT\ndM89jJ81i76tW5NwBe8v6zW2Cn4bm1KjOMuOm/HiFlArFTXASkQzhI5E875JCgiQxPh403hqqkYC\nAz16GrkX/3k9Ls6lJrOLLv4K+YkMe8WVoqSNnEu9mjWT+ytVki90dc6zugrJMBq/om9PQvMW6uzn\npxmlQ0NljO4tZAaV6Wqvf4B01Y/n1FVBhkHZieZplejvX+h1TIqPlyR/fxmrq8XcI8MXe1A39dXP\nuW9goDzg7S1tfH3lsZAQ6e/vr6nLfH0LtTEYxus3Jky4otPBrRZdbXNjoZhqpVIf/K/awVtAOHhK\nS+EpOtkwhBq2ASfII9WqyQq3wW2FPhBZcS/+swykrY+PtAN5wrARuA2IxgA//YUXJHHRokIjf5Pi\n42VlhQoFhNu08ePlOX9/6YWrobtLRIQ8XrOmPBYSIkNCQ2VIaKg8FhoqD4eFybO6gdkQYv2qVZOX\n27SR3opiehX1RHO9FfKNx8Z5J6PZMhILGaiN6z20SRNZ7nCY9ocpIKNARoK08GBLsApLw3HAsNlc\nqRqe9TeLDQyUVQsXFup0cKtFV9vcWIorHG5IhHRJYkdI5/PG+PHcOXs2XdHqJivAJUXhQz8/wn19\nzf1EhEv169Osc2dTd50UH8+i/v2ZK2JG77ZDcyf7f0BqWBijzpyhp+V4yQEBbB0xguP/93+cr16d\nuX/8YX43To9+VhSFGWPH8svcuURlZJAWHExk06aoqsrPW7bQPiuLI3pf5+h/k/z9WRYTwz+XLHFR\n9RgR2yn68TsDSQEBfPHAA9RduZJmaGqopWiBMtWaNCEtNZVVejCdoNkwZgG9gC/QAt+skc4GRsTz\n2VOnQI+0FnSVGNCa/KC5PUCV6GjqdOrEhNmzXSLL+x86xI5LlzigKCi6fcE4n6B77iHP4TB/s5XA\nv6pVY8WxYy7Xz/38PV1jG5trpbgR0qW+Mrjai1tg5VAUVFUtoLrpGxwsk9u0MVcYVpWFu1vsM126\nyBfk5zZyogWCGS6pHR0OU9VjeAxNbttWukREaHmP3LyA3LOVJll8/ZMTEmRaXJy8qq9GpoH8HczU\n2EObNJERPj4e3VI9rWC6eHvLaLdtfYKDZXiXLh7zOiWjuaV2rVmzwCrMCF4z1G7Gqs1Yydzn5VUg\nxmKpwyHP6OnLrbP7pQ6Hy4rIfYViXTUY+wzR/y7TkwIaXItq0camKFDMlYNtkC4npCxezLALF1w9\na5xOlDFjCqTHSGnWzKxmNmDUKDr26kX9Bg34l58f9S9f5iH9+x2AZvr/z6gqAWizdREhLjiYViNH\n8pehQ/kF2CzCFjBXBgBBP/xAp969SXnrLWZlZTEDmJCVxbjp09m/cyf10FYL49BmwiFBQbzy7bda\n/qHcXBej+/L581FE6FmrFk8fOYKiquZ5Ds/LYxeuhuyhOTkkKQo/NmvGf3fsQDl7Fh/9OBlADbSi\nLFNWr3aZeU+fOJGIDRvMlBbu5UxnjhvHli1b2OqhSJKIq+NAD1U1va08eZlZHQWMfvdH8+TqnpvL\nsJdeIqZPHxRFsYsG2ZQ5bLVSOeFqid9mTJrE0dWrmbNxIwOiohi8fz/dc3NZBuyeMIG7WrQw1Uqx\nQDZaMQ1D1SNog7ip+gkIYH6NGnyemlpAzeOwqGiSExJgyBDIyiIZrcpTnsNBfVU11UDJwFYAX18Y\nNYp7/vkIExwTAAAgAElEQVRPrSynru7p1Ls34+67j+obN/Jd1ao0a9wYh36eB1JTuXDyJOeAWv7+\n5nl7BQfT5JFH8vMs6dfmzMmTsHs3lUQ46e2N2q2bqb5SVZU+ISEsuXiR3oGBLL5w4ZoS4iUnJKAM\nGUKMLhwAluueSXUMd9V77jEFTv+mTfHbuxdFUbick0P4pUtUBoLQPJ2WORxUWLiwQLU9G5uSoLhq\nJVs4/AlITkhg1WOPUdPp5IWcHB5XFOZZbAu9AwPxqVaNYWlpdAaWAyuAvuS7koKmo18RHU3t8HB+\n37mTsHPneNfpJA6YDYXaG7Z8/DHZZ87wtN7Gr4pCDRHiLd/p4XDgFRFBBRE+P3bMpa1O48ezbfBg\nTuTkUNXXl3s+/fS6ZsviQW/f2+FguD4AW202K4FfJ0wo4PIrUnicgScBrYqwJyuLzzduNLNjevq+\nJxvHaUVB7dKF91auvOZztbG5GrbN4RZHVVUtuZ5uP0ikYHTucgomuusE0hukr/56RFGkb2CgzBg7\nVpLi42WEj48MbdJEhkZHy/JCPHFENFtBe/24hmuotaKb6UGl129w16sb+Yis6TishYKuBU96e6Py\nW15eXpFcfq22GsOGY3hseeqTu23nSilQPBY9atFC3rxFSora3FywbQ63NimLF9Nx2zYUNLXPCrTK\nSfOBRmgz933AUFx19iPQ1D3/QlcpNW9O9Q4dGK/XJHgvN5e4kBCq33cfP4eHs9mDLrxT797MmzSJ\nevr2zsB2YD2w09eXz9zUQBe/+YbLbnr10ydPov72Gw/r/eoMbN227YoR4oVh1dufPnGC07t3EwWE\n79vHiNjYAvr/Jy5edEmnLYZNISODuJkzUVXVTMV94oMPCgSmue/fsVcvl/fuQYzWKHijD1W3biV9\n82ZSmjWjc9++V1y52NjcVIojWW7GC3vlYOIeQOVpJtrN4ZAhaHEMxuzZCD7rg+bHPxAt4d0Myz7T\nfH1lpL+/a9DZVbxlkuLjpbfDIUs9eBh5StrnieljxpiV26yeSNPHjCnWdTJWI8l6m+0CAlziKoaE\nhspjISHS7+67Xc7HOPeVFSpIc71anBlb4uaN5B6XcLVrZ3hGvdK2rXSrXVsmt2kj3f38XJL6WWMo\nrjdDr42NSPFXDqU++F+1g7eQcHB3s3THkwojyd/fdE81AsGeRQvsegXkUZCRTZpIjJ+fjCC/LGYX\nh0Mmt2ljunoaA3RsYKDkobm8uqfetmIIpuloifaswkjwnLSvsHMqaRdOayS4IbSu1qa7oH0SLYrb\nGmRnbcN9f6dFiBTm2mo957HBwTItLk6WKopMR0tZnmhJhNgvKkrGXGeGXhsbkeILhz9n7cJyitXN\nUkSY8eKLhoBEJF+FkayrPD6YPp13/f1J9/LiiehohtWsSXxICEdr1kRp144D0dGEenlxyM+P6tnZ\nvAcE6McaoaoEtGzJ1DVraDVqFE/p+ZKeuHiRGcBR4Cvy3TPdMVQkzwPRwGZgtKIwOjqaqe3asbl1\na7b/8MNVz3nHunX82KwZU9u1M1/rmzUr0ncLY/n8+fRSFM1d1+Hgiejoq7ZpVfk4gcPAO8BGtESE\noOW8Sp45U/stLPsL8CzatXPJG+V27USE6S++SPLMmczKyOCH997DV4SjgJ8I80aPJmb7dgAC0tKY\no//Wxj1gY3NTKY5kuRkv/uQrB0NVZDWYxgYGyqpFi1wMo8NjYyVJzxeU6O8vsc2by3N+fvKXQmap\n1pltm9BQM7DLyCVkpKZwOp0FVFNd9LoQVwqyK2rKj5uNJ1XblVY/ns6nTVhYoSm5EwMCZHhsrEwf\nM8bcf2h0tDytKNLV318er1lThtaqZaqNrNcjKT5eRvr7yxu+vpKIFqhnrGzGkp/C3Jrjyj1YrrD+\n29i4g61WKt8YKoanunUzB6LlII9Ur24ObKsWLpRnHA5J1NU9K8n3PjIibhPdVB6GMFEpWG/Bqmrx\n5EFk5CjylEPpeosRuXOjBrZrUVN5Oh9r1boZaDUmeqNVmHu8Zk0ZGR0tXSzR3e7J8hIXLZJ+fn4y\n0t+/QLvGb/oIyDNoXmLWQkN99N/X3X5j2COK0n8bGwNbOJRjjIElD9eKYsas0pg5ttfrQvdDc/V8\nyDKztK4ExrRsKW8+/7wpTJL0zwvYAxwOs8Zzv7vvNmfAr7Rta9Y9nuE26y7pjKE3amArbEUzfcyY\nAsZ80wXYLYOq+2rhdbREf4mLFhW4BlZhlBgQII9ERua75bq1a6zelqJlkjUSCVqdCbpWrFjAdXip\nW4lSo/92BlebK2ELh3KMMbBYjZ7u1dhUtJKZTn2V4ERLTe1pJbCiQgXp5+dnprMeCzICzUNpCshI\nPWdSYeqfK826SzJjaGkMbJ6M+UYmWaPOtohIv7vvNr2a+vv7m15eKlqacGtdaE+V9LpaBPcX+qDu\nKcdSJw9V65ICAuSZrl3llbZtZVR0tIxSFPN3e7ZLFxfhZmdwtbkatnAoYxRVXeJS+lIXAP3Q6gi4\nJ35LRPOaSdRnsAUSw6GVB+3n5yfTyVc1LXU45EHdVVJFq7F8pQCzK826i6rHLwo3e2BzF0ZOp9Nc\nNZjBaIXYbKwrs8coqO4x7EDmNg/7vO5hNfIFSJ9ateTx2rVdVm5GWVT3/lm9lzx9bq8ebNyxhUMZ\no6jqEo+zdDQ99HNoGVP764PNZF1oJIK0Q3NP7YvmqmrUIOiFVpdBxVXVZLh0GoVypvn6XrMPfUm6\nm16Lwbik8BSP4F5/wrp6ML5jFAQyrmmB2tze3jK0SROZ0q6djIqOlr4UVOEtBmkbGiqTQQaRXzti\nMkjbgAAZHRgow2NjXaKw3etjWH9Hw67hqX6GvXqwsVJc4WBHSJcgInLFCFkr1mjeA6mpODMyEODi\npUt0zc0lETgBHAQErdZxCnA3WnK8ocAGb28q5+VxCTgL1EHL+NkZra5BJyBWURjWuDGZhw6xKCOD\nRypUYP/cufitXcuIPXtcaioURklmDPUUJXy1mtnFwfxNLCVY//eTT/jVx4eN2dmmK2qqry93rV1r\n9mHHunXsqleP3nv2oKgqK9Gu8UzgriZNqFy1KiJCtJ5ob+a4cRw8f555R47wpQjhaJHp2UD4+fO0\nBv6Cay6rpVlZ7AIOLl/OmxMmsObdd7lTd7n9xceHxdnZ5AQF4VO7Nr327EERIWbHDlbMn885X182\nZmebWXIPpqfT2NJ/G5viYifeK0GsWTutGUeLmg4hOSEBHnuM5EuXzFTX59BiE9LQBpeW6AVvFIXZ\nISF8d/48ycAiYC6YSfJWWOoYnwgKInbNGmKyssxsq0NTU1nucNDzJmcFvVJ2Wff02SWBp0yqKytU\n4FcRXszJyd/PrSCQiDCgRQsaBQRw9uRJ9u3ezbNohYaUrl35l4dkedaCSpCf5bY7cLFCBepUqEB2\nbi7hly4RDpxCq8G9Gmjn7c09eXkcjYpi6LRpZiLCcG9vNgYHs/TsWVOQGUWGOl+6ZBZkOvHBB3Se\nN6/YwkHETt/xZ8FOvFdGKExdkmiJV7ga08eMkbaVK8sSQ9WBlsSuqaJId/IL87irGp6xqDO+UBTT\nE8nQX7v3a6iimG0V5ib5Z8GTHeXxmjWll14IqLAYDWsai35RUWZiwcKumXsZVqPokPE7tsfV28lq\no3CSb88YqiguHk8P4cHG5FZkKDYw8IrR7NeC7R775wHb5lA2SFy0SJb7+Lg8xEbG0aLq1RMXLZJY\ni0urkaW0na+vTKNgptMvFEWGNmkifd1yE1mP5clesALkDetAU0iQVWmxN32vDBo1SNoPaS+DRg2S\nvel7b+rxrQJ1SFSUDFUU045T2DVLio+XZ729zbQlfd0FOciDVaua3k7uzgZJlt/GcFVW0RwNJoP0\n9/MzvZhGKopLXqzC4lKKc962gbv8U1zhYKuVSojnunblXHIylRs3Jkwv/LJu504anjvH+06nxzrG\nVkSEAQ0bMjg1le6W7SvR1EUXgKqAL5AL4O9PhbAwKtx1l6kyMrAey1pH4PTu3fiKkAPcBTxPvppi\n7p49ZUKNkL4vnY4jO5J2V5p2sjkQ+UskX737FfXr1b8pfbCqov4O3AN8g2tNC/drNnPcOC5s3sz3\n27ZRv2ZNev/+Oz30anYAScDfgWbR0Vw8f55zR45QQ4QwtNQjKy1tDwUGWN7vABp5eeG3cCHbf/jB\nVMuJCGlbt3JbRgbBwASKV3fak1rUtmGUX4qrVrJzK5UAIoLv6dN8oqp4BQczZfVqWo4cSeSZM/Rw\nOgHXvDwirnmTQDPUhqen8xEwBZiq/52LNkBUABagpeL+BKh655385+BBbmvY8Iq5iSbOmcPUNWvw\nCg7mExFO+/nRX1F4Xj+uAvRKT/eYP6k0mDx7cr5gAPCFtLvSmDx78k05vohmwO6kC1s/4F20kqpW\nI7r7NZs4Zw4BzZsTnZFBZkAAXzVuzGhFYSrab/kT0BQ4kZODz7334qXnfjoIPO3eNrAMzAp8RwFf\np5NZo0YxftYsXv3uO6asXs3xsDAG5+Xxd7TKcp7yOV3veVvvV5tbE9tbqQSweuDE7NhByuLFzB89\nmoUiHmsMiwhH33+fL5s3p1Pv3maJz25OJw5cPVpWos0c7wGPHj5FMeJa+/dkXh5fNG7MlqpVzc9F\nyk6t4sMXDmsFKaz4wpELR27K8d29qZ4H9gKf+vuzxNcXEa02Re3ISJdrpqoqG//v/1gC9N6zh1pP\nPMEh4Ixe9U0BqgCydy870tLIURSa6cdYBGwhf6Ww0+HgXGgoo8+d40sRZqOtJBoePWrWn0hZvBjH\nqlW8ERLC+ubNzZWCqqr8+uabV/SUK8p532gvMpuyjy0ciokx47K6Sg6YNImBeinMdsCwJk2oq7s+\nBq5dy9GffmJWRgadR43Cqaoceucdajqd/ApkAquA02ieSpWBVDTVw0f+/nj7+HgcnIrav+55eawO\nCWHK6tVlQo3kTs2QmpBD/soBIAdqhNS4Kcf35LYbIULkVbyppk+caBYTeuLiRX719aV2x478lpFB\npfr1tWstwoWtW6makYFDhENADeAxXCcEyX5+LGvdmi/27qX3nj2gqvgB/wsM+OQT1OnTSXnrLd7L\nzaX32bO0GjXKLBQ0olcv6nz77TUP6iXprnwrkb4vncmzJ3P4wmFqhtTktbjXbpr684ZTHIPFzXhR\nxg3S7gZfFWSI7kVkRCUPiYpyMRAn+fvLcLS6C0Yytj7BwfJK27YytEYN6aYbpWPQgt+GoqXB6BcV\ndcWSlUXpX1kPmNqbvlciu0YKLyFMRXgJiewaWSSjdGkZsj2lx4gNDJRVCxd6rL/xCPmeZw/7+Mhf\nQ0Lkr8HB0jYoyCwGZI1Kt2ZpNcqtGobtZfp9oaqqi0ODbVC+8RTnXr0ZYBukSxd3v/39J04Qu3s3\nPdHKcKYAlYAW8fHE9OlDXKtWdNywgX+j6ZpzgJ5AUkAAyzp1oufAgShDhxKTlcUzwB404/EcoD0Q\nM24cpz78sMg+7Tc7rqAkMGZjRy4coUZIjSLNxkrTkP3G+PHcOXs2XS3bVgLvRESQcvw4cS1bMuvH\nHxnfujWdNmxgG5qaMAYt5sJnwQJthTdsmPm7JickgG4cNmJXFEAF+gQGskRfpQjQH3h80SLmv/QS\nj6em0hntfnLYBuUbyuDRg1kQvKDAKndQxiA+eeeTUuuXQXEN0rZwKGGe69oVR1ISYSLsQtMn9wJq\ndOlCj8cfh8ceY96lSwxFi2aerX9vBrBeUagYGcm81FRS0IRLKtAczSjpDWwICGBjVlaxvFL+jJTm\ng9q/aVP89u51+S1ycnI4e/kySSJmoFrT998n+dIlQBP2xuA+rmVLjuzfz8Jjx8zf9a24OPanpJC6\nezcj0CYQoN0TTnARRMuAf0dEUOXECebq9g2heJ5LNlenw9AOrKm/puD29A58O//bm98hN2xvpTJG\nvYYNCWvThgPR0TzmcKAAQwBVUdixbh2f162Lv75vZ7QBIgU4BjQXITI1FfRtc9CMmBuA42g65+pZ\nWQjX75VSHknfl87g0YPpMLQDg0cPJn1feoF9Dl847CoY4KYZshdu3cp/z59n/rlzzD93jnlnzxJx\n550k6pOamKwsti1YwOd161IZeBhX54J2mzbx/3QblfG7Tpg9m4t5eTwAfKQopgfbB8B8ReERYLS+\n7Wsg+/hx+ogwE00wKECnW+geKQ1M+5iVQuxjRbmHyxr2yuEGICLEtWrF7A0bXPzi//PbbzzaqBGP\np6ayA8hAc2U8jTb764l2r3kDw9HSLiwD3kbzYe+Kpq74FXiBW2NmWFR1UVla4ntK2ZEcEMCKDh04\num0bwZmZIMLlzEz8VJULQCvy407iWrakY1wceY8+iq+qFlwpeHnxLyBRL+0qQGs054U6wGUgSFGQ\nJk2o06lTmVUflneKem+WlsrTViuVQTwNDsscDnaNHcuhd94hPC+Ps8AfaH70Q4BYNJWBoNUuTiRf\n7WAIBYf+fgDwOZByCwQqFXXQLwvBcwZFsfMY94hkZRUY/JMCAngvJIQVx47xFtok4nc/Pxq1aIGi\nKOxMS2PQoUPEWr7zBfCBjw+Jubl08fWlWYsWBN97L/j52XmSisD1eh0VxT5WWhOX4gqHEnVlVRSl\nElrcVkfgJPCSiHzmYb8pwN/QJjnGGHiniOwryf6UFoZb4KpTp0D3cz8twh9LltCydWsAjmzZQoPM\nTI6Rr0+OQRv4R+KqdngW+Ir8eImBDgdP6En1yrur4dUeyqLGPdSvV5+v3v3K9UF9t3TcCt1n6iJa\nMrsJb7xhvjfci43B/zMgEkgLDqZirVrU2r0bBS24DWBZTg4VRo0ipk8fulWvzhZggZ8fvjk5OH19\n8bt8meG5uSjA8JwcfrvvPu5s2ZKUYcP4snnzcn2P3GhcJhaVgRz4aeRPRZpY1K9X/6oDfGnH7lwv\nJR3n8D7agF8FzSFjlaIo20Rkt4d9PxeRx0r4+GWCiXPm5KuWDAOhCHEREUxds4aUxYvxGzSIdGAw\nroKgIjDP15fFVaoQFBrKqV278EEbQNYDZxQFadyYJm7qAmMAKiuzRPdB/+l+T/PBog9chABw1Yfy\nWuIeivKglgYpixebQY8xffq4BJwZg38y2u9/X14efz9zhnvR7AkGp0VQ588HYHRGBh2BXpcvMwzI\nyc5mDfmTjJ7Af//5T45+9x1vFyF9/K3AlSYhV4rKd7+frmeFUdqxO9dLiamVFEUJQEt5Hy0iafq2\nj4BDIvKS275TgMiiCIeypFa6lgG4ML0z//2vlpZg40ZSgXvRInAFiNL/bo6IYOXRo7wVF0fGli0c\nSk+ndn3tBty8dy++1aqxeONGlz4kJyS4uEKWJgVUPCfBe703eZ3zXFQ+d4TfwbKay6643C5L6qLr\nwWp/snoiZW7Zwh87dsDZs6j+/lTw9SUjKIjQkBB89+whp1Ej6rpFsQc2bcrRn35i9oYNvAlEA2vQ\n6nZ4iqxf4eXF/xUhr9efnavdQ0X1Orree7G82hxK0lupIZBnCAadX4DbC9m/u6IopxRF2aEoyjMl\n2I8bhjkDtHiAiIc8SSLCB9On88M999C9dm2mtG1r5jxaMW8eEdu2EQHUBl5FWz3cgWZU/DswKiPD\nTI3RetQoQs+do9WoURwPCyPw5Elq7txZoA9GkaGykA+nwExsJ+S1yoMf0YoX/Ahp9dJYv3P9VT2M\nDHXRoIxBdEjvwKCMQeVGMEDB1CrG7zp1zRoiGjZkAVD9zjuZf/Ysiw8epGJICP9UVQ6fO8cr337L\n1DVrmLpmDa9+9x133n8/nXfsQICNaJeuM5qDwjq0+8jwatoIHHI6UYFfsrJImjGj1O+L0uJq+bqK\n6nV0vXm/yus9XJLCIQg477btPBDsYd+FQBM09dPTwCuKovQvwb6UOCJCsj4AJ1kGYE8CIzkhgWrb\ntxPQogVR587RevRo8wGv17AhG8PDSVEU/NBiHWLQHu5/1ajhkjjPOujPe/FFWLaMgOxs3rl0qUAf\n3Aeg0qSAW+kltORBrdEy2LXW3jtVZ5EeSkNd9O38b3kt7jUmz55cLlwCjd/PUzI7T7+ZsS0FuO3o\nUaY//7xLezvWrePHZs3oUasWT6IJhR+Bi8Au4BG0ycZU9EkGMB3NTTpi27ZSvy9Ki6u5OT/d72mC\nVgRpqXfXACe1mb2h+ixqO1fCeg9/8s4nZV4wACWXPgOtgmWm27Y4YFkRvvsCEF/IZ1eKEL9pWOv6\nGvWGPeW/V1VVKwYD0r1CBY9FWBIXLcqvCaynUfCU0sJIfaGCPKLn/jeK+njqg6d6DqXBoFGD8lMK\nTEVogut7PdVAzeY1ryn9QFlPV+BOYalLkuLjC/xmj1SvLqNbtBCnni5FBelSoYI4nU6XNlVVlS4R\nEfIcyECQPiCPg3QG6enlJUNCQ6WPr6/0czikn4+P3O/lJSpITz8/mT5mTCldidKlwP2o3ztGihX3\neyqgeYA81OehAmlYCrQzFqENUrV11VKpO3I1KCvpM3Sbwxngdsm3OfwXOCxuNgcP330eaCEiBepV\nKooiU6ZMMd+3b9+e9u3bl0ifi4qIMK5VK+ZY4hbGtWxJzPjxOPRUF4ZeV1VVcgcMoIded9gHEDed\n74iuXXk4OZkeqspySzlPq6ujqqp0rlWLpKNHeRYtcOo7XGsKuPfBoLR1zAV0rCloy6NzwDbMKK27\ng+7mf174H4Y8P4RzznNU9KrIf2f8l7b3t/XYblmKZSgKhbm0ngoKoodbDY6lwE69VGgu0EPftnv8\neCa99Za5n7WUbAxaHMxZ4HEguUULqnfowPO6TSwpPp6c/v3ZI0JDRaHCwoXs2Ly5zDgt3CyupPOf\nPHuyx3uKtcCDrvuCxYEiCy0P+wOUGVvYmjVrWLNmjfn+1VdfLTtxDoqifIr26D+Flr5+JdBa3LyV\nFEXpAXwvIucURWkBLAFeFJECT3hZMEgnJyTgHDyYrtnZ5rYVvr4sqVuXuX/84TJYnzt9mnmpqfnb\ngOrA0RYtmPPTTwAFBI1hqARMg/ebEyZwZPZsaqBFSA/Hg9GxQgVWPfQQVTMyylzuJKv/d/rv6ey7\nZ5+mWuqA+TAFfBVA5aDKHGx1sEgPWFlPV1BUrEJDRCvY45uRQbq3N8He3iy7fNm8N2IrVmTpmTMu\nRYV2paTQc/duc7IwDAgFqvr4cNTbm24ff0yn3r0Z1qgR/f74w1RdvlutGlEXL3L5wQf555Ilt5yA\n8BSPUNg9xWq0exVcJiBGO1+v/5rjnY6X6YlKmYpzAJ5Di3M4gVY//RkR2a0oyv1AooiE6PsNAOYq\niuILHALe8CQYygrbf/iBX3x92ZidbT60GxwOnktPd3FDfWjbNrZa9pmJVr3tEFBD1/mKCB23bfOY\nN19Eq/OQ0qwZ2z79lM+BzorCWBEznfePaPaJyn5+BFauTJMGDcpkBKzVrTR9Xzp39riTzO6ZLsa8\nrI5ZZK3NKpILIZRfl0B3rL9XckICrYcMIQaYpijcrt8/oN0bwy5c0GwRfbVF9YTZs4lr1YoK5Kdf\niUXz/PjJy4tlly4RN3MmTlUlNjXVzN81Dgg7dozqwPply1zavBUozM25sHsK65BqsSsY7bQa0Irj\nvsddG/OFtBNp/FkoUeEgImfR8sy5b/8BCLG8H1iSx73R3Hn//dz173+7zNqfy8vjy4YN2Vylirnt\n9+3bOZedzTC0XEi1gP1oJSJ7Ao7vv0dRFFPQpAUHE9m0KYBZ52F2RgYDJk3i8QsXUIARIiwCghUF\nmjQhA2iwZw88+CD/XLny5lyAYlK/Xn3uiL6Dn3x/cv3Al4IuEW4GPqtfeSih1NlThwPNDrisNF57\n9zXKIyKutTYO5eZyGNhq2ceIbzAG8pTFi+m0fbtL0sYeaFUCn9AFS8yOHXz4xhscFmES2jgXpr+O\nAS1FmDdpEjF9+txSqwdPvBb3GmuHr+VAgwOwEy2r4TGgjWUnDxOQYweOaVGLbkLl2IFjN7zPNwu7\n2E8R8FQIJdyD6sZQF4gIpzZsIDYnx5yEPJGTw25fX+5q2dIUNElOJ8vCwvin7qly17//DUBAWhox\nuiqtJ5qtYZYIcUFBhCoKc5xO4k6d0oxG5eThjqwayU85PxWcoaluO+ZACCEMHj2YtBNp/LrrVzLb\nZkJ97bPaB2rT41gPMpwZBHsFo4QrDJs6rFwWWnGvvlYfOKAonG7cmH2ZmdxTvz5hQFCDBuZ3dqxb\nx8bKlRl86JDLCmMgUMGS6G/W3r3ciXa5BS1/l4KWzHEc4J+aesutHgojJyNHW453wZx08DXadDbA\n8wQkNDRUy3Fj/c5qqFaj2k3t+43Ezq10A5g2fjxNZs/me1wNyJ29vWl8zz28vXGjua23w8HTn3/O\nl7NmMXvDBrNucGdLe0b07BZfX+5UFLpmZ5e60fla8WQUrPNzHSRXXGwOtdfXRvFRXFYHrAZaooWP\n63rd1+JeK9fBcVC4wfpEUBB+a9d6DGgUEbpVr07E8eM49W0ZikJ1EeqhRVwbNR4WorkLdgKXGhLJ\naKuTbVFRfP777+VmgnEjGDx6MAu2LdBWCm4Tl4gvI3io1UMFJh3p+9K5c8CdZDbM1NIWqGjTbH94\nKOohImpElInKcHbivTKGqqp0DwhgZHZ2AQPyUuBXh4OX1fzpchLwXrVqjDx/ns6XLjETzbbwB6D4\n++Pj64sAFwID8crIID4j45ry9UsZSqvhySgIuGzLuJjB8mrLC64wfkSrdoRmgK4RUoMF6gJNFWDk\nqL4dBjnKjkHwehAPEdXukfDukffPeXvjaNiQylWqICKs2bmTuNOn6Qksdzh4JzQUv3PnWGGp9fAI\nUNXLi54LF5abCcaNoNWAVvx07Kd847OFkJUhdG/VvUDql8wLmSwLWqYJBl9cPJa8Er1w/sWpRXCV\n8oTFFg5lBGMQzs3J4Y45c/gdbZBX0Jb0x9CiAVOBDm3bciAtjcDDh6kMHFcUshs3JvfQISIzMkgL\nDojMXjEAACAASURBVOa2pk0J9pDF81pdVstSWo0rYQiOlRtWcr6Leywl+d4jOdDzcE/WblnLGa8z\nBZb199W4j/UJ629q30sS6+/s6fc1VhsABy1pVQwVZ1J8PB8MGMASVTUFwYCoKAbu3UtPy6RkqaKw\nrHFjomNiyqRDw83AdJSomOlx5cCPaGkMvkNzB8sDmoHfbj8u+14GL7TvZZHvoq0CueQv/UvRg6ms\neSvdsqQsXsyR995jk6pyDC2P3AEgUP88HPgnmm/v9mbNOLd5M3PBTMo3LC+PAdnZdANW5uTgM3q0\ny6BwPQXgTYNnKSVfS9+Xzrip48w0Gfc1vo+3J79dYBblonIKoHDvEV3ttNVnK2cqnnF9oH2BDnBs\ndfk1CLobqGOysgr8btbJwvnHH+f4vfeabqkiwvyXXmK4LhhAu2yV9+3jE19ftlhcZFODgrjrFq/1\nMHn2ZM2etR74FpcVAN8AdYHtaHlJjO1fwOWAy1rMThU0wbABFxdtVqLp7u4DKpb97KuFYQuHEsB4\nqDtlZnIOrR6Dglb682e0HCJJ+vsJwKx//YsJWVkuD3BsWhre+syua3Y24woZFK4FTykabtbqIX1f\nOu2GtePgpYOa0tsXlucs5+t+X/P/6vw/ompEmfpYl5w1d5O/StAftqBvgrij1h1EZkSSUV9XO63D\nYyqDDGcG6fvSy43dwYq7gbqw382432IyM1m+fLlpWE5OSOB8Who/AvOBRuSnAK+yZw+vWo6V7HSi\ntLG65Nx6HL5wWPMCaIsW0LYEbUTMAKoBm8hfmYK2/HeiTWCy9Ndm8u9V9L/d0ILoNgD3lD9XawNb\nOJQAxkO9FM2P13i4n0fLdXMXWg6lI2i1GRpkZZGMdu8JsMvPj4jsbOqS77v+kB4Xcb2DeVFmoTeS\nybMnczDvYP5sDMzYhg0/bmBDow38NPIn5r44l6/Wf6UtsRTgNrSLsgS8nF5Uq1iN2rfVJrKqlutm\n2NRhWnv6SsJ9hXG6wmk6juxYrgzTBkVdHaYsXkzM9u2kAO+rKsNeeoltGzey9YsvaCyCPzAU7V5K\ncjpZdtttZBw6xBRdZWm4T5f3WiBF4Uopts0Yh4poF+scmpDoQUFnCNCCOPviukLIwuMkBQfQAbwT\nvHktpXy6Wts2h2JiNSAahVt2ORzcrqocBHajhYqfRItkXYpmg2hJfi7/Z728OO3jQ2Prsj84mLuG\nDeP5t9/2eMyrGZmv10ZRUnQY2oE1+9Z4NPSZK4OTEPRzEJkPZuY/cEvQHlYBsnGxKbik+fa0nDce\n5ICyFalakhj3W6cNG0yHh6WKwnteXtTMy2Mu0AftMhr30rCoKHzT0/mX0+mxemBZclooSa6WKjt9\nXzp39LuDrI56IOY3FG57AC1hpPtnn6L5ERfiQBGyIoTzP3uwod0EbJtDKSIijOjdm9jt210Kt6xU\nVXagrT5ro9WCVtBSDP0L6Objw3cREWTedhuKonDpxAkG//EH3S1tX2nZ7148xhPXY6MoSWqG1NSM\nc1eKPt1JvmAAbcCviLba+BEtt41b9PTtx24n8pdI7YFvibZ8P4dm7TfcXSm/et6rYV01GEFwPUV4\nLy+PXmgr1CdxLSDVMy2Nz0S0NBoeVpBFuZ/KI1cr4lO/Xn38nH5k/ZilSdELeF4FnEHLOe3psyoU\njHdIRPMbzoEw37Abc3I3AVs4FIOUxYtxrFrFwqgoftILsxxITSX3/Hl2Z2Xxd1UlG+2B7US+yujJ\n3Fw+P3mS1m+/TafevRnQogWbWrfmZ2BLejr3GB4oHgbyohqZS9vQ+Frca3w/7HsOfnvQ1dBnXaY7\nyX/gzqFJz3A0weAuVNDeb/htA4umL+KDRR9oeZvy0tnXdp/2kBqUw5QaV8I6s9+xbh276tWj9549\nKBbvo1po2hBj9fqxnx8B4eEEBgdzYvdu7kWze20DLcJaV1mWttPCjaQo5Tm9Hd75K4I1aEt8N/do\nQoHDeJ7onEezkyUAEWgeTH9Bs0Wsg8i7IsutDawk6zncUhgP1Xu5uVQMCWHK6tVMXbOGuYcOEf30\n07RSVVYA/wHakS8gQMuFE5CdTdKMGSQnJFBjzx7+Mno0rUaNMus/vPrddx4H+LJWu6Ew6terz3dz\nv6Nno54onynaw5OANqPSg9nMB+4cmoqoF9pqoTVaZi4PtR6OnzxO12e78nS/p/l2/rd8++m3RO6L\nzN83x3Mu/vKMtWbIxDlziI6JYdP99zPAz48paKqlWH1fQbNzDcjO5uKpUxzx92eQw8HzQHtFYaOi\nsKhePbb/8IPZdnm4n66HohTxaXV7K81TKQfN3vUDrnVHfgAaAA9rjhHW+4xE4H60THJ90e7d9mgT\nlQcAJ3xT5Rs6juxYpuuOFIZtc7hOCvNHV1WVPiEhLLl4kXHA72jJ9/qAi9poObDT15dtderweWoq\n41q0AEVhTiHBT+Bq37iWQLjSJuIvEZzodKJAym7OgLe3N3mBeQV1vSfR/MutxkHjIW6l2Sq2f77d\n1B17yrj5Z6CwoDgjHXdPEYaTr/mogGbTcqCVoF0fGMiSixddsgRjyRBcHu+nolKU8pzp+9JpPbA1\nx04f0y5abwq1H9y34z4ia0Ry5MIRft/1O4dbHNYEgTWDq5Vv0FYSrUvHBlaWyoTeMpiuqx4qfE2f\nOJEn9YfxITQvudvRXFqnoKU16K6/98rJ4XJqKoJWqcvI1lrYDO5Kro5lmfsa35fvFdKe/FlZmFY+\ntMK5Cq7qpTVoZc7OAV/9//bOPT6q6tz735U7IYFwCUEwNDEqCkpBVAKiEhCkolKpWiu0Wmzp6VtL\nC1Vrbam0nPetlwrW0/a0tNL2VKxSUBHQAJJE7vEGCngFk0O4JAEkJCGXycys94+19+y9Z/bkRkhm\nwvp9PvlkZs/ae69Zs/Z61nP7PQRKi5KLss0ZvorcO3KZNXcWQPRV2WoBUqrys/krV3Kj8ZtPsbH3\nrvj5z7nV2DT9CbWw90bl0lTFxHC3EIyAwFwENV+mYlWFy1+5kvp331X3Q7EImyan7oDWlOfMzsrm\n6kuvVjv/DNz9ChLwQM9YlbUkkQgpLPOnGTlnhwclGCStrhYXadCaQzsQLhKIf/yDv/3wh7xQXh7Y\nid0aG0uf5GTievVC9u5N9ocfchL1kJrFWj5EsbgugWZ3cOG4eLq6dkNLKCktYcL3JoTlS+r3Sj9O\n3HSi+QikNNsFzZ1aIXCN+24wXPhitCB/5Uryv/1tTp13nqNmyPwxY5g8fz5Nd93FJ1LyIGrO/Bql\nKUxDRcQlouRrDfB5SgoIQU5NDaCiho+OGUN9v36I9etpHDoUCfT45BPqL774nMqaLiktIfeOXCp7\nVirz0G24F/6phVgZi+8mnzNRzvxh6lC7Pvu8vQLlv4hSzUELh3YgHIVBZUoKXw2q8GWanCbfdhtj\nBw1icG0tY06fZjPqAb4MtVF+hCCyPUPYfPDOO90ixLCktISJd0+kNK5U7ahGohb8Y9BjWw/qk+pV\n2bxwoYQTgt6Psx0PKsYS7YR89nDVppgYbrVzcSUn8+zQoYhdu0gHGlARce8DL2MLX4VABv5vEhK4\nTEpubmriSVRUXX6PHqw4/3yWffaZw6Q5L6iaXHdGyFw5hvIxTMMZeZSEsrFMIPzcPGicazqlh6Py\nIq6AnNKumX86lLWTIaVEJiaysKiI9atWUT17NuMMqosn581je20tOwwqg7KSEs7PziZ161be276d\nkRUV+GJiKEf5sbahnIffB/7eowf/io8nNjWVIRdeqGi///Y3ErZs6RYhhtlZ2RQ8XxCycKdsTlFF\ngOpQ4TRuar1JP2rfkdmjnmxqe0vhi9EA03z4AfCh389GIegnJV8IgczKIlZKfKmp/KGmhunx8RwB\nvt/U5DAf3QrcN2wYQ9LTeX//ft6urOQl1IZk9qWXAjDD0Egydu9mhBCB14f27OkWcy4cTM1y446N\nyhdmzpV01IP5EohYgUyX1qbkNWAdKvcmjQA1RmBuDoTMwZkMzRjKrs93UbuhlpTUFK6pv4anfx9K\nGRMN0D6HNsKMHFm/alUgBND0NzyweDE9x47llwUFVPTpQ6+qKq6ZO5efPPUUb/35z/w3cMLvZzHK\njNQfFb3538B5I0bw96oqlh06xMKiIhYWFZFw4oTj+tEONxvwRTkXqYfTTHxzs90eRw3U86iB+wCn\nqckWgXK4+rCrgOlqm6/pQ2jpd7T7sx5A+RGekZKFwO+kJC41lXsfeYTv+nyqGFR8PA3JybwCzExJ\nYVZKCo8C/xMfzyWTJ7OwqIjvPf00c+LiSEPNNdnURO9evbjZ60UCFR4PNzU2Bl4/U1/P691kzgXD\n1BaWpy5XpqTguZIO/Xr345YxtyjtFBT30u0ok9MM49hm4BhkebPIK8lj+uHpDM0YytZPtnIi8QSN\n0xo5MfUE+yr2ddI363hos1IbYKr7TxUXM6FPH35WX89XGhoCpiMpJetnzyb9u9/l7aef5iW/n/lj\nxtD/mmsYsXgx8aiNxjRsnPrACxCSudoSO2d3QEj50HxU4twNWGr9OpRanwCcRpmj9mElyBl1IYr+\nXER2VjZfvferrP58tdr2CKN9BGRMt5Yd1/G7YwQd2D5/3WYOMk1IXwcGoSq9xSckMMrjYR0w6Cc/\nISYujqOFhdz41luBjOpfA6MTE1VdENs97K9fjY8n8V//6lZzLsS02YCT3gWUNrsmhXV/XMfsx2Zz\noPqAu6lzC6RUpfDBqx8AhGjEkZCtr30OnQjzwZV1dawFBgM/wwgRNOy2i4uLmZSYyE8bG5kKrEtK\n4o8+H2uamvgJzuI/d6BydJqGDSOzf/+AYzlaQ1bbillzZ6maDDtQXlIPcBTle+hp/P8C+qX3IzU2\nldIrStWxzcbnMYAfMuMyeXPZmwChju8CyOyhPu8q1b6lGg122IMOPti/n9TaWpAyYG7838pKZnz2\nGbd4vYFzVqPMRaZsjUEFN0xMTGR4TAyDvV4qm5oCc+8J4N2kJHr270+VlKTW1iL9fqoaGlhtmKck\nMG/MGM67/noeeuyxqJ93br4oNmHRawc5kmfGqIJSV339KhUsEYxCyB2Yy44Xdqh5nLo8NBS7ABgA\nGacz2PHvHdrn0F1hqvtP1dXxE+C/UBqm+TCadtv1wJcaGwO7vfcbGvg/qCQ4M0Ma4/+3YmJ4eehQ\nhgVRJ7eWnTNS0dpoocPVh6EPTke03a+wDQYOHMj25xW5zeT7J6udnPkwGyjzlLFg8QIASzBAoBDL\nqPJRXWrzbQs7bktRQk/Om8cGIXjx44+JHzQIDh0iBRiCGpaXUKVlQc3D/wKmJyXxHa8XYWyyHgJe\n8Xj41/HjfO+557jxa18jf+VKfLNmOebcxPfeY/n777Ph6qujYt41BzdfFJNQ0SBbUJnOfQmYK4+U\nKDNkY32je2a0X5W+Bdh/ZL/a1JxG+SSSjXZG5FOFpyIqySC1cGglzAfcvsjfB0w//3yuuOAC9u3a\nxU9ravgG8G0sIfA+KqSwHOVjeBa1K/MlJnL5mDEMcwlD7WpepDOBY4fWD/DAzvt3uj4Yg3sNVjQD\nQRxK5KGiQG6Cq8uvDpy38fcbyZ2ZS2VCpfOmhk+hrrZOZYCZSXZGRFSNr+asfd+W0NHsuA8sXsz8\nsWP5p9/PXV4v3pQUltXWIlAbld+haH7WA3dh0LV4vbx8ySW8Z1C8SCnZv2sXow1/1pQZM/hg61be\nT0jgrcbGgOaw1+djTFNToE00aw/hqDSUowX1cJowfFiBeg9BFPK8Bj3owf4j+/nqvV/lg9IPlC3O\nnqzZiMXYGoVBEaCFQ6uxZ9s2akaP5qPdu1lhxItPA/7p9TLmBz+gYubMADXQDhTzL6gACIFiZv0J\n8DUjY3X+yJEsLCqKSF6kM0FbooUWzV/EqhmrVFUtO8yHNsGqz2BqIskyWans8Tiqb8UOiGXfoX0O\nX4SpgXQlz1JHa4H2633z5En2GKU/QWmn95vtsIj5bvF6KTQoXoQQ5K9cybh77lF+BqMvI8aPZ8Rf\n/hIIp84Hrvb7lX+sG2itASqNIA0g9ngsvjt8IWbIRcsMevhslDaxHWvTUQv1N9ZTnF6sTFM3EqKt\nsgU1PydYx7s6KKKt0MKhBQRIzxYvZv2qVVxzzz2OB/3b1dUsfewxMqTkyb59yfT5OG2c1xQbS7+q\nKoYYyUr5WBmrbV0gooVWuSWys+CHd/zQ8bzheSNs5bfU2FSnJjIYYlbH4O/pdziut6/eTt30uhAN\nJG5lHHOenXN2v3Qz6Egt0O/387e5c3nB0EKmNTby16Qkfogamg9Q5LT/BXxfCJCSJ1B5DeZ8m3zb\nbSyeO5f8IE3mvNxcyrKyeO7jj+kzdCjlhw4FNkGdXQukvWhOa51z5xxe/NGLeKd6A3NGrBX4pvjC\nmiEd9R4mGG3MpDiT6DGG8PUc7K7SKCSD1MKhBdjpjPds20bNlVfyopH4ZuYxNH7yCX/0+Zh/0UUO\nZ6PpXDwtBI9KyYe7doU8cJNvu40nH3kEpGzW8RcttMrhdmiDeg1yfXgz92Yy5IshodnTV6jkNdFf\nhGgi/r5+GIFjN1eXWOfc3RkmJW+Cl1m/nNVlDumO1AIff/BBZh496ticzPZ6efnSS+k3YAAjjOM1\n+/ezGnild296fPIJ9w0dqgIejHyboUePshG14TU3KjzwAEd37lTmqqYm7jNCZSF6fF7Naa2gqFrs\nc0QmSiebr3FO8cfFlJSWsGj+Inbev9ORJBezIQZ/ul/5KkYStugUfuf7nPdzWPT76CKDPKfzHFqK\nPbfTGec/+SQ/eeopKo38haSrrqJ3VRU9rrqKuTU1rjxHDy5ZwqOFhXzQ0MCYH/yA77g8cI8/9BCH\nnnmG9555JiynTXA/IiV6yw2L5i8i5313llS3h7dsbBkXpV1EVmEWqWtS6bmyJyP7jWRmjOLBOcWp\n0J2ZB5V9arJnDkdFndjZNItR5qc+UFZfxryF8872Vz8jtGYubl2+nHeAWamp/PLaa7klM5N3xo5l\n2I03srCoiEcLC0nOzWVVWRnPlpWRlprKH3w+eqemsrCoiAcWL2b388/zDPDX1FQeve46Fl5/PTuu\nvJI1f/tbwFzVv7SUdVlZLLz++sDfjiuvDDC5RirC5bisK17Hxnc3KkEwATVHJmBFyNnhgYpY5UAu\nO1TG8IzhDCgaQL9V/UguTsZ/h1/RK5tz7ALgDRzznQI4j/OYfsH0sJxO0YBzWnNoaTdut+/mvfce\njz3wABVr1vCS38+MP/2JladPc/uf/sTDNhU9uPbz4w8+SEZxMcsef5zhQeYFv9/PJ8uX80J9PfOA\n1594wlV178pa0G2FmejmYEn9vbL7hjM5bXp/k4rFTAFyoaa0pnlb8SmUb8HcBQZz4phO7ZUo72wy\n7Nywk0hGa+bi3Joa5Sfw+dh19dVcuHs31/zoR4H29mtIKUPmjJSS2dXVyl/R0EDCD3/I1Ntvx+/3\nM/X885lszOPfe73MT00N+CiiBeG01qrkKpVgFPzZcEIL9Rj5CQeaDjDtgWmqGNUQQqvEmXNsC2T2\nzGTo50PZc3AP3jovSTFJZH4pk5ReKSyZvyTqhIKJczbPoaXYc7dcg7y4OB70epmGQbmNsm5Ms113\nbWIi8cuXh9B3z+jZk1XV1cTExASu//3bbuOW/PxAMtKuhASueP75kBKO3SXnwTUe3LThTsKKPb9S\nxZmH40qKezkOb5rXiiDZZJwfDFsRjYwNGZRvKz97X+4M0Na56McW2GC0B4t+286VZM9ZQEqWvPVW\n4BoT+vSh6PhxHn/wQY4sXszNWAl30Zh46ZrLYKdZ2Ulo4alLUdTw8agNSk/gapQz2SwCBGFpueNW\nxnHT1Tfx9AJVzjeSeL00ZXc70VKRk+AoE4Asr5ebjNe3oHi2dgLj4+OZ2bMnjwL/Skjg/S1beOLh\nh3nsgQcCDujZp0/ztbFjA2aD/JUrqVizhpsaGwH1UFZ4PLz+xBMO00K00nS7wc3kRAGKThSs2PN3\n4Y0db5B3bx4LFi9g2cPLmFkzk9w9uWQVZhHji7EEA6hsVzfajQTr9ZhLxhCpaOtc3IAV2DBlzx6+\nP2MG+StXBtrY6d9BzZkbdu9moO3YBmDEyZM8/tBDYU1NkW5GCoadniXttTSlWZo0KwYfUvy/4y0K\n+DFAL5QXfxIq5yYWpU2cxrmJacJ1jnn7e3l14KtMvn8yP17042Z9HtGGc1JzaM1u3HQmnzx+HD76\niM+k5D+wEoxAUSN/hCpo1hQby599PvKTk9n1/e9TsXQp73s8FBhx468b7b/6738z9Wtf466LL+bb\n+/c7mVgJ1R6ilaY7HMxopXXF66iqrlJJI3Y6brOeg6nqH1PkfEPOH8KnBz7FO9mrEkfygs4Jpvou\nQJGjJTvpNSINbZmLxk4wENhgtp8RE0N8djYvHjiAAB4H1sTHMzE3l5iYGKSUFL33Hn1SUznt8TD0\nxAkqgBXAzcnJ/FBKptbXR6W2EA7htNSswiyVaW+WAq0ErgT245w/L6ASRRJQ82szVpiqPcN6LIHK\nhgOKBigivyDkleRR8PeCs/I9m4POkG4HWhN7bqexmCIlPtTD9B4W/cUe4HRsLPk+H3f5fPiB3XV1\nvPWnP7Hq9GkmYEWzbUAVZfn63LkgJf1LS9kBrAWOAD169CAuIYGalBTibGGO0SgAmkN2VjbPPfOc\nenh3L1fZpPYKcRVY1biqgPeg9pZaPkz4UCWLFGIR9JkPfhpwBSS8mECfjD5cPuRy5BDJ3rf2Ij2S\nUcNHdfbXbDVaOxdNmDkK9vZz/H7+cOBAoM2XgUNNTSSPGcPDTz5J/sqVVM+ezYBvfIODTz/NYJTm\nK4Dv1tUF5mi0hKy2BiGRRoaJ5z/n/SfffOybjpBW1qCqctn9CV+BuHVxeKd51dycikpqM/1cftQA\nplnnCI8IG6kXjTgnNYfW7sZNLqUP6ur4ELVm2Ut9/j9gGKp+r51IbxbKhLkW2JOQQEpsLN+pr+c2\n4BUh+FdODi/s3+9aujHaH8rWoqS0hOtnX09ZVZmy9Zo7Mrv/oAin3RfUw1eEGjizmIoPJVTGAwOV\npiCbJGVjy0Jsv0BEFQJqzVy057j8dv78QPsvjh2Djz6ij5SUY3F0mZrFXQMHMuqb36R882aWFBcz\nJSaGSX4/Owlf+yFYe4iW/Bo3uJWPXbB4geLzMjUHgXJM78PKZTCQ+nIqNX1qVA3WGYTC7ofwwPTD\n09l7fG+38Tmck8KhtQhHgtYUG0u/kyc5Cvwb6yG7OSaGC/1+lkCAZO+25GT6DBrEMpswmC0Ey2yZ\nreGc0RDdD2dLKCkt4bo7ruPQlEOWACjCEgjhavMWAjkoNc4l0oQmlJDJwMHMemv5reyr2BcxD29r\n4cboGs4cNeUnPyHm3nu5sa6OV2Ji+GNsLD+KiWFaYyO/RmmpmcDPbdd/BXjVqP0QLJhayyYbLci9\nPZfiI8Wh1Qa9OCNLTBNUXqlVXKqZQIrgzUck1DPXwqGTYa/SZVIgm3gVVfLTzI25EViEimiy+ypW\nx8TwytChNB06FFK6MThSpbs9nMHIuzePouwi64DdfxDuodyO0hbcqJQ3ooylwQ//pZC0LYmGAQ3O\nSnSerqfzbg5ukUwA37/tNr66cSNTbVUH3ei8ZwCrUPNxHoqt9Q7js8HDhtHXRSA0d+9o36Bkj8tW\nC37wvHkeJRwMTTSlKoW/LPgLv/jHLziQdUBV5rJvRIwSoWn+NKaNmdblGqgbtM+hk2Gv0lWL4lE6\nIQTliYmkNjRwEngHpVGAImssR5mcvhACLr2UPv37k5ySwq3/+78O4RLMYWNPfusutuBghMSmG/6D\nrMIsBg4ayN5Ne1WsuY30jHigGucDDlbNB3skUwIBhteGrzWEahlpkc154xbJJKWk7LXXWHfRRexM\nt1J8TTpvuz/iPpS/6xWU+VMA30Ktgbs9Hn7XTC5DNOXXtBYDhwykNKHUeTAB6AtxWw0fQwLUemr5\n9hPf5poLruGy2svY6tvKiS0nrDohY4FkmFYzzbGx6A71y01o4dBGmFw5wiirCMCxYwz49FP+gFUw\nxdy5JWKr4SAl841s1d/Onx8oKWoimHOnOz6cwXB1HJbmsPF5ZeoJKdAywTjxNdTuLUgLoB9q5RuJ\n5eSuxFnUxUxgMjSTSHUYujG6znvySZCStU1NIYlqT86bxzvp6Ww4doxjH31EMkrB+hgYiKXl3gL8\nDzCgpCTsnOpoNtnOxuatm7nnoXs46TtJn9g+/OOJf3Dd+OvIGZDDTs/OUAK+L2Lx3u11zJGGKQ1s\n2rKJnF45vPTHl5j1y1mUecuUgHhX1RFZtMyixGgLI3E0oEPNSkKIPijf1mQUecEjUsp/hWn7OGpj\nI4FlUsqfhmkXUWYlN4T4JmpqqK+tpd7v5zs4TUp2h19z/oTulPzWEkpKS/jxoh9T/HEx0iMZO3ws\nSxZamaUO01MVSl1zYV/lPZQ2sA1305K9tCjAJsjpFbk+B3tVOBNrExPZKyUPezyuoadSSkeY9OvA\nH1Bsrfaw6bWoIVofZk653TtaQl03b93MpB9NckQkxeXHsel3m8g8P9O1IBSnUNIzLehihcA1ytm8\n6+gux3nBIdLhwmejtRJcR2sOf0QV30tHPa7rhBC7pZQf2RsJIb6HqoF+uXHoDSHEASnl0g7uT6fA\nDHt98mc/Y1VhIetXrQpEOb0hBJsuvZS+hvpv1w6ao0yI9oI/bcW+in1UTKiABFjtWc3e+/cGFm2H\n6Wk37vUfXkY93Mmomg5fcWmzHQe7Zs+Kniz7f8siUjBAKKOrWYfhy82wpa5ftSoQJr0TFZ5/EQTC\npo+jojBTUMnB4eZUNNcUueeheyzBAJAA3qle7nnoHkq2lzDqvFEc3HLQMhEZ+TABB7MJDyoKbjts\nrdqqKsLZrnnwyoMOKvpw9DDritcxa+6sqDMxdZhwEEIko/xfw6SU9cA2IcSrwDeBR4Kafwt4CXCn\npwAAIABJREFUSkp51Dj3KeA7QFQKB7B4bdZfeSX5Tz7JeXV1PARgmJKCeWpa8idE88PZVrRUA8Jh\negqOIzfaMwD1gBeiBENwaGICysYCAU3i9OTTzH5sdsRqDsEOYnsdBnDfMOzZto3+48YFEub67drF\nn2wJc/OBCUKw+tJLOZ2ezo4wcyqa82tO+k66zpEqXxWAInN0o1s5iTW/TI1iKpAMJ191v+Yb775B\nSWlJ6CbGhMHttDx1edSZmDrMrCSEGAlsk1L2tB37CXCdlHJ6UNsqYLKU8m3j/WigQErZ2+W6EW9W\nspuAZl90EReXllLZ1MRUjMLtLuq4o5B8lKjrZwshEUvmcVtmaUlpCfMWzmNN4Rr8s/yh0SYvoco8\n+lGOngqc2dce4DngS0RVtJIdbc2Wz1+5Eu65xxnRhBIq4cxJ3QGZV2U6w6MBPNBjRQ/GXDOG0k9L\n3SOW8iHhVAKeBI+SpONQpHsQSrxnnrPFMk1CKLeSw5zZyXMtkriVUlCWOztOoZhLWmp7yjgW8XCj\nVrabgKYfOECh389iLK6aYJ4aU2uYYnP2RToV99lEYMdlh0tm6d7je/Fn+NWDGszPNA71QE9EmZBm\noOwqVVgPaTJqxzgBR2ZrJEcr2fHgkiX86s03WVhUFPj71ZtvBsyawfNyz7ZtrMvK4odCsBCYi/Lj\n70FxMrWFn6slSvFIwtALhqo5ETRH6lPrKcouovSKUsRa4fw8H4gDz9c98HXU/PkINX8ARkPsuljn\nOYXquKnlNsvtBFE116BjhUMtisbKjl6AWwHf4La9jGMRj4CfwHiwghf6D/1+fmDUbbjP52Pc3LmB\nB9h+je5CptcRaK4GhImA6aknigunCKUtrESFtX5GaAjrRNRDvx3lARO0SghFI4LnJah603GpqTwj\nJQtR9aUPJSWxWwhey8pqE7Ge2/UjFb4ePuVH2I5FspeLteKkg8yRiOeFmkNmurhZWRAsP9Vu432y\nymkYsGGAk7gvDceib9LDTBszTW1Y7A7uKJtrHemQ/hSIE0LkSClNopcvo6y/wdhnfPaO8X5kmHYA\nLFy4MPB6woQJTJgwoQO623a4+QnsC70feBv4mdHezWEopeRvjz/O0HPEn9Aa2GtAHDhygPIj5aQP\nSWfB4gUBJ17A2TeSUJK910FUCmRC0K7WiF9nHOqBvlJFrdijWKKxQlcwwvmv3DYh93o8JErJ+tRU\nHli8uLnLtnj9SERJaQmln5ZaBX1MeCAwEFVAOci7peJLehflqd+OZW4Ea34Zmsf4K8aT0ivFNSIp\neNEPx+10NudaUVERRUVFHXa9jg5lfR6l3H8XRZO2FhgXJlppLirkFVSezu+klH9xuWbE+Bzc/AQf\nbN0asAO/c+AA3z90yJGFH+xP6O4Zz2cCNz5+k5ZgweIF1kN5EBV+YybDxau2DCHEn5DwQgKDMgYx\ncNBAcgblMOfOOSxdsTQi6A06CuH8V8E+ii+OHUN+9BFDpOTyNvi5osU/Fpg/WQdUWLN9A2Gy9KZh\nUbTUEbrRCPIR8DyQCT1qepD/X/k89ben2LBrAw0pDYpqPjk8BYsbt1NnzrWIos8IynM4DvxUSvmi\nEGI88JqUspet7WMoISKBv0gpfxbmmhEhHNzyDuZdfTXn5eXx0G9+A8DN553H6IqKQG3xA6mpXDBq\nFKmGw7A70hF0JJqLE180fxGX3HoJniSP4k6yUXqzFUV94JL3EPtFLHdNuatVD2Z7slu7OiO2tfkw\n7c2biaZ8G8f8MZl+fXB+/fmIVGERMZrkjkWEp2cxtc1KIBNGJo2kJrbGsXHpsaEHU4ZPceTkRBIi\nKs9BSnkSVbAx+PhWgvwRUsqHgYc78v5nE24qesbu3RzasydQltEs42gi3+dDzJ17TmU8nwnCxYmb\n9ty4xDglHCZjPdD7sASD0T6Q9zARfPt8LE9dzpbvbWm2pkN7slsjISN2/apV3NiKfJj25s1EU77N\n4erDSnPcjsW4OhrOLzufAb0H0FjUiPAI4onnkOeQauMWFv0Flp9qEzAOqgqrnBFOCVA/pZ6UmpSI\nFAwdAU2f0Uq4JSTt27WLf9fUMO+JJzji93NJM36EaKcj6AyEixMf1GsQCxYvoG5Kncp+tn8e7gHv\nj5UxbSQs/XjRj1n97GrXezdXxStc6GFL+RmdgT3btnEwK4vnPv6YvpdcYhHpBfmv2ps301n5Nh2h\ngfWmd2gp0DdgV+0uGvs1KvNjLMRVxhG7NhZfb5/rfAv4qQqAVGU2Sh+S7srJFE3RR22FFg6tRLiE\nJFOD8MXGcs0//xn2gYmmHVhXoTkn3uyFs9Xu3Iw4Mh/o4PcY7ytRyXA252Lxx8Wu9y0pLWHDrg1O\nnhPjnOYe/pY0nc7AA4sXM3/sWP7p97smW5pob1JbZyTDdZQGJmNlKIfWVdC4tdHKUfCAd51XJbzV\nAetwmiRfQ+XJbIHE04lMvWIqSxYuYcHiBa6cTNEUfdRWaOHQDti1AImq/fwMMK8ZTeBcynhuL+xR\nSwEn3u8XObNPR2LVeUhAFWoJfsALUDkNdnjAW+91ve+CxQuUgzGM1hIOzWk6nYXuYKp01cCyDjDx\n7olkXZxFr9heCJ/gFKdctQpT63hz75vwOc6Io12Emh2nofwNE4DVKArleBSjbyOQDVneLApeLQjc\npyuij7oaup5DO+CI3sCq3bA2MZH45cuj7uGMBjh2l2b4YRUq7+FzlCkgAeWsHouiIbUVY6EAYipi\nWP74ctZuWeswX8xeOJuiPkUhkSs9NvQgf0k+S1csdTV3NBdd1Rl26GBnsR+Yet555B86RExMR6Yw\nnV00W9MjONooKDrI7TdwRBy9hHsVtxdQc+Y4yoeVbruPB3Kzc9mxcofjlK6OPmorIipa6WwgEoWD\nGSIIhBR7j9RIju4A8+H8sOxDPt73MfXUWyxywdFKY1APukQJEA8wFsR6gbxTOhbzy/pfxurBq5XQ\nMWm+/XDDgBsoaSixFp5jkLI5hcuGXUbOACtJr6sWjGDm1HyUEjX4gQf46RNPRE0FwZAotSLCRxFN\nwEFDES7CLRBx9C/gGy7Xegm4C/fw1S1KcyjZXtKxX7SToYVDFyKaaY2jHYFFwVwE3B5+afz1xYpx\nt9eoBjgGg98azAnviZDY9eEZw3l14KtWaGTQbrarS4za8xiklI7a0fc+8wwb7rsvKvJpQnb/63GW\nWDSxAZiiXpq8W+F4ucQKgewnlanIzJa3ayGguLeqUFpoNdAHZZLaBf0a+vH2C29HtGbQEiIqlPVc\ng/YjdB0CzuBwLK3pWCalQuO4BxXqaKIKeA8OTz4cMFWJjYLBPQazbMkyHv3ro9a1dxNCz9HZkUnB\nsDuL7QES91ZX8/dHHuGFKMhohlBf01sn3uK053SowD9pvTb9OuH8PrKfVL9/FYq3fAuKLMhvXOs6\nwpuvfHAi6QST758cVSyqHQ2tOWhEJQKaw8uozJpwJgjz/VrUYtDbeJ+MWih6AeehyF9sNYId5qYE\nLCd4EOzMsV0Ft0S12UKwTErWd6Im21EJgbm351J8pDg0c9kPTHIW2XEr7MNaFP+WyahqJMTFn4hn\nQNIADl99WG0einDXOlcANwPJ0cPY64ZIYmXtdogmJspzDQGyvhTUwhHMljnS1rjc+D8L+Jrx50eZ\nkK5FmRVMwQABrUDGSosQ0AyZtSNCQhndwqTvlJINdB7jr2kaWp66nKLsIpanLmfy/ZMpKW273f7C\nQReqBLQClON4HUriHQe2gGyyvsvSFUvxjvU6SfauRRExmkgDxkHfxL5sWbGFnFLjNw2XIzOAEEK9\ncxFaODSDaGKiPBcxPGM4idWJaiF5CVWswCxKuxuLbnk7aicYHM64w3g9CNdFosZXE6Bgzu2XS8qm\nlGaZY7sKe7ZtY/uVV7Lw+uuZO2wYc4VgJ/ABncf421xCYFuxaP4iMj/MVG9moDTDiajExtFQNraM\niXdPJO/ePDa+u1FpAROwyPbSUfU81qGEy0vAi3D5ly4nOyubZQ8vI2VNimrjIvD5AqVVHIsM4d9V\n0D6HMIgmJspzDQ4H5ldQWbGpqJXQjEyxcyzF4L5DjDdexxI2X8GkYAaraH2Vr4q02DSWPREZJUbt\nvocn582jtn9/hBCcBhbSOX6wjkwIzM7KZlT2KMoGljkF+iQC5sLSuFJKs0tVGHPwb3fM+B+LEi7G\nfPhw54eUlJawdMVSam+pVX4me86M6XMwqr/F5ccx53dz2tz/7gItHMKgOyQXdVc4dqkJqEikdcAd\nhHIsLcdibQ22LTcZr4ejMmNtPoeUTSksesHSCkpKS5j92OwAv06VpyoiS4x2VXnPjk4IrPZVuwt0\niTOwYDTO3+4Yai4kEZItfST3SMAnQj/jszFYXExB1QO9U70sXbGU68Zf167vEO3QwsEFmgcpsuG6\nS03GfTEZAomHE/G97sP7Fa+1gOSjzBSbjPeXohYJH6RUpbDuj+sci344s8m8hfNI6ZXSZayskYJw\nGcRzHp7DrLmzWjU+dof2Zx9+BoMJFeh+rLwEUAt5Iuq3q0dlON9NKAcX6v0b776B9Ejr2mkEcifY\njrM4zznuc9DCwQWaBymy4dilmuGIvXDXDmJh2Ihh9Ivtx6blm5BxUgmSr+MgZ2MPcF4obYIJV4FU\nBxv2baB+Sn2XsbJGCtyoT+Y8PIfZj81uFWdSCL9SCiG0KGKdILE6kYbxDWoRN2m56402sVgaRBjO\nrYrYChiPMh/Zch9SNqVQe2VQMcoICTjoKuhQVhe0tZC7RufCsZCYSXDhCrdcobKaa2+pVceX4zQ/\nQaCoS2Z2Jm8ue9N1YXfNxA1TdD6awx87EuGyl6cfnh6ibTmKOZk4hhLc5g7fSFCMy4/DO9wL+3H+\n3q8CtxvnuuUwvIFVutNIfsvwZXDD6BuYc6dNkEVIkuOZQmdIa5yTME0Q64rXUXWTEZZk7iRN+/Ek\niNsah3eEF8pQO8wa4E6XC26C5IZkLh9yORcOupA5d85h0R8WsWXfFvwNfvom9SUmLYaj1x4NLB5J\nryfRcH2DdU8BjISMtzK49KJLGdxrcKDy3LlodnLNXq6CHlt7KG3LtginilR21+52jGOAG8keaWZm\nNJehzEfNCWujbWJVIimxKZyYcMJpNsKZpxJt3EktQQsHjXMa4Xan/V7vR6O3kVpRq7iV8gj4FNx2\n+wENZDswHGK3xuKb5nNEsYhqQW52Lp9XfK7qVddCZVKlVZjelmHLNOAYxO0IrVkdzbvRtqDV2tYx\nQ4hP84ZofbyBikADpzawjdCkxCrU+JvCxDBFFf2+iKUrloatMthdtTwtHDTOaYRjRg1kN9szqAtR\nlc2bqxtciNq5umXOFkBcvW0RC2NWogjFAVTkfp1by28ltWdqt9cm3H6bpNeTaJje4GxYhPt4r0RF\nlJnhyfZ2bueYgQbxxvEUYBTMjJnJnDvnMO3/TKM2rVb5JoZDTmn3FtSaW0njnIQ9smV4xnAuO3wZ\nFQ0VlB8sp/+g/uzYt0MRqfXBWkAEyhk9BiU0+qMWCjsjp8k/4Rb5VAfem73WZy3lT4S5zoZdG2j4\nSkO3dmKbv096Sjq+Qh8DBw0kZ1AONaNqeNXzqnNcGnEfx/7AKIhdZ2hx9vG8gNAQ1i04Aw1eA96B\n/MZ8Nu/dbPmdDAf0st9GRp5KpEILB42og1vlsCHvDEE2ScryylQ5x6EoE0MMVtSKvVDQjajkOVuF\nsIAGsdt2jgmPy7HmqtBVhf+8IaXhrBD4dRS30Zli89bNzl36FRBbGsvz858HYN/9+xw06FTiPo4J\nQDqkxaZxYssJ1daDRa2ehIpoaoSY+hj8M/3OPJebgO1wYtIJThScUOcZuTG1k2rP6RyG1kCblTSi\nDmE5/M3iPvZjG1FbINOMdAySi5LxxfloTLDqCoujAjlWQo5qw2bgFpy+hKPATJwOz+Caxaat/D3g\nQojdE4vvJst3kbg+kcZrG5t1jLYHXV14yN6PEXeNoHZSbciYzIxR9n2743f79u00TmlU4+Vm6kuG\nvmv68kW/L9Tx4ygfUiLOcbdHKtnxEoqyfTiwD4uMEcjYYAUOdEfTnjYraZxzCEfVEMIUlgAZ8Rlk\nn5dNeWE5fTL6cLLiJNUx1Xwx7QuHcJEeSeK/E2k82Kh2/FehBEsNCI9gUNogfvvkb/nFP35hLcDJ\nkFiXSOM/GyELp4kqD3gJ4jxx+Lb4AnTRMbUxriVMzzSevjluo450uLppJ+b9D1cfpvTTUpUvYOag\n7EatMgVwYNgBAAclScY1GVSmV6pxKwJOofwMsUA+JHoTOd3jtOVf8BBawCcBZT500z76os41WV1t\nn1XEVlCRXdFtTXtnCi0cNKIO4agaHA+/ceyG0TcEdquT75+s6C/csmfrQCJVzQAP0ABcAyTD3TV3\nB64xPH84NUU1NNU2carmFI3jGxXD3aSg6yUAGdDob3R8Vn+sXiVc2XbWHVGLuCO5jcLBzZy3efZm\nRLzg4JUH1bHBqIX4UuAjHNrA3k17KSktcSzAuZfkKh9EGnA16reZbJ3TWNCowo+LUFqeQGkOwb/f\naGANTm3P1D4SCAhrwNIEc433Z0mQRjs0K6tG1CFA121jSB3yzhAy4zLDsqYuWLyAA1lG0lwVKtLI\nZG01zEOeOzyKqM2sOfwmZO7IZNH8RYGF8dWBr1I5pZKTN5/En+5XpgozO9sOU1jFBh1Ph8vOv4yZ\nNTPJK8ljZs1MNv5+I6DMZXn35jFr7qw2U10HBGZQHzoyw9dNOynzlinBYN/F56EYb4OKI9VOqg1h\naX16wdMMeWeI6vtuLMFgXmsiSpOYYFxvHJb/x45k1Fb3JWAV6nc2tTjzWrEogfESVmVArM/PZaoM\nN2jhoBF1MKka7Ats0Z+LeHPZmyGLrrlL3fvZXmUm8qFMECNQ/gIzqSqIpI3RQD3U1dWxYPEC5i2c\nF7IwcgPKyXkeyjFqE0yshcQTicrWbYcHcgbl8Nwzz1Hw94LATjVQC6FPEct3L2fYjGFMv296q4VE\nOIFZW13bLoFTUloSIqwOVx8O3bEHR2xVoRZmgSWITbgswNlZ2RT9uYgBGwYoqmy3qKUk1Dib7ydD\n7NpYx3dNeD0BrgcGEqjf4Fj8TWGdZ7Q5C6a97gbtkNbodgi2i8+5cw6T7p/kmmSV9EYSvfr1onJK\npXUBF+qFpA1JFqePHZtQ2diTUVqEmeE7HG44cQOfVHxCmbcs4HPIjLMoOsx+btyxUd3fhQKkLU5l\nu6M3NTaVtz9725HRba+g1tJ13Jzbjpra9u9vRny5UVbYc0iaSTqbNXcWy3cuV4IgBitLOhkVaODF\noQnEvxhPU/+mwLj2qOlB/TX1yvS0GUvrMPuxybjRV4BjkPJOqGmvu/kcdBKchoYNbgtbyhobt5IJ\nIys6NyOXnAE5zuinItyTsl7CQekcyKz2oTSNIBqN3LJcjtQcscwuHhi0cxBXZV5FeW05+w7tUwuU\nme0b5r7tyeKdft/00IXcSMBb/ezqZs+dNXcWy/3LQ4Td9Nrp7D2+1zG2iWsTaUxqVFrU9jDjZhzP\neT+HZQ8vc6UT2bx1MxN/ONER2UWB8f86lJDYjsWg6hKZFvid61BjegLln0hFJT/uC+1Hd6HKcIOO\nVtLQsMHNLl6bVuturvBBzgDll9jyvS3WIu7DvX06yhSVi1qszF3xTkJDWgvgkPcQh6YccvTlSO4R\nVm9Zrezfk4z2Zj6EW9JcHbyx4w3y7s1rU8jlzo93WjWUbd+h+OPiFs/df2Q/HCFEA6gYVOFgXk2N\nTSXfl2/RjgSbhYxopdjjsWQWZvKf8/4zhKV19V2rGX7+cA4dPKQEQx1WfQWzDocpjI1aDoFw4KDv\ndtmwyxhQPoD1766nsb5RaQnp6pzkDcmM+NIIcmqU8z87K1vnOLQA7XPQ6FZwtYubld7s8Ki6DeZi\nW/TnIm4tv5WMDRkklSe5O5hjUQIgn1CHZ7DPYiLUx9a7C5lTKOK4IpSAOQ28grKJ2+9rOMorplQE\n6jIPu2MYX733qy37D9yctm7HXFBxpCLEmUwe7Nmzh9kLZwPw7MJnSe2ZiqevRwnKCaiwUfP6polp\nHPhu91GaV8p3n/quCgoIclIXnyjmcNJhy6w2Tt2Pa40xqTKuWwFsgRhfjFWoycQxKD9YTrWvmhtH\n38gNl99ARnEGGRsyuLX8Vvb+ey87Vu7guWeec6ULP5NggO4KrTlodCu4hrkOJyR8NGWTs6BPdlZ2\nwNziZpoKaAl1KLoHH8qMNByoxVUIxPni3ENuk7EWVPP6b6B23vZwTBdHecOUBlZvWc3e+/c2ayMf\nO3wsqwtWh2gzucNzA23C5SzU+Gtcv8/pfqcVy6qRF5Cekq7MaWbW+Ujje9xgjI1LtFLANGS7LhIl\neN8NPYebsHwOhkmv3lPvNBUaBIelU0tVdrwHcipy2PHvHW3zr3RjOpP2QPscNLoVwjlT22pjLikt\nYeLdEymNK1UL10jjg2Dz0RqIOxmHN9sb4kidfjjURs9rKAFzO6FCYyWWY9uHytS+y6VzhcA1zfsi\nNm/dzNTvTKVeGNpLPCSeSiSlXwqxvlguv+ByPj32KWVjywJ9y9yRqXIW6g+6EwquRC3WhnN54GsD\nKb+p3KKzkMAhYBBKO5pBKDYRmsW+3RizTYBbLa2XUGNuCwbI/Uj5io5UH6Hk05JA+Vb7dVvjqwmX\nbd8d2FrP1OegzUoa3QpuYa4bf7+RzPMzASPRrZXXKXi+gJxeOVZYpFvI6y3gTfCqxdSMw9+pFtol\nC5cE+pK6JhVWoOzoibjuzMlA2cgnoBbQnribhppoNi7frHdd/7V6JVxuBuEVNN7YyImvnKBySiWb\nDm6irKrMESIayFkYjTJzbUIJok0oU9pklBAAqIOKqgqlKZgmHj9KQIzGaWKy9T2uIs4Z8luIVbsh\n3PftTUhYqukrGtRrECd9J13HszV5C65mSJ3zAGjhoNENYdIzuOYRGLb7yfdPbtG2HCxo4r+Id1/U\n7bWODX/DqOxRZGdlB/rSS/ZSLKOTCZgvHDB9GiaqUGamApyLaQFqV74J9n22z9VG7uaUl9Ok0khs\nfSQFa7EHZ85CPJbAu9Z4H2/0qRDIB9lLKsf1ZpRQnIQSRjtRrKmFzr7nvJ/D8AuGK01hA0oTuQKL\nEdcMObV/300on0zQdebcOSfwm55KPtXuBMDOSB6MVmizkka3R0eZDrLHZbuaL0Ls6FhmD9Oe/9qb\nr3Hy5pPh8wHWoEI2040LFOEeInsB8A6OgjYpm1IYfv5wLhx0IYvmL2LGD2e4V1UzfQMmCo3/5jEz\nZyFcSOoW4/UkVNJftXH9GWHajgbehaTaJKaMmsLTC552lgM1uZdME9pU4/z1WHTqpjnPVtIzpKyo\ny3i2Nm8hUggLzwZ0KKuGRgvoKN6h38z7DXc/cjfyPBkoGMM2FAeTHR4o3lHMzut3KpZXD8R4Y6wF\nNA3l3DbDP/tCb39vPMUeq3ymD3UP03FtYhPOsplmxM/2YoqHFlPwrQLKq8pDOYauQC3ktj4GE9Fl\nxmUi3hEc9B1015BOopzNGK/7YxXWCW5bCblHc8kZGURhUnmAlKIUaq+rVYJwnPGdklHmqX1ACohT\nAnmztBbsXs4F2/Gb2sYzrS6NaWOmBcJVW4KpHTrKg7by3O6ODhMOQog+wDKU4nwMeERK+a8wbR8F\nfo6iNzPLq4yQUpZ2VH80NEyEI+prjenAjOjZf2Q/+w7tQ95pLVi8BlyMK920vEUqE9DHQDL4vX5n\nH0yKB2OXfnPNzYEd8ZHqI2w7sg3PVI+12zevfQL3xfgLda2jTUctwWBnRV2DopcwvjsFkC7SGXfx\nOCr2VFB+pJyBQwYyoOcAGnc1UuGpCNUGemDZ/s2w3u24jm2/5H7seGFHYAwdEUE5INYIZJNUUUh9\nUEImH6U9pIM8JklZk8Jlwy5T/gVjwTZ/jw/3feg05xnjOa1mWpsdyXaWWA0LHak5/BG12Kej9inr\nhBC7pZQfhWn/gpTyWx14fw0NVyyav4id9+8MMR20xITqWNROYCWtgRVmaeY7bEft9itRyVegHKmm\nA/sYyhQzjZAdfXBfJJK0nmlUvl2pqMPNa1cY13QLjzWpqdcQ3nS1DnjbOCcVxg0bx5KFSwJstWYY\naIwnRvkEptjOLUAt5FWohTjJ+MxeQMkWljt+xPhA91x9ILdIZTrzE1oPYwyQDrW31JJTkxNYuB2/\nx3ijT7ZzO4LdVsNChwgHIUQyyvI4TEpZD2wTQrwKfBN4pCPuoaHRXmRnZbPs4WXc89A9VPmqSItN\nY9kTLZeIdCxq4UqH+rC0gALULjgNtfDZI5vSgfGQVZjFwCEDKT9YrkpnxlgLWmDhM81JlahFOtG4\n7k0o08taHD4HBzW16dx1yTNgGiosdCBwAez8YCdX3XUVJxJOQDnwufqe/r5++AzlNzDDc82scJO2\nogdW9rJdOB6ExJ6JLFm4JDBMByoPuNffqCXEREYelg8nyPTn+D0SjD5tsfkitDmoQ9FRmsPFgFdK\necB27H2Uiy0cbhFCHEfV1/qDlPJPHdQXDQ0HzNBO05lc5ali9mOzW3Q6Ouza4UqCHkctzuYCusv4\nzE2YpEP2xdmOim9hyfdGY+U7HEVF7Owi4DdIWZMC8VDbq9aZqW3WNUhzub+5qGYC70LFtApLq9mK\n2o2/i9IOeqMikfqjBIF5/imIXxXP9RdfT8HaAvw3+y3h+BowAEYPHO0Y1/KD5cr3Ejx2weNp3kNa\nbeymvxDfURowCYaVDNNmobOAjgplTUEF2NlxCkV55YYXUeVA0oE5wC+FEF/voL5oaDjQXJW05jC4\n12C1cBYB9ajFLzis9EbUbncCamdtOnmDqTCMc+yLnWkmWZ66nMqelap/u7HKjJrhoXca1xuljvVs\n6Mnwi4eT4k9RbTD6WAi8C2kNaVa95aD70xvYgxIE5njsM97vMN7PQFVam4pyPDca392B3YPUAAAO\nuklEQVQoliNOCz754hP8iX5131UojWICMBl2Hd3lCK/t3bt3KKX5OmO83PpoCGJ7PQ7QYaedjVZp\nDkKIQpQ7yy2mdBswFzXt7OiFquEUAinlx7a3O4QQv0PljL7o1n7hwoWB1xMmTGDChAmt6baGBtD+\naKU5d87hxR+9iHeqN7DDFisEab3TOBl/0qLCAPBAj/U96Cv7cvq10/T09kTulBzJPRIw/wx5Zwjl\naeVkXJMBHoj3x3N4rJGEZSff24dlEjKdyqkoh62E05NPUxxfrCqtvU5IPeXqddVOWotg85Od4RTb\nPXvipN9+DyuT23TATwBPsoeygrJA/e2AA9xA/ZR6R1W1U6dOKeGzHSu81tRSgvwGKZtSuOz8yxwE\neSba6zs6V1BUVERRUVGHXa9VwkFKmdfc54bPIVYIkWMzLX0ZK+2mxVvgDLRzwC4cNDTaivZGKy1d\nsdQSDKCiaO6UXFt+Lfsq9lmV5XyKxG/dH9c5mD5D6ivUv82mKzfBMNTOfiNq590buAS1eJtPZDin\n8qsoLWYf8FXj/Ak4tCL/NL+6djxWacw0nNXPfLYvKoz39iQ4N5+F6YCfgFrQC1DbPxfTkF3wDhwy\nkNL00pBckF7eXuQNz0OWS2p8NSqM9IXwfgMddto8gjfOv/rVr87oeh3ic5BS1gkhXgJ+LYT4LkoB\nvhVL6XVACHErsFlKWSWEuBqleTzcEX3R0AhGe3ec4TSOGl+NtUh9qWWuJonkg48+4OgVR61FfwdK\nKNSgHLObUeakEhBfCKRHui/Qt6Iyi/ug/BOnCV2c61Bxg7ZazIGEN4z3R7AE5nCUBjLYdiycA17a\nXieiBIqL4C35tITNWzezdMVSPvvkM1efw4QRE3jl2VcCh9yIAIPHVIeddh46MpT1B6g8h0qUm+4/\nzDBWIcR44DUpZS+j7V3AMiFEAoqq6zdSSv2La5wVtHfH2ZzG0dIiFRLbPxhn/QeTidTMLjbs8Jl9\nM3niwSf45v/9Jt5eXvcFOgMV1roTq361vd27OE099igg03E8QpnIZLpUi/yXgQ+xzDzhHPDC9toP\nXE2IaYhCKL2ilEk/moR3rFeFvQa3KQAx1DIWaHbUyIOmz9DQCIOWGF6b2+GGo+xgOypsNB1X5lOz\nUlvu7bkUf1gMt4W2YTsq7+IWXEuLitUCeYfLM7MCGICDkoJqlBYyEmiCwTsG443x0uBt4FTdKSWI\njFKc1KJsAp8RcFT3S+3H+MvH886H76iaDCblhRlSa5q1xhEIlTUpPfJO5gUit7ozO2pXQdNnaGic\nJbhpHHMenhNSzcxthxvOJIUPyxzjohXU+FQMx4WDLqR4YLHa5d9ESOJc/IZ4mhKa1HEzz0BCxukM\n4nvEc8hzKFSoSJTdv5lazxdffjEFfy+gpLSEy+64jLpr66w2L6MEii2RL3FnIgC1otZJxW1+3wwU\nvUgBTp+HBwb5BgVMSWuL1ypH+Uhbm1YEDWicPWhWVg2NZhDM8Lp0xdJWhcWGC7vsUd6D+Or4FkNd\nF81fRE5pDlyO8jFsQgmAKyCnNIebxt1knZ+GWvSvgRvG3sDyJcuJy3dSY4t1Aq403rv5MvKAd637\nL1i8gLopdc42/bEEg3HsSO4RVn++OiwzKsJoP1Fd3zwewqx60ymlXRSjhFfQeGh0PrRw0NBoA1rL\n/79o/iJy3s9xxvYXqjDPgVkDSfemh9BxD3lnSCCuP0AXnjaT3MtzyfJmkZuRy8wYVZ9iycIlIdc3\n8wKuG38dm363iazCLNJeSyOrMIvnf/48OV/kWBqEy3dIqk0K3N/1e4bRdojBotEI+r4BE1YCxByP\nIfej3ECNDTdBSx5KeLnkOWh0LrRZSUOjDWhtWKy5uDuqyRmZzGVjy5h+eDoAOzfshAQYc8kYnv7z\n0w7TVEtOb9PkdeDIAcqPlJM+JJ0FixcEBETJdmedhzFXjWHB4gW8cfoNV2K9KaOmBO4fSADch+Un\naCS8k9pGoxF/LJ6mvk3OzG0PTPzyRDLSMzhcfThAZsjlQV8qwWBWrWk9s6rG2YF2SGtotAFt5f/P\nuzdP1V0OPl6S56DRcLtPS2Gd7elPa8/ZvHWzijYy8zw8ELM6hoEZAx2JfW6+BLfyqEPeGYJsko6y\npCmbUqi9staqYWGcr53QHQPtkNbQ6ES0NSy2PQl4bQnrDKEGqYMD1QfInZnL5NGTw+YKtPQdQhIA\nE8A/3c9Vh68ipSYlkNi3q8cuypLLAt8r5/0clvx+SaBv5vVrz6tl9eDVjuvVTqolZU0KtbfU6ozn\nCITWHDQ0ziKa26UDrtpBW8I6HZrJGVREC0ZrNR57FnhzyYDhrmdWzGvpfI22Q2sOGhoRjHC7dCCs\ndtAWLiiHZuIShWRGUrXVTNMW30prrh3uejkDcrQJKUKhNQcNjU5EgKL73Y1UxlYqgjybvX5mzUyA\nVmsODs1kG84a0QbM3X5r/Rgh1z1DLeRsXE+jZZyp5qBDWTU0OgkOiu4plSpD2h7Xb2gHbmGw4cI6\nAyGvNTPpV90vbO6E/d5F2UUsT13O5PsnO6i1w103ryQvEH7a3oW8o6+ncfahNQcNjU5Cs5QaE3Bo\nB6215ZsoKS3h+tnXU1Zf5uAwGvLOEIr+XMSCxQs0PcU5Bu1z0NCIEoSl1JCEROq0lX10weIFKky0\nDqtugh9GXTCK7Kzsdte00Dh3oYWDhkYnIZxTNuN0BjfUnFkN5MDin4CjbkJ1SXWz99b0FBrhoH0O\nGhqdhHC+hB3/3sFzzzx3Rvb3lkpotsWPoaEB2uegodGpaKsvoS3XbSka6GzdWyMycaY+By0cNDS6\nCfTir2GHFg4aGhoaGiHQeQ4aGhoaGh0OLRw0NDQ0NEKghYOGhoaGRgi0cNDQ0NDQCIEWDhoaXYSS\n0hJmzZ1F3r15zJo7KyzPkYZGV0BHK2lodAE0S6nG2YaOVtLQiEKEVHCz1V7Q0IgEaOGgodEFOFx9\n2MlzBJoITyOioIWDhkYXoCUuJA2Nrob2OWhodAG0z0HjbEPTZ2hoRCk0F5LG2YQWDhoaGhoaIdDR\nShoaGhoaHQ4tHDQ0NDQ0QqCFg4aGhoZGCLRw0NDQ0NAIgRYOGhoaGhoh6BDhIIT4gRDibSFEgxBi\nWSvazxNCHBVCnBRC/FUIEd8R/dDQ0NDQ6Bh0lOZwGFgEPNtSQyHEjcBDQB6QBeQAv+qgfmhoaGho\ndAA6RDhIKV+RUr4KfNGK5t8CnpVSfiylPIUSKt/uiH50JYqKirq6C62C7mfHIRr6CLqfHY1o6eeZ\noit8DsOB923v3wcGCCH6dEFfOgzRMmF0PzsO0dBH0P3saERLP88UXSEcUoBTtvenAAGkdkFfNDQ0\nNDRc0KJwEEIUCiH8Qgify9/mdtyzFuhle98LkEBNO66loaGhoXEW0KHcSkKIRcBgKeXsZtosBz6X\nUi4w3k8EnpNSunIVCyE0sZKGhoZGO3Am3EpxHdEBIUQsEA/EAnFCiETAK6X0uTT/H+BvQojngXLg\n58Dfwl37TL6choaGhkb70FE+h18AdcBPgZnG658DCCEyhRDVQojzAaSU64EngEKgxPhb2EH90NDQ\n0NDoAEQ8ZbeGhoaGRudD02doaGhoaIQg4oRDW6g4hBD3CCG8htmqxvh/XaT102jfJZQhQog+QoiX\nhRC1QogSIcQ3mmn7qBDCEzSeWRHQr8eFEMeFEMeEEI+fjf6caT87c+xc7t2WZ6bLqGta288ufq4T\njHEpFUKcEkK8K4SY2kz7rnquW93P9o5nxAkH2kDFYWC7lLKXlDLV+N+e8Nr2IFooQ/4INADpwCzg\nv4UQlzbT/oWg8Sztyn4JIb4H3ApcDowAbhZCzDlLfWp3Pw101tgFo1VzMQKoa9rybHfVcx0HHASu\nlVL2Bn4JrBBCDAlu2MXj2ep+GmjzeEaccGgjFUeXIRooQ4QQycAM4BdSynop5TbgVeCbZ/veHdiv\nbwFPSSmPSimPAk8B90ZgP7sMbZiLXUpdEw3PtpSyTkr5ayllmfF+HSpoZrRL8y4bzzb2s12IOOHQ\nDowSQlQKIT4WQvxCCBGJ36mrKEMuRoUUHwi69/BmzrnFMOHsEUL8RwT0y23smut/R6Kt49cZY3cm\niCbqmoh4roUQGcBFwD6XjyNmPFvoJ7RjPDskz6EL8SZwmZTyf4UQw4EVQBPQqXbpVqA5ypCTnXhf\n897hqEpeBP4MVAC5wCohxEkp5Ytd2C+3sUvp4P6EQ1v62VljdyboqnnYVkTEcy2EiAOeA/4upfzU\npUlEjGcr+tmu8exUaSw6mIpDSlkqpfxf4/U+4NfA7ZHWT84SZUgr+lkL9A46rVe4+xrqcblU2AH8\njg4YTxcEj0dz/XIbu9qz0Cc3tLqfnTh2Z4KooK45W891WyCEEKgFtxH4YZhmXT6erelne8ezU4WD\nlDJPShkjpYx1+euoaIQzzqg+C/3cB3zZ9n4kUCGlPKPdRSv6+SkQK4TIsZ32ZcKrniG3oAPG0wWf\nojLpW9Mvt7Frbf/PFG3pZzDO1tidCc7KPOwkdPZYPgv0B2aEYXqAyBjP1vTTDS2OZ8TZ54UQsUKI\nJGxUHELRc7i1nSqEGGC8vgSVqf1KpPUTRRlynxDiUsMe2SxlSEdBSlkHvAT8WgiRLIS4BhX580+3\n9kKIW4UQacbrq4G5nIXxbGO//geYL4QYJIQYBMynE8aurf3srLFzQxvmYpfMw7b2syufa+OefwIu\nAW6VUnqaadrV49mqfrZ7PKWUEfUHPAr4AZ/t75fGZ5lANXC+8f5JFD9TDbDfODc20vppHPux0dcq\n4K9AfCf1sw/wMkoFLgW+bvtsPFBte/88cNzo+4fADzq7X8F9Mo49Bpww+vabTp6PrepnZ45da+ei\nMQ9rImEetqWfXfxcDzH6WGfcv8b4Tb8RYc91S/084/HU9BkaGhoaGiGIOLOShoaGhkbXQwsHDQ0N\nDY0QaOGgoaGhoRECLRw0NDQ0NEKghYOGhoaGRgi0cNDQ0NDQCIEWDhoaGhoaIdDCQUNDQ0MjBFo4\naGhoaGiE4P8Dj1WipvXqYbMAAAAASUVORK5CYII=\n",
|
||
"text/plain": [
|
||
"<matplotlib.figure.Figure at 0x7f2d94e9ca58>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"plt.plot(X_moons[y_moons == 1, 0], X_moons[y_moons == 1, 1], 'go', label=\"Positive\")\n",
|
||
"plt.plot(X_moons[y_moons == 0, 0], X_moons[y_moons == 0, 1], 'r^', label=\"Negative\")\n",
|
||
"plt.legend()\n",
|
||
"plt.show()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"We must not forget to add an extra bias feature ($x_0 = 1$) to every instance. For this, we just need to add a column full of 1s on the left of the input matrix $\\mathbf{X}$:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 106,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"X_moons_with_bias = np.c_[np.ones((m, 1)), X_moons]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's check:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 107,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 1. , 1.78492715, -0.31397748],\n",
|
||
" [ 1. , -0.93489306, 0.39201334],\n",
|
||
" [ 1. , 0.67505274, -0.37301603],\n",
|
||
" [ 1. , 1.1288975 , 0.18007805],\n",
|
||
" [ 1. , -0.6003075 , 0.93864394]])"
|
||
]
|
||
},
|
||
"execution_count": 107,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"X_moons_with_bias[:5]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Looks good. Now let's reshape `y_train` to make it a column vector (i.e. a 2D array with a single column):"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 108,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"y_moons_column_vector = y_moons.reshape(-1, 1)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Now let's split the data into a training set and a test set:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 109,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"test_ratio = 0.2\n",
|
||
"test_size = int(m * test_ratio)\n",
|
||
"X_train = X_moons_with_bias[:-test_size]\n",
|
||
"X_test = X_moons_with_bias[-test_size:]\n",
|
||
"y_train = y_moons_column_vector[:-test_size]\n",
|
||
"y_test = y_moons_column_vector[-test_size:]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Ok, now let's create a small function to generate training batches. In this implementation we will just pick random instances from the training set for each batch. This means that a single batch may contain the same instance multiple times, and also a single epoch may not cover all the training instances (in fact it will generally cover only about two thirds of the instances). However, in practice this is not an issue and it simplifies the code:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 110,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"def random_batch(X_train, y_train, batch_size):\n",
|
||
" rnd_indices = np.random.randint(0, len(X_train), batch_size)\n",
|
||
" X_batch = X_train[rnd_indices]\n",
|
||
" y_batch = y_train[rnd_indices]\n",
|
||
" return X_batch, y_batch"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's look at a small batch:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 111,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 1. , 0.9532683 , -0.68741253],\n",
|
||
" [ 1. , 1.44012105, -0.3697255 ],\n",
|
||
" [ 1. , 1.88225765, 0.24606083],\n",
|
||
" [ 1. , 0.751504 , -0.40606678],\n",
|
||
" [ 1. , 1.10919226, 0.33902847]])"
|
||
]
|
||
},
|
||
"execution_count": 111,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"X_batch, y_batch = random_batch(X_train, y_train, 5)\n",
|
||
"X_batch"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 112,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[1],\n",
|
||
" [1],\n",
|
||
" [1],\n",
|
||
" [1],\n",
|
||
" [0]])"
|
||
]
|
||
},
|
||
"execution_count": 112,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"y_batch"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Great! Now that the data is ready to be fed to the model, we need to build that model. Let's start with a simple implementation, then we will add all the bells and whistles."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"First let's reset the default graph."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 113,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"The _moons_ dataset has two input features, since each instance is a point on a plane (i.e., 2-Dimensional):"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 114,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"n_inputs = 2"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Now let's build the Logistic Regression model. As we saw in chapter 4, this model first computes a weighted sum of the inputs (just like the Linear Regression model), and then it applies the sigmoid function to the result, which gives us the estimated probability for the positive class:\n",
|
||
"\n",
|
||
"$\\hat{p} = h_\\mathbf{\\theta}(\\mathbf{x}) = \\sigma(\\mathbf{\\theta}^T \\cdot \\mathbf{x})$\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Recall that $\\mathbf{\\theta}$ is the parameter vector, containing the bias term $\\theta_0$ and the weights $\\theta_1, \\theta_2, \\dots, \\theta_n$. The input vector $\\mathbf{x}$ contains a constant term $x_0 = 1$, as well as all the input features $x_1, x_2, \\dots, x_n$.\n",
|
||
"\n",
|
||
"Since we want to be able to make predictions for multiple instances at a time, we will use an input matrix $\\mathbf{X}$ rather than a single input vector. The $i^{th}$ row will contain the transpose of the $i^{th}$ input vector $(\\mathbf{x}^{(i)})^T$. It is then possible to estimate the probability that each instance belongs to the positive class using the following equation:\n",
|
||
"\n",
|
||
"$ \\hat{\\mathbf{p}} = \\sigma(\\mathbf{X} \\cdot \\mathbf{\\theta})$\n",
|
||
"\n",
|
||
"That's all we need to build the model:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 115,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_inputs + 1), name=\"X\")\n",
|
||
"y = tf.placeholder(tf.float32, shape=(None, 1), name=\"y\")\n",
|
||
"theta = tf.Variable(tf.random_uniform([n_inputs + 1, 1], -1.0, 1.0, seed=42), name=\"theta\")\n",
|
||
"logits = tf.matmul(X, theta, name=\"logits\")\n",
|
||
"y_proba = 1 / (1 + tf.exp(-logits))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"In fact, TensorFlow has a nice function `tf.sigmoid()` that we can use to simplify the last line of the previous code:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 116,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"y_proba = tf.sigmoid(logits)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"As we saw in chapter 4, the log loss is a good cost function to use for Logistic Regression:\n",
|
||
"\n",
|
||
"$J(\\mathbf{\\theta}) = -\\dfrac{1}{m} \\sum\\limits_{i=1}^{m}{\\left[ y^{(i)} log\\left(\\hat{p}^{(i)}\\right) + (1 - y^{(i)}) log\\left(1 - \\hat{p}^{(i)}\\right)\\right]}$\n",
|
||
"\n",
|
||
"One option is to implement it ourselves:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 117,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"epsilon = 1e-7 # to avoid an overflow when computing the log\n",
|
||
"loss = -tf.reduce_mean(y * tf.log(y_proba + epsilon) + (1 - y) * tf.log(1 - y_proba + epsilon))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"But we might as well use TensorFlow's `tf.losses.log_loss()` function:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 118,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"loss = tf.losses.log_loss(y, y_proba) # uses epsilon = 1e-7 by default"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"The rest is pretty standard: let's create the optimizer and tell it to minimize the cost function:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 119,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"learning_rate = 0.01\n",
|
||
"optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
|
||
"training_op = optimizer.minimize(loss)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"All we need now (in this minimal version) is the variable initializer:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 120,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"init = tf.global_variables_initializer()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"And we are ready to train the model and use it for predictions!"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"There's really nothing special about this code, it's virtually the same as the one we used earlier for Linear Regression:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 121,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch: 0 \tLoss: 0.883408\n",
|
||
"Epoch: 100 \tLoss: 0.33318\n",
|
||
"Epoch: 200 \tLoss: 0.290913\n",
|
||
"Epoch: 300 \tLoss: 0.272955\n",
|
||
"Epoch: 400 \tLoss: 0.263399\n",
|
||
"Epoch: 500 \tLoss: 0.257667\n",
|
||
"Epoch: 600 \tLoss: 0.253585\n",
|
||
"Epoch: 700 \tLoss: 0.251268\n",
|
||
"Epoch: 800 \tLoss: 0.249604\n",
|
||
"Epoch: 900 \tLoss: 0.248331\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"n_epochs = 1000\n",
|
||
"batch_size = 50\n",
|
||
"n_batches = int(np.ceil(m / batch_size))\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" for batch_index in range(n_batches):\n",
|
||
" X_batch, y_batch = random_batch(X_train, y_train, batch_size)\n",
|
||
" sess.run(training_op, feed_dict={X: X_batch, y: y_batch})\n",
|
||
" loss_val = loss.eval({X: X_test, y: y_test})\n",
|
||
" if epoch % 100 == 0:\n",
|
||
" print(\"Epoch:\", epoch, \"\\tLoss:\", loss_val)\n",
|
||
"\n",
|
||
" y_proba_val = y_proba.eval(feed_dict={X: X_test, y: y_test})"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Note: we don't use the epoch number when generating batches, so we could just have a single `for` loop rather than 2 nested `for` loops, but it's convenient to think of training time in terms of number of epochs (i.e., roughly the number of times the algorithm went through the training set)."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"For each instance in the test set, `y_proba_val` contains the estimated probability that it belongs to the positive class, according to the model. For example, here are the first 5 estimated probabilities:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 122,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 0.70773607],\n",
|
||
" [ 0.89086711],\n",
|
||
" [ 0.98350209],\n",
|
||
" [ 0.06995566],\n",
|
||
" [ 0.9851836 ]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 122,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"y_proba_val[:5]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"To classify each instance, we can go for maximum likelihood: classify as positive any instance whose estimated probability is greater or equal to 0.5:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 123,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ True],\n",
|
||
" [ True],\n",
|
||
" [ True],\n",
|
||
" [False],\n",
|
||
" [ True]], dtype=bool)"
|
||
]
|
||
},
|
||
"execution_count": 123,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"y_pred = (y_proba_val >= 0.5)\n",
|
||
"y_pred[:5]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Depending on the use case, you may want to choose a different threshold than 0.5: make it higher if you want high precision (but lower recall), and make it lower if you want high recall (but lower precision). See chapter 3 for more details."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's compute the model's precision and recall:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 124,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"0.89473684210526316"
|
||
]
|
||
},
|
||
"execution_count": 124,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"from sklearn.metrics import precision_score, recall_score\n",
|
||
"\n",
|
||
"precision_score(y_test, y_pred)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 125,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"0.86734693877551017"
|
||
]
|
||
},
|
||
"execution_count": 125,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"recall_score(y_test, y_pred)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's plot these predictions to see what they look like:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 126,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEFCAYAAAAIZiutAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXl4VdXV+P/ZCQFBEggCERBIXtAKWBFBcGBIQHBAKYJV\nGRSKVjsoAg44UbG0DqBYean9VgUnwJZBhReZLBBl1iqgxao/YwIIyCBgwhhJ1u+PO3DH3OnceX2e\n5z7JPXefc9bZ99y9zl7TNiKCoiiKoriSEW8BFEVRlMRDlYOiKIrihSoHRVEUxQtVDoqiKIoXqhwU\nRVEUL1Q5KIqiKF6oclAURVG8sFQ5GGN+b4z52Bhzwhgzs4Z2I4wxp4wx5caYCvvfnlbKoiiKooRP\nLYuPtwuYBFwF1A3Qdr2IqEJQFEVJQCxVDiLyLoAx5hKghZXHVhRFUWJHPH0OnYwx+4wxXxpjHjPG\nqP9DURQlQbDarBQsHwAXiMh2Y0wHYC7wE/BMnORRFEVRXIjL07qIlInIdvv/24A/AjfGQxZFURTF\nm3jNHHxhfG40RsvGKoqihIGI+BxXg8HqUNZMY8wZQCZQyxhTxxiT6aPd1caYpvb/zwceA971d1wR\nSfjX448/HncZVE6VUeVUOR2vSLHarPQYcAwYDwyz//+oMaalPZ/hHHu7PsBnxpgKYDEwH3jKYlkU\nRVGUMLE6lPUJ4Ak/H2e7tHsAeMDKcyuKoijWoeGjFlFYWBhvEYJC5bSOZJARVE6rSRY5I8VYYZuK\nJsYYSXQZFUVREg1jDBKBQzqRopUURUky8vPz2b59e7zFSGtat25NWVmZ5cfVmYOiKGFjfzqNtxhp\njb/vINKZg/ocFEVRFC9UOSiKoiheqHJQFEVRvFDloCiKEoCdO3eSk5NTo38lOzs7Ko7heKHKQVGU\nlCQ/P5969eqRk5NDs2bNGDVqFMeOHQvrWC1btqS8vBxjbP7doqIiZs50X+yyoqKC/Pz8SMVOGFQ5\nKIpiOaVlpQwfPZyikUUMHz2c0rLSmB/DGMN7771HeXk5n376KR9//DF/+tOfQpYjXVHloCiKpZSW\nldL37r7Mzp5NcUExs7Nn0/fuviEN7lYcA3CagZo1a8Y111zDf/7zH/bs2cOAAQM466yzOO+883jl\nlVec7T/++GMuueQSGjRoQLNmzbj//vsB2L59OxkZGVRXV/PYY4+xZs0a7r77bnJychg9ejQAGRkZ\nfPvtt2zatIlmzZq5maDeeecdOnbs6JTp6aefpm3btjRp0oRbbrmFw4cPh3RdsUCVg6IoljJh6gRK\nOpZAbfuG2lDSsYQJUyfE9Biu7Ny5kyVLltCpUyeGDBlCq1at+P7775k3bx6PPPIIq1evBuDee+9l\nzJgx/Pjjj5SUlHDTTTc5j+EwKf3pT3+iR48eTJ8+nfLycqZNm+b2ebdu3ahfvz6rVq1y7vvWW28x\nfPhwAF544QUWLVrEmjVr2L17N7m5ufzud78L67qiiSoHRVEsZVf5rtODuoPasLt8d0yPATBw4EAa\nNWpEz549KSoq4te//jXr1q1j8uTJZGVl0bFjR+644w7efPNNALKysvjmm2/44YcfqFevHl27dg36\nXK4zhVtuuYU5c+YANl/EkiVLGDJkCAAvvfQSf/7zn2nWrBlZWVn84Q9/YP78+VRXV4d0bdFGlYOi\nKJbSIqcFVHpsrITmOc1jegyAhQsXcvDgQUpLS/nf//1fdu/eTaNGjahXr56zTevWrdm1axcAM2fO\n5KuvvuL888+nW7duvPfeeyGdz8HQoUN55513+Omnn3j77bfp3Lkz55xjW7Fg+/bt3HDDDTRq1IhG\njRrRvn17srKy2Lt3b1jnihaqHBRFsZRJ4ybRZmub04N7JbTZ2oZJ4ybF9BiAV+hp8+bNOXjwIEeP\nHnVu27FjBy1atACgTZs2zJkzh/379/Pggw9y4403cvz4ca/jOkxI/mjXrh2tW7dmyZIlvPXWWwwd\nOtT5WatWrVi6dCkHDx7k4MGDHDp0iKNHj9KsWbOQri3aqHJQFMVSCvILeH/6+wyrGEZRaRHDKobx\n/vT3KcgviOkxfHHOOedw+eWX8/DDD3Py5Ek+++wzZsyY4fQHzJ49mwMHDgDQoEEDjDFkZtoWs3RV\nNHl5eXz77bc1nmvo0KFMmzaNNWvW8Mtf/tK5/a677uKRRx5hx44dAOzfv59FixZFdF1RId5L2QWx\n1J0oipKYJPLvs6CgQFauXOm1fdeuXXLddddJo0aNpG3btvLSSy85Pxs+fLg0bdpUsrOz5YILLpBF\nixaJiEhZWZlkZGRIVVWViIhs2LBBzjvvPGnUqJHce++9IiKSkZEhJSUlzmPt2LFDMjMz5frrr3c7\nf3V1tTz//PPys5/9THJycqRt27by6KOPhn2d/r4D+/awx16tyqooSthoVdb4o1VZFUVRlJihykFR\nFEXxQpWDoiiK4oUqB0VRFMULVQ6KoiiKF6oclJRGRJj80EMaUaMoIaLKQUlpli9YwJ4XX2TF22/H\nWxRFSSpUOShJh4jwzEMP8cz48TXOCESE5c8+y9SKCpZNmaKzB0UJAVUOStKxfMECvps2jU+nTatx\nRrB8wQKu/vxzDHDV55/r7EGJG9dee62z8muyoMpBSSpEhGXPPsu048dpduIESydP9jkjcMwa+tmX\nhbzq2DGdPaQZ+fn5nH322W6F82bMmEFRUVFUz/vEE09w2223uW1bsmQJt956a1TPazWqHJSExtOh\nvHzBAvpu2YIBrgbytmzxOSNwnTUAOnuIA1YEA0RyDGMMVVVV/OUvf/HargRGlYOS0Lg6lB2zhmtP\nngTgKmBvZaXP2cPn69axvksXJvbq5Xxt6NKFz9aujcNVpCdWBANEeowHHniA5557jvLycq/Pvvzy\nS/r168dZZ51Fu3btmDdvnvOzgwcPcv3119OgQQO6devGhAkT6NGjh/PzMWPG0KpVKxo0aMAll1zC\nWvt9tXz5cp588kn++c9/kp2dTadOnQAoKipi5syZVFZWkpubyxdffOE81oEDB6hXr56zGuzixYvp\n1KkTubm5dO/enc8//zysa4+YSKr2xeJFAld9VKJLdXW1jOnWTapBxnTrJkvmzpXFdeqIgPO1FOTJ\n2rVl2fz58RY3LfH3+/T87qqrq0M+dqTHyM/Pl5UrV8rgwYPlscceExGRV155RYqKiuTo0aPSsmVL\nef3116W6ulo2b94sjRs3li+++EJERG6++WYZMmSInDhxQr744gtp2bKl9OjRw3ns2bNny6FDh6Sq\nqkqmTp0qZ599tpw8eVJERCZOnCi33nqrmyyFhYUyY8YMERG5/fbbnfKIiPz1r3+Va665RkREPvnk\nE2natKl8/PHHUl1dLW+88Ybk5+dLZWWl3+v09x0QYVVWnTkoCYunQ3nx66+zoHFjRuTkMLJBA0bk\n5PBa7dp81KSJzggSDCuCAawKKHjiiSeYPn06P/zwg3Pb4sWLKSgo4LbbbsMYw0UXXcTgwYOdy3W+\n/fbb/PGPf6ROnTq0a9eOESNGuB1z6NChNGzYkIyMDMaOHcvJkyf56quvgpJnyJAhziVEAebMmcOw\nYcMAeOWVV/jNb35Dly5dMMZw6623UqdOHTZu3BjWtUeCKgfFDUmQpDHx4VCufeAAM3bu5PUff+S1\nw4cZMmMGzerU4TcvvMADzz8fV3mV0/j67kINBrDiGA46dOjAddddx1NPPeXctn37djZu3OhcqjM3\nN5c5c+awd+9e9u/fz6lTp5zLegK0bNnS7ZjPPfcc7du3Jzc3l9zcXMrLy51moUD07t2bEydO8PHH\nH7Njxw62bt3KwIEDnXI999xzbnJ999137N4d2trZVqDKQXEj3kljDuW0bP78Gh3KjsEj2ByGRFF6\n6YAVwQBWBxRMnDiRl19+2blWdKtWrSgsLHRbqrO8vJzp06fTpEkTsrKy+O6775z779y50/n/mjVr\nmDx5MvPnz+fQoUMcOnSInJwc570VyOFtjOGmm25izpw5zJkzh+uuu44zzzwTsCmhRx991E2uI0eO\ncPPNN4d13ZFQK+ZnVBIW1wF33JQp9Bs0KOaRHQ7ltL1nTxp36cIGl/OLCPXXruWqwYN9mhyuGjw4\n4HFXXHJJje2UyPl83TqO1PDdxeoYrrRp04abb76ZadOmceGFF9K/f3/Gjx/PrFmzuOWWWxARtm7d\nSnZ2Nj/72c8YNGiQU6Fs376dN954g9atWwNw5MgRsrKyOOuss6isrOTpp5+moqLCea68vDz+9a9/\n2VZT8/P7GTJkCAMHDqRx48b8+c9/dm7/9a9/zaBBg+jTpw9du3bl6NGjfPDBB/Tq1cupQGJGJA4L\nzxfwe+Bj4AQwM0DbscAe4BDwCpDlp51fR4xiLUvnzZNl9erZHL316sXcyRusA9K1nUBI7cN1jiq+\nSeTfp+cyoTt37pS6detK7969RUTk66+/lv79+0uTJk2kcePG0qdPH9m6dauIiOzfv1/69+8vDRo0\nkK5du8pDDz0kV155pYiIVFVVye233y45OTnSvHlzmTJlitu5fvjhB+nevbvk5uZK586dRUSkqKjI\n6ZB20LZtW2ncuLH89NNPbtuXL18ul1xyieTm5krz5s3lpptukiNHjvi9Tn/fARE6pK1WDgOBAcBf\na1IO2KIQ9wDnAw2A1cCTftr67RTFOkIdcKNBsMrJtZ0zasmlfXV1tTwzfrxT9pqO69lWCY10+X2O\nHz9eRo4cGW8xfBIt5WCpz0FE3hWRRcDBAE1vA2aIyJci8iMwCfiVlbIooRGPpDFx8QNICA7IQDkM\nnrkRNR033j4WJTH56quvnPkFH330ETNmzGDQoEFxliq2xMvn0AF41+X9VqCpMSZXRA7FSaa05vN1\n66jo3JnpZWVcXFDgXLQ8XBtvMLj6AUTEr3LyPH9NkUkOZeDwm1RXV/s9br9Bg+LuY1ESk4qKCoYM\nGcKePXto2rQpDzzwANdff328xYopxteTWcQHNWYS0EJERvn5/BvgdyKywv6+FlAJ5IvIDo+2Eg0Z\nUx0RYcrDD/PAU08FPeAtmz+fZb/6FSeuvJK/vf12VAdKEWHcZZcxddMmxnXrRrNLL+Xo5s1u5xQR\n6l98cUhhqsvmz8eMGGGbIdSrx/8VFdG4osLncX9+xRVubc0bb4SlCMPp61TB8RChxA9/34F9e9g3\nZLxmDkeAHJf3OYAAFb4aT5w40fl/YWEhhYWFURQtNQglOkdEmPzww+xZvZqrjhxh0aJFNjPTjTdG\nVT7XaCPzwANc5VEDJ1ScswYXE9LyAweYuGGD16DtVE7HjiHA1mPH2DN5clizB42EUhKB4uJiiouL\nrTtgJA4Lfy9sPoSaHNKzgUku73sDu/20rdkbo3gRanTO0nnz5O66deXJrCwZY3dGjzz33Kg5aaPl\n/A7kqPbXdinImDDLcKR7JJT+PuOPv++ARHJIG2MyjTFnAJlALWNMHWNMpo+mbwC3G2PaGWNygUeB\nV62UJZ0JpeyAyOkS2F//9BNXYbPLDywpYfmCBVGXD6xzfodSbM/R9vGePZmRnc1UYAOw5cMPw74W\nrfqqpBSRaBbPF/A4UA1Uubz+ALTEZjI6x6XtGOB74DCa52AZoT6VL503z1nM7h37U7REefYwecwY\n+UPPnvJ4r17O1x969pTJY8ZYfq5ALJk7VxZlZYmAvAvy1P33Oz8LFOaaCOG/8aZ169aCzSSsrzi9\nWrdu7fO7IcKZQ1TMSla+VDmERiimFZ+Dm/2vgCysVSulq51WV1fbFKDL9Q8880ypqqoSEVtfjsnO\nDjvfQlHiSaTKQctnpBihlB3wZd7pBVwLnNu+PbmNG0c1lNUqRGzRQvc/+STPPvJI0FFDy+bP54aS\nErfrv/3oUSaPH8+DzzzDq6NH848awlytLvGgKIlEVEJZrURDWaPHlLFjOfLppxw6cAD++19yRfgS\n6Azs6daNqT6ifBKRZfPns3zUKJr++tfsnD6dAbNnBxVp9fv+/clYupRDIrTBphyqgU/y8rhi2DAu\nmDqVARBRmKuixItIQ1njbjYK9ELNSlHF1bS0lNM+hyVJYh5xlf/6OnWkiuB8JY79loAsczELCch7\nZ5wh19ep49eXoCU3lGSACM1KWrI7zVk2fz7HP/kEAZZjK3oFcHUE9fNjiatp7M6TJ3mf4CKtHPv9\nB1gPTARGG8Po9u158ayzuOvkSb/RVFpyQ0kHVDmkKCLBrV+w6LXXMCIMOOccemdkWBpe6k+GYGUL\n5viudZP6A8uA66ur+dudd1JdXe13X0co69FevTC9ekGvXuT26ME5ffsip06xCZvCeBwYnp3NentI\nrOOcwa4joShJSyTTjli8ULNSWPiKtPE0h7iaZG46+2yZ0KOHpeGl/qJ9AkUBhXJ8r2ghu5noHZAn\n77vPa59AJqFAEUjxLmuuKMGChrIqnrgN+s2a+Q3NjOZA5y9z2MqMYke+xJDcXBkKMgLkNpAbQH4P\n0qtBA6/jB1JMNeVgaF6DkkyoclC8cB30HYldnoNyVVVVVAc6f4rH3/ZInLyuA/o97dvLPcbIH0B+\n55GnEali0rwGJZlQ5aC44evpduCZZ8p7c+e6DcpP3ndf1AY6f0/YNSkkK0xNgZ7sI50pJVJmt6IE\nQpWD4oavp9tFIL9s1sxt0IyGj6EmGWpSSEvnzbPE1FTTk30kJqFohq5qWKwSLSJVDpohnWJ8vm4d\nFV268ObmzbSpqMAAZcDQPXvcIpF+VV6OufdeZ2KXiD3L+KmnLJHBV+bwlytXcsLH9gOvvsoAj+J1\n4SScuZ5XRNhZWso5BQVk26OMgl1MyJNoluTWct9KoqIZ0imI64I3AFOA7cZwsFEjzu3Q4fQqby4L\n6TiyjK9+9dWYDlIipxf9MYAA4yzIzva8Hkc2eKiLCbnKZ4VcsTq2omiGdJrjyyzhyzY+sl07+W1W\nVsACfLGOvomGk9fK64lmRNfSefNkad268gzIkrp11bGtWArqc0hNgikX/cz48bJk7tyAjtxAg2U8\nY/ej4eS16nqC9VME6zdwbedavmOMPT9Dw2IVK1HlkKIEUy763uxsualt26DWbPA3WKZa7H4o1xNO\nQtySevXkroED3fbx/K78Hde1nWPW4CiRPkZnD4rFqHJIQWpKIHtm/Hi3kNCRxtiK5oWyZoOf8M5U\niN0PdanQUBPi7m7fXq51Mc/5+q78Zae7JiY+PXq0jGzXThZlZIiALMzIkJHt2mlYrGIZkSoHjVZK\nQHwtPXnV4MHOyJZnTp3iKvvnN4mwArjq2DGf6w7UtCTnVYMHp9yaBMFej8jpGkn+1mvwdFSL2BzI\ni3/6ybmP53e1fMECn8d1bTd0zx6+yMqiYU4O19nrP11fXc3qnBzunzo1an2jKCERiWaJxYs0mzkE\nSiCrsie1+Vq9zdcTsiZu+SYcv4TnPq75Gc78kbZtZanHcX19p9fWqSNL6tZNmRmbkngQ4cxBQ1kT\nDM8wVLAtNrP5t7+l04sv8s7x41wL/MJln0UZGbx7/vm0bNw4YGimEl74rK99Rp17Ljd/9x1XHz9u\nawOMMoaZIm7H7XfffWSMHOn2nb4LzD7nHDq0aeN2Dv3+FKuINJRVlUOCMWXsWCo++YTNZWVcXFCA\nMYZqET756ivu3ruXZ4DmwE9nnEH7rl2dOQtnduoEZ5wR9BKZ6Yw/BVzTam++9llUqxbvnHsurZs2\nBWD7/v3c8OWXDHApFb4oK4vlffty1pEjfOuSmOhYcW7xnj36fSlRIVLloD6HBOOB559n2fz5VIwa\nxeWjR3PV4MEsmz+fy2+7jWXARcDzwKiTJ7nsnnucy2E6kr400zYw4fhZPl+3jorOnZnuorRFhPYu\nT/pTxo7lk8aN+dR+3IP79/PDl1/S0Bguv+cerhgxwrmYEsCyioqws8EVJdrozCHBEBHGXnYZz2/a\nxNhu3Xh+wwaeHTeOL5Yv53/++1+6Ylut7V1jWH7ttfxt8WI3k4dm2kaPULLIPb+TZpdeytHNm0PO\n0FaUcNEM6RRj6bx5srhOHRGQ/6tTx+nQvLdrV7nX7sx0ODXvdQmd1AVookuoWdf6nSjxhggd0rpM\naAIhIix79lmuPXkSgP4nT7J0yhSWzZ9P3pYtXANuIalXbtniDJ10LJV5VZKs/Zxs+Aov9oeI6Hei\nJD1qVkogls2fT9Xw4fS3KweAxXXq8N6VV7J3yxayjxxxKgcRITM7mzodOzKwuDgk56oSGhJkdJOI\nrbLtzzt39opO0u9EiTXqkE4hPlu7lq21a/PRyZPOQeib2rXp2LYtf1u82Oc+U8aOZf2RIymTxJaI\nBEokdG2358UXKe3Zk6ZRTix0KCKNTlOihSqHBOLC7t3p+PLL7hEtVVWYHj387qPOzOgTTHSTw5Q0\ntaKCcQcOMDFAzsTkhx4CY3gwzMFd14FQoo2alRKIcNccUOKPax5EMDkTr956K02N4bo33wx5cHc1\nc2l0WvxJ1FmcRislMLoEZHoQaiVY18gzR8RZKPeKRkIlFlasfx4N0GilxMU59a8hssUfYjc9iH3W\n5PleSRxq8kn4ausaeXblli2sePvtoO8V0UiohMLxfUytqEi570GVQ5SI9KbxHCwiUTRKdPl83TrW\nd+nCxF69nK8NXbrw2dq1bu1EhGVTprC3spJ+9m3OcOUpU4K6V0JRREr0CSXEOemIZNoRixdJalaK\nZOrvmXDlun5DMi/EEwmpYKJbOm+ePFm7tixzqcQqIAuzsuSp2rWDuldqqrKbCn2UTCT6QlnoYj+J\nR6Q3jadiefK++9Lexpyodt1QmDxmjNzQooXc1qCBjLC/bsvJkRuzsuQZCwaYVOijZCLRF8qKVDlo\ntFIUCKfqpwMR74SrQWeeyYKjR8kguPLSqYZrn6TatUdyr7ji2ke3NGvGW999R0aGWo2jSaJHFyZU\nyW5jTC4wE+gL7AceEZG3fLR7HHgUOAHOMfBCESnz0TbplEMkN42vwWIxkAXO/Id0y7YNJUw02bBq\ngHHto4XAf++/n4emTImCxEqykGjKwaEIRgEXA+8Bl4nIfz3aPQ60EZHbgjhm0imHSPAcLHZ88w1V\nFRVUZGdzYdu2QGI9nUQbXzOpVJs9RIrf2WZ5uc4e0piEUQ7GmHrAIaC9iJTYt70BfCcij3i0VeWg\nBIVVZpdUxt9s8z86e0hrIlUOVj5WnAeccigGO1uBDn7aX2+MOWCM+dwY8xsL5YgpIuHlH4S7X7oR\nbJhoOvP5unWs69KF4dnZPA5MBD4C1rz5pt5fSvhE4s12fQHdgd0e2+4AVvloez5wNjZ/w2XAbuBm\nP8cNzUUfY8KNENHIEsVKEj1yJpH5tvRbGXbPMCkcUSjD7hkm35Z+G2+RLIEIo5WsLLx3BMjx2JYD\nVPhQSF+6vN1gjHkBuBH4p4XyRB0Rl2JrU6bQb9CgoOzg4e6nKP4IZ+lTBUrLSul7d19KOpbAWUAl\nbLx7I+9Pf5+C/IJ4ixdXrFQOXwO1jDFt5LRpqSOwLYh9hdPr2HgxceJE5/+FhYUUFhaGL6WF+MqO\nDOaHGMx+IolZzEtJTIIJTtB7ypsJUyfYFENt+4baUNKxhAlTJzBr2qy4yhYqxcXFFBcXW3fASKYd\nni9gDjAbqAdcgc1B3c5HuwFAQ/v/XYHvgOF+jmntXMsigk1088xaDXY/NTspVqP3lDeFIwqFiXi9\nikYUxVu0iCHBCu/93q4Y9tmVxG9E5L/GmO7GmHKXdrcA39i3vQY8JSJJpaaDrXHjq0ZSoP1E/Ndl\nEnVkK2EgYqvrdHZFBUsnT9b7x06LnBZQ6bGxEprnNI+LPImEpYv9iMgh4AYf29fi4o8QkaFWnjce\nhLwAjN23EMx+NZmddJEXJRwc1WC/B/LslWD1/oFJ4yax8e6NlOSX2AzgVVD/cH3ufPHOeIsWfyKZ\ndsTiRYKalYIhnOJ7NZmdPAvyJUqBr2iQzkXkrL52rzUkQO7t2jUt+9YXH6z5QOp3qy88YjcrPYK0\n6d8m6aOWSDCzkmJHJLy6+zWZnVK6PLAH6Vyi3Opr91xD4mpOzx4UeGnuSxzpc8SnU7omSstKGThy\nIHmX5JF3RR6/uP0XlJaVRl/gGKHKIUqEW3ffX9LX1jVr0maRF4diTcUFVAIRjWv/bO1aPqlTx7mG\nxFXAJ3XqsHXNmoiPnQrsKt91WjE4qA27y3f73ae0rJReo3qx8KuF7Ou3j3399rHo7EUU3lWYMgrC\nUp+Dcppw4879hSQumz+fji+/7FPZpJrtONwQ4VQgGtd+YffuXvfOHVVVmB49IhU3JXA6pV0VRACn\n9ISpE9h5aif0Bo4B6wGBHdU7GDtxLO++9m50hY4BWrI7SUj08sA1ISHE14ukb6G9aF17Mt87scAt\nEa42UAlttrapMRGuaGQRxWXF0AnYBBTh3Lfuirpsm7ct7kl0CVN4L1qockh+ls2fz/JRo7j61VcD\nPgWnc6G9dL72eFNaVmpLiNtXwvc7vieveR5tm7dl0rhJzkHe0WZX+S7Kvi6j7FQZnAH0wGvWMaxi\nWNyT6CJVDmpWUqKKqw09mFIhnuY4EeHT0lJ6rFmT8gOklsCIHwX5BUwaN4m+d/elrKiMstplbKrc\n5CylAbiX2WgBmUsyqTpWFbK/IlnQmYMSVSJdqCeUWYeiRMLw0cOZnT3b5ywA8PlZ3Xl1Of7L4yk5\nc9BoJSVqhBvO67l/OkYtKbGnpqglf591vLAjbba2OZ1lbfdXTBo3KQYSRxdVDgmEpFhpjFDDeT2v\nP53yOmoi1e6LRKVFTgvb4sbFwGr73/22qCV/ZTbaNLc5rodVDKOotIhhFcNSpqKrmpUSiFQzoYQa\nJeN6/f0GDfKK3BnbrRvNevXiwaefTvnIJVeCuS9cnaUtclq4OVKV4Phw7Yf0ubcPp64+5Yw8qrWs\nFitfWEnLc1qGHNEUbzRaKUVwDWNMl9BNV1yvf2y3bhzPy+OGf/2Lq10idxZlZTE7M5NRs2alhPIM\nhmDui3BCMRVvavI5zJo2y6mAd5fvpnlO84RXwOpzSDL8mQjS3YTiev19Nm9m4//9H4tbt+aedu24\nEXgceL3TEXVMAAAcWklEQVSqis4nTqSV/yGY+6KmNQmCobSslOGjh1M0sojho4enTIZvqATKlC7I\nL2DWtFmsem0Vs6bNSmjFYAWqHGKMr7o5kTpukx3P669VWcnlIuyrrGTfTz9xB/AE8KvqajqSPsoz\n2PsinPIPDhyzjtnZsykuKGZ29mz63t03LRWElu92R5VDjKiuruaGSy9lyeTJXjX1w63DlCq4Xr8A\nK4AXgRMlJZzxzTdcZW/XH1gG9EsT5RnsfRHJoOY26zgMrIeS8hJ6D+2ddgpi0rhJKRt5FA6aBBcj\nnnngAZpu2sSxjAyvmvrpnvzkuP71wEcffcRvT5zAACOBr8B9cATeJz1qLgV7XzjXJPDwOUyaHnhQ\n21W+y5bUdRi3MhBllWX0vbtvWvktCvILeH/6++5+BXsfDh89PO2c/eqQjgHV1dUMzslhwdGjDAbe\nBsYCdO3K8xs3ppXjuSaWzpvH3JtvZqaIcxbxSyAbqAZOAdSty/FGjfh5mzZJUxsoFpFE4TpLnU7Y\n9cDlJGQyVzxJZme/RivFgVAKyQE8dd99XDB1KllAFafNI5tr1+biOXNS+uk3FH7bvz/XLFvGgOpq\n57Z3gRmNGtH55z9PymJxiT64OOUrL4E+3p8XlRax6rVVsRcsQQgUwZTIaG2lOBDKUp3V1dV89Pe/\nMx64D5hq334V8EqdOmSmQc2gYBAR9u7bx7+vuIJPMzLctvdMMoXgSk2RRIkwuDhMKb2H9qasssxr\nENy2bRvDRw9PG1OKJ06zmyspUjspEKocQiTUQnLPPPAAdxw9ygpsK3BpTX3fLF+wgNZffcUVKZIA\n6CAZBpeC/AJWzVnlNcNhFezrvo/Z9WY7C9Clm4IIZq2HVE1A1GilEAk1H2HLqlXMzcnh2dq1mZmZ\nyS2ZmdyckcEvzzyTDV268NnatbERPIGxsoZSopWaSJbwSMcMYljFMPJW5MEa4FKgISHnTaQSgSKY\nUjkUWH0OIeBvMZbn1q/n2UceCdoHobgTaeVWz2MlUgmSRPc5+KJoZBHFBcXe29PU/1CTsz+RfRKa\nIR1D/MWdP/Pgg5YuCJ9OWJkAmIhVXF2fyJOlMFuyzHZiRU2Z0ZEkICY6qhxC4PN161jXpQujWrbk\n8Z49mdirF+u7dGHtrFlhD0iJZgaJNVYlAJaWlXJt/170+vfHGODKzz5LGGWdbGUXNBnMN77KjKSy\nIlWzUoh4mi10MZvIsGJ949KyUq78/ZU02fItG3bjNPn97qKLeNHj2EpwJFuRuWjg6mhuQAM279nM\nji473MyDMx+ayainRyWk2VDzHGKIiDD2sst43l45dOr69dx3+eVhLwif7pVYrWL46OG8891s3lgM\ng386vf3dWpnU/cc/01LpKpHh5Staid+1oieNm+RUpNmZ2Zgqw4/8GPfIJfU5xJDlCxbQd8sWm9li\nyxaeefDBiEwi6V6J1Sp2le+iwW6Y1hwKW59+vXZWfY0G84FWYQ2MV35KBn59Cw6z4YyJM9i2dxsL\nWyxMicglzXMIEhFh2bPP8vzJkwD0P3mSv82axbHOnZleVsbFBQUOTe2zJpJnVrXTeeriiA0mb0Lx\npkVOC4r7wB6vp7rrkjZ5Llq4PRGfBVSStjkMNeGVn/ITAfMdEj3hMVR05hAkrrMGsM0Sfnv4MPW6\ndqXt4cNcPno0E4uLeeKDD3wOSJ6lutO9EmuwBPOUqw7U0wTqr0jXfkgFgrmn3BzNh4ETwCrc7rFW\n/27ldo+lWuSSzhyC5LO1a9lauzYfnTzp9C/8f1lZVM2ezT8CZEv7yqpO90qswRDsU66/aprp9iQc\nTH8lQ8Z2NAn2nnKrdLsFW3mDY9gKFApQDZ3+p5PbPsFkUycT6pAOEteoJAdP1a7NhcbQ/+TJGiOV\nrEzySieCXbYx1coWhEtN/eVwmr7/yfvsy9wHnbFlP7u0SUbTR6gMHz2c2dWzYRu2Qd4AHWBYhvf1\nO+6v9za9x+FrD3sdyzMpMNESHrXwXozwfNIXEbZt3sxDFRWAf5+B+hbCp6anXLWde+Ovv0r2lZzu\nq3446yZxKVAv+LUfUoFvdn8Du3GuW0ElsBpKmpd4tXU4moePHs7sSm+l6zkjSLUZrM4cwsTXTMLX\nrCDYdoo3NT0JAz4/G/D9ABbOWBhTORMFf/2VvzqfsqIyr+15K/K48rIr02rGVXB5gc++yF+dT+l6\n31FFiTYjCBadOcSJYH0G6lsIn5pWOBs1cZTPp+QVm1dQWlaa0D/aaOGrv1r9uxXlJ8t9Okrbd2if\nFqYkV85udTZltcvcN9a2bfdHKDOCVDJ16sxBSWh8ZeoCtvUHapVBJnARNvt5JbAGhl2UHvZzX7j2\nV3ZmNptLN7Pz1E6/CVyp2k/+BuloFspLtBlGQmVIG2NygZlAX2A/8IiIvOWn7TPA7djcQjNFZLyf\ndqocFCe+foCsBi4GPgW6QdGh9Kwe6olzIDyG2/rQ8R60ok1NgzQQtQE80Sq0JlqG9IvYIoKbAMOB\nvxlj2nk2MsbcBQwAfg5cCFxnjLnTYlmUFMRXnD5F2Bys3YB6yRs6aDXOuPuG2PpmPbDa5mtIVcUA\nNedyRLNKruY5+MEYUw8YBLQXkePAOmPMIuBW4BGP5rcBz4nIHvu+zwF3AC9ZJU88CXWNaSV4/EXk\n0JS0i7wJhFvcfUOgEKiEKyuuTFnFAIFzORxRSFaTankOVs4czgNOiYhrTNhWoIOPth3snwVql5R4\nZkMr1uGvRHLe0TwGfD+ACxpfwKiJo7RmEOmbOR5uGe1Ia06lWn9b5nMwxnQH5opIc5dtdwBDRaS3\nR9tT2GYYX9vftwW+EpFMH8dNKp9DdXU1Q845h3/s2aOVVqOAP3tyIpdOjifpWHo7HMewVc7kROrv\nhHFIG2MuAtaKSH2XbeOAXiLyC4+2h4ErReTf9vcXA6tFpIGP4yaVcnjqvvvoMHUqA9B8hmjh6wc4\nYeqEhHIGKvEl1EE60ZzJVpBIeQ5fA7WMMW1cTEsdsSWqe7LN/tm/7e8v8tMOgIkTJzr/LywspLCw\n0AJxrae6upqP/v53HrK/12zo6ODLZpzuNYMUd4L1KziUyOJNi6Eep8OiIenun+LiYoqLiy07ntWh\nrHOwhab+GugELAYuF5H/erS7CxiNLeQVYAXwgoi87OOYSTNzeOq++7hw6lT6u2zT2YP1eMaw33nT\nnYx4cITPvIdkfvJToovfsOhupMT9kzBmJbswrnkOB4DxIvJPuz9iiYjkuLR9GpsSEeBlEXnYzzGT\nQjmICNc1a0bnvXvJwHZRJdnZ/E+nTmSHsOSlUjNeP+j9UGtDLU5dfcor76FNWWg241TKblUC48+U\nxHrg8uT3WSWUcogGyaIctIZSbPD6QRcDl+OzVs6qOatCUgyJlN2qRJ+ikUUUFxR7bW+4pCH9u/VP\n+oeDRPI5pDVaQyk2ePkWBJ+JRwXnFYT0w061VbyU0/ibEfrLS+jfrb9+56hysAw1G8UGrx+0wZLE\nI3VopyalZaUU3lXIji47nKXd19y1huK/F9dY2FHRZUKVJMMr0agD1FpWK+LEo3ATp5TEZsykMTbF\n4DIj3NFlB2MmjYlqKY1UQH0OStLhGcN+XY/rePj5hzlcdZiGmQ15ffLr9OzeM+RjJrPPQZ3pvsm7\nIo99/fZ5ba+zoA6XXXxZSveVOqSVtMbKQT2RsltDIdkVWzTJu8SuHDwjkt4GbiGl+0qVg5LWpGJm\na6hoH/hn4MiBLPxqIfTmdKizo5r71cBh4BNoWtWUvp37Js0DQTAkWsluRYkpVpVJjrToWjxJtVLR\nVvL8xOdpWbclrMGW/7IGm4K4FJti2AT0gH399jE7ezZ97+6bVN99NFHloCQ1wTqSP1z7IQWXF9Cw\nW0MKLi/gw7UfOj9zmGVmZ8+muKA46QYJdab7pyC/gA9mfsCwi4ZRlF9E/ql8uAxbBvQWTi+ABG7h\ny4qalZQkJxh7+4drP6TPvX3csqhrLavFyhdW0rN7z6Q3y6jPIXjc+modNuXgQVFpaqwkqGYlJa1x\nDUe89PNLyV+dT5P6TZgwdYLzyX/EgyNOKwaA2nDq6lOMeHAEkPxmGQ3J9Ka0rJSBIweSd0keeVfk\n8Yvbf0FpWalbX+UdzdMZVw3ozEFJCWp6eu50cyd+vPZHr30aLmnIoU2Hkn7moLhTWlZKr1G92Hl8\np5sjutW/W1H892Kn0kz1GZfOHBSFmstf5Gbm+nxCbJhpq82cait4pTOlZaX0HtqbnXtcFAM4k99c\n/Qk646oZVQ5KSlCTaej1ya+7Z1HvBzPXkJuXy/DRwwF0kEgBHDOBsqIyyCMoU6Fj3YdVr61i1rRZ\n+p27oLWVlJSgpsXde3bvycoXVjLiwRH8cOwHjnOcUzedYnPtzWyu3MzGuzfy/vT31YSU5LjNHi2q\nuZXO6MxBSQkCmYZ6du9J6fpSBvQcwKn+p7zMT72H9k6a0FXFN26zx4uw5TW43A+t/t1KTYUhoMpB\nSQmCtR/7Mz+V1SoLK7chUPJcMifXxRIr+skt36MhthXd1tjqKA34foCbM1oJjEYrKWlFoNW/QolQ\nChTtkurRMFZhVT9pf7uj0UqKEgK+zE+sxmaGCDG3oaYIqWA+V2xY2U8XNL6ApiuakrcijwHfD3BT\n1DqDCw11SCtphcP81Htob8pqlUEmbgvKh+KwDLRAkC4gFBxW9JPbrKEAqIRVK1cx5P4h5DXIY3Pp\nZnZettO54I8jCCEdZxTBojMHJe0oyC9g1ZxVtMlpY1t/2q4YQs1tCFTTKNY1j5L16diKfvI1+zjS\n5wibftjEorMX2RLijp3+TGdwgVHloKQlViRABYqQimVyXTIXD7Sin/wFGnAQmz+pM7ZCey6f6Qyu\nZtQhrSgREGiBoFgtIJTsJUAc/VSyr4Tvd3xPXvM82jZvG3R/BQo0YDVQDVxz+rNk6Ztw0cV+FMUC\ngllmM5GX4iwaWURxQbH39iSqMBpJtJGvfVmNmz8pHVZ/c0WjlRQlQoIxySS62SYV1nSIJGrJ1UyY\nszjHNmNwKAb7sc7KOUvLo4SAzhyUtKCmp36fJon9kP9pPvnn5dMipwVHyo+wsMXCqJttwp2dJEqM\nfySzK6tmP8luYrOKSGcOGsqqpDxuA6ePUEavUMrDwKdQVlRGWe0yqIQzNpwBubgPOEE4NUMZLAPJ\nWROOJ2c3/8b02Jq9IpEfoAENYCU2e4bBlntSL/TZz6Rxk9h490YvRTlpupbOCAWdOSgpT6AnSa/P\ni7E5MT2dm29jW5S+ofcxfBHq03yyP/FGIn9pWSmFdxWyo8uO0z6DVdCybks+mPlByEouVoEAiYz6\nHBQlAIFWevMKpazCd1hkE2AjtplFEOGWodrQfcp5DP614V9JkbvgJf9hYD28t+m9gLJPmDrBphiO\nYVPO64AsOK/JeWEN6lqKO3JUOSgpTyBnrWfOQ/6pfJ/tyQR6Q97avKCcmjUpJV8Ja15yHgY2wt5+\nexPSCe6Jm/yHgU3A5XD42sMBZd9VvsumGOz7UAT0gPXfrg/qepM1ATCRUeWgpDzBJFm5PmmumrPK\nf/0le5btrvJdbutU+8KfUsohx2fk05033el+3k/wWs0skTN7J42bRKt/t7LJvwXbAB+k7C1yWtiu\n12Of4/2OB7zeRI8kS1ZUOSgpT6jZ0I72+avzbQ5SR1gkhPQk708pSab4NDe9NPclNzmbVjUNajWz\nRKEgv4BOzTrBGmyZySHIPmncJM44ckZY16sFDqODKgclLQjVBu2z/lKIT/KeSmnA9wO4oPEFfPCf\nD/wOgq5y9u3cNylyF1xNOhu2bbCVqmhESLIX5BfQr1O/kPZxnHfxpsVJpUSTBVUOiuIHz8E9nCd5\nx2A/Y+IMtu3dxsIWC/mx3o9BDYL+Zh533nRnwtjXPU06+/rtsznt/wevldgCOfD/MuEvQddYcj1v\nsP2phIaGsipKkEQSqum2r8NZ67Cv1xDi6hmSeedNdzLq6VE1hsfGqsxHaVmprfR5UZl32O8abDOI\nT+CMI2fQr1M//jLhL0GVwQgmBDXc/kwntLaSosSID9d+SP/f9edIwyO2yKUO0KYsuEHIK/v3MLAF\nGh5rSP9u/SMuMOdQULHKlHaep7wE+nh/nrcij/bnto9ajoFV/ZnKJEyGtDEmF5gJ9AX2A4+IyFt+\n2j4OPAqcwJYLKcCFIlJmlTyKYiWlZaWMenoUR64/4hx066+sz8xnZwY1CDkjlxyDekPgcuhf0T+k\nBLdAC+O4OWftA2ZJVQm9h/Zm1ZxVlg2YzvOsx/26sL2/svOVUU3cs6o/Ff9Y6XN4Edtg3wQYDvzN\nGNOuhvb/EJEcEcm2/y2zUBZFsRR/i8m8NPeloPa3am2HQDkbztwKlzwD+thKgVgZ3uk8z0WE7Fuw\ngliulZGuWKIcjDH1gEHAYyJyXETWAYuAW604vqLEm0BZ1oGwYnEhCDwoOpVHiHkGoeI8T0NsYb7r\ngZWQvzo/JrZ+q/pT8Y9VZqXzgFMiUuKybSvQs4Z9rjfGHAD2AH8Vkf9nkSyKYjleZgwIOSLGEbkU\nCYEK7DmLzlWVRDW80624nd2kc8aKM+jYoaMlxw8GK/pT8Y8lDmljTHdgrog0d9l2BzBURHr7aH8+\ntonvXuBSYAEwVkT+6aOtOqSVuJMoJbH9yeYanXTnTXcy4sERPqOIrCziV1pWyphJY1ixeQUn6p+w\nRSfVS5x+SXdi4pA2xqwGemFzHHuyDhgNNPDYngNU+DqeiHzp8naDMeYF4EbASzkATJw40fl/YWEh\nhYWFwYitKJYRr5LYgcJSfZbJfnojr09+3WfIq5VlqwvyC8g+M5sT15xwU0IO85U+1ceW4uJiiouL\nLTueVTOHetgS5js4TEvGmNeBXSLySBD7Pwh0FZEbfXymMwclLQlmtuIvtPUXu36BZAobv9yIqTRc\n2uFSnp/4fPRDSh3bk2h50lQlIUp2i8gxbNXu/2iMqWeMuQIYALzpq70xZoAxpqH9/67YZh7vWiGL\noiQLgSqJBlMzyF+Z7xXbVrDo7EXs67ePvf328p8D/4nKNfiLnir9ulQL3yU5Voay/h6oB+wDZgO/\nEZH/gs0nYYwpd2l7C/CNfdtrwFMionNQJW0IppJoMBFSPgfnT2zVTGNRiM5X9BSroexia0Nnldhj\nmXIQkUMicoOI1BeRfFfnsoisFZEcl/dDRaSxPb+hvYj81So5FCUZCGZWECinAXwPzuFWN/XE18zG\ncxvgu4JtE62MmuzoGtKKEgcCZTpDcGsh+3KUV3SqYFHloojCbn05uj8c9SEmy9hWbPNYIzr/vHzK\nCspqvB4luVDloChxIJi8iWAjpDzj/UvLStl297aIIpV8zWx2ntoJl+FztmNFHoiSWGjhPUWJA9HO\nmwi2uqk/fEYhrcaWde3ZtrSIGRNnJGweSLqSMIX3FEUJnmjnTUSaPdwip4WtfOY2bNlNBjiJ39lB\nvPJAlOihMwdFUbz4cO2H9Lm3D6euPuWcCWQszODsvLPZfelunR0kATpzUBTFcl6a+9JpxQBQG6p/\nUc0luy6hfkV9nR2kAaocFEXxwl80VTnlvDtN81XTAV1DWlEUL4LJsVBSG/U5KIriRSJXoVWCQ9eQ\nVhQlKkQaDqvEF1UOiqIoihcJUZVVURRFSS1UOSiKoiheqHJQFEVRvFDloCiKonihykFRFEXxQpWD\noiiK4oUqB0VRFMULVQ6KoiiKF6ocFEVRFC9UOSiKoiheqHJQFEVRvFDloCiKonihykFRFEXxQpWD\noiiK4oUqB0VRFMULVQ6KoiiKF6ocFEVRFC9UOSiKoiheqHJQFEVRvFDloCiKonihykFRFEXxQpWD\noiiK4oUqB0VRFMULS5SDMeb3xpiPjTEnjDEzg2g/1hizxxhzyBjzijEmywo5FEVRFGuwauawC5gE\nzAjU0BhzFfAgUATkA22AJyySQ1EURbEAS5SDiLwrIouAg0E0vw2YISJfisiP2JTKr6yQI54UFxfH\nW4SgUDmtIxlkBJXTapJFzkiJh8+hA7DV5f1WoKkxJjcOslhGstwwKqd1JIOMoHJaTbLIGSnxUA71\ngR9d3v8IGCA7DrIoiqIoPgioHIwxq40x1caYKh+vD8M45xEgx+V9DiBARRjHUhRFUaKAERHrDmbM\nJKCFiIyqoc1s4FsRmWB/3xuYJSLN/bS3TkBFUZQ0QkRMuPvWskIAY0wmkAVkArWMMXWAUyJS5aP5\nG8Crxpg5wPfAo8Cr/o4dycUpiqIo4WGVz+Ex4BgwHhhm//9RAGNMS2NMuTHmHAARWQ5MBlYDpfbX\nRIvkUBRFUSzAUrOSoiiKkhpo+QxFURTFi4RTDqGU4jDGjDDGnLKbrSrsf3smmpz29nEpGWKMyTXG\nvGOMOWKMKTXGDKmh7ePGmEqP/sxPALmeMcYcMMbsN8Y8Ew15IpUzln3n49yh/GbiVromWDnj/Luu\nbe+XMmPMj8aYT4wxV9fQPl6/66DlDLc/E045EEIpDjvrRSRHRLLtf8MJrw2HZCkZ8iJwAmgCDAf+\nZoxpV0P7f3j0Z1k85TLG3AUMAH4OXAhcZ4y5M0oyhS2nnVj1nSdB3YsJULomlN92vH7XtYAdQA8R\naQD8AZhrjGnl2TDO/Rm0nHZC7s+EUw4hluKIG8lQMsQYUw8YBDwmIsdFZB2wCLg12ue2UK7bgOdE\nZI+I7AGeA0YmoJxxI4R7Ma6la5Lhty0ix0TkjyKy0/7+PWxBM519NI9bf4YoZ1gknHIIg07GmH3G\nmC+NMY8ZYxLxmuJVMuQ8bCHFJR7n7lDDPtfbTTifG2N+kwBy+eq7muS3klD7LxZ9FwnJVLomIX7X\nxpg84Fxgm4+PE6Y/A8gJYfSnJXkOceQD4AIR2W6M6QDMBX4CYmqXDoKaSoYciuF5Hef2V6rkn8Df\ngb3ApcACY8whEflnHOXy1Xf1LZbHH6HIGau+i4R43YehkhC/a2NMLWAW8JqIfO2jSUL0ZxByhtWf\nMdXGxuJSHCJSJiLb7f9vA/4I3JhochKlkiFByHkEaOCxW46/89qnx9+LjQ3AC1jQnz7w7I+a5PLV\nd0eiIJMvgpYzhn0XCUlRuiZav+tQMMYYbAPuSeAeP83i3p/ByBluf8ZUOYhIkYhkiEimj5dV0QgR\nZ1RHQc5tQEeX9xcBe0UkoqeLIOT8Gsg0xrRx2a0j/qeeXqfAgv70wdfYMumDkctX3wUrf6SEIqcn\n0eq7SIjKfRgjYt2XM4DGwCA/lR4gMfozGDl9EbA/E84+b4zJNMacgUspDmMrz+Gr7dXGmKb2/8/H\nlqn9bqLJia1kyO3GmHZ2e2SNJUOsQkSOAW8DfzTG1DPGXIEt8udNX+2NMQOMMQ3t/3cFRhOF/gxR\nrjeAccaY5saY5sA4YtB3ocoZq77zRQj3Ylzuw1DljOfv2n7O/wecDwwQkcoamsa7P4OSM+z+FJGE\negGPA9VAlcvrD/bPWgLlwDn291Ow1WeqAL6x75uZaHLat42xy3oYeAXIipGcucA72KbAZcDNLp91\nB8pd3s8BDthl/wL4fazl8pTJvu1p4Ae7bE/F+H4MSs5Y9l2w96L9PqxIhPswFDnj/LtuZZfxmP38\nFfbvdEiC/a4DyRlxf2r5DEVRFMWLhDMrKYqiKPFHlYOiKIrihSoHRVEUxQtVDoqiKIoXqhwURVEU\nL1Q5KIqiKF6oclAURVG8UOWgKIqieKHKQVEURfHi/wdRnN9F5H49sgAAAABJRU5ErkJggg==\n",
|
||
"text/plain": [
|
||
"<matplotlib.figure.Figure at 0x7f2d9447fa58>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"y_pred_idx = y_pred.reshape(-1) # a 1D array rather than a column vector\n",
|
||
"plt.plot(X_test[y_pred_idx, 1], X_test[y_pred_idx, 2], 'go', label=\"Positive\")\n",
|
||
"plt.plot(X_test[~y_pred_idx, 1], X_test[~y_pred_idx, 2], 'r^', label=\"Negative\")\n",
|
||
"plt.legend()\n",
|
||
"plt.show()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Well, that looks pretty bad, doesn't it? But let's not forget that the Logistic Regression model has a linear decision boundary, so this is actually close to the best we can do with this model (unless we add more features, as we will show in a second)."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Now let's start over, but this time we will add all the bells and whistles, as listed in the exercise:\n",
|
||
"* Define the graph within a `logistic_regression()` function that can be reused easily.\n",
|
||
"* Save checkpoints using a `Saver` at regular intervals during training, and save the final model at the end of training.\n",
|
||
"* Restore the last checkpoint upon startup if training was interrupted.\n",
|
||
"* Define the graph using nice scopes so the graph looks good in TensorBoard.\n",
|
||
"* Add summaries to visualize the learning curves in TensorBoard.\n",
|
||
"* Try tweaking some hyperparameters such as the learning rate or the mini-batch size and look at the shape of the learning curve."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Before we start, we will add 4 more features to the inputs: ${x_1}^2$, ${x_2}^2$, ${x_1}^3$ and ${x_2}^3$. This was not part of the exercise, but it will demonstrate how adding features can improve the model. We will do this manually, but you could also add them using `sklearn.preprocessing.PolynomialFeatures`."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 127,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"X_train_enhanced = np.c_[X_train,\n",
|
||
" np.square(X_train[:, 1]),\n",
|
||
" np.square(X_train[:, 2]),\n",
|
||
" X_train[:, 1] ** 3,\n",
|
||
" X_train[:, 2] ** 3]\n",
|
||
"X_test_enhanced = np.c_[X_test,\n",
|
||
" np.square(X_test[:, 1]),\n",
|
||
" np.square(X_test[:, 2]),\n",
|
||
" X_test[:, 1] ** 3,\n",
|
||
" X_test[:, 2] ** 3]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"This is what the \"enhanced\" training set looks like:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 128,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[ 1. , 1.78492715, -0.31397748, 3.18596495, 0.09858186,\n",
|
||
" 5.68671534, -0.03095248],\n",
|
||
" [ 1. , -0.93489306, 0.39201334, 0.87402503, 0.15367446,\n",
|
||
" -0.81711993, 0.06024244],\n",
|
||
" [ 1. , 0.67505274, -0.37301603, 0.4556962 , 0.13914096,\n",
|
||
" 0.30761897, -0.05190181],\n",
|
||
" [ 1. , 1.1288975 , 0.18007805, 1.27440957, 0.0324281 ,\n",
|
||
" 1.43867778, 0.00583959],\n",
|
||
" [ 1. , -0.6003075 , 0.93864394, 0.36036909, 0.88105245,\n",
|
||
" -0.21633227, 0.82699454]])"
|
||
]
|
||
},
|
||
"execution_count": 128,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"X_train_enhanced[:5]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Ok, next let's reset the default graph:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 129,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"tf.reset_default_graph()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Now let's define the `logistic_regression()` function to create the graph. We will leave out the definition of the inputs `X` and the targets `y`. We could include them here, but leaving them out will make it easier to use this function in a wide range of use cases (e.g. perhaps we will want to add some preprocessing steps for the inputs before we feed them to the Logistic Regression model)."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 130,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"def logistic_regression(X, y, initializer=None, seed=42, learning_rate=0.01):\n",
|
||
" n_inputs_including_bias = int(X.get_shape()[1])\n",
|
||
" with tf.name_scope(\"logistic_regression\"):\n",
|
||
" with tf.name_scope(\"model\"):\n",
|
||
" if initializer is None:\n",
|
||
" initializer = tf.random_uniform([n_inputs_including_bias, 1], -1.0, 1.0, seed=seed)\n",
|
||
" theta = tf.Variable(initializer, name=\"theta\")\n",
|
||
" logits = tf.matmul(X, theta, name=\"logits\")\n",
|
||
" y_proba = tf.sigmoid(logits)\n",
|
||
" with tf.name_scope(\"train\"):\n",
|
||
" loss = tf.losses.log_loss(y, y_proba, scope=\"loss\")\n",
|
||
" optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
|
||
" training_op = optimizer.minimize(loss)\n",
|
||
" loss_summary = tf.summary.scalar('log_loss', loss)\n",
|
||
" with tf.name_scope(\"init\"):\n",
|
||
" init = tf.global_variables_initializer()\n",
|
||
" with tf.name_scope(\"save\"):\n",
|
||
" saver = tf.train.Saver()\n",
|
||
" return y_proba, loss, training_op, loss_summary, init, saver"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Let's create a little function to get the name of the log directory to save the summaries for Tensorboard:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 131,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"from datetime import datetime\n",
|
||
"\n",
|
||
"def log_dir(prefix=\"\"):\n",
|
||
" now = datetime.utcnow().strftime(\"%Y%m%d%H%M%S\")\n",
|
||
" root_logdir = \"tf_logs\"\n",
|
||
" if prefix:\n",
|
||
" prefix += \"-\"\n",
|
||
" name = prefix + \"run-\" + now\n",
|
||
" return \"{}/{}/\".format(root_logdir, name)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Next, let's create the graph, using the `logistic_regression()` function. We will also create the `FileWriter` to save the summaries to the log directory for Tensorboard:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 132,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"n_inputs = 2 + 4\n",
|
||
"logdir = log_dir(\"logreg\")\n",
|
||
"\n",
|
||
"X = tf.placeholder(tf.float32, shape=(None, n_inputs + 1), name=\"X\")\n",
|
||
"y = tf.placeholder(tf.float32, shape=(None, 1), name=\"y\")\n",
|
||
"\n",
|
||
"y_proba, loss, training_op, loss_summary, init, saver = logistic_regression(X, y)\n",
|
||
"\n",
|
||
"file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"At last we can train the model! We will start by checking whether a previous training session was interrupted, and if so we will load the checkpoint and continue training from the epoch number we saved. In this example we just save the epoch number to a separate file, but in chapter 11 we will see how to store the training step directly as part of the model, using a non-trainable variable called `global_step` that we pass to the optimizer's `minimize()` method.\n",
|
||
"\n",
|
||
"You can try interrupting training to verify that it does indeed restore the last checkpoint when you start it again."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 133,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch: 0 \tLoss: 0.788962\n",
|
||
"Epoch: 500 \tLoss: 0.149561\n",
|
||
"Epoch: 1000 \tLoss: 0.103335\n",
|
||
"Epoch: 1500 \tLoss: 0.0803964\n",
|
||
"Epoch: 2000 \tLoss: 0.0666324\n",
|
||
"Epoch: 2500 \tLoss: 0.0573981\n",
|
||
"Epoch: 3000 \tLoss: 0.0507045\n",
|
||
"Epoch: 3500 \tLoss: 0.0456395\n",
|
||
"Epoch: 4000 \tLoss: 0.0417048\n",
|
||
"Epoch: 4500 \tLoss: 0.038556\n",
|
||
"Epoch: 5000 \tLoss: 0.0359166\n",
|
||
"Epoch: 5500 \tLoss: 0.0336372\n",
|
||
"Epoch: 6000 \tLoss: 0.0317378\n",
|
||
"Epoch: 6500 \tLoss: 0.0301286\n",
|
||
"Epoch: 7000 \tLoss: 0.0286658\n",
|
||
"Epoch: 7500 \tLoss: 0.0273198\n",
|
||
"Epoch: 8000 \tLoss: 0.0262269\n",
|
||
"Epoch: 8500 \tLoss: 0.0251737\n",
|
||
"Epoch: 9000 \tLoss: 0.0242857\n",
|
||
"Epoch: 9500 \tLoss: 0.023423\n",
|
||
"Epoch: 10000 \tLoss: 0.0226845\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"n_epochs = 10001\n",
|
||
"batch_size = 50\n",
|
||
"n_batches = int(np.ceil(m / batch_size))\n",
|
||
"\n",
|
||
"checkpoint_path = \"/tmp/my_logreg_model.ckpt\"\n",
|
||
"checkpoint_epoch_path = checkpoint_path + \".epoch\"\n",
|
||
"final_model_path = \"./my_logreg_model\"\n",
|
||
"\n",
|
||
"with tf.Session() as sess:\n",
|
||
" if os.path.isfile(checkpoint_epoch_path):\n",
|
||
" # if the checkpoint file exists, restore the model and load the epoch number\n",
|
||
" with open(checkpoint_epoch_path, \"rb\") as f:\n",
|
||
" start_epoch = int(f.read())\n",
|
||
" print(\"Training was interrupted. Continuing at epoch\", start_epoch)\n",
|
||
" saver.restore(sess, checkpoint_path)\n",
|
||
" else:\n",
|
||
" start_epoch = 0\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(start_epoch, n_epochs):\n",
|
||
" for batch_index in range(n_batches):\n",
|
||
" X_batch, y_batch = random_batch(X_train_enhanced, y_train, batch_size)\n",
|
||
" sess.run(training_op, feed_dict={X: X_batch, y: y_batch})\n",
|
||
" loss_val, summary_str = sess.run([loss, loss_summary], feed_dict={X: X_test_enhanced, y: y_test})\n",
|
||
" file_writer.add_summary(summary_str, epoch)\n",
|
||
" if epoch % 500 == 0:\n",
|
||
" print(\"Epoch:\", epoch, \"\\tLoss:\", loss_val)\n",
|
||
" saver.save(sess, checkpoint_path)\n",
|
||
" with open(checkpoint_epoch_path, \"wb\") as f:\n",
|
||
" f.write(b\"%d\" % (epoch + 1))\n",
|
||
"\n",
|
||
" saver.save(sess, final_model_path)\n",
|
||
" y_proba_val = y_proba.eval(feed_dict={X: X_test_enhanced, y: y_test})\n",
|
||
" os.remove(checkpoint_epoch_path)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Once again, we can make predictions by just classifying as positive all the instances whose estimated probability is greater or equal to 0.5:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 134,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"y_pred = (y_proba_val >= 0.5)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 135,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"0.98989898989898994"
|
||
]
|
||
},
|
||
"execution_count": 135,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"precision_score(y_test, y_pred)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 136,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"1.0"
|
||
]
|
||
},
|
||
"execution_count": 136,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"recall_score(y_test, y_pred)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 137,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEFCAYAAAAIZiutAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvXl4VeW1+P95gYBGEghTGASTG7QCXhFBUBRIUNCKIoJV\nISgUrfZWRcABRamxtCqgULnUfmsFJ6CtBAcuMlkhylyLgKhVf0ICCMggYBJGSdbvjzNwxpxpnzHr\n8zznSc4+7957nX323mu/azQigqIoiqK4UifeAiiKoiiJhyoHRVEUxQtVDoqiKIoXqhwURVEUL1Q5\nKIqiKF6oclAURVG8UOWgKIqieGGpcjDG3GeM+cQYc8IYM7uGcSOMMaeNMeXGmAr7395WyqIoiqKE\nTz2Lt7cbmARcC5wdYOxaEVGFoCiKkoBYqhxE5F0AY8xlQBsrt60oiqLEjnj6HLoYY/YbY74yxjxp\njFH/h6IoSoJgtVkpWD4CLhKRHcaYTsBbwE/A5DjJoyiKorgQl6d1ESkTkR32/78AfgfcEg9ZFEVR\nFG/iNXPwhfG50BgtG6soihIGIuLzvhoMVoey1jXGnAXUBeoZYxoYY+r6GHedMaaF/f8LgSeBd/1t\nV0QS/vXUU0/FXQaVU2VUOVVOxytSrDYrPQkcA8YDhfb/nzDGtLXnM5xrH3c18JkxpgJYBBQDz1os\ni6IoihImVoeyPg087efjDJdxjwCPWLlvRVEUxTo0fNQi8vPz4y1CUKic1pEMMoLKaTXJImekGCts\nU9HEGCOJLqOiKEqiYYxBInBIJ1K0kqIoSUZOTg47duyItxi1mvPOO4+ysjLLt6szB0VRwsb+dBpv\nMWo1/n6DSGcO6nNQFEVRvFDloCiKonihykFRFEXxQpWDoihKAHbt2kVmZmaN/pWMjIyoOIbjhSoH\nRVFSkpycHNLT08nMzKRVq1aMGjWKY8eOhbWttm3bUl5ejjE2/25BQQGzZ7s3u6yoqCAnJydSsRMG\nVQ6KolhOaVkpw0cPp2BkAcNHD6e0rDTm2zDG8P7771NeXs6nn37KJ598wu9///uQ5aitqHJQFMVS\nSstK6Xd/P+ZmzKUkt4S5GXPpd3+/kG7uVmwDcJqBWrVqxc9//nM+//xz9u7dy8CBA2natCkXXHAB\nr7zyinP8J598wmWXXUajRo1o1aoVDz/8MAA7duygTp06VFdX8+STT7Jq1Sruv/9+MjMzGT16NAB1\n6tRh+/btbNiwgVatWrmZoN555x06d+7slOm5556jffv2NG/enNtvv50jR46E9L1igSoHRVEsZeK0\niWzrvA3q2xfUh22dtzFx2sSYbsOVXbt2sXjxYrp06cLQoUNp164d33//PfPnz2fChAmsXLkSgAcf\nfJAxY8bw448/sm3bNm699VbnNhwmpd///vf06tWLmTNnUl5ezowZM9w+79GjBw0bNmTFihXOdf/2\nt78xfPhwAF588UUWLlzIqlWr2LNnD1lZWfzmN78J63tFE1UOiqJYyu7y3Wdu6g7qw57yPTHdBsCg\nQYNo0qQJvXv3pqCggF/96lesWbOGKVOmkJaWRufOnbn77rt58803AUhLS+Pbb7/lhx9+ID09ne7d\nuwe9L9eZwu233868efMAmy9i8eLFDB06FICXX36ZP/zhD7Rq1Yq0tDR++9vfUlxcTHV1dUjfLdqo\nclAUxVLaZLaBUx4LT0HrzNYx3QbAe++9x6FDhygtLeV///d/2bNnD02aNCE9Pd055rzzzmP37t0A\nzJ49m6+//poLL7yQHj168P7774e0PwfDhg3jnXfe4aeffuLtt9+ma9eunHuurWPBjh07uPnmm2nS\npAlNmjShY8eOpKWlsW/fvrD2FS1UOSiKYimTxk0ib0vemZv7KcjbksekcZNiug3AK/S0devWHDp0\niKNHjzqX7dy5kzZt2gCQl5fHvHnzOHDgAI8++ii33HILx48f99quw4Tkjw4dOnDeeeexePFi/va3\nvzFs2DDnZ+3atWPJkiUcOnSIQ4cOcfjwYY4ePUqrVq1C+m7RRpWDoiiWkpuTywczP6CwopCC0gIK\nKwr5YOYH5ObkxnQbvjj33HPp2bMnjz/+OCdPnuSzzz5j1qxZTn/A3LlzOXjwIACNGjXCGEPdurZm\nlq6KJjs7m+3bt9e4r2HDhjFjxgxWrVrFL37xC+fye++9lwkTJrBz504ADhw4wMKFCyP6XlEh3q3s\ngmh1J4qiJCaJfH3m5ubKhx9+6LV89+7dcsMNN0iTJk2kffv28vLLLzs/Gz58uLRo0UIyMjLkoosu\nkoULF4qISFlZmdSpU0eqqqpERGTdunVywQUXSJMmTeTBBx8UEZE6derItm3bnNvauXOn1K1bV268\n8Ua3/VdXV8v06dPlZz/7mWRmZkr79u3liSeeCPt7+vsN7MvDvvdqVVZFUcJGq7LGH63KqiiKosQM\nVQ6KoiiKF6ocFEVRFC9UOSiKoiheqHJQFEVRvFDloKQ0IsKUxx7TiBpFCRFVDkpKs2zBAva+9BLL\n33473qIoSlKhykFJOkSEyY89xuTx42ucEYgIy55/nmkVFSydOlVnD4oSAqoclKRj2YIFfDdjBp/O\nmFHjjGDZggVct3UrBrh261adPShx4/rrr3dWfk0WVDkoSYWIsPT555lx/DitTpxgyZQpPmcEjllD\nf3tbyGuPHdPZQy0jJyeHli1buhXOmzVrFgUFBVHd79NPP82dd97ptmzx4sXccccdUd2v1ahyUBIa\nT4fysgUL6Ld5Mwa4DsjevNnnjMB11gDo7CEOWBEMEMk2jDFUVVXxxz/+0Wu5EhhVDkpC4+pQdswa\nrj95EoBrgX2nTvmcPWxds4a13bpR1KeP87WuWzc+W706Dt+idmJFMECk23jkkUd44YUXKC8v9/rs\nq6++on///jRt2pQOHTowf/5852eHDh3ixhtvpFGjRvTo0YOJEyfSq1cv5+djxoyhXbt2NGrUiMsu\nu4zV9vNq2bJlPPPMM/zjH/8gIyODLl26AFBQUMDs2bM5deoUWVlZfPnll85tHTx4kPT0dGc12EWL\nFtGlSxeysrK46qqr2Lp1a1jfPWIiqdoXixcJXPVRiS7V1dUypkcPqQYZ06OHLH7rLVnUoIEIOF9L\nQJ6pX1+WFhfHW9xaib/r0/O3q66uDnnbkW4jJydHPvzwQxkyZIg8+eSTIiLyyiuvSEFBgRw9elTa\ntm0rr7/+ulRXV8umTZukWbNm8uWXX4qIyG233SZDhw6VEydOyJdffilt27aVXr16Obc9d+5cOXz4\nsFRVVcm0adOkZcuWcvLkSRERKSoqkjvuuMNNlvz8fJk1a5aIiNx1111OeURE/vSnP8nPf/5zERHZ\nuHGjtGjRQj755BOprq6WN954Q3JycuTUqVN+v6e/34AIq7LqzEFJWDwdyotef50FzZoxIjOTkY0a\nMSIzk9fq1+dfzZvrjCDBsCIYwKqAgqeffpqZM2fyww8/OJctWrSI3Nxc7rzzTowxXHLJJQwZMsTZ\nrvPtt9/md7/7HQ0aNKBDhw6MGDHCbZvDhg2jcePG1KlTh7Fjx3Ly5Em+/vrroOQZOnSos4UowLx5\n8ygsLATglVde4de//jXdunXDGMMdd9xBgwYNWL9+fVjfPRJUOShuSIIkjYkPh3L9gweZtWsXr//4\nI68dOcLQWbNo1aABv37xRR6ZPj2u8ipn8PXbhRoMYMU2HHTq1IkbbriBZ5991rlsx44drF+/3tmq\nMysri3nz5rFv3z4OHDjA6dOnnW09Adq2beu2zRdeeIGOHTuSlZVFVlYW5eXlTrNQIPr27cuJEyf4\n5JNP2LlzJ1u2bGHQoEFOuV544QU3ub777jv27Amtd7YVqHJQ3Ih30phDOS0tLq7Roey4eQSbw5Ao\nSq82YEUwgNUBBUVFRfz1r3919opu164d+fn5bq06y8vLmTlzJs2bNyctLY3vvvvOuf6uXbuc/69a\ntYopU6ZQXFzM4cOHOXz4MJmZmc5zK5DD2xjDrbfeyrx585g3bx433HAD55xzDmBTQk888YSbXJWV\nldx2221hfe9IqBfzPSoJi+sNd9zUqfQfPDjmkR0O5bSjd2+adevGOpf9iwgNV6/m2iFDfJocrh0y\nJOB2l192WY3jlMjZumYNlTX8drHahit5eXncdtttzJgxg4svvpgBAwYwfvx45syZw+23346IsGXL\nFjIyMvjZz37G4MGDnQplx44dvPHGG5x33nkAVFZWkpaWRtOmTTl16hTPPfccFRUVzn1lZ2fzz3/+\n09ZNzc/1M3ToUAYNGkSzZs34wx/+4Fz+q1/9isGDB3P11VfTvXt3jh49ykcffUSfPn2cCiRmROKw\n8HwB9wGfACeA2QHGjgX2AoeBV4A0P+P8OmIUa1kyf74sTU+3OXrT02Pu5A3WAek6TiCk8eE6RxXf\nJPL16dkmdNeuXXL22WdL3759RUTkm2++kQEDBkjz5s2lWbNmcvXVV8uWLVtEROTAgQMyYMAAadSo\nkXTv3l0ee+wxueaaa0REpKqqSu666y7JzMyU1q1by9SpU9329cMPP8hVV10lWVlZ0rVrVxERKSgo\ncDqkHbRv316aNWsmP/30k9vyZcuWyWWXXSZZWVnSunVrufXWW6WystLv9/T3GxChQ9pq5TAIGAj8\nqSblgC0KcS9wIdAIWAk842es34OiWEeoN9xoEKxych3njFpyGV9dXS2Tx493yl7Tdj3HKqFRW67P\n8ePHy8iRI+Mthk+ipRws9TmIyLsishA4FGDoncAsEflKRH4EJgG/tFIWJTTikTQmLn4ACcEBGSiH\nwTM3oqbtxtvHoiQmX3/9tTO/4F//+hezZs1i8ODBcZYqtsTL59AJeNfl/RaghTEmS0QOx0mmWs3W\nNWuo6NqVmWVlXJqb62xaHq6NNxhc/QAi4lc5ee6/psgkhzJw+E2qq6v9brf/4MFx97EoiUlFRQVD\nhw5l7969tGjRgkceeYQbb7wx3mLFFOPrySzijRozCWgjIqP8fP4t8BsRWW5/Xw84BeSIyE6PsRIN\nGVMdEWHq44/zyLPPBn3DW1pczNJf/pIT11zDn99+O6o3ShFh3BVXMG3DBsb16EGryy/n6KZNbvsU\nERpeemlIYapLi4sxI0bYZgjp6fxfQQHNKip8bve/r7zSbax5442wFGE4xzpVcDxEKPHD329gXx72\nCRmvmUMlkOnyPhMQoMLX4KKiIuf/+fn55OfnR1G01CCU6BwRYcrjj7N35Uquraxk4cKFNjPTLbdE\nVT7XaCPzyCNc61EDJ1ScswYXE9KygwcpWrfO66btVE7HjiHAlmPH2DtlSlizB42EUhKBkpISSkpK\nrNtgJA4Lfy9sPoSaHNJzgUku7/sCe/yMrdkbo3gRanTOkvnz5f6zz5Zn0tJkjN0ZPfL886PmpI2W\n8zuQo9rf2CUgY8Isw1HbI6H0+ow//n4DEskhbYypa4w5C6gL1DPGNDDG1PUx9A3gLmNMB2NMFvAE\n8KqVstRmQik7IHKmBPY3P/3Etdjs8oO2bWPZggVRlw+sc36HUmzPMfap3r2ZlZHBNGAdsPnjj8P+\nLlr1VUkpItEsni/gKaAaqHJ5/RZoi81kdK7L2DHA98ARNM/BMkJ9Kl8yf76zmN079qdoifLsYcqY\nMfLb3r3lqT59nK/f9u4tU8aMsXxfgVj81luyMC1NBORdkGcfftj5WaAw10QI/4035513nmAzCesr\nTq/zzjvP529DhDOHqJiVrHypcgiNUEwrPm9u9r8C8l69eild7bS6utqmAF2+/6BzzpGqqioRsR3L\nMRkZYedbKEo8iVQ5aPmMFCOUsgO+zDt9gOuB8zt2JKtZs6iGslqFiC1a6OFnnuH5CROCjhpaWlzM\nzdu2uX3/u44eZcr48Tw6eTKvjh7N32sIc7W6xIOiJBJRCWW1Eg1ljR5Tx46l8tNPOXzwIPznP2SJ\n8BXQFdjbowfTfET5JCJLi4tZNmoULX71K3bNnMnAuXODirS6b8AA6ixZwmER8rAph2pgY3Y2VxYW\nctG0aQyEiMJcFSVeRBrKGnezUaAXalaKKq6mpSWc8TksThLziKv8NzZoIFUE5ytxrLcYZKmLWUhA\n3j/rLLmxQQO/vgQtuaEkA0RoVtKS3bWcpcXFHN+4EQGWYSt6BXBdBPXzY4mraeyekyf5gOAirRzr\nfQ6sBYqA0cYwumNHXmralHtPnvQbTaUlN5TagCqHFEUkuP4FC197DSPCwHPPpW+dOpaGl/qTIVjZ\ngtm+a92kAcBS4Mbqav58zz1UV1f7XdcRynq0Tx9Mnz7Qpw9ZvXpxbr9+yOnTbMCmMJ4ChmdksNYe\nEuvYZ7B9JBQlaYlk2hGLF2pWCgtfkTae5hBXk8ytLVvKxF69LA0v9RftEygKKJTte0UL2c1E74A8\n89BDXusEMgkFikCKd1lzRQkWNJRV8cTtpt+qld/QzGje6PxlDluZUezIlxialSXDQEaA3AlyM8h9\nIH0aNfLafiDFVFMOhuY1KMmEKgfFC9ebviOxy/OmXFVVFdUbnT/F4295JE5e1xv6Ax07ygPGyG9B\nfuORpxGpYtK8BiWZUOWguOHr6XbQOefI+2+95XZTfuahh6J2o/P3hF2TQrLC1BToyT7SmVIiZXYr\nSiBUOShu+Hq6XQjyi1at3G6a0fAx1CRDTQppyfz5lpiaanqyj8QkFM3QVQ2LVaJFpMpBM6RTjK1r\n1lDRrRtvbtpEXkUFBigDhu3d6xaJ9MvycsyDDzoTu0TsWcbPPmuJDL4yh7/68ENO+Fh+8NVXGehR\nvC6chDPX/YoIu0pLOTc3lwx7lFGwzYQ8iWZJbi33rSQqmiGdgrg2vAGYCuwwhkNNmnB+p05nury5\nNNJxZBlf9+qrMb1JiZxp+mMAAcZZkJ3t+X0c2eChNhNylc8KuWK1bUXRDOlaji+zhC/b+MgOHeR/\n0tICFuCLdfRNNJy8Vn6faEZ0LZk/X5acfbZMBll89tnq2FYsBfU5pCbBlIuePH68LH7rrYCO3EA3\ny3jG7kfDyWvV9wnWTxGs38B1nGv5jjH2/AwNi1WsRJVDihJMuegHMzLk1vbtg+rZ4O9mmWqx+6F8\nn3AS4hanp8u9gwa5reP5W/nbrus4x6zBUSJ9jM4eFItR5ZCC1JRANnn8eLeQ0JHG2IrmhdKzwU94\nZyrE7ofaKjTUhLj7O3aU613Mc75+K3/Z6a6Jic+NHi0jO3SQhXXqiIC8V6eOjOzQQcNiFcuIVDlo\ntFIC4qv15LVDhjgjWyafPs219s9vFWE5cO2xYz77DtTUkvPaIUNSridBsN9H5EyNJH/9Gjwd1SI2\nB/Kin35yruP5Wy1bsMDndl3HDdu7ly/T0micmckN9vpPN1ZXszIzk4enTYvasVGUkIhEs8TiRS2b\nOQRKIKuyJ7X56t7m6wlZE7d8E45fwnMd1/wMZ/5I+/ayxGO7vn7T6xs0kMVnn50yMzYl8SDCmYOG\nsiYYnmGoYGs2s+l//ocuL73EO8ePcz1wk8s6C+vU4d0LL6Rts2YBQzOV8MJnfa0z6vzzue2777ju\n+HHbGGCUMcwWcdtu/4ceos7IkW6/6bvA3HPPpVNents+9PdTrCLSUFZVDgnG1LFjqdi4kU1lZVya\nm4sxhmoRNn79Nffv28dkoDXw01ln0bF7d2fOwjldusBZZwXdIrM2408B19Ttzdc6C+vV453zz+e8\nFi0A2HHgADd/9RUDXUqFL0xLY1m/fjStrGS7S2Kio+Pcor179fdSokKkykF9DgnGI9Ons7S4mIpR\no+g5ejTXDhnC0uJiet55J0uBS4DpwKiTJ7nigQec7TAdSV+aaRuYcPwsW9esoaJrV2a6KG0RoaPL\nk/7UsWPZ2KwZn9q3e+jAAX746isaG0PPBx7gyhEjnM2UAJZWVISdDa4o0UZnDgmGiDD2iiuYvmED\nY3v0YPq6dTw/bhxfLlvGf/3nP3TH1q3tXWNYdv31/HnRIjeTh2baRo9Qssg9f5NWl1/O0U2bQs7Q\nVpRw0QzpFGPJ/PmyqEEDEZD/a9DA6dB8sHt3edDuzHQ4NR90CZ3UBjTRJdSsa/1NlHhDhA5pbROa\nQIgIS59/nutPngRgwMmTLJk6laXFxWRv3szPwS0k9ZrNm52hk45WmdcmSe/nZMNXeLE/RER/EyXp\nUbNSArG0uJiq4cMZYFcOAIsaNOD9a65h3+bNZFRWOpWDiFA3I4MGnTszqKQkJOeqEhoSZHSTiK2y\n7X937eoVnaS/iRJr1CGdQny2ejVb6tfnXydPOm9C39avT+f27fnzokU+15k6dixrKytTJoktEQmU\nSOg6bu9LL1HauzctopxY6FBEGp2mRAtVDgnExVddRee//tU9oqWqCtOrl9911JkZfYKJbnKYkqZV\nVDDu4EGKAuRMTHnsMTCGR8O8uWsfCCXaqFkpgQi354ASf1zzIILJmXj1jjtoYQw3vPlmyDd3VzOX\nRqfFl9KyUiZOm8ju8t20yWzDpHGTyM3JjbdYgEYrJTTaArJ2EGolWNfIM0fEWSjnikZCJQbbS7dL\n3oA8YQJCEcIEJG9Anmwv3R5v0UREo5USGufUv4bIFn+I3fQg9lmT53slcajJJ+FrrGvk2TWbN7P8\n7beDPldEI6EShonTJrKt8zaob19QH7Z13sbEaRPjKpdVqHKIEo6LeFpFRVgXr+fNIhJFo0SXrWvW\nsLZbN4r69HG+1nXrxmerV7uNExGWTp3KvlOn6G9f5gxXnjo1qHMlFEWkRJfd5bvPKAYH9WFP+Z64\nyGM16pCOEv7KbgeDq2IZN3Uq/W6+OWB56VQmke26EHxQgGPWcCnu+Sp9P/2UL40J6lypyTnef/Bg\njWCKIW0y28Ap3BXEKWid2TpeIllLJDapWLxIQp9DpN3VPG3Kzzz0UK21MSe6XTcUpowZIze3aSN3\nNmokI+yvOzMz5Za0NJlsQSe+QM2LFGtJ9HMT7QSXeETSXc2XYhl0zjlSZcHNIxkpfKDwzMVXdOYi\nLHygMN6iWYJVnfg8O81VVVVFSWLFle2l26XwgUIpGFEghQ8UJoxiEIlcOVhqVjLGZAGzgX7AAWCC\niPzNx7ingCeAE+DM97pYRMqslCdeRNJdzZdN+a6jR/kAW8G9cMxUyczu8t3Q1GNhCtl1rerE59lp\nbsr48Tw2dWoUJFZcyc3JZc6MOfEWIypY7XN4CdsNvzlwKfC+MWaziPzHx9i/i8idFu8/IYgkJ8Hz\nZrHz22+pqqigIiODde3bA7UrAzrV7bpW5K+I2H1U9gimgcBrf/4z1ZMnU6eOxpwo4WFZEpwxJh04\nDHQUkW32ZW8A34nIBI+xTwF5wSiH2pQEp3hTWlZKv/v7nQkZPAV5W/L4YOYHCeWUjie+GhEtAj5/\n+GGdPdRiIk2Cs/Kx4gLgtEMx2NkCdPIz/kZjzEFjzFZjzK8tlCOmiISXfxDuerWN3JxcPpj5AYUV\nhRSUFlBYUaiKwYOta9awpls3hmdk8BRQBPwLWPXmm3p+KeETicPC9QVcBezxWHY3sMLH2AuBlthM\n6FcAe4Db/Gw3QrdMdAk3QkQjSxQrscqxXRtxOJXzR+QnnFM5Ekggh3QlkOmxLBOo8KGQvnJ5u84Y\n8yJwC/APC+WJOiISVv5BuOspij+scmzXNtzMlk2BU7D+/vU6O8Vah/Q3QD1jTJ6cMS11Br4IYl3h\nTF6QF0VFRc7/8/Pzyc/PD19KCwk30S2Y9US0JLMSPME4tvWc8qamEhjJFoVUUlJCSUmJdRuMZNrh\n+QLmAXOBdOBKbA7qDj7GDQQa2//vDnwHDPezTWvnWhYRbKKbZ0G1YNdTs5NiNXpOeZM/It89h8b+\nKhhREG/RIoYEK7x3n10x7LcriV+LyH+MMVcZY8pdxt0OfGtf9hrwrIgklZoOtsaNrxpJgdYT8V+X\nSdSRrYSBiK2uU8uKCpZMmaLnjx1nqLQrKRQqHQmW5jmIyGHgZh/LV+PijxCRYVbuNx6E3ADG7lsI\nZr2azE7a5EUJB0ddp++BbHslWD1/YNK4Say/fz3bcrbZDOBV0PBIQ+556Z54ixZ/Ipl2xOJFgpqV\ngiGcuvs1mZ1cP0vlEhqpGj0SLFb3AfHqIQHyYPfuKXv+hMpHqz6Shj0aJmyNpHAhwcxKih2R8Oru\n12R28jWjSDUc0SNzM+ZSklvC3Iy59Lu/H6VlpfEWLWZYXZ7ds4fEdZyZPSjw8lsvU3l1Zch9GUrL\nShk0chDZl2WTfWU2N911U0qdp6ocokS4dff99QbYsmpVrWjykuoNVALheKgItw+ILz5bvZqNDRo4\ne0hcC2xs0IAtq1ZFvO1UIJy+DKVlpfQZ1Yf3vn6P/f33s7//fha2XEj+vfkpoyC0n0OUCDfu3F9I\n4tLiYjr/9a8+lU0q2Y5TvdBeICLpA+KPi6+6yuvcubuqCtOrV6TipgTh1O+aOG0iu07vgr7AMWAt\nILCzeidji8by7mvvRlfoGKDKIUpYUVDNlWRNcgq1UU+qF9qrCeeswWV2aEWSZLKeO7HC6ZT2qN81\naeYkv+vsLt9ts7scAzYABTjXXb58OaVlpUmfRGdZ4b1ooYX3kpdwiubV5kJ7vgroLU1Px7zxht7E\no4zjIWbb/m18v/N7sltn0751e7eHGdcHnbJvyig7XQZnAb3wepgprCiMexJdpIX3VDkoUWP46OHM\nzZgb8oXjvFD3bOP7Pd/Tsl1L8lrkJVx7UKuZOnYslZ9+6jZLEBEaXnqp5TNRxZuaHkwAr8/qLq5L\n1U9V8AvvbRWUFrDitRUxld8TVQ5KwlIwsoCS3BLv5UFcOLV5BqHEh5oeZgCfn509/2yO/+J4Ss4c\nNFpJiRqRZJ/W9qglJfbUFLXk77POF3cmb0vemfPc4a8Y599fkSyockggJMVKY0waNynoC6e0rJTh\no4dTMLKA4aOHs23/tpDDC1OVVDsvEpU2mW1szY1LgJX2vwdsDzP+HnTyWuelbL8RNSslEEuLi1k2\nahTXvfpqyjggHf6DPeV7aJ3Z2qffwJcJqeH/NaTyxkqv6XrTxU25rtd1Ke9/cCXY80JEq65Gwser\nP+bqB6/gfvvJAAAdHklEQVTm9HWnnedhvaX1+PDFD2l7btukM3OqzyFFEBHGXXEF0zZsYFyPHkxb\nt67WXOA+bb0HoO6qulTdUOW8GFlh/3sF5JUl9oVpFaGcF6n4cBFLAgVQBPOgk0iozyHJ8GciqA2l\nMfzhZs89gm06/zlUHaqCD7BN8dcCl2Or/fBF7fE/BHteRJpZraarwJnSuTm5zJkxhxWvrWDOjDkJ\nrRisQJVDjPFVNyfcOkypgtOeewRbQlFPbElFd2JL0+wC5AONsV28Qq3wP4RyXkT6cGF1PadkRMt3\nu6PKIUZUV1dz8+WXs3jKFK+a+uHWYUoVnI7rjZzJNMX+twDY7DL4FLYDVAsu2mDPi0gfLlxnHUum\nTGFyLZ1BhBJAURvQ8hkxYvIjj9BiwwaO1anjVVO/tpc3yM3J5YOZH9BhYAdO1j/p/mF9oMr+/yls\nJqZLA5c3SAWCPS9qUiKhtq3N3ryZ77ZurZX9QhznoZtfwX6ODR89POgSMKmCOqRjQHV1NUMyM1lw\n9ChDgLeBsQDduzN9/fpa43gOxE133cTClgu9HIK8AzSGevvqcVHHi+iU2ynpLtBoRhJFklnt6vAG\n23k5HRjbowfTa1FQhD+SORlTo5XiQKgX+rMPPcRF06aRhu0heACwFNhUvz6XzptX657Q/FFaVkr+\nvfns7LbTeSGmf5DOxe0uJq91cpfPSNRIItd6TkuxzzqARQ0akDZ3bkLJGg/CLQGTCGi0UhwIxXlX\nXV3Nv/7yFwYAy4Dr7cu1pv4ZHAlwo4pGccm5l3DT7pucCUWfv/U564rXJXV0SDR6NFiFo3/IU717\nMysjw9nzYcDJkyyZOpXJ48cnlLyxJpxeD6mC+hxCxPVCD6ac8uRHHuHuo0dZji0KU2vqu+M2bW9K\nUk3bgyUaPRqswmF2WlpcTM8RI9zOz2s2b+bNLVtY3r17wsgba4IpIR9qWfpkQc1KIeI2DQ+inPJt\nXbpw1vbt7D5xgqZVVRhsCqb67LPp2LVrra+4aeW0PREvUlebvsEWhZuISY6efgsR4dtNm+hcUcHe\nBJQ3VgTyOSSyT0J9DjHE34X+wtq1PD9hgpYtCINIKre6kqgXabL2aAj1ISiVqSkzOpF9EpEqBzUr\nhYC/kMHJjz7K/pdfrpXhf5FiVee3mqq4xvMiTcYw5Wh1pEtWHJnRvkjltraqHEJg65o1VHTrxj9K\nS2mbm4sxhmoRNs6Zw6IgfRCe1PZiaeG0aPTFt3u+hR+wTecMcAnQOP4XaTKaDCPNm0hVfJktU7mt\nrSqHEHhk+nRnSGLP0aO5dsgQlhYXc6XdkRfOBeSMfKqlsw5/iUehmIJKy0r54rsv4GrOFOmzJ8ul\nwkUaa5JxthMNXJVBIxqxae8mW5i1PXBi/f3rmf3YbNY/F/nDTSKiPocQEBHGXnEF0zdsYGyPHkxb\nu5aHevYM29lYmyuxWok/u2/D/2vIZws/i7tjUEk+vHxYH+K3V/SkcZOcDzcZdTMwVYYf+THuQRGa\n5xBDli1YQL/Nm51hfpMffTSimki1uRKrlfiLRb+o40WqGPygVVhrxsuHVQe/+Q4On8Ssoll8se8L\n3mvzHiW5JczNmEu/+/tRWlYaY+mtQZVDkIgIS59/nutP2mr/DDh5ktVz5rC6a1dubNuWp3r3pqhP\nH9Z168Znq1f7XN/1YqztlVitxG+XrhZ5cZEnGdAqrDXj9cDxEwErtqZaa1tVDkHiOmsA2yzhf44c\nIb17d9ofOULP0aMpKinh6Y8+8umE9LwYa3sl1mDxbB/q6ylMq2m6E2hWkMgZ27EgmHPK7YHjCHCC\nM82msP1t9+92budYqmVTq0M6SD5bvZot9evzr5Mnnf6F/y8tjaq5c/l7gEglX1nV6vQLjK/s6fX3\nr/fKXbDCqZ1KBApySOSM7WgT7DnlFkW3GVt5g2PYmk4JUA1d/quL2zopF7kkIgn9sokYf5bMny9L\n09NFwPl6pn59WdSggQjIkvR0WVpcHHDdmsYp7hQ+UChMQChyeU1ACh8oFBGR7aXbpfCBQskfkS+F\nDxTK9tLtcZY4/lRXV8uYHj2kGmx/q6vdPntu/Hh5sHt3qbafw77GpTKFDxQK9yHkI/Sx/73vzDnl\niuP8aty9sfs5aH8VjCjwGp83IO/MOTsByRuQF7fz0n7vDPveq2alIHEUKCvq04eiPn14qndvNjZo\n4PRB+PMZiPoWwqamabrjCXBuxtyUcP5ZRU1BDssWLOC7GTNo6WEerU3mzG/3fAufcqbbYE/gU9i2\nZ5vXWIejeUCPAUF1iHPMYAsrCp2FI+OdoR8JGsoaJsGWRUjW8gmJQE2lCQCfnw38fiDvzXovpnIm\nClJDHSfA+dkvMjLo1KWLWx2l2lLjK7dnLmUFZV7nTc7KHErX+n6wSNTSLIHQ8hlxIlifgfoWwqem\n7OlRRaN8li1Yvmk5pWWlCX3RRgt/QQ7LFizg3TlzGGT/7O6qKow9ibO20bJdS8rql7kvrG9b7o9Q\nfFqJWPwxXHTmoCQ0voqeAfQd1peyemVQF2epDE4Bq6DwkvgXPYsH/jrCHWjYkL1Ll/J2dXVCV4a1\nEn836WgWyku0GUZCVWU1xmQBs4F+wAFggoj8zc/YycBd2M7V2SIy3s84VQ6KE18XoKNUBp8CPaDg\ncGgVXVMZEeH2Cy7gl99+y3Uuy1PZtFnTTRqI2g080Sq0JppZ6SVsEcHNsV2u7xtjNovIf1wHGWPu\nBQYC/21f9E9jzDYRedlieZQUw1eiEQXY+kxfC6RD66okDR2MAssWLKBZWRnrgMXGQIcONGnePKVN\nm4Eq9EYr7DnVKrRaphyMMenAYKCjiBwH1hhjFgJ3ABM8ht8JvCAie+3rvgDcDaSEcpBaXmk1mvi7\nAGkBpKdO0TMrcETKzTx92tlkalxGBk+tXJnS52Wgm3RNJbgjIdXyHKwMZb0AOC0irjFhW4BOPsZ2\nsn8WaFxSoqUJooe/UhnZR7MZ+P1ALmp2EaOKRvnNfK1N1NYsfH/nSKCbdDCZ0zWRapn6lvkcjDFX\nAW+JSGuXZXcDw0Skr8fY09hmGN/Y37cHvhaRuj62m1Q+h+rqaoaeey5/37s35Z1+8cCfPXn2Y7MZ\n9dyohHEGJgL+HNSpHrYajmPYKmdyTV3jYk3COKSNMZcAq0WkocuycUAfEbnJY+wR4BoR+bf9/aXA\nShFp5GO7SaUcnn3oITpNm8ZAUtvpF098XYATp01MKGegEl9CvUknmjPZChLJIf0NUM8Yk+diWuoM\nfOFj7Bf2z/5tf3+Jn3EAFBUVOf/Pz88nPz/fAnGtp7q6mn/95S88Zn9f29srRgtfNuNUcwYqkRGs\nX8GhRBZtWATpnAmLhqQ7f0pKSigpKbFse1aHss7DFpr6K6ALsAjo6SdaaTS2kFeA5cCLIvJXH9tM\nmpnDsw89xMXTpjHAZZnOHqzHM4b9nlvvYcSjI3zmPSTzk58SXfyGRfcgJc6fhDEr2YVxzXM4CIwX\nkX/Y/RGLRSTTZexz2JSIAH8Vkcf9bDMplIOIcEOrVnTdt4862L7UtowM/qtLFzJS3MYbS7wu6ANQ\nb109Tl932ivvIa8sNJtxKmW3KoHxZ0piLdAz+X1WCaUcokGyKAetoRQbvC7oEmzF03zUylkxb0VI\niiGRsluV6FMwsoCS3BKv5Y0XN2ZAjwFJ/3CQSD6HWo3WUIoNXr4FwWfl1twLckO6sAMlTinJi78Z\nob+8hAE9BuhvjioHy1CzUWzwuqANliQeqUM7NSktKyX/3nx2dtvpbO6z6t5VlPylpMbCjoq2CVWS\nDK9Eo05Qb2m9iBOPwk2cUhKbMZPG2BSDy4xwZ7edjJk0JuX6L1iN+hyUpMMzhv2GXjfw+PTHOVJ1\nhMZ1G/P6lNfpfVXvkLeZzD4Hdab7JvvKbPb33++1vMGCBlxx6RUpfazUIa3Uaqy8qSdSdmsoJLti\niybZl9mVg2dE0tvA7aT0sVLloNRqUjGzNVT0GPhn0MhBvPf1e9CXM6HOjmru1wFHgI3QoqoF/br2\nS5oHgmCIVDmoz0FJamrqMx0KkRZdiydWHYNUZHrRdNqe3RZWYct/WYVNQVyOTTFsAHrB/v77tQ+5\nB6oclKQmWEfyx6s/JrdnLo17NCa3Zy4fr/7Y+ZnDLDM3Yy4luSVJd5NQZ7p/cnNy+Wj2RxReUkhB\nTgE5p3PgCmwZ0Jux9QLxEb6sqFlJSXKCsbd/vPpjrn7warcs6npL6/Hhix/S+6reSW+WUZ9D8Lgd\nqzXYlIMHBaWp0UlQzUpKrcY1HPHyrZeTszKH5g2bM3HaROeT/4hHR5xRDAD14fR1pxnx6Agg+c0y\nGpLpTWlZKYNGDiL7smyyr8zmprtuorSs1O1YZR/N1hlXDejMQUkJanp67nJbF368/kevdRovbszh\nDYeTfuaguFNaVkqfUX3YdXyXmyO63b/bUfKXEqfSTPUZl84cFIWay19k1c3y+YTYuK6tNnOqdfCq\nzZSWldJ3WF927XVRDOBMfnP1J+iMq2ZUOSgpQU2modenvO6eRX0AzFuGrOwsho8eDqA3iRTAMRMo\nKyiDbIIyFTr6Pqx4bQVzZszR39wFra2kpAQ1NXfvfVVvPnzxQ0Y8OoIfjv3AcY5z+tbTbKq/iU2n\nNrH+/vV8MPMDNSElOW6zR4tqbtVmdOagpASBTEO9r+pN6dpSBvYeyOkBp73MT32H9U2a0FXFN26z\nx0uw5TW4nA/t/t1OTYUhoMpBSQmCtR/7Mz+V1SsLK7chUPJcMifXxRIrjpNbvkdjbB3dVtnqKA38\nfqCbM1oJjEYrKbWKQN2/QolQChTtkurRMFZh1XHS4+2ORispSgj4Mj+xEpsZIsTchpoipIL5XLFh\n5XG6qNlFtFjeguzl2Qz8fqCbotYZXGioQ1qpVTjMT32H9aWsXhnUxa2hfCgOy0ANgrSBUHBYcZzc\nZg25wClY8eEKhj48lOxG2Wwq3cSuK3Y5G/44ghBq44wiWHTmoNQ6cnNyWTFvBXmZebb+03bFEGpu\nQ6CaRrGueZSsT8dWHCdfs4/KqyvZ8MMGFrZcaEuIO3bmM53BBUaVg1IrsSIBKlCEVCyT65K5eKAV\nx8lfoAGHsPmTumIrtOfymc7gakYd0ooSAYEaBMWqgVCylwBxHKdt+7fx/c7vyW6dTfvW7YM+XoEC\nDVgJVAM/P/NZshybcNFmP4piAcG02UzkVpwFIwsoyS3xXp5EFUYjiTbytS4rcfMn1Ybub65otJKi\nREgwJplEN9ukQk+HSKKWXM2EmYsybTMGh2Kwb6tpZlMtjxICOnNQagU1PfX7NEkcgJxPc8i5IIc2\nmW2oLK/kvTbvRd1sE+7sJFFi/COZXVk1+0l2E5tVRDpz0FBWJeVxu3H6CGX0CqU8AnwKZQVllNUv\ng1Nw1rqzIAv3G04QTs1QbpaB5KwJx5Ozm39jZmzNXpHID9CIRvAhNnuGwZZ7kh767GfSuEmsv3+9\nl6KcNFNLZ4SCzhyUlCfQk6TX5yXYnJiezs23sTWlb+y9DV+E+jSf7E+8kchfWlZK/r357Oy284zP\nYAW0PbstH83+KGQlF6tAgERGfQ6KEoBAnd68Qimr8B0W2RxYj21mEUS4Zag2dJ9yHoN/rvtnUuQu\neMl/BFgL7294P6DsE6dNtCmGY9iU8xogDS5ofkFYN3UtxR05qhyUlCeQs9Yz5yHndI7P8dQF+kL2\n6uygnJo1KSVfCWtech4B1sO+/vsS0gnuiZv8R4ANQE84cv2RgLLvLt9tUwz2dSgAesHa7WuD+r7J\nmgCYyKhyUFKeYJKsXJ80V8xb4b/+kj3Ldnf5brc+1b7wp5QyyfQZ+XTPrfe473cjXt3MEjmzd9K4\nSbT7dzub/Jux3eCDlL1NZhvb9/VY53j/4wG/b6JHkiUrqhyUlCfUbGjH+JyVOTYHqSMsEkJ6kven\nlKSu+DQ3vfzWy25ytqhqEVQ3s0QhNyeXLq26wCpsmckhyD5p3CTOqjwrrO+rBQ6jgyoHpVYQqg3a\nZ/2lEJ/kPZXSwO8HclGzi/jo84/83gRd5ezXtV9S5C64mnTWfbHOVqqiCSHJnpuTS/8u/UNax7Hf\nRRsWJZUSTRZUOSiKHzxv7uE8yTtu9rOKZvHFvi94r817/Jj+Y1A3QX8zj3tuvSdh7OueJp39/ffb\nnPb/hVcntkAO/D9O/GPQNZZc9xvs8VRCQ0NZFSVIIgnVdFvX4ax12NdrCHH1DMm859Z7GPXcqBrD\nY2NV5qO0rNRW+rygzDvsdxW2GcRGOKvyLPp36c8fJ/4xqDIYwYSghns8axNaW0lRYsTHqz9mwG8G\nUNm40ha51AnyyoK7CXll/x4BNkPjY40Z0GNAxAXmHAoqVpnSzv2Ub4OrvT/PXp5Nx/M7Ri3HwKrj\nmcokTIa0MSYLmA30Aw4AE0Tkb37GPgU8AZzAlgspwMUiUmaVPIpiJaVlpYx6bhSVN1Y6b7oNP2zI\n7OdnB3UTckYuOW7qjYGeMKBiQEgJboEa47g5Z+03zG1V2+g7rC8r5q2w7Ibp3M9a3L8XtvfXdL0m\nqol7Vh1PxT9W+hxewnazbw4MB/5sjOlQw/i/i0imiGTY/5ZZKIuiWIq/ZjIvv/VyUOtb1dshUM6G\nM7fCJc+Aq22lQKwM73Tu5xJC9i1YQSx7ZdRWLFEOxph0YDDwpIgcF5E1wELgDiu2ryjxJlCWdSCs\naC4EgW+KTuURYp5BqDj30xhbmO9a4EPIWZkTE1u/VcdT8Y9VZqULgNMiss1l2Ragdw3r3GiMOQjs\nBf4kIv/PIlkUxXK8zBgQckSMI3IpEgIV2HMWnavaFtXwTrfidnaTzlnLz6Jzp86WbD8YrDiein8s\ncUgbY64C3hKR1i7L7gaGiUhfH+MvxDbx3QdcDiwAxorIP3yMVYe0EncSpSS2P9lco5PuufUeRjw6\nwmcUkZVF/ErLShkzaQzLNy3nRMMTtuik9MQ5LrWdmDikjTErgT7YHMeerAFGA408lmcCFb62JyJf\nubxdZ4x5EbgF8FIOAEVFRc7/8/Pzyc/PD0ZsRbGMeJXEDhSW6rNM9nPreX3K6z5DXq0sW52bk0vG\nORmc+PkJNyXkMF/pU31sKSkpoaSkxLLtWTVzSMeWMN/JYVoyxrwO7BaRCUGs/yjQXURu8fGZzhyU\nWkkwsxV/oa037b4JqSus/2o95pTh8k6XM71oevRDSh3Lk6g9aaqSECW7ReQYtmr3vzPGpBtjrgQG\nAm/6Gm+MGWiMaWz/vzu2mce7VsiiKMlCoEqiwdQM8lfme/kXy1nYciH7++9nX/99fH7w86h8B3/R\nU6XflGrhuyTHylDW+4B0YD8wF/i1iPwHbD4JY0y5y9jbgW/ty14DnhURnYMqtYZgKokGEyHl8+a8\n0VbNNBaF6HxFT7ESyi61NnRWiT2WKQcROSwiN4tIQxHJcXUui8hqEcl0eT9MRJrZ8xs6isifrJJD\nUZKBYGYFgXIawPfNOdzqpp74mtl4LgN8V7BtrpVRkx3tIa0ocSBQpjME1wvZl6O8oksFC08tjCjs\n1pej++NRH2PSjK1jm0eP6JwLcijLLavx+yjJhSoHRYkDweRNBBsh5RnvX1pWyhf3fxFRpJKvmc2u\n07vgCnzOdqzIA1ESCy28pyhxINp5E8FWN/WHzyikldiyrj3HlhYwq2hWwuaB1FYSpvCeoijBE+28\niUizh9tktrGVz/wCW3aTAU7id3YQrzwQJXrozEFRFC8+Xv0xVz94NaevO+2cCdR5rw4ts1uy5/I9\nOjtIAnTmoCiK5bz81stnFANAfai+qZrLdl9Gw4qGOjuoBahyUBTFC3/RVOWU8+4MzVetDWgPaUVR\nvAgmx0JJbdTnoCiKF4lchVYJDu0hrShKVIg0HFaJL6ocFEVRFC8SoiqroiiKklqoclAURVG8UOWg\nKIqieKHKQVEURfFClYOiKIrihSoHRVEUxQtVDoqiKIoXqhwURVEUL1Q5KIqiKF6oclAURVG8UOWg\nKIqieKHKQVEURfFClYOiKIrihSoHRVEUxQtVDoqiKIoXqhwURVEUL1Q5KIqiKF6oclAURVG8UOWg\nKIqieKHKQVEURfFClYOiKIrihSoHRVEUxQtVDoqiKIoXligHY8x9xphPjDEnjDGzgxg/1hiz1xhz\n2BjzijEmzQo5FEVRFGuwauawG5gEzAo00BhzLfAoUADkAHnA0xbJoSiKoliAJcpBRN4VkYXAoSCG\n3wnMEpGvRORHbErll1bIEU9KSkriLUJQqJzWkQwygsppNckiZ6TEw+fQCdji8n4L0MIYkxUHWSwj\nWU4YldM6kkFGUDmtJlnkjJR4KIeGwI8u738EDJARB1kURVEUHwRUDsaYlcaYamNMlY/Xx2HssxLI\ndHmfCQhQEca2FEVRlChgRMS6jRkzCWgjIqNqGDMX2C4iE+3v+wJzRKS1n/HWCagoilKLEBET7rr1\nrBDAGFMXSAPqAvWMMQ2A0yJS5WP4G8Crxph5wPfAE8Cr/rYdyZdTFEVRwsMqn8OTwDFgPFBo//8J\nAGNMW2NMuTHmXAARWQZMAVYCpfZXkUVyKIqiKBZgqVlJURRFSQ20fIaiKIriRcIph1BKcRhjRhhj\nTtvNVhX2v70TTU77+LiUDDHGZBlj3jHGVBpjSo0xQ2sY+5Qx5pTH8cxJALkmG2MOGmMOGGMmR0Oe\nSOWM5bHzse9Qrpm4la4JVs44X9f17celzBjzozFmozHmuhrGx+u6DlrOcI9nwikHQijFYWetiGSK\nSIb9bzjhteGQLCVDXgJOAM2B4cCfjTEdahj/d4/jWRZPuYwx9wIDgf8GLgZuMMbcEyWZwpbTTqyO\nnSdBnYsJULomlGs7Xtd1PWAn0EtEGgG/Bd4yxrTzHBjn4xm0nHZCPp4JpxxCLMURN5KhZIgxJh0Y\nDDwpIsdFZA2wELgj2vu2UK47gRdEZK+I7AVeAEYmoJxxI4RzMa6la5Lh2haRYyLyOxHZZX//Prag\nma4+hsfteIYoZ1gknHIIgy7GmP3GmK+MMU8aYxLxO8WrZMgF2EKKt3nsu1MN69xoN+FsNcb8OgHk\n8nXsapLfSkI9frE4dpGQTKVrEuK6NsZkA+cDX/j4OGGOZwA5IYzjaUmeQxz5CLhIRHYYYzoBbwE/\nATG1SwdBTSVDDsdwv459+ytV8g/gL8A+4HJggTHmsIj8I45y+Tp2DS2Wxx+hyBmrYxcJ8ToPQyUh\nrmtjTD1gDvCaiHzjY0hCHM8g5AzreMZUGxuLS3GISJmI7LD//wXwO+CWRJOTKJUMCULOSqCRx2qZ\n/vZrnx5/LzbWAS9iwfH0gefxqEkuX8euMgoy+SJoOWN47CIhKUrXROu6DgVjjMF2wz0JPOBnWNyP\nZzByhns8Y6ocRKRAROqISF0fL6uiESLOqI6CnF8AnV3eXwLsE5GIni6CkPMboK4xJs9ltc74n3p6\n7QILjqcPvsGWSR+MXL6OXbDyR0oocnoSrWMXCVE5D2NErI/lLKAZMNhPpQdIjOMZjJy+CHg8E84+\nb4ypa4w5C5dSHMZWnsPX2OuMMS3s/1+ILVP73USTE1vJkLuMMR3s9sgaS4ZYhYgcA94GfmeMSTfG\nXIkt8udNX+ONMQONMY3t/3cHRhOF4xmiXG8A44wxrY0xrYFxxODYhSpnrI6dL0I4F+NyHoYqZzyv\na/s+/x9wITBQRE7VMDTexzMoOcM+niKSUC/gKaAaqHJ5/db+WVugHDjX/n4qtvpMFcC39nXrJpqc\n9mVj7LIeAV4B0mIkZxbwDrYpcBlwm8tnVwHlLu/nAQftsn8J3BdruTxlsi97DvjBLtuzMT4fg5Iz\nlscu2HPRfh5WJMJ5GIqccb6u29llPGbff4X9Nx2aYNd1IDkjPp5aPkNRFEXxIuHMSoqiKEr8UeWg\nKIqieKHKQVEURfFClYOiKIrihSoHRVEUxQtVDoqiKIoXqhwURVEUL1Q5KIqiKF6oclAURVG8+P8B\nN6ybmWL6ekwAAAAASUVORK5CYII=\n",
|
||
"text/plain": [
|
||
"<matplotlib.figure.Figure at 0x7f2d9438f278>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"y_pred_idx = y_pred.reshape(-1) # a 1D array rather than a column vector\n",
|
||
"plt.plot(X_test[y_pred_idx, 1], X_test[y_pred_idx, 2], 'go', label=\"Positive\")\n",
|
||
"plt.plot(X_test[~y_pred_idx, 1], X_test[~y_pred_idx, 2], 'r^', label=\"Negative\")\n",
|
||
"plt.legend()\n",
|
||
"plt.show()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Now that's much, much better! Apparently the new features really helped a lot."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Try starting the tensorboard server, find the latest run and look at the learning curve (i.e., how the loss evaluated on the test set evolves as a function of the epoch number):\n",
|
||
"\n",
|
||
"```\n",
|
||
"$ tensorboard --logdir=tf_logs\n",
|
||
"```"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"Now you can play around with the hyperparameters (e.g. the `batch_size` or the `learning_rate`) and run training again and again, comparing the learning curves. You can even automate this process by implementing grid search or randomized search. Below is a simple implementation of a randomized search on both the batch size and the learning rate. For the sake of simplicity, the checkpoint mechanism was removed."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 138,
|
||
"metadata": {
|
||
"collapsed": false
|
||
},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Iteration 0\n",
|
||
" logdir: tf_logs/logreg-run-20170528155400/\n",
|
||
" batch size: 95\n",
|
||
" learning_rate: 0.00815027148568\n",
|
||
" training: .....................\n",
|
||
" precision: 0.989795918367\n",
|
||
" recall: 0.989795918367\n",
|
||
"Iteration 1\n",
|
||
" logdir: tf_logs/logreg-run-20170528155524/\n",
|
||
" batch size: 31\n",
|
||
" learning_rate: 0.0140099212368\n",
|
||
" training: .....................\n",
|
||
" precision: 1.0\n",
|
||
" recall: 1.0\n",
|
||
"Iteration 2\n",
|
||
" logdir: tf_logs/logreg-run-20170528155909/\n",
|
||
" batch size: 82\n",
|
||
" learning_rate: 0.00587736410211\n",
|
||
" training: .....................\n",
|
||
" precision: 0.989795918367\n",
|
||
" recall: 0.989795918367\n",
|
||
"Iteration 3\n",
|
||
" logdir: tf_logs/logreg-run-20170528160108/\n",
|
||
" batch size: 31\n",
|
||
" learning_rate: 0.0553422822078\n",
|
||
" training: .....................\n",
|
||
" precision: 1.0\n",
|
||
" recall: 1.0\n",
|
||
"Iteration 4\n",
|
||
" logdir: tf_logs/logreg-run-20170528160350/\n",
|
||
" batch size: 29\n",
|
||
" learning_rate: 0.0843115287044\n",
|
||
" training: .....................\n",
|
||
" precision: 1.0\n",
|
||
" recall: 1.0\n",
|
||
"Iteration 5\n",
|
||
" logdir: tf_logs/logreg-run-20170528160541/\n",
|
||
" batch size: 51\n",
|
||
" learning_rate: 0.000123882463569\n",
|
||
" training: .....................\n",
|
||
" precision: 0.886597938144\n",
|
||
" recall: 0.877551020408\n",
|
||
"Iteration 6\n",
|
||
" logdir: tf_logs/logreg-run-20170528160647/\n",
|
||
" batch size: 78\n",
|
||
" learning_rate: 0.000232823516849\n",
|
||
" training: .....................\n",
|
||
" precision: 0.887755102041\n",
|
||
" recall: 0.887755102041\n",
|
||
"Iteration 7\n",
|
||
" logdir: tf_logs/logreg-run-20170528160734/\n",
|
||
" batch size: 67\n",
|
||
" learning_rate: 0.00023945574122\n",
|
||
" training: .....................\n",
|
||
" precision: 0.896907216495\n",
|
||
" recall: 0.887755102041\n",
|
||
"Iteration 8\n",
|
||
" logdir: tf_logs/logreg-run-20170528160822/\n",
|
||
" batch size: 90\n",
|
||
" learning_rate: 0.0231245651913\n",
|
||
" training: .....................\n",
|
||
" precision: 0.989898989899\n",
|
||
" recall: 1.0\n",
|
||
"Iteration 9\n",
|
||
" logdir: tf_logs/logreg-run-20170528160905/\n",
|
||
" batch size: 45\n",
|
||
" learning_rate: 0.000347092571646\n",
|
||
" training: .....................\n",
|
||
" precision: 0.921568627451\n",
|
||
" recall: 0.959183673469\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"from scipy.stats import reciprocal\n",
|
||
"\n",
|
||
"n_search_iterations = 10\n",
|
||
"\n",
|
||
"for search_iteration in range(n_search_iterations):\n",
|
||
" batch_size = np.random.randint(1, 100)\n",
|
||
" learning_rate = reciprocal(0.0001, 0.1).rvs()\n",
|
||
"\n",
|
||
" n_inputs = 2 + 4\n",
|
||
" logdir = log_dir(\"logreg\")\n",
|
||
" \n",
|
||
" print(\"Iteration\", search_iteration)\n",
|
||
" print(\" logdir:\", logdir)\n",
|
||
" print(\" batch size:\", batch_size)\n",
|
||
" print(\" learning_rate:\", learning_rate)\n",
|
||
" print(\" training: \", end=\"\")\n",
|
||
"\n",
|
||
" tf.reset_default_graph()\n",
|
||
"\n",
|
||
" X = tf.placeholder(tf.float32, shape=(None, n_inputs + 1), name=\"X\")\n",
|
||
" y = tf.placeholder(tf.float32, shape=(None, 1), name=\"y\")\n",
|
||
"\n",
|
||
" y_proba, loss, training_op, loss_summary, init, saver = logistic_regression(\n",
|
||
" X, y, learning_rate=learning_rate)\n",
|
||
"\n",
|
||
" file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())\n",
|
||
"\n",
|
||
" n_epochs = 10001\n",
|
||
" n_batches = int(np.ceil(m / batch_size))\n",
|
||
"\n",
|
||
" final_model_path = \"./my_logreg_model_%d\" % search_iteration\n",
|
||
"\n",
|
||
" with tf.Session() as sess:\n",
|
||
" sess.run(init)\n",
|
||
"\n",
|
||
" for epoch in range(n_epochs):\n",
|
||
" for batch_index in range(n_batches):\n",
|
||
" X_batch, y_batch = random_batch(X_train_enhanced, y_train, batch_size)\n",
|
||
" sess.run(training_op, feed_dict={X: X_batch, y: y_batch})\n",
|
||
" loss_val, summary_str = sess.run([loss, loss_summary], feed_dict={X: X_test_enhanced, y: y_test})\n",
|
||
" file_writer.add_summary(summary_str, epoch)\n",
|
||
" if epoch % 500 == 0:\n",
|
||
" print(\".\", end=\"\")\n",
|
||
"\n",
|
||
" saver.save(sess, final_model_path)\n",
|
||
"\n",
|
||
" print()\n",
|
||
" y_proba_val = y_proba.eval(feed_dict={X: X_test_enhanced, y: y_test})\n",
|
||
" y_pred = (y_proba_val >= 0.5)\n",
|
||
" \n",
|
||
" print(\" precision:\", precision_score(y_test, y_pred))\n",
|
||
" print(\" recall:\", recall_score(y_test, y_pred))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"The `reciprocal()` function from SciPy's `stats` module returns a random distribution that is commonly used when you have no idea of the optimal scale of a hyperparameter. See the exercise solutions for chapter 2 for more details. "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"metadata": {
|
||
"collapsed": true
|
||
},
|
||
"outputs": [],
|
||
"source": []
|
||
}
|
||
],
|
||
"metadata": {
|
||
"kernelspec": {
|
||
"display_name": "Python 3",
|
||
"language": "python",
|
||
"name": "python3"
|
||
},
|
||
"language_info": {
|
||
"codemirror_mode": {
|
||
"name": "ipython",
|
||
"version": 3
|
||
},
|
||
"file_extension": ".py",
|
||
"mimetype": "text/x-python",
|
||
"name": "python",
|
||
"nbconvert_exporter": "python",
|
||
"pygments_lexer": "ipython3",
|
||
"version": "3.5.3"
|
||
},
|
||
"nav_menu": {
|
||
"height": "603px",
|
||
"width": "616px"
|
||
},
|
||
"toc": {
|
||
"navigate_menu": true,
|
||
"number_sections": true,
|
||
"sideBar": true,
|
||
"threshold": 6,
|
||
"toc_cell": false,
|
||
"toc_section_display": "block",
|
||
"toc_window_display": true
|
||
}
|
||
},
|
||
"nbformat": 4,
|
||
"nbformat_minor": 0
|
||
}
|