diff --git a/talks/tensorflow/.gitkeep b/talks/tensorflow/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/talks/tensorflow/demos/.gitkeep b/talks/tensorflow/demos/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/talks/tensorflow/demos/convolutional_network_demo.ipynb b/talks/tensorflow/demos/convolutional_network_demo.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..2f837cf1e99a02f0731a2c8e7b6cd82e55e2388e
--- /dev/null
+++ b/talks/tensorflow/demos/convolutional_network_demo.ipynb
@@ -0,0 +1,435 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Convolutional Neural Network Example\n",
+    "\n",
+    "Build a convolutional neural network with TensorFlow.\n",
+    "\n",
+    "This example is using TensorFlow layers API, see 'convolutional_network_raw' example\n",
+    "for a raw TensorFlow implementation with variables.\n",
+    "\n",
+    "- Author: Aymeric Damien\n",
+    "- Project: https://github.com/aymericdamien/TensorFlow-Examples/"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## MNIST Dataset Overview\n",
+    "\n",
+    "This example is using MNIST handwritten digits. The dataset contains 60,000 examples for training and 10,000 examples for testing. The digits have been size-normalized and centered in a fixed-size image (28x28 pixels) with values from 0 to 1. For simplicity, each image has been flattened and converted to a 1-D numpy array of 784 features (28*28).\n",
+    "\n",
+    "![MNIST Dataset](http://neuralnetworksanddeeplearning.com/images/mnist_100_digits.png)\n",
+    "\n",
+    "More info: http://yann.lecun.com/exdb/mnist/\n",
+    "\n",
+    "## CNN Overview\n",
+    "\n",
+    "![CNN](http://personal.ie.cuhk.edu.hk/~ccloy/project_target_code/images/fig3.png)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Extracting /tmp/data/train-images-idx3-ubyte.gz\n",
+      "Extracting /tmp/data/train-labels-idx1-ubyte.gz\n",
+      "Extracting /tmp/data/t10k-images-idx3-ubyte.gz\n",
+      "Extracting /tmp/data/t10k-labels-idx1-ubyte.gz\n"
+     ]
+    }
+   ],
+   "source": [
+    "from __future__ import division, print_function, absolute_import\n",
+    "\n",
+    "# Import MNIST data\n",
+    "from tensorflow.examples.tutorials.mnist import input_data\n",
+    "mnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=False)\n",
+    "\n",
+    "import tensorflow as tf\n",
+    "import matplotlib.pyplot as plt\n",
+    "import numpy as np"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Training Parameters\n",
+    "learning_rate = 0.001\n",
+    "num_steps = 2000\n",
+    "batch_size = 128\n",
+    "\n",
+    "# Network Parameters\n",
+    "num_input = 784 # MNIST data input (img shape: 28*28)\n",
+    "num_classes = 10 # MNIST total classes (0-9 digits)\n",
+    "dropout = 0.25 # Dropout, probability to drop a unit"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Create the neural network\n",
+    "def conv_net(x_dict, n_classes, dropout, reuse, is_training):\n",
+    "    \n",
+    "    # Define a scope for reusing the variables\n",
+    "    with tf.variable_scope('ConvNet', reuse=reuse):\n",
+    "        # TF Estimator input is a dict, in case of multiple inputs\n",
+    "        x = x_dict['images']\n",
+    "\n",
+    "        # MNIST data input is a 1-D vector of 784 features (28*28 pixels)\n",
+    "        # Reshape to match picture format [Height x Width x Channel]\n",
+    "        # Tensor input become 4-D: [Batch Size, Height, Width, Channel]\n",
+    "        x = tf.reshape(x, shape=[-1, 28, 28, 1])\n",
+    "\n",
+    "        # Convolution Layer with 32 filters and a kernel size of 5\n",
+    "        conv1 = tf.layers.conv2d(x, 32, 5, activation=tf.nn.relu)\n",
+    "        # Max Pooling (down-sampling) with strides of 2 and kernel size of 2\n",
+    "        conv1 = tf.layers.max_pooling2d(conv1, 2, 2)\n",
+    "\n",
+    "        # Convolution Layer with 64 filters and a kernel size of 3\n",
+    "        conv2 = tf.layers.conv2d(conv1, 64, 3, activation=tf.nn.relu)\n",
+    "        # Max Pooling (down-sampling) with strides of 2 and kernel size of 2\n",
+    "        conv2 = tf.layers.max_pooling2d(conv2, 2, 2)\n",
+    "\n",
+    "        # Flatten the data to a 1-D vector for the fully connected layer\n",
+    "        fc1 = tf.contrib.layers.flatten(conv2)\n",
+    "\n",
+    "        # Fully connected layer (in tf contrib folder for now)\n",
+    "        fc1 = tf.layers.dense(fc1, 1024)\n",
+    "        # Apply Dropout (if is_training is False, dropout is not applied)\n",
+    "        fc1 = tf.layers.dropout(fc1, rate=dropout, training=is_training)\n",
+    "\n",
+    "        # Output layer, class prediction\n",
+    "        out = tf.layers.dense(fc1, n_classes)\n",
+    "\n",
+    "    return out"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Define the model function (following TF Estimator Template)\n",
+    "def model_fn(features, labels, mode):\n",
+    "    \n",
+    "    # Build the neural network\n",
+    "    # Because Dropout have different behavior at training and prediction time, we\n",
+    "    # need to create 2 distinct computation graphs that still share the same weights.\n",
+    "    logits_train = conv_net(features, num_classes, dropout, reuse=False, is_training=True)\n",
+    "    logits_test = conv_net(features, num_classes, dropout, reuse=True, is_training=False)\n",
+    "    \n",
+    "    # Predictions\n",
+    "    pred_classes = tf.argmax(logits_test, axis=1)\n",
+    "    pred_probas = tf.nn.softmax(logits_test)\n",
+    "    \n",
+    "    # If prediction mode, early return\n",
+    "    if mode == tf.estimator.ModeKeys.PREDICT:\n",
+    "        return tf.estimator.EstimatorSpec(mode, predictions=pred_classes) \n",
+    "        \n",
+    "    # Define loss and optimizer\n",
+    "    loss_op = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(\n",
+    "        logits=logits_train, labels=tf.cast(labels, dtype=tf.int32)))\n",
+    "    optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)\n",
+    "    train_op = optimizer.minimize(loss_op, global_step=tf.train.get_global_step())\n",
+    "    \n",
+    "    # Evaluate the accuracy of the model\n",
+    "    acc_op = tf.metrics.accuracy(labels=labels, predictions=pred_classes)\n",
+    "    \n",
+    "    # TF Estimators requires to return a EstimatorSpec, that specify\n",
+    "    # the different ops for training, evaluating, ...\n",
+    "    estim_specs = tf.estimator.EstimatorSpec(\n",
+    "      mode=mode,\n",
+    "      predictions=pred_classes,\n",
+    "      loss=loss_op,\n",
+    "      train_op=train_op,\n",
+    "      eval_metric_ops={'accuracy': acc_op})\n",
+    "\n",
+    "    return estim_specs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "WARNING:tensorflow:Using temporary folder as model directory: /var/folders/c3/rhg93t4n0cjbgvkb9tp99_h40000gq/T/tmpqhkrjnut\n",
+      "INFO:tensorflow:Using default config.\n",
+      "INFO:tensorflow:Using config: {}\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Build the Estimator (i.e. class for training and evaluating network)\n",
+    "model = tf.estimator.Estimator(model_fn)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "INFO:tensorflow:Create CheckpointSaverHook.\n",
+      "INFO:tensorflow:Saving checkpoints for 1 into /var/folders/c3/rhg93t4n0cjbgvkb9tp99_h40000gq/T/tmpqhkrjnut/model.ckpt.\n",
+      "INFO:tensorflow:step = 1, loss = 2.32753\n",
+      "INFO:tensorflow:global_step/sec: 12.1422\n",
+      "INFO:tensorflow:step = 101, loss = 0.127041 (8.237 sec)\n",
+      "INFO:tensorflow:global_step/sec: 11.4857\n",
+      "INFO:tensorflow:step = 201, loss = 0.0393114 (8.707 sec)\n",
+      "INFO:tensorflow:global_step/sec: 8.48714\n",
+      "INFO:tensorflow:step = 301, loss = 0.0843611 (11.783 sec)\n",
+      "INFO:tensorflow:global_step/sec: 7.15251\n",
+      "INFO:tensorflow:step = 401, loss = 0.0262136 (13.981 sec)\n",
+      "INFO:tensorflow:global_step/sec: 6.4515\n",
+      "INFO:tensorflow:step = 501, loss = 0.0497303 (15.500 sec)\n",
+      "INFO:tensorflow:global_step/sec: 6.70897\n",
+      "INFO:tensorflow:step = 601, loss = 0.0142919 (14.905 sec)\n",
+      "INFO:tensorflow:global_step/sec: 6.6801\n",
+      "INFO:tensorflow:step = 701, loss = 0.042797 (14.970 sec)\n",
+      "INFO:tensorflow:global_step/sec: 6.33292\n",
+      "INFO:tensorflow:step = 801, loss = 0.0159051 (15.790 sec)\n",
+      "INFO:tensorflow:global_step/sec: 6.51359\n",
+      "INFO:tensorflow:step = 901, loss = 0.0560886 (15.352 sec)\n",
+      "INFO:tensorflow:global_step/sec: 7.00529\n",
+      "INFO:tensorflow:step = 1001, loss = 0.0389398 (14.275 sec)\n",
+      "INFO:tensorflow:global_step/sec: 11.4372\n",
+      "INFO:tensorflow:step = 1101, loss = 0.000826523 (8.743 sec)\n",
+      "INFO:tensorflow:global_step/sec: 11.8734\n",
+      "INFO:tensorflow:step = 1201, loss = 0.0288895 (8.422 sec)\n",
+      "INFO:tensorflow:global_step/sec: 11.4033\n",
+      "INFO:tensorflow:step = 1301, loss = 0.0109369 (8.769 sec)\n",
+      "INFO:tensorflow:global_step/sec: 12.1424\n",
+      "INFO:tensorflow:step = 1401, loss = 0.016869 (8.236 sec)\n",
+      "INFO:tensorflow:global_step/sec: 12.2184\n",
+      "INFO:tensorflow:step = 1501, loss = 0.025425 (8.184 sec)\n",
+      "INFO:tensorflow:global_step/sec: 12.9909\n",
+      "INFO:tensorflow:step = 1601, loss = 0.00609953 (7.698 sec)\n",
+      "INFO:tensorflow:global_step/sec: 12.3233\n",
+      "INFO:tensorflow:step = 1701, loss = 0.0149776 (8.114 sec)\n",
+      "INFO:tensorflow:global_step/sec: 12.452\n",
+      "INFO:tensorflow:step = 1801, loss = 0.0356573 (8.031 sec)\n",
+      "INFO:tensorflow:global_step/sec: 12.6611\n",
+      "INFO:tensorflow:step = 1901, loss = 0.0126303 (7.898 sec)\n",
+      "INFO:tensorflow:Saving checkpoints for 2000 into /var/folders/c3/rhg93t4n0cjbgvkb9tp99_h40000gq/T/tmpqhkrjnut/model.ckpt.\n",
+      "INFO:tensorflow:Loss for final step: 0.0101263.\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "<tensorflow.python.estimator.estimator.Estimator at 0x124517438>"
+      ]
+     },
+     "execution_count": 6,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# Define the input function for training\n",
+    "input_fn = tf.estimator.inputs.numpy_input_fn(\n",
+    "    x={'images': mnist.train.images}, y=mnist.train.labels,\n",
+    "    batch_size=batch_size, num_epochs=None, shuffle=True)\n",
+    "# Train the Model\n",
+    "model.train(input_fn, steps=num_steps)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "INFO:tensorflow:Starting evaluation at 2018-02-22-18:15:46\n",
+      "INFO:tensorflow:Restoring parameters from /var/folders/c3/rhg93t4n0cjbgvkb9tp99_h40000gq/T/tmpqhkrjnut/model.ckpt-2000\n",
+      "INFO:tensorflow:Finished evaluation at 2018-02-22-18:15:48\n",
+      "INFO:tensorflow:Saving dict for global step 2000: accuracy = 0.9878, global_step = 2000, loss = 0.0447589\n",
+      "WARNING:tensorflow:Skipping summary for global_step, must be a float or np.float32.\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "{'accuracy': 0.9878, 'global_step': 2000, 'loss': 0.044758931}"
+      ]
+     },
+     "execution_count": 7,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# Evaluate the Model\n",
+    "# Define the input function for evaluating\n",
+    "input_fn = tf.estimator.inputs.numpy_input_fn(\n",
+    "    x={'images': mnist.test.images}, y=mnist.test.labels,\n",
+    "    batch_size=batch_size, shuffle=False)\n",
+    "# Use the Estimator 'evaluate' method\n",
+    "model.evaluate(input_fn)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "INFO:tensorflow:Restoring parameters from /var/folders/c3/rhg93t4n0cjbgvkb9tp99_h40000gq/T/tmpqhkrjnut/model.ckpt-2000\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAADQNJREFUeJzt3W+MVfWdx/HPZylNjPQBWLHEgnQb\n3bgaAzoaE3AzamxYbYKN1NQHGzbZMH2AZps0ZA1PypMmjemfrU9IpikpJtSWhFbRGBeDGylRGwej\nBYpQICzMgkAzJgUT0yDfPphDO8W5v3u5/84dv+9XQube8z1/vrnhM+ecOefcnyNCAPL5h7obAFAP\nwg8kRfiBpAg/kBThB5Ii/EBShB9IivADSRF+IKnP9HNjtrmdEOixiHAr83W057e9wvZB24dtP9nJ\nugD0l9u9t9/2LEmHJD0gaVzSW5Iei4jfF5Zhzw/0WD/2/HdJOhwRRyPiz5J+IWllB+sD0EedhP96\nSSemvB+vpv0d2yO2x2yPdbAtAF3WyR/8pju0+MRhfUSMShqVOOwHBkkne/5xSQunvP+ipJOdtQOg\nXzoJ/1uSbrT9JduflfQNSdu70xaAXmv7sD8iLth+XNL/SJolaVNE7O9aZwB6qu1LfW1tjHN+oOf6\ncpMPgJmL8ANJEX4gKcIPJEX4gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiApwg8kRfiBpAg/kBThB5Ii\n/EBShB9IivADSRF+ICnCDyRF+IGkCD+QFOEHkiL8QFKEH0iK8ANJEX4gKcIPJEX4gaTaHqJbkmwf\nk3RO0seSLkTEUDeaAtB7HYW/cm9E/LEL6wHQRxz2A0l1Gv6QtMP2Htsj3WgIQH90eti/LCJO2p4v\n6RXb70XErqkzVL8U+MUADBhHRHdWZG+QdD4ivl+YpzsbA9BQRLiV+do+7Ld9te3PXXot6SuS9rW7\nPgD91clh/3WSfm370np+HhEvd6UrAD3XtcP+ljbGYT/Qcz0/7AcwsxF+ICnCDyRF+IGkCD+QFOEH\nkurGU30prFq1qmFtzZo1xWVPnjxZrH/00UfF+pYtW4r1999/v2Ht8OHDxWWRF3t+ICnCDyRF+IGk\nCD+QFOEHkiL8QFKEH0iKR3pbdPTo0Ya1xYsX96+RaZw7d65hbf/+/X3sZLCMj483rD311FPFZcfG\nxrrdTt/wSC+AIsIPJEX4gaQIP5AU4QeSIvxAUoQfSIrn+VtUemb/tttuKy574MCBYv3mm28u1m+/\n/fZifXh4uGHt7rvvLi574sSJYn3hwoXFeicuXLhQrJ89e7ZYX7BgQdvbPn78eLE+k6/zt4o9P5AU\n4QeSIvxAUoQfSIrwA0kRfiApwg8k1fR5ftubJH1V0pmIuLWaNk/SLyUtlnRM0qMR8UHTjc3g5/kH\n2dy5cxvWlixZUlx2z549xfqdd97ZVk+taDZewaFDh4r1ZvdPzJs3r2Ft7dq1xWU3btxYrA+ybj7P\n/zNJKy6b9qSknRFxo6Sd1XsAM0jT8EfELkkTl01eKWlz9XqzpIe73BeAHmv3nP+6iDglSdXP+d1r\nCUA/9PzeftsjkkZ6vR0AV6bdPf9p2wskqfp5ptGMETEaEUMRMdTmtgD0QLvh3y5pdfV6taTnu9MO\ngH5pGn7bz0p6Q9I/2R63/R+SvifpAdt/kPRA9R7ADML39mNgPfLII8X61q1bi/V9+/Y1rN17773F\nZScmLr/ANXPwvf0Aigg/kBThB5Ii/EBShB9IivADSXGpD7WZP7/8SMjevXs7Wn7VqlUNa9u2bSsu\nO5NxqQ9AEeEHkiL8QFKEH0iK8ANJEX4gKcIPJMUQ3ahNs6/Pvvbaa4v1Dz4of1v8wYMHr7inTNjz\nA0kRfiApwg8kRfiBpAg/kBThB5Ii/EBSPM+Pnlq2bFnD2quvvlpcdvbs2cX68PBwsb5r165i/dOK\n5/kBFBF+ICnCDyRF+IGkCD+QFOEHkiL8QFJNn+e3vUnSVyWdiYhbq2kbJK2RdLaabX1EvNSrJjFz\nPfjggw1rza7j79y5s1h/44032uoJk1rZ8/9M0opppv8oIpZU/wg+MMM0DX9E7JI00YdeAPRRJ+f8\nj9v+ne1Ntud2rSMAfdFu+DdK+rKkJZJOSfpBoxltj9gesz3W5rYA9EBb4Y+I0xHxcURclPQTSXcV\n5h2NiKGIGGq3SQDd11b4bS+Y8vZrkvZ1px0A/dLKpb5nJQ1L+rztcUnfkTRse4mkkHRM0jd72COA\nHuB5fnTkqquuKtZ3797dsHbLLbcUl73vvvuK9ddff71Yz4rn+QEUEX4gKcIPJEX4gaQIP5AU4QeS\nYohudGTdunXF+tKlSxvWXn755eKyXMrrLfb8QFKEH0iK8ANJEX4gKcIPJEX4gaQIP5AUj/Si6KGH\nHirWn3vuuWL9ww8/bFhbsWK6L4X+mzfffLNYx/R4pBdAEeEHkiL8QFKEH0iK8ANJEX4gKcIPJMXz\n/Mldc801xfrTTz9drM+aNatYf+mlxgM4cx2/Xuz5gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiCpps/z\n214o6RlJX5B0UdJoRPzY9jxJv5S0WNIxSY9GxAdN1sXz/H3W7Dp8s2vtd9xxR7F+5MiRYr30zH6z\nZdGebj7Pf0HStyPiZkl3S1pr+58lPSlpZ0TcKGln9R7ADNE0/BFxKiLerl6fk3RA0vWSVkraXM22\nWdLDvWoSQPdd0Tm/7cWSlkr6raTrIuKUNPkLQtL8bjcHoHdavrff9hxJ2yR9KyL+ZLd0WiHbI5JG\n2msPQK+0tOe3PVuTwd8SEb+qJp+2vaCqL5B0ZrplI2I0IoYiYqgbDQPojqbh9+Qu/qeSDkTED6eU\ntktaXb1eLen57rcHoFdaudS3XNJvJO3V5KU+SVqvyfP+rZIWSTou6esRMdFkXVzq67ObbrqpWH/v\nvfc6Wv/KlSuL9RdeeKGj9ePKtXqpr+k5f0TsltRoZfdfSVMABgd3+AFJEX4gKcIPJEX4gaQIP5AU\n4QeS4qu7PwVuuOGGhrUdO3Z0tO5169YV6y+++GJH60d92PMDSRF+ICnCDyRF+IGkCD+QFOEHkiL8\nQFJc5/8UGBlp/C1pixYt6mjdr732WrHe7PsgMLjY8wNJEX4gKcIPJEX4gaQIP5AU4QeSIvxAUlzn\nnwGWL19erD/xxBN96gSfJuz5gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiCpptf5bS+U9IykL0i6KGk0\nIn5se4OkNZLOVrOuj4iXetVoZvfcc0+xPmfOnLbXfeTIkWL9/Pnzba8bg62Vm3wuSPp2RLxt+3OS\n9th+par9KCK+37v2APRK0/BHxClJp6rX52wfkHR9rxsD0FtXdM5ve7GkpZJ+W0163PbvbG+yPbfB\nMiO2x2yPddQpgK5qOfy250jaJulbEfEnSRslfVnSEk0eGfxguuUiYjQihiJiqAv9AuiSlsJve7Ym\ng78lIn4lSRFxOiI+joiLkn4i6a7etQmg25qG37Yl/VTSgYj44ZTpC6bM9jVJ+7rfHoBeaeWv/csk\n/Zukvbbfqaatl/SY7SWSQtIxSd/sSYfoyLvvvlus33///cX6xMREN9vBAGnlr/27JXmaEtf0gRmM\nO/yApAg/kBThB5Ii/EBShB9IivADSbmfQyzbZjxnoMciYrpL85/Anh9IivADSRF+ICnCDyRF+IGk\nCD+QFOEHkur3EN1/lPR/U95/vpo2iAa1t0HtS6K3dnWztxtanbGvN/l8YuP22KB+t9+g9jaofUn0\n1q66euOwH0iK8ANJ1R3+0Zq3XzKovQ1qXxK9tauW3mo95wdQn7r3/ABqUkv4ba+wfdD2YdtP1tFD\nI7aP2d5r+526hxirhkE7Y3vflGnzbL9i+w/Vz2mHSauptw22/7/67N6x/WBNvS20/b+2D9jeb/s/\nq+m1fnaFvmr53Pp+2G97lqRDkh6QNC7pLUmPRcTv+9pIA7aPSRqKiNqvCdv+F0nnJT0TEbdW056S\nNBER36t+cc6NiP8akN42SDpf98jN1YAyC6aOLC3pYUn/rho/u0Jfj6qGz62OPf9dkg5HxNGI+LOk\nX0haWUMfAy8idkm6fNSMlZI2V683a/I/T9816G0gRMSpiHi7en1O0qWRpWv97Ap91aKO8F8v6cSU\n9+MarCG/Q9IO23tsj9TdzDSuq4ZNvzR8+vya+7lc05Gb++mykaUH5rNrZ8Trbqsj/NN9xdAgXXJY\nFhG3S/pXSWurw1u0pqWRm/tlmpGlB0K7I153Wx3hH5e0cMr7L0o6WUMf04qIk9XPM5J+rcEbffj0\npUFSq59nau7nrwZp5ObpRpbWAHx2gzTidR3hf0vSjba/ZPuzkr4haXsNfXyC7aurP8TI9tWSvqLB\nG314u6TV1evVkp6vsZe/MygjNzcaWVo1f3aDNuJ1LTf5VJcy/lvSLEmbIuK7fW9iGrb/UZN7e2ny\nicef19mb7WclDWvyqa/Tkr4j6TlJWyUtknRc0tcjou9/eGvQ27AmD13/OnLzpXPsPve2XNJvJO2V\ndLGavF6T59e1fXaFvh5TDZ8bd/gBSXGHH5AU4QeSIvxAUoQfSIrwA0kRfiApwg8kRfiBpP4CIJjq\nosJxHysAAAAASUVORK5CYII=\n",
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x11be4d6d8>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model prediction: 7\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAADYNJREFUeJzt3X+oXPWZx/HPZ20CYouaFLMXYzc1\n6rIqauUqiy2LSzW6S0wMWE3wjyy77O0fFbYYfxGECEuwLNvu7l+BFC9NtLVpuDHGWjYtsmoWTPAq\nGk2TtkauaTbX3A0pNkGkJnn2j3uy3MY7ZyYzZ+bMzfN+QZiZ88w552HI555z5pw5X0eEAOTzJ3U3\nAKAehB9IivADSRF+ICnCDyRF+IGkCD+QFOEHkiL8QFKf6+XKbHM5IdBlEeFW3tfRlt/2nbZ/Zfs9\n2491siwAveV2r+23fZ6kX0u6XdJBSa9LWhERvyyZhy0/0GW92PLfLOm9iHg/Iv4g6ceSlnawPAA9\n1En4L5X02ymvDxbT/ojtIdujtkc7WBeAinXyhd90uxaf2a2PiPWS1kvs9gP9pJMt/0FJl015PV/S\noc7aAdArnYT/dUlX2v6y7dmSlkvaVk1bALqt7d3+iDhh+wFJ2yWdJ2k4IvZU1hmArmr7VF9bK+OY\nH+i6nlzkA2DmIvxAUoQfSIrwA0kRfiApwg8kRfiBpAg/kBThB5Ii/EBShB9IivADSRF+IKme3rob\n7XnooYdK6+eff37D2nXXXVc67z333NNWT6etW7eutP7aa681rD399NMdrRudYcsPJEX4gaQIP5AU\n4QeSIvxAUoQfSIrwA0lx994+sGnTptJ6p+fi67R///6Gtdtuu6103gMHDlTdTgrcvRdAKcIPJEX4\ngaQIP5AU4QeSIvxAUoQfSKqj3/PbHpN0TNJJSSciYrCKps41dZ7H37dvX2l9+/btpfXLL7+8tH7X\nXXeV1hcuXNiwdv/995fO++STT5bW0Zkqbubx1xFxpILlAOghdvuBpDoNf0j6ue03bA9V0RCA3uh0\nt/+rEXHI9iWSfmF7X0S8OvUNxR8F/jAAfaajLX9EHCoeJyQ9J+nmad6zPiIG+TIQ6C9th9/2Bba/\ncPq5pEWS3q2qMQDd1clu/zxJz9k+vZwfRcR/VtIVgK5rO/wR8b6k6yvsZcYaHCw/olm2bFlHy9+z\nZ09pfcmSJQ1rR46Un4U9fvx4aX327Nml9Z07d5bWr7++8X+RuXPnls6L7uJUH5AU4QeSIvxAUoQf\nSIrwA0kRfiAphuiuwMDAQGm9uBaioWan8u64447S+vj4eGm9E6tWrSqtX3311W0v+8UXX2x7XnSO\nLT+QFOEHkiL8QFKEH0iK8ANJEX4gKcIPJMV5/gq88MILpfUrrriitH7s2LHS+tGjR8+6p6osX768\ntD5r1qwedYKqseUHkiL8QFKEH0iK8ANJEX4gKcIPJEX4gaQ4z98DH3zwQd0tNPTwww+X1q+66qqO\nlr9r1662aug+tvxAUoQfSIrwA0kRfiApwg8kRfiBpAg/kJQjovwN9rCkxZImIuLaYtocSZskLZA0\nJuneiPhd05XZ5StD5RYvXlxa37x5c2m92RDdExMTpfWy+wG88sorpfOiPRFRPlBEoZUt/w8k3XnG\ntMckvRQRV0p6qXgNYAZpGv6IeFXSmbeSWSppQ/F8g6S7K+4LQJe1e8w/LyLGJal4vKS6lgD0Qtev\n7bc9JGmo2+sBcHba3fIftj0gScVjw299ImJ9RAxGxGCb6wLQBe2Gf5uklcXzlZKer6YdAL3SNPy2\nn5X0mqQ/t33Q9j9I+o6k223/RtLtxWsAM0jTY/6IWNGg9PWKe0EXDA6WH201O4/fzKZNm0rrnMvv\nX1zhByRF+IGkCD+QFOEHkiL8QFKEH0iKW3efA7Zu3dqwtmjRoo6WvXHjxtL6448/3tHyUR+2/EBS\nhB9IivADSRF+ICnCDyRF+IGkCD+QVNNbd1e6Mm7d3ZaBgYHS+ttvv92wNnfu3NJ5jxw5Ulq/5ZZb\nSuv79+8vraP3qrx1N4BzEOEHkiL8QFKEH0iK8ANJEX4gKcIPJMXv+WeAkZGR0nqzc/llnnnmmdI6\n5/HPXWz5gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiCppuf5bQ9LWixpIiKuLaY9IekfJf1v8bbVEfGz\nbjV5rluyZElp/cYbb2x72S+//HJpfc2aNW0vGzNbK1v+H0i6c5rp/xYRNxT/CD4wwzQNf0S8Kulo\nD3oB0EOdHPM/YHu37WHbF1fWEYCeaDf86yQtlHSDpHFJ3230RttDtkdtj7a5LgBd0Fb4I+JwRJyM\niFOSvi/p5pL3ro+IwYgYbLdJANVrK/y2p95Odpmkd6tpB0CvtHKq71lJt0r6ou2DktZIutX2DZJC\n0pikb3axRwBd0DT8EbFimslPdaGXc1az39uvXr26tD5r1qy21/3WW2+V1o8fP972sjGzcYUfkBTh\nB5Ii/EBShB9IivADSRF+IClu3d0Dq1atKq3fdNNNHS1/69atDWv8ZBeNsOUHkiL8QFKEH0iK8ANJ\nEX4gKcIPJEX4gaQcEb1bmd27lfWRTz75pLTeyU92JWn+/PkNa+Pj4x0tGzNPRLiV97HlB5Ii/EBS\nhB9IivADSRF+ICnCDyRF+IGk+D3/OWDOnDkNa59++mkPO/msjz76qGGtWW/Nrn+48MIL2+pJki66\n6KLS+oMPPtj2sltx8uTJhrVHH320dN6PP/64kh7Y8gNJEX4gKcIPJEX4gaQIP5AU4QeSIvxAUk3P\n89u+TNJGSX8q6ZSk9RHxH7bnSNokaYGkMUn3RsTvutcqGtm9e3fdLTS0efPmhrVm9xqYN29eaf2+\n++5rq6d+9+GHH5bW165dW8l6Wtnyn5C0KiL+QtJfSvqW7aslPSbppYi4UtJLxWsAM0TT8EfEeES8\nWTw/JmmvpEslLZW0oXjbBkl3d6tJANU7q2N+2wskfUXSLknzImJcmvwDIemSqpsD0D0tX9tv+/OS\nRiR9OyJ+b7d0mzDZHpI01F57ALqlpS2/7VmaDP4PI2JLMfmw7YGiPiBpYrp5I2J9RAxGxGAVDQOo\nRtPwe3IT/5SkvRHxvSmlbZJWFs9XSnq++vYAdEvTW3fb/pqkHZLe0eSpPklarcnj/p9I+pKkA5K+\nERFHmywr5a27t2zZUlpfunRpjzrJ5cSJEw1rp06dalhrxbZt20rro6OjbS97x44dpfWdO3eW1lu9\ndXfTY/6I+G9JjRb29VZWAqD/cIUfkBThB5Ii/EBShB9IivADSRF+ICmG6O4DjzzySGm90yG8y1xz\nzTWl9W7+bHZ4eLi0PjY21tHyR0ZGGtb27dvX0bL7GUN0AyhF+IGkCD+QFOEHkiL8QFKEH0iK8ANJ\ncZ4fOMdwnh9AKcIPJEX4gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiApwg8kRfiBpAg/kBThB5Ii/EBS\nhB9Iqmn4bV9m+79s77W9x/Y/FdOfsP0/tt8q/v1t99sFUJWmN/OwPSBpICLetP0FSW9IulvSvZKO\nR8S/trwybuYBdF2rN/P4XAsLGpc0Xjw/ZnuvpEs7aw9A3c7qmN/2AklfkbSrmPSA7d22h21f3GCe\nIdujtkc76hRApVq+h5/tz0t6RdLaiNhie56kI5JC0j9r8tDg75ssg91+oMta3e1vKfy2Z0n6qaTt\nEfG9aeoLJP00Iq5tshzCD3RZZTfwtG1JT0naOzX4xReBpy2T9O7ZNgmgPq182/81STskvSPpVDF5\ntaQVkm7Q5G7/mKRvFl8Oli2LLT/QZZXu9leF8APdx337AZQi/EBShB9IivADSRF+ICnCDyRF+IGk\nCD+QFOEHkiL8QFKEH0iK8ANJEX4gKcIPJNX0Bp4VOyLpgymvv1hM60f92lu/9iXRW7uq7O3PWn1j\nT3/P/5mV26MRMVhbAyX6tbd+7Uuit3bV1Ru7/UBShB9Iqu7wr695/WX6tbd+7Uuit3bV0lutx/wA\n6lP3lh9ATWoJv+07bf/K9nu2H6ujh0Zsj9l+pxh5uNYhxoph0CZsvztl2hzbv7D9m+Jx2mHSauqt\nL0ZuLhlZutbPrt9GvO75br/t8yT9WtLtkg5Kel3Sioj4ZU8bacD2mKTBiKj9nLDtv5J0XNLG06Mh\n2f4XSUcj4jvFH86LI+LRPuntCZ3lyM1d6q3RyNJ/pxo/uypHvK5CHVv+myW9FxHvR8QfJP1Y0tIa\n+uh7EfGqpKNnTF4qaUPxfIMm//P0XIPe+kJEjEfEm8XzY5JOjyxd62dX0lct6gj/pZJ+O+X1QfXX\nkN8h6ee237A9VHcz05h3emSk4vGSmvs5U9ORm3vpjJGl++aza2fE66rVEf7pRhPpp1MOX42IGyX9\njaRvFbu3aM06SQs1OYzbuKTv1tlMMbL0iKRvR8Tv6+xlqmn6quVzqyP8ByVdNuX1fEmHauhjWhFx\nqHickPScJg9T+snh04OkFo8TNffz/yLicEScjIhTkr6vGj+7YmTpEUk/jIgtxeTaP7vp+qrrc6sj\n/K9LutL2l23PlrRc0rYa+vgM2xcUX8TI9gWSFqn/Rh/eJmll8XylpOdr7OWP9MvIzY1GllbNn12/\njXhdy0U+xamMf5d0nqThiFjb8yamYftyTW7tpclfPP6ozt5sPyvpVk3+6uuwpDWStkr6iaQvSTog\n6RsR0fMv3hr0dqvOcuTmLvXWaGTpXarxs6tyxOtK+uEKPyAnrvADkiL8QFKEH0iK8ANJEX4gKcIP\nJEX4gaQIP5DU/wG6SwYLYCwMKQAAAABJRU5ErkJggg==\n",
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x11c3670b8>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model prediction: 2\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAADCFJREFUeJzt3WGoXPWZx/Hvs1n7wrQvDDUarGu6\nRVdLxGS5iBBZXarFFSHmRaUKS2RL0xcNWNgXK76psBREtt1dfFFIaWgqrbVEs2pdbYsspguLGjVU\n21grcre9a8hVFGoVKSbPvrgn5VbvnLmZOTNnkuf7gTAz55kz52HI7/7PzDlz/pGZSKrnz/puQFI/\nDL9UlOGXijL8UlGGXyrK8EtFGX6pKMMvFWX4paL+fJobiwhPJ5QmLDNjNc8ba+SPiOsi4lcR8UpE\n3D7Oa0marhj13P6IWAO8DFwLLADPADdn5i9b1nHklyZsGiP/5cArmflqZv4B+AGwbYzXkzRF44T/\nPOC3yx4vNMv+RETsjIiDEXFwjG1J6tg4X/ittGvxod36zNwN7AZ3+6VZMs7IvwCcv+zxJ4DXxmtH\n0rSME/5ngAsj4pMR8RHg88DD3bQladJG3u3PzPcjYhfwY2ANsCczf9FZZ5ImauRDfSNtzM/80sRN\n5SQfSacuwy8VZfilogy/VJThl4oy/FJRhl8qyvBLRRl+qSjDLxVl+KWiDL9UlOGXijL8UlGGXyrK\n8EtFGX6pKMMvFWX4paIMv1SU4ZeKmuoU3arnoosuGlh76aWXWte97bbbWuv33HPPSD1piSO/VJTh\nl4oy/FJRhl8qyvBLRRl+qSjDLxU11nH+iJgH3gaOAe9n5lwXTen0sWXLloG148ePt667sLDQdTta\npouTfP42M9/o4HUkTZG7/VJR44Y/gZ9ExLMRsbOLhiRNx7i7/Vsz87WIWA/8NCJeyswDy5/Q/FHw\nD4M0Y8Ya+TPzteZ2EdgPXL7Cc3Zn5pxfBkqzZeTwR8TaiPjYifvAZ4EXu2pM0mSNs9t/DrA/Ik68\nzvcz8/FOupI0cSOHPzNfBS7rsBedhjZv3jyw9s4777Suu3///q7b0TIe6pOKMvxSUYZfKsrwS0UZ\nfqkowy8V5aW7NZZNmza11nft2jWwdu+993bdjk6CI79UlOGXijL8UlGGXyrK8EtFGX6pKMMvFeVx\nfo3l4osvbq2vXbt2YO3+++/vuh2dBEd+qSjDLxVl+KWiDL9UlOGXijL8UlGGXyoqMnN6G4uY3sY0\nFU8//XRr/eyzzx5YG3YtgGGX9tbKMjNW8zxHfqkowy8VZfilogy/VJThl4oy/FJRhl8qaujv+SNi\nD3ADsJiZm5pl64D7gY3APHBTZr41uTbVl40bN7bW5+bmWusvv/zywJrH8fu1mpH/O8B1H1h2O/BE\nZl4IPNE8lnQKGRr+zDwAvPmBxduAvc39vcCNHfclacJG/cx/TmYeAWhu13fXkqRpmPg1/CJiJ7Bz\n0tuRdHJGHfmPRsQGgOZ2cdATM3N3Zs5lZvs3Q5KmatTwPwzsaO7vAB7qph1J0zI0/BFxH/A/wF9F\nxEJEfAG4C7g2In4NXNs8lnQKGfqZPzNvHlD6TMe9aAZdddVVY63/+uuvd9SJuuYZflJRhl8qyvBL\nRRl+qSjDLxVl+KWinKJbrS699NKx1r/77rs76kRdc+SXijL8UlGGXyrK8EtFGX6pKMMvFWX4paKc\noru4K664orX+6KOPttbn5+db61u3bh1Ye++991rX1WicoltSK8MvFWX4paIMv1SU4ZeKMvxSUYZf\nKsrf8xd3zTXXtNbXrVvXWn/88cdb6x7Ln12O/FJRhl8qyvBLRRl+qSjDLxVl+KWiDL9U1NDj/BGx\nB7gBWMzMTc2yO4EvAifmX74jM/9zUk1qci677LLW+rDrPezbt6/LdjRFqxn5vwNct8Lyf83Mzc0/\ngy+dYoaGPzMPAG9OoRdJUzTOZ/5dEfHziNgTEWd11pGkqRg1/N8EPgVsBo4AXx/0xIjYGREHI+Lg\niNuSNAEjhT8zj2bmscw8DnwLuLzlubszcy4z50ZtUlL3Rgp/RGxY9nA78GI37UialtUc6rsPuBr4\neEQsAF8Fro6IzUAC88CXJtijpAnwuv2nuXPPPbe1fujQodb6W2+91Vq/5JJLTronTZbX7ZfUyvBL\nRRl+qSjDLxVl+KWiDL9UlJfuPs3deuutrfX169e31h977LEOu9EsceSXijL8UlGGXyrK8EtFGX6p\nKMMvFWX4paI8zn+au+CCC8Zaf9hPenXqcuSXijL8UlGGXyrK8EtFGX6pKMMvFWX4paI8zn+au+GG\nG8Za/5FHHumoE80aR36pKMMvFWX4paIMv1SU4ZeKMvxSUYZfKmrocf6IOB/4LnAucBzYnZn/HhHr\ngPuBjcA8cFNm+uPvHlx55ZUDa8Om6FZdqxn53wf+MTMvAa4AvhwRnwZuB57IzAuBJ5rHkk4RQ8Of\nmUcy87nm/tvAYeA8YBuwt3naXuDGSTUpqXsn9Zk/IjYCW4CngHMy8wgs/YEA2ud9kjRTVn1uf0R8\nFHgA+Epm/i4iVrveTmDnaO1JmpRVjfwRcQZLwf9eZj7YLD4aERua+gZgcaV1M3N3Zs5l5lwXDUvq\nxtDwx9IQ/23gcGZ+Y1npYWBHc38H8FD37UmalNXs9m8F/h54ISIONcvuAO4CfhgRXwB+A3xuMi1q\nmO3btw+srVmzpnXd559/vrV+4MCBkXrS7Bsa/sz8b2DQB/zPdNuOpGnxDD+pKMMvFWX4paIMv1SU\n4ZeKMvxSUV66+xRw5plnttavv/76kV973759rfVjx46N/NqabY78UlGGXyrK8EtFGX6pKMMvFWX4\npaIMv1RUZOb0NhYxvY2dRs4444zW+pNPPjmwtri44gWW/uiWW25prb/77rutdc2ezFzVNfYc+aWi\nDL9UlOGXijL8UlGGXyrK8EtFGX6pKI/zS6cZj/NLamX4paIMv1SU4ZeKMvxSUYZfKsrwS0UNDX9E\nnB8R/xURhyPiFxFxW7P8zoj4v4g41Pwb/eLxkqZu6Ek+EbEB2JCZz0XEx4BngRuBm4DfZ+a/rHpj\nnuQjTdxqT/IZOmNPZh4BjjT3346Iw8B547UnqW8n9Zk/IjYCW4CnmkW7IuLnEbEnIs4asM7OiDgY\nEQfH6lRSp1Z9bn9EfBR4EvhaZj4YEecAbwAJ/DNLHw3+YchruNsvTdhqd/tXFf6IOAP4EfDjzPzG\nCvWNwI8yc9OQ1zH80oR19sOeiAjg28Dh5cFvvgg8YTvw4sk2Kak/q/m2/0rgZ8ALwPFm8R3AzcBm\nlnb754EvNV8Otr2WI780YZ3u9nfF8EuT5+/5JbUy/FJRhl8qyvBLRRl+qSjDLxVl+KWiDL9UlOGX\nijL8UlGGXyrK8EtFGX6pKMMvFTX0Ap4dewP432WPP94sm0Wz2tus9gX2Nqoue7tgtU+c6u/5P7Tx\niIOZOddbAy1mtbdZ7QvsbVR99eZuv1SU4ZeK6jv8u3vefptZ7W1W+wJ7G1UvvfX6mV9Sf/oe+SX1\npJfwR8R1EfGriHglIm7vo4dBImI+Il5oZh7udYqxZhq0xYh4cdmydRHx04j4dXO74jRpPfU2EzM3\nt8ws3et7N2szXk99tz8i1gAvA9cCC8AzwM2Z+cupNjJARMwDc5nZ+zHhiPgb4PfAd0/MhhQRdwNv\nZuZdzR/OszLzn2aktzs5yZmbJ9TboJmlb6XH967LGa+70MfIfznwSma+mpl/AH4AbOuhj5mXmQeA\nNz+weBuwt7m/l6X/PFM3oLeZkJlHMvO55v7bwImZpXt971r66kUf4T8P+O2yxwvM1pTfCfwkIp6N\niJ19N7OCc07MjNTcru+5nw8aOnPzNH1gZumZee9GmfG6a32Ef6XZRGbpkMPWzPxr4O+ALze7t1qd\nbwKfYmkatyPA1/tspplZ+gHgK5n5uz57WW6Fvnp53/oI/wJw/rLHnwBe66GPFWXma83tIrCfpY8p\ns+ToiUlSm9vFnvv5o8w8mpnHMvM48C16fO+amaUfAL6XmQ82i3t/71bqq6/3rY/wPwNcGBGfjIiP\nAJ8HHu6hjw+JiLXNFzFExFrgs8ze7MMPAzua+zuAh3rs5U/MyszNg2aWpuf3btZmvO7lJJ/mUMa/\nAWuAPZn5tak3sYKI+EuWRntY+sXj9/vsLSLuA65m6VdfR4GvAv8B/BD4C+A3wOcyc+pfvA3o7WpO\ncubmCfU2aGbpp+jxvetyxutO+vEMP6kmz/CTijL8UlGGXyrK8EtFGX6pKMMvFWX4paIMv1TU/wNP\nnZK3k8+kHgAAAABJRU5ErkJggg==\n",
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x11c2e1be0>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model prediction: 1\n"
+     ]
+    },
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAADbVJREFUeJzt3W2IXPUVx/HfSWzfpH2hZE3jU9I2\nEitCTVljoRKtxZKUStIX0YhIiqUbJRoLfVFJwEaKINqmLRgSthi6BbUK0bqE0KaINBWCuJFaNVtb\nlTVNs2yMEWsI0picvti7siY7/zuZuU+b8/2AzMOZuXO8+tt7Z/733r+5uwDEM6PuBgDUg/ADQRF+\nICjCDwRF+IGgCD8QFOEHgiL8QFCEHwjqnCo/zMw4nBAombtbO6/rastvZkvN7A0ze9PM7u1mWQCq\nZZ0e229mMyX9U9INkg5IeknSLe6+L/EetvxAyarY8i+W9Ka7v+3u/5P0e0nLu1gegAp1E/4LJf17\n0uMD2XOfYmZ9ZjZkZkNdfBaAgnXzg99Uuxan7da7e7+kfondfqBJutnyH5B08aTHF0k62F07AKrS\nTfhfknSpmX3RzD4raZWkwWLaAlC2jnf73f1jM7tL0p8kzZS0zd1fL6wzAKXqeKivow/jOz9QukoO\n8gEwfRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQf\nCIrwA0ERfiAowg8ERfiBoAg/EFSlU3SjerNmzUrWH3744WR9zZo1yfrevXuT9ZUrV7asvfPOO8n3\nolxs+YGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gqK5m6TWzEUkfSjoh6WN37815PbP0VmzBggXJ+vDw\ncFfLnzEjvf1Yt25dy9rmzZu7+mxMrd1Zeos4yOeb7n64gOUAqBC7/UBQ3YbfJe0ys71m1ldEQwCq\n0e1u/zfc/aCZnS/pz2b2D3ffPfkF2R8F/jAADdPVlt/dD2a3hyQ9I2nxFK/pd/fevB8DAVSr4/Cb\n2Swz+/zEfUnflvRaUY0BKFc3u/1zJD1jZhPLedzd/1hIVwBK13H43f1tSV8tsBd0qKenp2VtYGCg\nwk4wnTDUBwRF+IGgCD8QFOEHgiL8QFCEHwiKS3dPA6nTYiVpxYoVLWuLF5920GWllixZ0rKWdzrw\nK6+8kqzv3r07WUcaW34gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCKqrS3ef8Ydx6e6OnDhxIlk/efJk\nRZ2cLm+svpve8qbwvvnmm5P1vOnDz1btXrqbLT8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBMU4fwPs\n3LkzWV+2bFmyXuc4/3vvvZesHz16tGVt3rx5RbfzKTNnzix1+U3FOD+AJMIPBEX4gaAIPxAU4QeC\nIvxAUIQfCCr3uv1mtk3SdyUdcvcrsufOk/SkpPmSRiTd5O7vl9fm9Hbttdcm6wsXLkzW88bxyxzn\n37p1a7K+a9euZP2DDz5oWbv++uuT792wYUOynufOO+9sWduyZUtXyz4btLPl/62kpac8d6+k59z9\nUknPZY8BTCO54Xf33ZKOnPL0ckkD2f0BSa2njAHQSJ1+55/j7qOSlN2eX1xLAKpQ+lx9ZtYnqa/s\nzwFwZjrd8o+Z2VxJym4PtXqhu/e7e6+793b4WQBK0Gn4ByWtzu6vlvRsMe0AqEpu+M3sCUl7JC00\nswNm9gNJD0q6wcz+JemG7DGAaYTz+Qswf/78ZH3Pnj3J+uzZs5P1bq6Nn3ft++3btyfr999/f7J+\n7NixZD0l73z+vPXW09OTrH/00Ucta/fdd1/yvY888kiyfvz48WS9TpzPDyCJ8ANBEX4gKMIPBEX4\ngaAIPxAUQ30FWLBgQbI+PDzc1fLzhvqef/75lrVVq1Yl33v48OGOeqrC3Xffnaxv2rQpWU+tt7zT\noC+77LJk/a233krW68RQH4Akwg8ERfiBoAg/EBThB4Ii/EBQhB8IqvTLeKF7Q0NDyfrtt9/estbk\ncfw8g4ODyfqtt96arF911VVFtnPWYcsPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0Exzl+BvPPx81x9\n9dUFdTK9mKVPS89br92s940bNybrt912W8fLbgq2/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QVO44\nv5ltk/RdSYfc/YrsuY2Sfijp3exl6919Z1lNNt0dd9yRrOddIx5Tu/HGG5P1RYsWJeup9Z733yRv\nnP9s0M6W/7eSlk7x/C/d/crsn7DBB6ar3PC7+25JRyroBUCFuvnOf5eZ/d3MtpnZuYV1BKASnYZ/\ni6QvS7pS0qikX7R6oZn1mdmQmaUvRAegUh2F393H3P2Eu5+U9BtJixOv7Xf3Xnfv7bRJAMXrKPxm\nNnfSw+9Jeq2YdgBUpZ2hvickXSdptpkdkPRTSdeZ2ZWSXNKIpDUl9gigBLnhd/dbpnj60RJ6mbby\nxqMj6+npaVm7/PLLk+9dv3590e184t13303Wjx8/XtpnNwVH+AFBEX4gKMIPBEX4gaAIPxAU4QeC\n4tLdKNWGDRta1tauXVvqZ4+MjLSsrV69Ovne/fv3F9xN87DlB4Ii/EBQhB8IivADQRF+ICjCDwRF\n+IGgGOdHV3buTF+4eeHChRV1crp9+/a1rL3wwgsVdtJMbPmBoAg/EBThB4Ii/EBQhB8IivADQRF+\nICjG+QtgZsn6jBnd/Y1dtmxZx+/t7+9P1i+44IKOly3l/7vVOT05l1RPY8sPBEX4gaAIPxAU4QeC\nIvxAUIQfCIrwA0HljvOb2cWSfifpC5JOSup391+b2XmSnpQ0X9KIpJvc/f3yWm2uLVu2JOsPPfRQ\nV8vfsWNHst7NWHrZ4/BlLn/r1q2lLTuCdrb8H0v6sbt/RdLXJa01s8sl3SvpOXe/VNJz2WMA00Ru\n+N191N1fzu5/KGlY0oWSlksayF42IGlFWU0CKN4Zfec3s/mSFkl6UdIcdx+Vxv9ASDq/6OYAlKft\nY/vN7HOStkv6kbv/N+949knv65PU11l7AMrS1pbfzD6j8eA/5u5PZ0+PmdncrD5X0qGp3uvu/e7e\n6+69RTQMoBi54bfxTfyjkobdfdOk0qCkialOV0t6tvj2AJTF3D39ArNrJP1V0qsaH+qTpPUa/97/\nlKRLJO2XtNLdj+QsK/1h09S8efOS9T179iTrPT09yXqTT5vN621sbKxlbXh4OPnevr70t8XR0dFk\n/dixY8n62crd2/pOnvud391fkNRqYd86k6YANAdH+AFBEX4gKMIPBEX4gaAIPxAU4QeCyh3nL/TD\nztJx/jxLlixJ1lesSJ8Tdc899yTrTR7nX7duXcva5s2bi24Han+cny0/EBThB4Ii/EBQhB8IivAD\nQRF+ICjCDwTFOP80sHTp0mQ9dd573jTVg4ODyXreFN95l3Pbt29fy9r+/fuT70VnGOcHkET4gaAI\nPxAU4QeCIvxAUIQfCIrwA0Exzg+cZRjnB5BE+IGgCD8QFOEHgiL8QFCEHwiK8ANB5YbfzC42s+fN\nbNjMXjeze7LnN5rZf8zsb9k/3ym/XQBFyT3Ix8zmSprr7i+b2ecl7ZW0QtJNko66+8/b/jAO8gFK\n1+5BPue0saBRSaPZ/Q/NbFjShd21B6BuZ/Sd38zmS1ok6cXsqbvM7O9mts3Mzm3xnj4zGzKzoa46\nBVCoto/tN7PPSfqLpAfc/WkzmyPpsCSX9DONfzW4PWcZ7PYDJWt3t7+t8JvZZyTtkPQnd980RX2+\npB3ufkXOcgg/ULLCTuyx8cuzPippeHLwsx8CJ3xP0mtn2iSA+rTza/81kv4q6VVJE3NBr5d0i6Qr\nNb7bPyJpTfbjYGpZbPmBkhW6218Uwg+Uj/P5ASQRfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivAD\nQRF+ICjCDwRF+IGgCD8QFOEHgsq9gGfBDkt6Z9Lj2dlzTdTU3pral0RvnSqyt3ntvrDS8/lP+3Cz\nIXfvra2BhKb21tS+JHrrVF29sdsPBEX4gaDqDn9/zZ+f0tTemtqXRG+dqqW3Wr/zA6hP3Vt+ADWp\nJfxmttTM3jCzN83s3jp6aMXMRszs1Wzm4VqnGMumQTtkZq9Neu48M/uzmf0ru51ymrSaemvEzM2J\nmaVrXXdNm/G68t1+M5sp6Z+SbpB0QNJLkm5x932VNtKCmY1I6nX32seEzWyJpKOSfjcxG5KZPSTp\niLs/mP3hPNfdf9KQ3jbqDGduLqm3VjNLf181rrsiZ7wuQh1b/sWS3nT3t939f5J+L2l5DX00nrvv\nlnTklKeXSxrI7g9o/H+eyrXorRHcfdTdX87ufyhpYmbpWtddoq9a1BH+CyX9e9LjA2rWlN8uaZeZ\n7TWzvrqbmcKciZmRstvza+7nVLkzN1fplJmlG7PuOpnxumh1hH+q2USaNOTwDXf/mqRlktZmu7do\nzxZJX9b4NG6jkn5RZzPZzNLbJf3I3f9bZy+TTdFXLeutjvAfkHTxpMcXSTpYQx9TcveD2e0hSc9o\n/GtKk4xNTJKa3R6quZ9PuPuYu59w95OSfqMa1102s/R2SY+5+9PZ07Wvu6n6qmu91RH+lyRdamZf\nNLPPSlolabCGPk5jZrOyH2JkZrMkfVvNm314UNLq7P5qSc/W2MunNGXm5lYzS6vmdde0Ga9rOcgn\nG8r4laSZkra5+wOVNzEFM/uSxrf20vgZj4/X2ZuZPSHpOo2f9TUm6aeS/iDpKUmXSNovaaW7V/7D\nW4vertMZztxcUm+tZpZ+UTWuuyJnvC6kH47wA2LiCD8gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrw\nA0H9HwAENgeMtPBpAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x11bd952e8>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Model prediction: 0\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Predict single images\n",
+    "n_images = 4\n",
+    "# Get images from test set\n",
+    "test_images = mnist.test.images[:n_images]\n",
+    "# Prepare the input data\n",
+    "input_fn = tf.estimator.inputs.numpy_input_fn(\n",
+    "    x={'images': test_images}, shuffle=False)\n",
+    "# Use the model to predict the images class\n",
+    "preds = list(model.predict(input_fn))\n",
+    "\n",
+    "# Display\n",
+    "for i in range(n_images):\n",
+    "    plt.imshow(np.reshape(test_images[i], [28, 28]), cmap='gray')\n",
+    "    plt.show()\n",
+    "    print(\"Model prediction:\", preds[i])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "anaconda-cloud": {},
+  "kernelspec": {
+   "display_name": "Python [conda env:tensorflow]",
+   "language": "python",
+   "name": "conda-env-tensorflow-py"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.5.2"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/talks/tensorflow/demos/labels_1024.tsv b/talks/tensorflow/demos/labels_1024.tsv
new file mode 100644
index 0000000000000000000000000000000000000000..e4674dd220334d8124621c536677ef86b1796741
--- /dev/null
+++ b/talks/tensorflow/demos/labels_1024.tsv
@@ -0,0 +1,1024 @@
+7
+2
+1
+0
+4
+1
+4
+9
+5
+9
+0
+6
+9
+0
+1
+5
+9
+7
+3
+4
+9
+6
+6
+5
+4
+0
+7
+4
+0
+1
+3
+1
+3
+4
+7
+2
+7
+1
+2
+1
+1
+7
+4
+2
+3
+5
+1
+2
+4
+4
+6
+3
+5
+5
+6
+0
+4
+1
+9
+5
+7
+8
+9
+3
+7
+4
+6
+4
+3
+0
+7
+0
+2
+9
+1
+7
+3
+2
+9
+7
+7
+6
+2
+7
+8
+4
+7
+3
+6
+1
+3
+6
+9
+3
+1
+4
+1
+7
+6
+9
+6
+0
+5
+4
+9
+9
+2
+1
+9
+4
+8
+7
+3
+9
+7
+4
+4
+4
+9
+2
+5
+4
+7
+6
+7
+9
+0
+5
+8
+5
+6
+6
+5
+7
+8
+1
+0
+1
+6
+4
+6
+7
+3
+1
+7
+1
+8
+2
+0
+2
+9
+9
+5
+5
+1
+5
+6
+0
+3
+4
+4
+6
+5
+4
+6
+5
+4
+5
+1
+4
+4
+7
+2
+3
+2
+7
+1
+8
+1
+8
+1
+8
+5
+0
+8
+9
+2
+5
+0
+1
+1
+1
+0
+9
+0
+3
+1
+6
+4
+2
+3
+6
+1
+1
+1
+3
+9
+5
+2
+9
+4
+5
+9
+3
+9
+0
+3
+6
+5
+5
+7
+2
+2
+7
+1
+2
+8
+4
+1
+7
+3
+3
+8
+8
+7
+9
+2
+2
+4
+1
+5
+9
+8
+7
+2
+3
+0
+4
+4
+2
+4
+1
+9
+5
+7
+7
+2
+8
+2
+6
+8
+5
+7
+7
+9
+1
+8
+1
+8
+0
+3
+0
+1
+9
+9
+4
+1
+8
+2
+1
+2
+9
+7
+5
+9
+2
+6
+4
+1
+5
+8
+2
+9
+2
+0
+4
+0
+0
+2
+8
+4
+7
+1
+2
+4
+0
+2
+7
+4
+3
+3
+0
+0
+3
+1
+9
+6
+5
+2
+5
+9
+2
+9
+3
+0
+4
+2
+0
+7
+1
+1
+2
+1
+5
+3
+3
+9
+7
+8
+6
+5
+6
+1
+3
+8
+1
+0
+5
+1
+3
+1
+5
+5
+6
+1
+8
+5
+1
+7
+9
+4
+6
+2
+2
+5
+0
+6
+5
+6
+3
+7
+2
+0
+8
+8
+5
+4
+1
+1
+4
+0
+3
+3
+7
+6
+1
+6
+2
+1
+9
+2
+8
+6
+1
+9
+5
+2
+5
+4
+4
+2
+8
+3
+8
+2
+4
+5
+0
+3
+1
+7
+7
+5
+7
+9
+7
+1
+9
+2
+1
+4
+2
+9
+2
+0
+4
+9
+1
+4
+8
+1
+8
+4
+5
+9
+8
+8
+3
+7
+6
+0
+0
+3
+0
+2
+6
+6
+4
+9
+3
+3
+3
+2
+3
+9
+1
+2
+6
+8
+0
+5
+6
+6
+6
+3
+8
+8
+2
+7
+5
+8
+9
+6
+1
+8
+4
+1
+2
+5
+9
+1
+9
+7
+5
+4
+0
+8
+9
+9
+1
+0
+5
+2
+3
+7
+8
+9
+4
+0
+6
+3
+9
+5
+2
+1
+3
+1
+3
+6
+5
+7
+4
+2
+2
+6
+3
+2
+6
+5
+4
+8
+9
+7
+1
+3
+0
+3
+8
+3
+1
+9
+3
+4
+4
+6
+4
+2
+1
+8
+2
+5
+4
+8
+8
+4
+0
+0
+2
+3
+2
+7
+7
+0
+8
+7
+4
+4
+7
+9
+6
+9
+0
+9
+8
+0
+4
+6
+0
+6
+3
+5
+4
+8
+3
+3
+9
+3
+3
+3
+7
+8
+0
+8
+2
+1
+7
+0
+6
+5
+4
+3
+8
+0
+9
+6
+3
+8
+0
+9
+9
+6
+8
+6
+8
+5
+7
+8
+6
+0
+2
+4
+0
+2
+2
+3
+1
+9
+7
+5
+1
+0
+8
+4
+6
+2
+6
+7
+9
+3
+2
+9
+8
+2
+2
+9
+2
+7
+3
+5
+9
+1
+8
+0
+2
+0
+5
+2
+1
+3
+7
+6
+7
+1
+2
+5
+8
+0
+3
+7
+2
+4
+0
+9
+1
+8
+6
+7
+7
+4
+3
+4
+9
+1
+9
+5
+1
+7
+3
+9
+7
+6
+9
+1
+3
+7
+8
+3
+3
+6
+7
+2
+8
+5
+8
+5
+1
+1
+4
+4
+3
+1
+0
+7
+7
+0
+7
+9
+4
+4
+8
+5
+5
+4
+0
+8
+2
+1
+0
+8
+4
+5
+0
+4
+0
+6
+1
+7
+3
+2
+6
+7
+2
+6
+9
+3
+1
+4
+6
+2
+5
+4
+2
+0
+6
+2
+1
+7
+3
+4
+1
+0
+5
+4
+3
+1
+1
+7
+4
+9
+9
+4
+8
+4
+0
+2
+4
+5
+1
+1
+6
+4
+7
+1
+9
+4
+2
+4
+1
+5
+5
+3
+8
+3
+1
+4
+5
+6
+8
+9
+4
+1
+5
+3
+8
+0
+3
+2
+5
+1
+2
+8
+3
+4
+4
+0
+8
+8
+3
+3
+1
+7
+3
+5
+9
+6
+3
+2
+6
+1
+3
+6
+0
+7
+2
+1
+7
+1
+4
+2
+4
+2
+1
+7
+9
+6
+1
+1
+2
+4
+8
+1
+7
+7
+4
+8
+0
+7
+3
+1
+3
+1
+0
+7
+7
+0
+3
+5
+5
+2
+7
+6
+6
+9
+2
+8
+3
+5
+2
+2
+5
+6
+0
+8
+2
+9
+2
+8
+8
+8
+8
+7
+4
+9
+3
+0
+6
+6
+3
+2
+1
+3
+2
+2
+9
+3
+0
+0
+5
+7
+8
+1
+4
+4
+6
+0
+2
+9
+1
+4
+7
+4
+7
+3
+9
+8
+8
+4
+7
+1
+2
+1
+2
+2
+3
+2
+3
+2
+3
+9
+1
+7
+4
+0
+3
+5
+5
+8
+6
+3
+2
+6
+7
+6
+6
+3
+2
+7
+8
+1
+1
+7
+5
+6
+4
+9
+5
+1
+3
+3
+4
+7
+8
+9
+1
+1
+6
+9
+1
+4
+4
+5
+4
+0
+6
+2
+2
+3
+1
+5
+1
+2
+0
+3
+8
+1
+2
+6
+7
+1
+6
+2
+3
+9
+0
+1
+2
+2
+0
+8
+9
+9
+0
+2
+5
+1
+9
+7
+8
+1
+0
+4
+1
+7
+9
+6
+4
+2
+6
+8
+1
+3
+7
+5
+4
diff --git a/talks/tensorflow/demos/sprite_1024.png b/talks/tensorflow/demos/sprite_1024.png
new file mode 100644
index 0000000000000000000000000000000000000000..634a1583a60644e8da719047eb7816323c56f6eb
Binary files /dev/null and b/talks/tensorflow/demos/sprite_1024.png differ
diff --git a/talks/tensorflow/demos/tensorboard_demo.ipynb b/talks/tensorflow/demos/tensorboard_demo.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..5b4dff4367ce962134572b0cad979968287d372a
--- /dev/null
+++ b/talks/tensorflow/demos/tensorboard_demo.ipynb
@@ -0,0 +1,223 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Extracting /tmp/TensorBoard_demo/data/train-images-idx3-ubyte.gz\n",
+      "Extracting /tmp/TensorBoard_demo/data/train-labels-idx1-ubyte.gz\n",
+      "Extracting /tmp/TensorBoard_demo/data/t10k-images-idx3-ubyte.gz\n",
+      "Extracting /tmp/TensorBoard_demo/data/t10k-labels-idx1-ubyte.gz\n"
+     ]
+    }
+   ],
+   "source": [
+    "import os\n",
+    "import os.path\n",
+    "import shutil\n",
+    "import tensorflow as tf\n",
+    "\n",
+    "LOGDIR = \"/tmp/TensorBoard_demo/\"\n",
+    "LABELS = os.path.join(os.getcwd(), \"labels_1024.tsv\")\n",
+    "SPRITES = os.path.join(os.getcwd(), \"sprite_1024.png\")\n",
+    "### MNIST EMBEDDINGS ###\n",
+    "mnist = tf.contrib.learn.datasets.mnist.read_data_sets(train_dir=LOGDIR + \"data\", one_hot=True)\n",
+    "### Get a sprite and labels file for the embedding projector ###\n",
+    "\n",
+    "if not (os.path.isfile(LABELS) and os.path.isfile(SPRITES)):\n",
+    "  print(\"Necessary data files were not found: LABELS and SPRITES\")\n",
+    "  exit(1)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "def conv_layer(input, size_in, size_out, name=\"conv\"):\n",
+    "  with tf.name_scope(name):\n",
+    "    w = tf.Variable(tf.truncated_normal([5, 5, size_in, size_out], stddev=0.1), name=\"W\")\n",
+    "    b = tf.Variable(tf.constant(0.1, shape=[size_out]), name=\"B\")\n",
+    "    conv = tf.nn.conv2d(input, w, strides=[1, 1, 1, 1], padding=\"SAME\")\n",
+    "    act = tf.nn.relu(conv + b)\n",
+    "    tf.summary.histogram(\"weights\", w)\n",
+    "    tf.summary.histogram(\"biases\", b)\n",
+    "    tf.summary.histogram(\"activations\", act)\n",
+    "    return tf.nn.max_pool(act, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding=\"SAME\")\n",
+    "\n",
+    "\n",
+    "def fc_layer(input, size_in, size_out, name=\"fc\"):\n",
+    "  with tf.name_scope(name):\n",
+    "    w = tf.Variable(tf.truncated_normal([size_in, size_out], stddev=0.1), name=\"W\")\n",
+    "    b = tf.Variable(tf.constant(0.1, shape=[size_out]), name=\"B\")\n",
+    "    act = tf.matmul(input, w) + b\n",
+    "    tf.summary.histogram(\"weights\", w)\n",
+    "    tf.summary.histogram(\"biases\", b)\n",
+    "    tf.summary.histogram(\"activations\", act)\n",
+    "    return act"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "def mnist_model(learning_rate):\n",
+    "  tf.reset_default_graph()\n",
+    "  sess = tf.Session()\n",
+    "\n",
+    "  # Setup placeholders, and reshape the data\n",
+    "  x = tf.placeholder(tf.float32, shape=[None, 784], name=\"x\")\n",
+    "  x_image = tf.reshape(x, [-1, 28, 28, 1])\n",
+    "  tf.summary.image('input', x_image, 3)\n",
+    "  y = tf.placeholder(tf.float32, shape=[None, 10], name=\"labels\")\n",
+    "\n",
+    "  conv_out = conv_layer(x_image, 1, 16, \"conv\")\n",
+    "\n",
+    "  flattened = tf.reshape(conv_out, [-1, 7 * 7 * 64])\n",
+    "\n",
+    "  embedding_input = flattened\n",
+    "  embedding_size = 7*7*64\n",
+    "  logits = fc_layer(flattened, 7*7*64, 10, \"fc\")\n",
+    "\n",
+    "  with tf.name_scope(\"xent\"):\n",
+    "    xent = tf.reduce_mean(\n",
+    "        tf.nn.softmax_cross_entropy_with_logits(\n",
+    "            logits=logits, labels=y), name=\"xent\")\n",
+    "    tf.summary.scalar(\"xent\", xent)\n",
+    "\n",
+    "  with tf.name_scope(\"train\"):\n",
+    "    train_step = tf.train.AdamOptimizer(learning_rate).minimize(xent)\n",
+    "\n",
+    "  with tf.name_scope(\"accuracy\"):\n",
+    "    correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(y, 1))\n",
+    "    accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n",
+    "    tf.summary.scalar(\"accuracy\", accuracy)\n",
+    "\n",
+    "  summ = tf.summary.merge_all()\n",
+    "\n",
+    "\n",
+    "  embedding = tf.Variable(tf.zeros([1024, embedding_size]), name=\"test_embedding\")\n",
+    "  assignment = embedding.assign(embedding_input)\n",
+    "  saver = tf.train.Saver()\n",
+    "\n",
+    "  sess.run(tf.global_variables_initializer())\n",
+    "  writer = tf.summary.FileWriter(LOGDIR)\n",
+    "  writer.add_graph(sess.graph)\n",
+    "\n",
+    "  config = tf.contrib.tensorboard.plugins.projector.ProjectorConfig()\n",
+    "  embedding_config = config.embeddings.add()\n",
+    "  embedding_config.tensor_name = embedding.name\n",
+    "  embedding_config.sprite.image_path = SPRITES\n",
+    "  embedding_config.metadata_path = LABELS\n",
+    "  # Specify the width and height of a single thumbnail.\n",
+    "  embedding_config.sprite.single_image_dim.extend([28, 28])\n",
+    "  tf.contrib.tensorboard.plugins.projector.visualize_embeddings(writer, config)\n",
+    "\n",
+    "  for i in range(1,2001):\n",
+    "    batch = mnist.train.next_batch(100)\n",
+    "    if i % 5 == 0:\n",
+    "      [train_accuracy, s] = sess.run([accuracy, summ], feed_dict={x: batch[0], y: batch[1]})\n",
+    "      writer.add_summary(s, i)\n",
+    "    if i % 500 == 0:\n",
+    "      sess.run(assignment, feed_dict={x: mnist.test.images[:1024], y: mnist.test.labels[:1024]})\n",
+    "      saver.save(sess, os.path.join(LOGDIR, \"model.ckpt\"), i)\n",
+    "    sess.run(train_step, feed_dict={x: batch[0], y: batch[1]})"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Done training!\n",
+      "Run `tensorboard --logdir=/tmp/TensorBoard_demo/` to see the results.\n",
+      "Running on mac? If you want to get rid of the dialogue asking to give network permissions to TensorBoard, you can provide this flag: --host=localhost\n"
+     ]
+    }
+   ],
+   "source": [
+    "learning_rate = 1E-3\n",
+    "\n",
+    "mnist_model(learning_rate)\n",
+    "print('Done training!')\n",
+    "print('Run `tensorboard --logdir=%s` to see the results.' % LOGDIR)\n",
+    "print('Running on mac? If you want to get rid of the dialogue asking to give '\n",
+    "        'network permissions to TensorBoard, you can provide this flag: '\n",
+    "        '--host=localhost')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Starting TensorBoard b'47' at http://0.0.0.0:6006\n",
+      "(Press CTRL+C to quit)\n",
+      "WARNING:tensorflow:Found more than one graph event per run, or there was a metagraph containing a graph_def, as well as one or more graph events.  Overwriting the graph with the newest event.\n",
+      "WARNING:tensorflow:Found more than one graph event per run, or there was a metagraph containing a graph_def, as well as one or more graph events.  Overwriting the graph with the newest event.\n",
+      "WARNING:tensorflow:Found more than one graph event per run, or there was a metagraph containing a graph_def, as well as one or more graph events.  Overwriting the graph with the newest event.\n",
+      "WARNING:tensorflow:path ../external/data/plugin/text/runs not found, sending 404\n",
+      "WARNING:tensorflow:path ../external/data/plugin/text/runs not found, sending 404\n",
+      "WARNING:tensorflow:path ../external/data/plugin/text/runs not found, sending 404\n",
+      "WARNING:tensorflow:path ../external/data/plugin/text/runs not found, sending 404\n",
+      "^C\n"
+     ]
+    }
+   ],
+   "source": [
+    "!tensorboard --logdir=/tmp/TensorBoard_demo/"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "anaconda-cloud": {},
+  "kernelspec": {
+   "display_name": "Python [conda env:tensorflow]",
+   "language": "python",
+   "name": "conda-env-tensorflow-py"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.5.2"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/talks/tensorflow/img/.gitkeep b/talks/tensorflow/img/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/talks/tensorflow/img/bigger_boat.jpg b/talks/tensorflow/img/bigger_boat.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..f83fdb4d9217c34ef1a9cb120636174fbcabf445
Binary files /dev/null and b/talks/tensorflow/img/bigger_boat.jpg differ
diff --git a/talks/tensorflow/img/cg/.gitkeep b/talks/tensorflow/img/cg/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/talks/tensorflow/img/cg/ad.png b/talks/tensorflow/img/cg/ad.png
new file mode 100644
index 0000000000000000000000000000000000000000..4b5174698d71e04ce31c9e866dc3640ec7bd4bd3
Binary files /dev/null and b/talks/tensorflow/img/cg/ad.png differ
diff --git a/talks/tensorflow/img/cg/cg1.png b/talks/tensorflow/img/cg/cg1.png
new file mode 100644
index 0000000000000000000000000000000000000000..af7f929f45e9967d6e7d239ce9da3790270ce225
Binary files /dev/null and b/talks/tensorflow/img/cg/cg1.png differ
diff --git a/talks/tensorflow/img/copyright_infringement.png b/talks/tensorflow/img/copyright_infringement.png
new file mode 100644
index 0000000000000000000000000000000000000000..b255f20306c756ed3fb3edbac3e9ca5139eabaa6
Binary files /dev/null and b/talks/tensorflow/img/copyright_infringement.png differ
diff --git a/talks/tensorflow/img/demos/.gitkeep b/talks/tensorflow/img/demos/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/talks/tensorflow/img/demos/nothing_here b/talks/tensorflow/img/demos/nothing_here
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/talks/tensorflow/img/fchollet_popularity_2017.jpg b/talks/tensorflow/img/fchollet_popularity_2017.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..201cdafa370b4021df8faa6ec55d0fb234f9550f
Binary files /dev/null and b/talks/tensorflow/img/fchollet_popularity_2017.jpg differ
diff --git a/talks/tensorflow/img/how_big_is_your_data.png b/talks/tensorflow/img/how_big_is_your_data.png
new file mode 100644
index 0000000000000000000000000000000000000000..d7eba3ac055846b57ffdb259e2f2f728c6b80364
Binary files /dev/null and b/talks/tensorflow/img/how_big_is_your_data.png differ
diff --git a/talks/tensorflow/img/model_zoo.png b/talks/tensorflow/img/model_zoo.png
new file mode 100644
index 0000000000000000000000000000000000000000..9b5a766b9ed9ea8afbea4b2152f9765e9246d3c3
Binary files /dev/null and b/talks/tensorflow/img/model_zoo.png differ
diff --git a/talks/tensorflow/img/multiple_devices.png b/talks/tensorflow/img/multiple_devices.png
new file mode 100644
index 0000000000000000000000000000000000000000..5fb5234aaaae6bdab21da52fcdf377fcdabc0d75
Binary files /dev/null and b/talks/tensorflow/img/multiple_devices.png differ
diff --git a/talks/tensorflow/img/so_hot_right_now.jpg b/talks/tensorflow/img/so_hot_right_now.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..cbc620632dd5de7932e2e97fa09533864842d8ce
Binary files /dev/null and b/talks/tensorflow/img/so_hot_right_now.jpg differ
diff --git a/talks/tensorflow/img/what_leo_needs.png b/talks/tensorflow/img/what_leo_needs.png
new file mode 100644
index 0000000000000000000000000000000000000000..39194ab096b3866a70187dd44ef0670b051706ea
Binary files /dev/null and b/talks/tensorflow/img/what_leo_needs.png differ
diff --git a/talks/tensorflow/oiwi_pytreat_tensorflow_notes.ipynb b/talks/tensorflow/oiwi_pytreat_tensorflow_notes.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..a25ecec16b2dc5011406c2b56d5a79a00411bb8d
--- /dev/null
+++ b/talks/tensorflow/oiwi_pytreat_tensorflow_notes.ipynb
@@ -0,0 +1,476 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<img src=\"img/copyright_infringement.png\" width=\"500\">\n",
+    "See [https://www.tensorflow.org/](https://www.tensorflow.org/)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Project Overview\n",
+    "* **[TensorFlow](https://www.tensorflow.org/)**<sup>TM</sup> is a software library for numerical computation on computation graphs.\n",
+    "* It was open-sourced in 2015 and is currently very popular for Deep Learning.\n",
+    "* Backed by \n",
+    "**<span style=\"color:blue\">G</span><span style=\"color:red\">o</span><span style=\"color:yellow\">o</span><span style=\"color:blue\">g</span><span style=\"color:green\">l</span><span style=\"color:red\">e</span>**."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<div class=\"myimages\">\n",
+    "    <div style=\"float:left;margin-right:5px;\">\n",
+    "        <img class=\"middle-img\" src=\"img/so_hot_right_now.jpg\"/ width=\"300\"/>\n",
+    "    </div>\n",
+    "    <div style=\"float:left;margin-right:5px;\">\n",
+    "        <img src=\"img/fchollet_popularity_2017.jpg\" width=\"400\">\n",
+    "    </div>\n",
+    "</div>\n",
+    "\n",
+    "<p>[https://imgflip.com/memegenerator/](https://imgflip.com/memegenerator/48288540/So-hot-right-now)</p>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Another view: [http://nicodjimenez.github.io/2017/10/08/tensorflow.html](http://nicodjimenez.github.io/2017/10/08/tensorflow.html)."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Motivating Problem\n",
+    "#### Given:\n",
+    "$\n",
+    "\\mathcal{L}(w) = w^2 - 10 w + 25 \n",
+    "$\n",
+    "#### Find:\n",
+    "$\n",
+    "\\underset{w}{\\mathrm{argmin}} \\, \\mathcal{L}(w)\n",
+    "$\n",
+    "#### Hint:\n",
+    "Answer = 5"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEKCAYAAAAIO8L1AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzt3Xl4VPXd/vH3JztZIEASdghLEFBA\nIOAKbtUqatEqiFbElbpWrT7Wrdo+tj7WVn8VtVrUKlqrQgVBRavFBVBUAsq+JCxK2BL2QEjI8v39\nMQdN6QAJZObMJPfruuaamTNnMjcnw9yZs3yPOecQERHZX4zfAUREJDKpIEREJCgVhIiIBKWCEBGR\noFQQIiISlApCRESCUkGIiEhQKggREQlKBSEiIkHF+R3gSGRkZLjs7Gy/Y4iIRJW5c+duds5lHmq+\nqC6I7Oxs8vLy/I4hIhJVzOzb2synVUwiIhKUCkJERIJSQYiISFAqCBERCUoFISIiQakgREQkKBWE\niIgE1SgLYt32Pfz27cVUVFX7HUVEJGI1yoJYvG4HL362hudnrvY7iohIxGqUBXHW0a05q1crnpi+\ngu+2lPodR0QkIjXKggD47bCjiYuJ4f4pi3DO+R1HRCTiNNqCaNOsCXee1Z0ZK4qZOn+933FERCJO\noy0IgFEnZNO3QzoPvbOE7aV7/Y4jIhJRGnVBxMYY/3dhb7aVVvDIe8v8jiMiElEadUEA9GrblGtP\n7szrc9by1eqtfscREYkYjb4gAG79UQ7tmzfhnkkLKK+s8juOiEhEUEEAyQlx/O6CY1hZvJtnP1nl\ndxwRkYiggvCcelQW5/dty9MfF7CyeJffcUREfKeCqOGB83qRFB/DfZMX6tgIEWn0VBA1ZKYlcs/Q\nnnyxaisT5xb6HUdExFcqiP1cktuBgdnNeXjaUrbsKvc7joiIb1QQ+4mJMf7vp73ZXV7J795d6ncc\nERHfqCCC6JaVxg2ndGXy1+uYmV/sdxwREV+oIA7gxtO60SUjhfvfWkRZhY6NEJHGRwVxAEnxsfzu\nwmP4dkspY6fn+x1HRCTsVBAHcWLXDC4e0J5xM1axbONOv+OIiISVCuIQ7h3ak7SkOO6dtJDqah0b\nISKNR8gKwsw6mNnHZrbUzBab2a3e9BZm9qGZ5XvXzb3pZmZjzazAzBaYWf9QZauLFikJ3H9uL+Z9\nt51Xv/rO7zgiImETym8QlcAdzrmewPHATWbWC7gbmO6cywGme/cBzgFyvMsY4JkQZquTn/Zvx0nd\nWvLoe8vYtLPM7zgiImERsoJwzm1wzs3zbpcAS4F2wDBgvDfbeOAC7/Yw4GUX8AWQbmZtQpWvLsyM\n313Qm/Kqan779mK/44iIhEVYtkGYWTbQD/gSaOWc2wCBEgGyvNnaAWtrPK3Qm7b/zxpjZnlmlldc\nHL5jFDpnpPCL07sxbeFGpi/dFLbXFRHxS8gLwsxSgTeB25xzB9sVyIJM+6+tws65cc65XOdcbmZm\nZn3FrJUxQ7rSvVUqD0xZzO7yyrC+tohIuIW0IMwsnkA5vOqcm+RN3rRv1ZF3XeRNLwQ61Hh6e2B9\nKPPVVUJcDA9f2Jt12/fw+Icr/I4jIhJSodyLyYAXgKXOucdrPDQVGO3dHg1MqTH9Cm9vpuOBHftW\nRUWS3OwWXHZcR178bDXfrN3udxwRkZAJ5TeIk4BRwOlm9o13GQo8ApxpZvnAmd59gGnAKqAAeA64\nMYTZjsjd5/SgVdMk7pw4X8NwiEiDFReqH+ycm0Xw7QoAZwSZ3wE3hSpPfWqaFM8jF/Vh9N++4vEP\nV3Dv0J5+RxIRqXc6kvowndI9k0sHdeS5mavIW7PV7zgiIvVOBXEE7ju3J+3Sm3DnxPmU7tVeTSLS\nsKggjkBqYhyPXtyHNVtKefT95X7HERGpVyqII3Ri1wyuPDGblz5fw+crN/sdR0Sk3qgg6sFdZx9F\ndstk7vrnAnbpADoRaSBUEPUgOSGOPw3vy7rte3h4ms5jLSINgwqinuRmt+C6wV34x5ffMWOFzmMt\nItFPBVGPfnlmd7plpfKrNxewY0+F33FERI6ICqIeJcXH8tjwvhSVlPPQO0v8jiMickRUEPWsb4d0\nrj+lC/+cW6hhwUUkqqkgQuAXZ+TQo3Uad09ayLbde/2OIyJyWFQQIZAYF8tjI/qybfdeHpyqM9CJ\nSHRSQYTI0W2bccvpOUydv573FkbcqOUiIoekggihG0/rSu92zbj/rUVs2VXudxwRkTpRQYRQfGwM\nj43oS0lZJfe/tYjAiOYiItFBBRFi3VulcfuZ3Xlv0Uamzo+oM6iKiByUCiIMxgzpQr+O6TwwZTFF\nO8v8jiMiUisqiDCIjTH+NLwvZRVV3DNpoVY1iUhUUEGESdfMVO46uwfTlxXxz7mFfscRETkkFUQY\nXXViNoOyW/C/by9h/fY9fscRETkoFUQYxcQYfxzeh8pqx6/eXKBVTSIS0VQQYdapZQr3Du3BzPzN\njP98jd9xREQOSAXhg8uP78QZPbJ4eNoyFq/f4XccEZGgVBA+MDP+OLwv6cnx3PLa15Tu1WlKRSTy\nqCB80iIlgT+PPJbVm3fzGw3oJyIRSAXhoxO7ZnDTqd2YkFeoo6xFJOKoIHx2249y6N8xnXsnLeS7\nLaV+xxER+Z4KwmdxsTE8MbIfZnDL619TUVXtdyQREUAFERE6tEjmDxf1Yf7a7Tz2wQq/44iIACqI\niDG0dxsuHdSRZz9dycz8Yr/jiIioICLJA+f1IicrldvfmE9xiU4wJCL+UkFEkCYJsTx5WT9Kyiq4\nc+J8qqs1FIeI+EcFEWF6tG7K/ef14tMVxbwwa7XfcUSkEVNBRKDLj+vIj49uxaP/WsaCwu1+xxGR\nRipkBWFmfzOzIjNbVGPab8xsnZl9412G1njsHjMrMLPlZvbjUOWKBmbGHy7qQ2ZqIre89jW7yjUU\nh4iEXyi/QbwEnB1k+v9zzh3rXaYBmFkvYCRwtPecv5hZbAizRbz05AT+PLIfa7eW8uu3Fh36CSIi\n9SxkBeGcmwFsreXsw4DXnXPlzrnVQAEwKFTZosWgzi249YzuTP56HW/qLHQiEmZ+bIO42cwWeKug\nmnvT2gFra8xT6E1r9G4+vRuDOrfg11MWsap4l99xRKQRCXdBPAN0BY4FNgCPedMtyLxB9/E0szFm\nlmdmecXFDf+AstgY44mRx5IQF8Mtr31NeWWV35FEpJEIa0E45zY556qcc9XAc/ywGqkQ6FBj1vZA\n0OFNnXPjnHO5zrnczMzM0AaOEG2aNeHRi/qweP1OHn1/ud9xRKSRCGtBmFmbGncvBPZtfZ0KjDSz\nRDPrDOQAX4UzW6Q76+jWjD6hEy/MWs3Hy4r8jiMijUAod3N9DZgNHGVmhWZ2DfComS00swXAacDt\nAM65xcAEYAnwPnCTc07rUvZzz9Ce9Gidxh0T57NpZ5nfcUSkgTPnonc4h9zcXJeXl+d3jLAqKCrh\n/Cc/o1/HdF655jhiY4JtvhEROTAzm+ucyz3UfDqSOsp0y0rjNz/pxecrt/DEvzU0uIiEjgoiCo3I\n7cDwAe0Z+1EBHyze6HccEWmgVBBRyMx46IJj6NO+Gb+cMJ+VOj5CREJABRGlkuJjeebyASTExfDz\nV+ZqvCYRqXcqiCjWLr0JT13Wj9Wbd3PnhPlE8w4HIhJ5VBBR7sSuGdxzTg/eX7yRv3yy0u84ItKA\nqCAagGtO7sz5fdvypw+W8+mKhj/8iIiEhwqiAQicP6I3R7VK4xevfc3araV+RxKRBkAF0UAkJ8Tx\n11EDcM4x5pW57NmrA9FF5MioIBqQTi1TGHtpP5Zt3Mk9kxZoo7WIHBEVRANz6lFZ3HFmd976Zj0v\nfrbG7zgiEsVUEA3Qjad246xerfj9tKV8sWqL33FEJEqpIBqgmBjjsRF9yW6ZzM3/mMeGHXv8jiQi\nUajWBWFmWWZ2oZndZGZXm9kgM1PBRKi0pHj+OiqXsopqrv/7PJ2JTkTq7JAf8GZ2mpn9C3gXOAdo\nA/QC7gcWmtlvzaxpaGPK4eiWlcqfhvdl/trtPDhlsd9xRCTKxNVinqHAdc657/Z/wMzigPOAM4E3\n6zmb1IOzj2nNTad15emPV9KnfTqXHdfR70giEiUOWRDOuf85yGOVwFv1mkjq3S/PPIqF63by4NRF\n9GiTRv+Ozf2OJCJRoC7bIFaa2atmdr2Z9QplKKlfsTHG2JHH0qZZE274+1yKSnS6UhE5tLpsZO4F\n/BVoCfzJzFaZ2eTQxJL6lp6cwLOXD2DHngpufvVrKqqq/Y4kIhGuLgVRBVR419XAJqAoFKEkNHq1\nbcofLurDV2u28vt3l/odR0QiXG02Uu+zE1gIPA4855zTEVhRaNix7VhQuIMXZq2mR+s0Rg7SRmsR\nCa4u3yAuBWYANwKve7u3nhGaWBJK95zTg1O6Z3LfW4s0PLiIHFCtC8I5N8Xbo+nnwDTgSuCdEOWS\nEIqLjeHpn/Wne6s0bnp1HkvW7/Q7kohEoLrsxfSmma0EngBSgCsA7S8ZpVIT43jxyoGkJsZx9Utz\n2LhDezaJyH+qyyqmR4DuzrkfO+d+55z71DmnT5Uo1rpZEi9eNZBd5ZVc9dIcdpVX+h1JRCJIbYba\nOBnAOTfHOfdfA/qYWVMzOyYU4ST0erZpyl9+1p8Vm0q46dV52v1VRL5Xm28QF5nZ52b2gJmd6w3S\nN8QbsO8VAtshmoQ4p4TQkO6Z/P6CY/h0RTEPTFmkEw2JCFC7oTZuN7PmwMXAcAKD9e0BlgJ/dc7N\nCm1ECYeRgzqydlspT3+8ko4tUrjh1K5+RxIRn9XqOAjn3DbgOe8iDdSdZx1F4bY9/OH9ZbRr3oSf\n9G3rdyQR8VFd9mK61dveYGb2vJnNM7OzQhlOwsvMePTiPgzKbsGdE+YzZ81WvyOJiI/qshfT1c65\nncBZQBZwFYE9m6QBSYyLZdwVA2jfognXvZzHquJdfkcSEZ/UpSDMux4KvOicm19jmjQg6ckJvHTl\nIGLNuOqlOWzZVe53JBHxQV0KYq6ZfUCgIP5lZmkEBu2TBqhjy2SeH53Lxh1lXPtyHmUVOmWpSGNT\nl4K4BrgbGOicKwXiCaxmkgaqX8fmPDHyWL5Zu53b3/iG6mrt/irSmNSlIE4AljvntpvZ5QTOSb0j\nNLEkUpx9TBvuG9qT9xZt5JH3l/kdR0TCqC4F8QxQamZ9gbuAb4GXDzSzmf3NzIrMbFGNaS3M7EMz\ny/eum3vTzczGmlmBmS0ws/6H+e+RELjm5M6MPqET42as4pXZa/yOIyJhUpeCqHSBQ2yHAU84554A\n0g4y/0vA2ftNuxuY7pzLAaZ79wHOAXK8yxgCZSQRwsx44Pyj+VHPLB6cupjpSzf5HUlEwqAuBVFi\nZvcAo4B3zSyWwHaIoJxzM4D9d6QfBoz3bo8HLqgx/WUX8AWQbmZt6pBNQiw2xhh7aT+ObtuMm//x\nNQsLtXZRpKGrS0FcApQTOB5iI9AO+GMdX6+Vc24DgHed5U1vB6ytMV+hN00iSHJCHC9cmUuLlASu\nHj+HtVtL/Y4kIiFUlxMGbQReBZqZ2XlAmXPugNsg6ijY8RRBd5kxszFmlmdmecXFOhtauGWlJfHS\nVQPZW1nNZc9/wYYde/yOJCIhUpehNkYAXxEYsG8E8KWZXVzH19u0b9WRd13kTS8EOtSYrz2wPtgP\ncM6Nc87lOudyMzMz6/jyUh9yWqXxyjWD2L67gp899yXFJTqQTqQhqssqpvsIHAMx2jl3BTAI+HUd\nX28qMNq7PRqYUmP6Fd7eTMcDO/atipLI1Kd9Oi9eNZANO8oY9cKXbNu91+9IIlLP6lIQMc65ohr3\ntxzs+Wb2GjAbOMrMCs3sGgJjN51pZvnAmfwwltM0YBVQQGDE2BvrkEt8kpvdgudH57Jq826u+NtX\n7Cyr8DuSiNQjq+3JYczsj0Af4DVv0iXAAufcr0KU7ZByc3NdXl6eXy8vno+XFTHmlTz6tE/n5asH\nkZJYq1HkRcQnZjbXOZd7qPnqspH6f4BxBEqiLzDOz3KQyHFajyzGjuzH199t4zqN2yTSYNRlFRPO\nuTedc790zt3unJscqlASfc7p3YbHRvRl9qotXP/3uZRXqiREot0hC8LMSsxsZ5BLiZntDEdIiQ4X\n9mvPwxf25pPlxdz62jdUVmmwX5FodsiCcM6lOeeaBrmkOeeahiOkRI9LB3XkwfN78f7ijdwxcT5V\nGgFWJGppa6LUu6tO6syeiioefX85TeJjefjC3sTE6NxSItFGBSEhceOp3SjbW8XYjwpIio/lwfN7\nYaaSEIkmKggJmdvP7E7p3iqen7WapPhYfnX2USoJkSiigpCQMTPuO7cneyqqePbTlSQnxPKLM3L8\njiUitaSCkJAyMx4adgxlFdU8/uEKmsTHct2QLn7HEpFaUEFIyMXEGI9e3Ieyyip+P20pSfExjDoh\n2+9YInIIKggJi9gY48+XHEt5RTW/nrKYpPhYhud2OPQTRcQ3dTqSWuRIxMfG8NRl/Rick8Gv3lzA\nxLy1h36SiPhGBSFhlRQfy7hRuZzULYP/+ecCXpi12u9IInIAKggJuyYJsTw/OpdzjmnNQ+8s4fEP\nllPbUYVFJHxUEOKLxLhYnry0HyNy2zP2owJ++/YSqjUsh0hE0UZq8U1cbAx/uKgPzZrE89zM1ezY\nU8GjF/chPlZ/t4hEAhWE+MrMuHdoT5o1iedPH6ygpKySpy7rR1J8rN/RRBo9/akmvjMzbj49h4eG\nHc30ZZu48sWvKNHpS0V8p4KQiDHqhGz+fMmxzFmzjZ89/yVbd+/1O5JIo6aCkIgy7Nh2jBs1gOUb\nSxjx19ls2LHH70gijZYKQiLOGT1bMf7qQWzcUcbFz8xmzebdfkcSaZRUEBKRju/SkteuO549FVVc\n/OxslqzX2W1Fwk0FIRGrd/tmTPj5CcTHGiPHzWbut1v9jiTSqKggJKJ1y0pl4vUn0DI1kcuf/4pP\nVxT7HUmk0VBBSMRr3zyZCT8/gc4ZKVw7fg7vLtjgdySRRkEFIVEhMy2R18YcT9/26dzy2jxe/+o7\nvyOJNHgqCIkazZrE88o1xzE4J5O7Jy3k8Q9XaPwmkRBSQUhUaZIQy3NX5DJ8QHvGTs/nxlfnsbu8\n0u9YIg2SCkKiTkJcDI9e3If7z+3JB0s2ctEzn1O4rdTvWCINjgpCopKZce3gLrx41SDWbd/DsKc+\nY84a7QYrUp9UEBLVTumeyVs3nUSzJvFc9twX2ngtUo9UEBL1umamMvnGkzi+S0vunrSQ30xdTGVV\ntd+xRKKeCkIahGbJ8bx45UCuObkzL32+hitfnMP2Uo0GK3IkVBDSYMTFxvDr83rx6EV9+HL1Fi54\n+jMKikr8jiUStVQQ0uCMGNiB1647nl3llVz49Od8vKzI70giUcmXgjCzNWa20My+MbM8b1oLM/vQ\nzPK96+Z+ZJOGITe7BVNuPpkOLZK5evwcxs1YiXM6qE6kLvz8BnGac+5Y51yud/9uYLpzLgeY7t0X\nOWzt0pvwzxtO4JxjWvPwtGXcMXE+ZRVVfscSiRqRtIppGDDeuz0euMDHLNJAJCfE8fRl/bn9R92Z\nNG8dlz73BUU7y/yOJRIV/CoIB3xgZnPNbIw3rZVzbgOAd53lUzZpYMyMW3+UwzM/68+yDSX85KnP\nWFi4w+9YIhHPr4I4yTnXHzgHuMnMhtT2iWY2xszyzCyvuFjnBpDaO6d3G9684URiY4yLn/2c17/6\nTtslRA7Cl4Jwzq33rouAycAgYJOZtQHwroPueuKcG+ecy3XO5WZmZoYrsjQQvdo2ZcrNJ5Gb3Zy7\nJy3khr/PY9tuHS8hEkzYC8LMUswsbd9t4CxgETAVGO3NNhqYEu5s0jhkpCbyytXHce/QHkxftomz\nn5jBZwWb/Y4lEnH8+AbRCphlZvOBr4B3nXPvA48AZ5pZPnCmd18kJGJijDFDujL5xpNISYzj8he+\n5P+mLWVvpYboENnHonkdbG5ursvLy/M7hkS5PXureOjdJfzjy+84um1TnhjZj25ZqX7HEgkZM5tb\n4xCDA4qk3VxFfNEkIZaHL+zNuFEDWL99D+c9OZNXv/xWG7Cl0VNBiHjOOro17982hIHZLbhv8iLG\nvDKXrdqALY2YCkKkhlZNkxh/1SDuP7cnny4v5sd/nsHMfO1OLY2TCkJkPzExgbPVTb7pRJo1iWfU\nC1/xu3eWUF6pYTqkcVFBiBzA0W2b8fbNJzPq+E48P2s1Fzz9OfmbNHy4NB4qCJGDaJIQy0MXHMML\no3PZtLOM856cxSuz12gDtjQKKgiRWjijZyvev20wx3Vpya+nLOba8Xka9E8aPBWESC1lpSXx0pUD\neeC8Xsws2Mzpj33KC7NW6/zX0mCpIETqICbGuPrkzvzrtiH079Sch95ZwnlPzmLOmq1+RxOpdyoI\nkcPQOSOF8VcN5NnLB7BzTwXDn53NLyd8Q3FJud/RROqNCkLkMJkZZx/Tmn/fcQo3ntqVt+ev5/TH\nPmH852u02kkaBBWEyBFKTojjrrN78P5tQ+jbPp0Hpy7mJ099xtxvt/kdTeSIqCBE6knXzFReuWYQ\nT1/Wn62793LRM59z1z/ns2WXVjtJdFJBiNQjM+PcPm2Yfscp/HxIFybNW8fpj33K37/4lqpqHTsh\n0UUFIRICKYlx3DO0J+/dOpiebdK4/61FXPiXz5i/drvf0URqTQUhEkI5rdJ47brjeWLksWzcUcYF\nf/mMeycv1GlOJSqoIERCzMwYdmw7pt9xClef1Jk35qzl9Mc+4bkZqyjdW+l3PJED0hnlRMJs2cad\n/O6dpcwq2EzLlASuG9KFUcd3IiUxzu9o0kjU9oxyKggRn+St2crYjwqYsaKY5snxXDu4C6NPzCZV\nRSEhpoIQiRLzvtvGk9Pz+Xh5MenJ8VxzUmdGn5RN06R4v6NJA6WCEIkyCwq3M3Z6Pv9eWkTTpDiu\nPrkzV53YmWbJKgqpXyoIkSi1aN0Oxk7P54Mlm0hLjOPKk7K55uTOpCcn+B1NGggVhEiUW7J+J099\nnM+0hRtJSYhl9InZXDu4Cy1SVBRyZFQQIg3E8o0lPPlRPu8u3ECT+FhGndCJ6wZ3ISM10e9o4hPn\nHJXVjvjYwztSQQUh0sDkbyrhqY8LeHv+ehLjYjm/bxsuGdiB/h2bY2Z+x5MwKC4pZ/LXhUzIK2Tk\nwA5cO7jLYf2c2haE9qcTiRI5rdJ4YmQ/fnFGDuM+XcXbC9YzIa+QLpkpjMjtwE/7tyMrLcnvmFLP\nKquq+WR5MW/kreXjZUVUVjsGdGpOxxbJIX9tfYMQiVK7yyt5d+EGJsxZS96324iNMU47KosRue05\nrUfWYa9+kMhQULSLiXPXMmneOopLyslITeSiAe0YPqAD3bJSj+hnaxWTSCOysngXE/MKeXNe4Q8f\nJv3bMTz3yD9MJHx2lVcybcEGJuT9UPqn98hiRG4HTj0qs95KXwUh0gjtWx0xIW8tH3mrI/p3TOeS\ngR04t09bHaUdgZxzzP12G2/MWcu7CzdQureKrt5qwwtDtNpQBSHSyO3boPnGnLWsLN5Nk/hYzu3T\nhhG5HRiYrQ3bfivaWcab89YxMW8tqzbvJiUhlvP6tGXEwA7075ge0t+PCkJEgMBfqF+v3c6EOWt5\ne/56du+tonNGCmcd3YohOZkM6NScpPhYv2M2eM45Vm/ezYwVxXyyopiZ+ZupqnYMzG7OiNwODO3d\nJmwDNqogROS/lO6tZNrCjUyaV8icNVupqHIkxcdwXOeWDOmeyZCcDLplperbRT3ZUVrBZys3MzO/\nmBkrNrNu+x4AOrZIZmjvNozIbU+XzPBvI1JBiMhB7S6v5ItVW5iZv5kZ+cWsKt4NQOumSQzOyWBw\n90xO7pahI7froKKqmvlrtzMjfzMzVhSzoHA71Q7SEuM4sVtLBudkMjgng04tU3zNqYIQkTop3FbK\nrPzNzMzfzKyCzezYU4EZ9G7XLFAYOZn079ichDjtPlvTt1t2MyN/MzNXFDN75RZKyiuJMejbIZ3B\nOYFvZcd2SCcugnY7VkGIyGGrqnYsKNzOzPzA6pF5322nqtqRnBDLCV1aMjgng97t0+mWlUqzJo1n\ntNmKqmq+3VLK8o0lzF4VKNNvt5QC0C69yfer6U7smhHRo/BGbUGY2dnAE0As8Lxz7pEDzauCEAmP\nnWUVfLFyCzPyi//jQxGgVdNEcrLS6JaVSvdWaeS0SiUnKzWqR5/dW1nNmi27yd+0i/yiku+vV2/e\nTUVV4DMzJSGWE7r+sNqoc0ZK1Gy7icqCMLNYYAVwJlAIzAEudc4tCTa/CkLEH4XbAn9F5xftIn/T\nLgqKArdL91Z9P09GaiI5WamBwmiVFridlUrLIxlkcNafoV1/6Dzkh2mrZ8C6eXDybXX+cWUVVaze\nvJv8ol0UbCphhVcEa7aUUlUd+Gw0C2xUzsn6ofy6ZaXSo3XTqF3dFq1jMQ0CCpxzqwDM7HVgGBC0\nIETEH+2bJ9O+eTJn9Gz1/bTqasf6HXu8D1vvL++iXUyet46S8srv52uRkkC3zFQy0hJITYwjNTGe\n1KQ40hLjSE2KC0zb735aYjwpibHEtesPE6+E4S8FSmL1DNzEKym74AV27iyjpKySXeWV7CqrZFd5\nxX73Kykpr2R3eSU791SwZksp327ZjdcDxMYYnVomk5OVyjnHtCGnVaAIumamNtrdgCOtINoBa2vc\nLwSO8ymLiNRBTIx9XxynHZX1/XTnHJt2lrNiU6AwCopKWFkUWH3z/Yf33kpqszKjSXwspyTcwiPj\nf8ak2B9zQdX73Lz3F3z+t1Jg+kGfmxQfQ2piPGle6fRsk8b5fdt+/y2nc0YKiXGNswgOJNIKItgK\nvP9425jZGGAMQMeOHcORSUSOgJnRulkSrZslMaR7ZtB5qqsdpRVVQf/yL6nxDWBXeSUlZe2YvW4V\nV295mU9bX8WALsM4pea3jSTvW4l3Oy0pjpTEOA1eeBgirSAKgQ417rcH1tecwTk3DhgHgW0Q4Ysm\nIqESE2Pe6qY44BBjD62eAQXvwpC7OCXvBU7J+el/bpOQehNplToHyDGzzmaWAIwEpvqcSUQixeoZ\nP2yDOP2+wPXEKwPTpd5FVEFW9uy1AAAGfklEQVQ45yqBm4F/AUuBCc65xf6mEpGIsW7eDxuoIXA9\n/KXAdKl3EbWba11pN1cRkbqr7W6uEfUNQkREIocKQkREglJBiIhIUCoIEREJSgUhIiJBRfVeTGZW\nDHx7mE/PADbXY5z6Fun5IPIzKt+RUb4jE8n5Ojnngh/WXkNUF8SRMLO82uzm5ZdIzweRn1H5jozy\nHZlIz1cbWsUkIiJBqSBERCSoxlwQ4/wOcAiRng8iP6PyHRnlOzKRnu+QGu02CBERObjG/A1CREQO\nokEXhJkNN7PFZlZtZrn7PXaPmRWY2XIz+/EBnt/ZzL40s3wze8MbgjxUWd8ws2+8yxoz++YA860x\ns4XefGEbqdDMfmNm62pkHHqA+c72lmmBmd0dxnx/NLNlZrbAzCabWfoB5gvr8jvU8jCzRO93X+C9\n17JDnanGa3cws4/NbKn3/+TWIPOcamY7avzeHwhXvhoZDvo7s4Cx3jJcYGb9w5jtqBrL5hsz22lm\nt+03j+/L8LA55xrsBegJHAV8AuTWmN4LmA8kAp2BlUBskOdPAEZ6t58FbghT7seABw7w2Bogw4dl\n+RvgzkPME+styy5AgreMe4Up31lAnHf7D8Af/F5+tVkewI3As97tkcAbYfydtgH6e7fTgBVB8p0K\nvBPu91tdfmfAUOA9AmekPB740qecscBGAscYRNQyPNxLg/4G4Zxb6pxbHuShYcDrzrly59xqoAAY\nVHMGMzPgdOCf3qTxwAWhzFvjdUcAr4X6tUJgEFDgnFvlnNsLvE5gWYecc+4DFzifCMAXBM5G6Lfa\nLI9hBN5bEHivneG9B0LOObfBOTfPu11C4Bws7cLx2vVsGPCyC/gCSDezNj7kOANY6Zw73IN3I06D\nLoiDaAesrXG/kP/+j9ES2F7jQyfYPKEwGNjknMs/wOMO+MDM5nrn5w6nm72v8H8zs+ZBHq/Ncg2H\nqwn8RRlMOJdfbZbH9/N477UdBN57YeWt2uoHfBnk4RPMbL6ZvWdmR4c1WMChfmeR8r4byYH/sPN7\nGR6WSDsndZ2Z2b+B1kEeus85N+VATwsybf/duWozT53UMuulHPzbw0nOufVmlgV8aGbLnHP1cr7F\ng+UDngEeIrAMHiKwGuzq/X9EkOfW225ytVl+ZnYfUAm8eoAfE7LlF4Qv77O6MrNU4E3gNufczv0e\nnkdglckub7vTW0BOOPNx6N9ZJCzDBOAnwD1BHo6EZXhYor4gnHM/OoynFQIdatxvD6zfb57NBL6q\nxnl/2QWbp04OldXM4oCfAgMO8jPWe9dFZjaZwGqMevmAq+2yNLPngHeCPFSb5XrYarH8RgPnAWc4\nb+VvkJ8RsuUXRG2Wx755Cr3ffzNga4jy/BcziydQDq865ybt/3jNwnDOTTOzv5hZhnMubGMM1eJ3\nFtL3XS2dA8xzzm3a/4FIWIaHq7GuYpoKjPT2IOlMoM2/qjmD9wHzMXCxN2k0cKBvJPXlR8Ay51xh\nsAfNLMXM0vbdJrBhdlGIM+177ZrrdC88wOvOAXIssPdXAoGv3FPDlO9s4FfAT5xzpQeYJ9zLrzbL\nYyqB9xYE3msfHajc6pu3reMFYKlz7vEDzNN63zYRMxtE4DNjSzjyea9Zm9/ZVOAKb2+m44EdzrkN\n4croOeA3f7+X4RHxeyt5KC8EPsgKgXJgE/CvGo/dR2APk+XAOTWmTwPaere7ECiOAmAikBjivC8B\n1+83rS0wrUae+d5lMYFVK+Falq8AC4EFBP5Dttk/n3d/KIG9YVaGOV8BgfXQ33iXZ/fP58fyC7Y8\ngP8lUGQASd57q8B7r3UJ4zI7mcCqmAU1lttQ4Pp970PgZm9ZzSew8f/EcOU72O9sv4wGPO0t44XU\n2GMxTBmTCXzgN6sxLWKW4ZFcdCS1iIgE1VhXMYmIyCGoIEREJCgVhIiIBKWCEBGRoFQQIiISlApC\nRESCUkGIiEhQKgiRemJmd5nZL7zb/8/MPvJun2Fmf/c3nUjdqSBE6s8MAqPxAuQCqd5YRycDM31L\nJXKYVBAi9WcuMMAbO6gcmE2gKAajgpAoFPWjuYpECudchZmtAa4CPicwxtFpQFcCJ+MRiSr6BiFS\nv2YAd3rXMwkM2vaN06BnEoVUECL1ayaBcz3PdoFzA5Sh1UsSpTSaq4iIBKVvECIiEpQKQkREglJB\niIhIUCoIEREJSgUhIiJBqSBERCQoFYSIiASlghARkaD+PxO0rDje4tEgAAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x1118aea90>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "# Actually, let's graph this function.\n",
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline\n",
+    "def loss(w):\n",
+    "    return [w**2 - 10*w + 25 for w in w]\n",
+    "w = list(range(-10,10))\n",
+    "plt.plot(w,loss(w)); plt.plot([5],loss([5]),'x')\n",
+    "plt.xlabel('w'); plt.ylabel('loss(w)')\n",
+    "plt.show(); del w, loss"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Computational Graphs\n",
+    "Artificial neural networks are graphs. So is our loss function $\\mathcal{L}(w)$.\n",
+    "<img src=\"img/cg/cg1.png\" width=\"400\">"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "import tensorflow as tf"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Set up variables\n",
+    "w = tf.Variable(0,dtype=tf.float32)\n",
+    "loss = w**2 - 10 * w + 25\n",
+    "# Equivalently:\n",
+    "#loss = tf.add(tf.add(w**2,tf.multiply(-10.,w)),25)\n",
+    "train = tf.train.RMSPropOptimizer(0.01).minimize(loss)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Itiomatic: run session, initialise variables\n",
+    "init = tf.global_variables_initializer()\n",
+    "session = tf.Session()\n",
+    "session.run(init)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "0.0\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Check initial value of w\n",
+    "print(session.run(w))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "0.0302891\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Take one gradient step (check updated w)\n",
+    "session.run(train)\n",
+    "print(session.run(w))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "5.0\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Take 1000 gradient steps (relish result)\n",
+    "for _ in range(1000): session.run(train)\n",
+    "print(session.run(w))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Close the door behind you\n",
+    "session.close()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Key Concepts (Interm Summary)\n",
+    "* Computation Graph\n",
+    "* Variables\n",
+    "* Sessions\n",
+    "* Modularity (try replacing trainer, e.g. s/GradientDescentOptimizer/RMSPropOptimizer/)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Passing numpy arrays into the graph as data\n",
+    "import numpy as np\n",
+    "data = np.array([[1.],[-10.],[25.]])\n",
+    "w = tf.Variable(0,dtype=tf.float32)\n",
+    "x = tf.placeholder(tf.float32, [3,1])\n",
+    "loss = x[0][0]*w**2 + x[1][0]*w + x[2][0]\n",
+    "train = tf.train.RMSPropOptimizer(0.01).minimize(loss)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "5.0\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Initialise variables + run session + train\n",
+    "init = tf.global_variables_initializer()\n",
+    "with tf.Session() as session:\n",
+    "    session.run(init)\n",
+    "    for _ in range(1000): session.run(train, feed_dict={x:data})\n",
+    "    print(session.run(w))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {
+    "collapsed": true
+   },
+   "source": [
+    "### Bonus: Interactive Sessions\n",
+    "If you are using jupyter notebook, you can use an interactive session."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "3"
+      ]
+     },
+     "execution_count": 28,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "sess = tf.InteractiveSession()\n",
+    "a = tf.constant(1)\n",
+    "b = tf.constant(2)\n",
+    "c = a + b\n",
+    "# instead of sess.run(c)\n",
+    "c.eval()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "1"
+      ]
+     },
+     "execution_count": 29,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "a.eval()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Close the sess when you are done.\n",
+    "sess.close()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<img src=\"img/what_leo_needs.png\" width=\"500\">\n",
+    "### Models in the wild\n",
+    "* CNNs.\n",
+    "* RNNs.\n",
+    "* VAEs.\n",
+    "* GANs.\n",
+    "\n",
+    "(*And many other fine acronyms!*)\n",
+    "\n",
+    "Demo1 [here](demos/convolutional_network_demo.ipynb).\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### TensorBoard\n",
+    "\n",
+    "* TensorBoard can be used to visualise the network graph, training accuracy, loss, inputs, and model internal representations--including feature embeddings. \n",
+    "* These visualisations can be used for decoding and understanding the model.\n",
+    "\n",
+    "Demo2 [here](demos/tensorboard_demo.ipynb)."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "### Discussion: Big Data, Big Models, Big Computing..."
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<div class=\"myimages\">\n",
+    "    <div style=\"float:left;margin-right:5px;\">\n",
+    "        <img class=\"middle-img\" src=\"img/how_big_is_your_data.png\"/ width=\"400\"/>\n",
+    "    </div>\n",
+    "    <div style=\"float:left;margin-right:5px;\">\n",
+    "        <img src=\"img/model_zoo.png\" width=\"300\">\n",
+    "    </div>\n",
+    "</div>"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "<img class=\"middle-img\" src=\"img/multiple_devices.png\"/ width=\"400\"/>\n",
+    "\n",
+    "Demo multi GPU computing (*internet/time permitting*)\n",
+    "\n",
+    "**Warning**: the following won't run unless you are running on a system with at least 2 GPUs (+1 CPU)."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# Multi GPU computing\n",
+    "# import numpy as np\n",
+    "# import tensorflow as tf\n",
+    "import datetime\n",
+    "\n",
+    "# Example: compute A^n + B^n on 2 GPUs\n",
+    "\n",
+    "#Processing Units logs\n",
+    "log_device_placement = True\n",
+    "\n",
+    "#num of multiplications to perform\n",
+    "n = 10\n",
+    "\n",
+    "# Create random large matrix\n",
+    "A = np.random.rand(10000, 10000).astype('float32')\n",
+    "B = np.random.rand(10000, 10000).astype('float32')\n",
+    "\n",
+    "# Create a graph to store results\n",
+    "g = []\n",
+    "\n",
+    "# Define matrix power\n",
+    "def matpow(M, n):\n",
+    "    if n < 1: #Abstract cases where n < 1\n",
+    "        return M\n",
+    "    else:\n",
+    "        return tf.matmul(M, matpow(M, n-1))\n",
+    "\n",
+    "# GPU:0 computes A^n\n",
+    "with tf.device('/gpu:0'):\n",
+    "    #compute A^n and store result in g\n",
+    "    a = tf.constant(A)\n",
+    "    g.append(matpow(a, n))\n",
+    "\n",
+    "#GPU:1 computes B^n\n",
+    "with tf.device('/gpu:1'):\n",
+    "    #compute B^n and store result in g\n",
+    "    b = tf.constant(B)\n",
+    "    g.append(matpow(b, n))\n",
+    "\n",
+    "with tf.device('/cpu:0'):\n",
+    "    s = tf.add_n(g) #Add all elements in g, i.e. A^n + B^n\n",
+    "\n",
+    "t1 = datetime.datetime.now()\n",
+    "with tf.Session(config=tf.ConfigProto(log_device_placement=log_device_placement)) as sess:\n",
+    "    # Runs the op.\n",
+    "    sess.run(s)\n",
+    "\n",
+    "t2 = datetime.datetime.now()\n",
+    "print(t1, t2)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "anaconda-cloud": {},
+  "kernelspec": {
+   "display_name": "Python [conda env:tensorflow]",
+   "language": "python",
+   "name": "conda-env-tensorflow-py"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.5.2"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}