GeronBook/Ch12/exercises.ipynb

201 lines
5.4 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**EXERCISE 12**\n",
"\n",
"create custom layer with layer normalization"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import tensorflow as tf\n",
"import os\n",
"import keras"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"class MyLayerNormalization(keras.layers.Layer):\n",
" \n",
" def __init__(self, eps=tf.keras.backend.epsilon(), **kwargs):\n",
" super(MyLayerNormalization, self).__init__( **kwargs)\n",
" self.eps = eps\n",
" \n",
" def build(self, batch_input_shape):\n",
" self.alpha = self.add_weight(name=\"alphas\", shape=batch_input_shape[-1:], initializer=\"ones\", dtype=tf.float32)\n",
" self.beta = self.add_weight(name=\"betas\", shape=batch_input_shape[-1:], initializer=\"zeros\", dtype=tf.float32)\n",
" super(MyLayerNormalization, self).build(batch_input_shape)\n",
" \n",
" def call(self, X, training=None):\n",
" mean, var = tf.nn.moments(X, axes=0, keepdims=True)\n",
" normalized = (X - mean) / tf.math.sqrt((var + self.eps))\n",
" normalized_inputs = tf.multiply(self.alpha, normalized) + self.beta\n",
" return normalized\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"WARNING:tensorflow:From C:\\Users\\TSB\\Miniconda3\\envs\\geron_env\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py:74: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.\n",
"\n",
"WARNING:tensorflow:From C:\\Users\\TSB\\Miniconda3\\envs\\geron_env\\lib\\site-packages\\tensorflow_core\\python\\ops\\resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"If using Keras pass *_constraint arguments to layers.\n"
]
}
],
"source": [
"(X_train, y_train), _ = keras.datasets.boston_housing.load_data()\n",
"X_train = tf.convert_to_tensor(X_train, tf.float32)\n",
"\n",
"\n",
"mylayer = MyLayerNormalization(eps=0.001)\n",
"my_results = mylayer(X_train)\n",
"\n",
"layer = tf.keras.layers.LayerNormalization()\n",
"results = layer(X_train, training=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**EXERCISE 13**\n",
"\n",
"Train a model using a custom training loop to tackle fashion MNIST dataset. Display epoch, iteration, mean training loss, and mean accuracy over each epoch. Then try using a different optimizer with a different learning rate for the upper layer and lower layers."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"from keras.datasets import fashion_mnist\n",
"(X_train, y_train), (X_test, y_test) = fashion_mnist.load_data()"
]
},
{
"cell_type": "code",
"execution_count": 35,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(60000,)"
]
},
"execution_count": 35,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_train.shape\n",
"y_train.shape"
]
},
{
"cell_type": "code",
"execution_count": 44,
"metadata": {},
"outputs": [],
"source": [
"from tensorflow.keras.models import Sequential\n",
"from tensorflow.keras.layers import Flatten, Dense, BatchNormalization\n",
"\n",
"# Create Batch Normalization model\n",
"model = Sequential()\n",
"model.add(Flatten(input_shape=[28, 28]))\n",
"for layer in range(20):\n",
" model.add(Dense(100, activation='elu', \n",
" kernel_initializer='he_normal'))\n",
" model.add(BatchNormalization())\n",
"model.add(Dense(10, activation='softmax'))"
]
},
{
"cell_type": "code",
"execution_count": 45,
"metadata": {},
"outputs": [],
"source": [
"# Randomly sample batches\n",
"def random_batch(X, y, batch_size=32):\n",
" idx = np.random.randint(len(X), size = batch_size)\n",
" return X[idx], y[idx]"
]
},
{
"cell_type": "code",
"execution_count": 54,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'\\nTODO finish creating custom training loop\\n'"
]
},
"execution_count": 54,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"\n",
"TODO finish creating custom training loop\n",
"\"\"\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}