diff --git a/08_introduction_to_nlp_in_tensorflow.ipynb b/08_introduction_to_nlp_in_tensorflow.ipynb index 897d9f6..f2f3000 100644 --- a/08_introduction_to_nlp_in_tensorflow.ipynb +++ b/08_introduction_to_nlp_in_tensorflow.ipynb @@ -3121,19 +3121,20 @@ "# Set random seed and create embedding layer (new embedding layer for each model)\n", "tf.random.set_seed(42)\n", "from tensorflow.keras import layers\n", - "model_3_embedding = layers.Embedding(input_dim=max_vocab_length,\n", - " output_dim=128,\n", - " embeddings_initializer=\"uniform\",\n", + "model_3_embedding = layers.Embedding(,\n", + " input_dim=max_vocab_length,\n", + " output_dim=128,",\n", + " embeddings_initializer="uniform",\n", " input_length=max_length,\n", - " name=\"embedding_3\")\n", + " name="embedding_3"), "\n", "# Build an RNN using the GRU cell\n", - "inputs = layers.Input(shape=(1,), dtype=\"string\")\n", + "inputs = layers.Input(shape=(1,), dtype="string") # Adjust input shape for sequences\n", "x = text_vectorizer(inputs)\n", "x = model_3_embedding(x)\n", - "# x = layers.GRU(64, return_sequences=True) # stacking recurrent cells requires return_sequences=True\n", - "x = layers.GRU(64)(x) \n", - "# x = layers.Dense(64, activation=\"relu\")(x) # optional dense layer after GRU cell\n", + "# x = layers.GRU(64, return_sequences=True)(x) # Stacking recurrent cells requires return_sequences=True\n", + "x = layers.GRU(64)(x) # Last GRU without return_sequences\n", + "# x = layers.Dense(64, activation="relu")(x) # optional dense layer after GRU cell\n", "outputs = layers.Dense(1, activation=\"sigmoid\")(x)\n", "model_3 = tf.keras.Model(inputs, outputs, name=\"model_3_GRU\")" ] @@ -7154,4 +7155,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +}