From 8e7d8490058f432259b1e0b645c4ec91ef03fdb2 Mon Sep 17 00:00:00 2001 From: Francois Chollet Date: Sun, 11 Feb 2024 15:39:01 -0800 Subject: [PATCH] Fix dtype in keras nlp guide --- .../keras_nlp/transformer_pretraining.ipynb | 26 +++++++++---------- guides/keras_nlp/transformer_pretraining.py | 4 +-- .../md/keras_nlp/transformer_pretraining.md | 2 +- scripts/autogen.py | 2 +- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/guides/ipynb/keras_nlp/transformer_pretraining.ipynb b/guides/ipynb/keras_nlp/transformer_pretraining.ipynb index 0eb94525fa..f93e18ac80 100644 --- a/guides/ipynb/keras_nlp/transformer_pretraining.ipynb +++ b/guides/ipynb/keras_nlp/transformer_pretraining.ipynb @@ -46,7 +46,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -58,7 +58,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -95,7 +95,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -132,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -174,7 +174,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -233,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -338,7 +338,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -435,7 +435,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -492,7 +492,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -565,7 +565,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -603,7 +603,7 @@ }, { "cell_type": "code", - "execution_count": 0, + "execution_count": null, "metadata": { "colab_type": "code" }, @@ -613,7 +613,7 @@ "encoder_model = keras.models.load_model(\"encoder_model.keras\", compile=False)\n", "\n", "# Take as input the tokenized input.\n", - "inputs = keras.Input(shape=(SEQ_LENGTH,), dtype=tf.int32)\n", + "inputs = keras.Input(shape=(SEQ_LENGTH,), dtype=\"int32\")\n", "\n", "# Encode and pool the tokens.\n", "encoded_tokens = encoder_model(inputs)\n", @@ -687,4 +687,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} diff --git a/guides/keras_nlp/transformer_pretraining.py b/guides/keras_nlp/transformer_pretraining.py index 404fb750bb..623d39dd33 100644 --- a/guides/keras_nlp/transformer_pretraining.py +++ b/guides/keras_nlp/transformer_pretraining.py @@ -306,7 +306,7 @@ def preprocess(inputs): the context in which it appeared. """ -inputs = keras.Input(shape=(SEQ_LENGTH,), dtype=tf.int32) +inputs = keras.Input(shape=(SEQ_LENGTH,), dtype="int32") # Embed our tokens with a positional embedding. embedding_layer = keras_nlp.layers.TokenAndPositionEmbedding( @@ -430,7 +430,7 @@ def preprocess(sentences, labels): encoder_model = keras.models.load_model("encoder_model.keras", compile=False) # Take as input the tokenized input. -inputs = keras.Input(shape=(SEQ_LENGTH,), dtype=tf.int32) +inputs = keras.Input(shape=(SEQ_LENGTH,), dtype="int32") # Encode and pool the tokens. encoded_tokens = encoder_model(inputs) diff --git a/guides/md/keras_nlp/transformer_pretraining.md b/guides/md/keras_nlp/transformer_pretraining.md index 823057d2da..378eedf1e0 100644 --- a/guides/md/keras_nlp/transformer_pretraining.md +++ b/guides/md/keras_nlp/transformer_pretraining.md @@ -372,7 +372,7 @@ the context in which it appeared. ```python -inputs = keras.Input(shape=(SEQ_LENGTH,), dtype=tf.int32) +inputs = keras.Input(shape=(SEQ_LENGTH,), dtype="int32") # Embed our tokens with a positional embedding. embedding_layer = keras_nlp.layers.TokenAndPositionEmbedding( diff --git a/scripts/autogen.py b/scripts/autogen.py index 1ca39e34e1..0742bfbeb8 100644 --- a/scripts/autogen.py +++ b/scripts/autogen.py @@ -46,7 +46,7 @@ PROJECT_URL = { "keras": f"{KERAS_TEAM_GH}/keras/tree/v3.0.4/", "keras_tuner": f"{KERAS_TEAM_GH}/keras-tuner/tree/v1.4.6/", - "keras_cv": f"{KERAS_TEAM_GH}/keras-cv/tree/v0.8.1/", + "keras_cv": f"{KERAS_TEAM_GH}/keras-cv/tree/v0.8.2/", "keras_nlp": f"{KERAS_TEAM_GH}/keras-nlp/tree/v0.7.0/", "tf_keras": f"{KERAS_TEAM_GH}/tf-keras/tree/v2.15.0/", }