From 29795d53cf1a5f0674013a5b584517ae9d626e98 Mon Sep 17 00:00:00 2001 From: Pietro Monticone <38562595+pitmonticone@users.noreply.github.com> Date: Tue, 2 Jan 2024 01:46:22 +0100 Subject: [PATCH 1/5] Update text_generation_wikipedia_rnn.ipynb --- .../text_generation_wikipedia_rnn.ipynb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb b/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb index a10b935..71d7a0d 100644 --- a/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb +++ b/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb @@ -1055,7 +1055,7 @@ } ], "source": [ - "# Map character indices to characters from vacabulary.\n", + "# Map character indices to characters from vocabulary.\n", "index2char = np.array(vocab)\n", "\n", "print(index2char)" @@ -1365,7 +1365,7 @@ "id": "BDYHEJ0pY1ai" }, "source": [ - "Each index of these vectors are processed as one time step. For the input at time step 0, the model receives the index for \"F\" and trys to predict the index for \"i\" as the next character. At the next timestep, it does the same thing but the RNN considers the previous step context in addition to the current input character." + "Each index of these vectors are processed as one time step. For the input at time step 0, the model receives the index for \"F\" and tries to predict the index for \"i\" as the next character. At the next timestep, it does the same thing but the RNN considers the previous step context in addition to the current input character." ] }, { @@ -1644,7 +1644,7 @@ } ], "source": [ - "# Let's do a quick detour and see how Embeding layer works.\n", + "# Let's do a quick detour and see how Embedding layer works.\n", "# It takes several char indices sequences (batch) as an input.\n", "# It encodes every character of every sequence to a vector of tmp_embeding_size length.\n", "tmp_vocab_size = 10\n", @@ -1935,7 +1935,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Prediction for the 1st letter of the batch 1st sequense:\n", + "Prediction for the 1st letter of the batch 1st sequence:\n", "tf.Tensor(\n", "[-2.96991039e-03 2.02196068e-04 5.34047745e-03 -2.94846855e-03\n", " -3.64167639e-03 -2.63241702e-04 -8.80502281e-04 7.99844624e-04\n", @@ -2097,7 +2097,7 @@ } ], "source": [ - "print('Prediction for the 1st letter of the batch 1st sequense:')\n", + "print('Prediction for the 1st letter of the batch 1st sequence:')\n", "print(example_batch_predictions[0, 0])" ] }, From 732652553030dbe123a84d3ba25a9050b008ff0a Mon Sep 17 00:00:00 2001 From: Pietro Monticone <38562595+pitmonticone@users.noreply.github.com> Date: Tue, 2 Jan 2024 01:49:55 +0100 Subject: [PATCH 2/5] Update recipes_generation.en.md --- assets/recipes_generation.en.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/assets/recipes_generation.en.md b/assets/recipes_generation.en.md index 0e701e3..c0b23f8 100644 --- a/assets/recipes_generation.en.md +++ b/assets/recipes_generation.en.md @@ -1289,14 +1289,14 @@ _➔ output:_ To get actual predictions from the model we need to sample from the output distribution, to get actual character indices. This distribution is defined by the logits over the character vocabulary. ```python -print('Prediction for the 1st letter of the batch 1st sequense:') +print('Prediction for the 1st letter of the batch 1st sequence:') print(example_batch_predictions[0, 0]) ``` _➔ output:_ > ```text -> Prediction for the 1st letter of the batch 1st sequense: +> Prediction for the 1st letter of the batch 1st sequence: > tf.Tensor( > [-9.0643829e-03 -1.9503604e-03 9.3381782e-04 3.7442446e-03 > -2.0541784e-03 -7.4054599e-03 -7.1884273e-03 2.6014952e-03 From f2a7310be85a43b1c31d8f8444ab6b35eeb2383c Mon Sep 17 00:00:00 2001 From: Pietro Monticone <38562595+pitmonticone@users.noreply.github.com> Date: Tue, 2 Jan 2024 01:50:00 +0100 Subject: [PATCH 3/5] Update recipes_generation.ru.md --- assets/recipes_generation.ru.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/assets/recipes_generation.ru.md b/assets/recipes_generation.ru.md index 1a271a1..03af509 100644 --- a/assets/recipes_generation.ru.md +++ b/assets/recipes_generation.ru.md @@ -1301,14 +1301,14 @@ _➔ вывод:_ Для того, чтобы выбрать символ, который по мнению модели должен идти следующим нам необходимо сделать sampling по вероятностям появления каждого символа. ```python -print('Prediction for the 1st letter of the batch 1st sequense:') +print('Prediction for the 1st letter of the batch 1st sequence:') print(example_batch_predictions[0, 0]) ``` _➔ вывод:_ > ```text -> Prediction for the 1st letter of the batch 1st sequense: +> Prediction for the 1st letter of the batch 1st sequence: > tf.Tensor( > [-9.0643829e-03 -1.9503604e-03 9.3381782e-04 3.7442446e-03 > -2.0541784e-03 -7.4054599e-03 -7.1884273e-03 2.6014952e-03 From 3294cde982775d117f26a0f6a0b46c30ca175b6c Mon Sep 17 00:00:00 2001 From: Pietro Monticone <38562595+pitmonticone@users.noreply.github.com> Date: Tue, 2 Jan 2024 01:50:04 +0100 Subject: [PATCH 4/5] Update recipe_generation_rnn.ipynb --- .../recipe_generation_rnn/recipe_generation_rnn.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb b/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb index 554cbed..f38bea0 100644 --- a/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb +++ b/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb @@ -2603,7 +2603,7 @@ } ], "source": [ - "# Let's do a quick detour and see how Embeding layer works.\n", + "# Let's do a quick detour and see how Embedding layer works.\n", "# It takes several char indices sequences (batch) as an input.\n", "# It encodes every character of every sequence to a vector of tmp_embeding_size length.\n", "tmp_vocab_size = 10\n", @@ -2894,7 +2894,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Prediction for the 1st letter of the batch 1st sequense:\n", + "Prediction for the 1st letter of the batch 1st sequence:\n", "tf.Tensor(\n", "[-9.0643829e-03 -1.9503604e-03 9.3381782e-04 3.7442446e-03\n", " -2.0541784e-03 -7.4054599e-03 -7.1884273e-03 2.6014952e-03\n", @@ -2944,7 +2944,7 @@ } ], "source": [ - "print('Prediction for the 1st letter of the batch 1st sequense:')\n", + "print('Prediction for the 1st letter of the batch 1st sequence:')\n", "print(example_batch_predictions[0, 0])" ] }, From deae679777ae2ed5a8901d0b0c7516427662b11c Mon Sep 17 00:00:00 2001 From: Pietro Monticone <38562595+pitmonticone@users.noreply.github.com> Date: Tue, 2 Jan 2024 01:50:08 +0100 Subject: [PATCH 5/5] Update text_generation_shakespeare_rnn.ipynb --- .../text_generation_shakespeare_rnn.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb b/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb index 56d84bc..44139e0 100644 --- a/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb +++ b/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb @@ -1417,7 +1417,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Prediction for the 1st letter of the batch 1st sequense:\n", + "Prediction for the 1st letter of the batch 1st sequence:\n", "tf.Tensor(\n", "[-5.5658985e-03 -5.6167855e-03 2.3333444e-03 -5.4010577e-03\n", " -1.2658490e-03 -2.0685978e-03 -1.7119508e-03 -1.9059415e-03\n", @@ -1440,7 +1440,7 @@ } ], "source": [ - "print('Prediction for the 1st letter of the batch 1st sequense:')\n", + "print('Prediction for the 1st letter of the batch 1st sequence:')\n", "print(example_batch_predictions[0, 0])" ] },