Skip to content

Commit 34aa7cf

Browse files
jatinpaprejakartikay-bagla
authored andcommitted
Added windows compatibility in ch3 nb10
1 parent c78b4d3 commit 34aa7cf

File tree

1 file changed

+21
-83
lines changed

1 file changed

+21
-83
lines changed

Ch3/10_Visualizing_Embeddings_using_Tensorboard.ipynb

Lines changed: 21 additions & 83 deletions
Original file line numberDiff line numberDiff line change
@@ -9,72 +9,17 @@
99
},
1010
{
1111
"cell_type": "code",
12-
"execution_count": 3,
12+
"execution_count": 1,
1313
"metadata": {},
14-
"outputs": [
15-
{
16-
"name": "stdout",
17-
"output_type": "stream",
18-
"text": [
19-
"Collecting tensorflow==1.14.0\n",
20-
" Downloading tensorflow-1.14.0-cp36-cp36m-manylinux1_x86_64.whl (109.2 MB)\n",
21-
"\u001b[K |████████████████████████████████| 109.2 MB 4.3 kB/s eta 0:00:01\n",
22-
"\u001b[?25hCollecting google-pasta>=0.1.6\n",
23-
" Using cached google_pasta-0.2.0-py3-none-any.whl (57 kB)\n",
24-
"Collecting grpcio>=1.8.6\n",
25-
" Using cached grpcio-1.28.1-cp36-cp36m-manylinux2010_x86_64.whl (2.8 MB)\n",
26-
"Collecting keras-applications>=1.0.6\n",
27-
" Using cached Keras_Applications-1.0.8-py3-none-any.whl (50 kB)\n",
28-
"Collecting astor>=0.6.0\n",
29-
" Using cached astor-0.8.1-py2.py3-none-any.whl (27 kB)\n",
30-
"Collecting protobuf>=3.6.1\n",
31-
" Using cached protobuf-3.11.3-cp36-cp36m-manylinux1_x86_64.whl (1.3 MB)\n",
32-
"Collecting tensorflow-estimator<1.15.0rc0,>=1.14.0rc0\n",
33-
" Downloading tensorflow_estimator-1.14.0-py2.py3-none-any.whl (488 kB)\n",
34-
"\u001b[K |████████████████████████████████| 488 kB 3.1 MB/s eta 0:00:01\n",
35-
"\u001b[?25hCollecting gast>=0.2.0\n",
36-
" Downloading gast-0.3.3-py2.py3-none-any.whl (9.7 kB)\n",
37-
"Processing /home/etherealenvy/.cache/pip/wheels/93/2a/eb/e58dbcbc963549ee4f065ff80a59f274cc7210b6eab962acdc/termcolor-1.1.0-py3-none-any.whl\n",
38-
"Collecting keras-preprocessing>=1.0.5\n",
39-
" Downloading Keras_Preprocessing-1.1.1-py2.py3-none-any.whl (42 kB)\n",
40-
"\u001b[K |████████████████████████████████| 42 kB 125 kB/s eta 0:00:011\n",
41-
"\u001b[?25hRequirement already satisfied: wheel>=0.26 in /home/etherealenvy/miniconda3/envs/practicalnlp/lib/python3.6/site-packages (from tensorflow==1.14.0) (0.34.2)\n",
42-
"Processing /home/etherealenvy/.cache/pip/wheels/c3/af/84/3962a6af7b4ab336e951b7877dcfb758cf94548bb1771e0679/absl_py-0.9.0-py3-none-any.whl\n",
43-
"Requirement already satisfied: numpy<2.0,>=1.14.5 in /home/etherealenvy/miniconda3/envs/practicalnlp/lib/python3.6/site-packages (from tensorflow==1.14.0) (1.18.4)\n",
44-
"Collecting tensorboard<1.15.0,>=1.14.0\n",
45-
" Downloading tensorboard-1.14.0-py3-none-any.whl (3.1 MB)\n",
46-
"\u001b[K |████████████████████████████████| 3.1 MB 13.4 MB/s eta 0:00:01\n",
47-
"\u001b[?25hRequirement already satisfied: six>=1.10.0 in /home/etherealenvy/miniconda3/envs/practicalnlp/lib/python3.6/site-packages (from tensorflow==1.14.0) (1.14.0)\n",
48-
"Processing /home/etherealenvy/.cache/pip/wheels/32/42/7f/23cae9ff6ef66798d00dc5d659088e57dbba01566f6c60db63/wrapt-1.12.1-cp36-cp36m-linux_x86_64.whl\n",
49-
"Collecting h5py\n",
50-
" Using cached h5py-2.10.0-cp36-cp36m-manylinux1_x86_64.whl (2.9 MB)\n",
51-
"Requirement already satisfied: setuptools in /home/etherealenvy/.local/lib/python3.6/site-packages (from protobuf>=3.6.1->tensorflow==1.14.0) (46.1.3)\n",
52-
"Requirement already satisfied: werkzeug>=0.11.15 in /home/etherealenvy/.local/lib/python3.6/site-packages (from tensorboard<1.15.0,>=1.14.0->tensorflow==1.14.0) (1.0.1)\n",
53-
"Collecting markdown>=2.6.8\n",
54-
" Downloading Markdown-3.2.2-py3-none-any.whl (88 kB)\n",
55-
"\u001b[K |████████████████████████████████| 88 kB 711 kB/s eta 0:00:011\n",
56-
"\u001b[?25hRequirement already satisfied: importlib-metadata; python_version < \"3.8\" in /home/etherealenvy/miniconda3/envs/practicalnlp/lib/python3.6/site-packages (from markdown>=2.6.8->tensorboard<1.15.0,>=1.14.0->tensorflow==1.14.0) (1.5.0)\n",
57-
"Requirement already satisfied: zipp>=0.5 in /home/etherealenvy/miniconda3/envs/practicalnlp/lib/python3.6/site-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<1.15.0,>=1.14.0->tensorflow==1.14.0) (3.1.0)\n",
58-
"Installing collected packages: google-pasta, grpcio, h5py, keras-applications, astor, protobuf, tensorflow-estimator, gast, termcolor, keras-preprocessing, absl-py, markdown, tensorboard, wrapt, tensorflow\n",
59-
"Successfully installed absl-py-0.9.0 astor-0.8.1 gast-0.3.3 google-pasta-0.2.0 grpcio-1.28.1 h5py-2.10.0 keras-applications-1.0.8 keras-preprocessing-1.1.1 markdown-3.2.2 protobuf-3.11.3 tensorboard-1.14.0 tensorflow-1.14.0 tensorflow-estimator-1.14.0 termcolor-1.1.0 wrapt-1.12.1\n"
60-
]
61-
}
62-
],
14+
"outputs": [],
6315
"source": [
6416
"#installing the required libraries\n",
6517
"!pip install tensorflow==1.14.0"
6618
]
6719
},
6820
{
6921
"cell_type": "code",
70-
"execution_count": 1,
71-
"metadata": {},
72-
"outputs": [],
73-
"source": []
74-
},
75-
{
76-
"cell_type": "code",
77-
"execution_count": 4,
22+
"execution_count": 2,
7823
"metadata": {},
7924
"outputs": [],
8025
"source": [
@@ -93,28 +38,28 @@
9338
},
9439
{
9540
"cell_type": "code",
96-
"execution_count": 11,
41+
"execution_count": 3,
9742
"metadata": {},
9843
"outputs": [],
9944
"source": [
10045
"#Loading the model\n",
101-
"filename = \"File-Path\"\n",
102-
"model = KeyedVectors.load_word2vec_format(filename, binary=True)\n"
46+
"cwd=os.getcwd() \n",
47+
"model = KeyedVectors.load_word2vec_format(cwd+'\\Models\\word2vec_cbow.bin', binary=True)"
10348
]
10449
},
10550
{
10651
"cell_type": "code",
107-
"execution_count": 7,
52+
"execution_count": 4,
10853
"metadata": {},
10954
"outputs": [],
11055
"source": [
11156
"#get the model's vocabulary size\n",
112-
"max_size = len(model.wv.vocab)-1\n"
57+
"max_size = len(model.wv.vocab)-1"
11358
]
11459
},
11560
{
11661
"cell_type": "code",
117-
"execution_count": 13,
62+
"execution_count": 5,
11863
"metadata": {},
11964
"outputs": [],
12065
"source": [
@@ -124,7 +69,7 @@
12469
},
12570
{
12671
"cell_type": "code",
127-
"execution_count": 16,
72+
"execution_count": 6,
12873
"metadata": {},
12974
"outputs": [],
13075
"source": [
@@ -133,7 +78,7 @@
13378
"if not os.path.exists('projections'):\n",
13479
" os.makedirs('projections')\n",
13580
" \n",
136-
"with open(\"projections/metadata.tsv\", 'w+') as file_metadata:\n",
81+
"with open(\"projections/metadata.tsv\", 'w+',encoding=\"utf-8\") as file_metadata: #changed added encoding=\"utf-8\"\n",
13782
" \n",
13883
" for i, word in enumerate(model.wv.index2word[:max_size]):\n",
13984
" \n",
@@ -146,7 +91,7 @@
14691
},
14792
{
14893
"cell_type": "code",
149-
"execution_count": 17,
94+
"execution_count": 7,
15095
"metadata": {},
15196
"outputs": [],
15297
"source": [
@@ -156,7 +101,7 @@
156101
},
157102
{
158103
"cell_type": "code",
159-
"execution_count": 18,
104+
"execution_count": 8,
160105
"metadata": {},
161106
"outputs": [],
162107
"source": [
@@ -167,7 +112,7 @@
167112
},
168113
{
169114
"cell_type": "code",
170-
"execution_count": 19,
115+
"execution_count": 9,
171116
"metadata": {},
172117
"outputs": [],
173118
"source": [
@@ -177,7 +122,7 @@
177122
},
178123
{
179124
"cell_type": "code",
180-
"execution_count": 20,
125+
"execution_count": 10,
181126
"metadata": {},
182127
"outputs": [],
183128
"source": [
@@ -187,7 +132,7 @@
187132
},
188133
{
189134
"cell_type": "code",
190-
"execution_count": 21,
135+
"execution_count": 11,
191136
"metadata": {},
192137
"outputs": [],
193138
"source": [
@@ -197,7 +142,7 @@
197142
},
198143
{
199144
"cell_type": "code",
200-
"execution_count": 23,
145+
"execution_count": 12,
201146
"metadata": {},
202147
"outputs": [],
203148
"source": [
@@ -208,7 +153,7 @@
208153
},
209154
{
210155
"cell_type": "code",
211-
"execution_count": 24,
156+
"execution_count": 13,
212157
"metadata": {},
213158
"outputs": [],
214159
"source": [
@@ -219,7 +164,7 @@
219164
},
220165
{
221166
"cell_type": "code",
222-
"execution_count": 25,
167+
"execution_count": 14,
223168
"metadata": {},
224169
"outputs": [
225170
{
@@ -228,7 +173,7 @@
228173
"'projections/model.ckpt-161017'"
229174
]
230175
},
231-
"execution_count": 25,
176+
"execution_count": 14,
232177
"metadata": {},
233178
"output_type": "execute_result"
234179
}
@@ -262,13 +207,6 @@
262207
"Output when we search for a specific word in this case \"human\" and isolate only those points\n",
263208
"![TensorBoard-4](Images/TensorBoard-4.png)"
264209
]
265-
},
266-
{
267-
"cell_type": "code",
268-
"execution_count": null,
269-
"metadata": {},
270-
"outputs": [],
271-
"source": []
272210
}
273211
],
274212
"metadata": {
@@ -287,7 +225,7 @@
287225
"name": "python",
288226
"nbconvert_exporter": "python",
289227
"pygments_lexer": "ipython3",
290-
"version": "3.6.10"
228+
"version": "3.7.0"
291229
}
292230
},
293231
"nbformat": 4,

0 commit comments

Comments
 (0)