Skip to content

Commit 6bf1481

Browse files
committed
📝 reformat
Signed-off-by: Christoph Görn <[email protected]>
1 parent 5ee5359 commit 6bf1481

File tree

5 files changed

+1
-14
lines changed

5 files changed

+1
-14
lines changed

notebooks/neural-magic-torch-mnist/download_dataset.ipynb

-2
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
")\n",
103103
"\n",
104104
"if use_ceph:\n",
105-
"\n",
106105
" s3_endpoint_url = os.environ[\"OBJECT_STORAGE_ENDPOINT_URL\"]\n",
107106
" s3_access_key = os.environ[\"AWS_ACCESS_KEY_ID\"]\n",
108107
" s3_secret_key = os.environ[\"AWS_SECRET_ACCESS_KEY\"]\n",
@@ -118,7 +117,6 @@
118117
"\n",
119118
" # Store MNIST data for next step.\n",
120119
" for path in destination_path.iterdir():\n",
121-
"\n",
122120
" file_downloaded_path = destination_path.joinpath(path.name)\n",
123121
"\n",
124122
" key = f\"{project_name}/data/raw/{data_name}\"\n",

notebooks/neural-magic-torch-mnist/sparsify.ipynb

+1-3
Original file line numberDiff line numberDiff line change
@@ -248,15 +248,13 @@
248248
"outputs": [],
249249
"source": [
250250
"def train_model(model, loaders, criterion, optimizer, num_epochs=1):\n",
251-
"\n",
252251
" model.train()\n",
253252
"\n",
254253
" # Train the model\n",
255254
" total_step = len(loaders[\"train\"])\n",
256255
"\n",
257256
" for epoch in range(num_epochs):\n",
258257
" for i, (images, labels) in enumerate(loaders[\"train\"]):\n",
259-
"\n",
260258
" # gives batch data, normalize x when iterate train_loader\n",
261259
" b_x = Variable(images) # batch x\n",
262260
" b_y = Variable(labels) # batch y\n",
@@ -412,7 +410,7 @@
412410
"from sparseml.pytorch.utils import get_prunable_layers, tensor_sparsity\n",
413411
"\n",
414412
"# print sparsities of each layer\n",
415-
"for (name, layer) in get_prunable_layers(cnn):\n",
413+
"for name, layer in get_prunable_layers(cnn):\n",
416414
" print(\"{}.weight: {:.4f}\".format(name, tensor_sparsity(layer.weight).item()))"
417415
]
418416
},

notebooks/neural-magic-torch-mnist/training.ipynb

-2
Original file line numberDiff line numberDiff line change
@@ -391,15 +391,13 @@
391391
"\n",
392392
"\n",
393393
"def train(num_epochs, cnn, loaders, loss_func, optimizer):\n",
394-
"\n",
395394
" cnn.train()\n",
396395
"\n",
397396
" # Train the model\n",
398397
" total_step = len(loaders[\"train\"])\n",
399398
"\n",
400399
" for epoch in range(num_epochs):\n",
401400
" for i, (images, labels) in enumerate(loaders[\"train\"]):\n",
402-
"\n",
403401
" # gives batch data, normalize x when iterate train_loader\n",
404402
" b_x = Variable(images) # batch x\n",
405403
" b_y = Variable(labels) # batch y\n",

notebooks/tf-mnist/download_dataset.ipynb

-4
Original file line numberDiff line numberDiff line change
@@ -181,15 +181,13 @@
181181
" destination_path = Path(str(os.environ.get(\"DATASET\", \"data/raw\")))\n",
182182
"\n",
183183
"else:\n",
184-
"\n",
185184
" # Set path where to store\n",
186185
" directory_path = Path.cwd().parents[0]\n",
187186
" destination_path = directory_path.joinpath(\n",
188187
" str(os.environ.get(\"DATASET\", \"data/raw\"))\n",
189188
" )\n",
190189
"\n",
191190
" if use_ceph:\n",
192-
"\n",
193191
" s3_endpoint_url = os.environ[\"OBJECT_STORAGE_ENDPOINT_URL\"]\n",
194192
" s3_access_key = os.environ[\"AWS_ACCESS_KEY_ID\"]\n",
195193
" s3_secret_key = os.environ[\"AWS_SECRET_ACCESS_KEY\"]\n",
@@ -206,7 +204,6 @@
206204
"\n",
207205
"# Store MNIST data for next step.\n",
208206
"for data_name, data_file in dataset.items():\n",
209-
"\n",
210207
" if not os.path.exists(destination_path):\n",
211208
" destination_path.mkdir(parents=True, exist_ok=True)\n",
212209
"\n",
@@ -218,7 +215,6 @@
218215
" s3.upload_file(Bucket=s3_bucket, Key=key, Filename=str(file_downloaded_path))\n",
219216
"\n",
220217
" else:\n",
221-
"\n",
222218
" if not _is_file_downloaded(file_downloaded_path):\n",
223219
" output = open(file_downloaded_path, \"wb\")\n",
224220
" pickle.dump(data_file, output)\n",

notebooks/tf-mnist/training.ipynb

-3
Original file line numberDiff line numberDiff line change
@@ -117,20 +117,17 @@
117117
"project_name = os.environ.get(\"PROJECT_NAME\", \"elyra-aidevsecops-tutorial\")\n",
118118
"\n",
119119
"if automation:\n",
120-
"\n",
121120
" # Get file path relative to notebook\n",
122121
" raw_dataset_path = str(os.environ.get(\"DATASET\", \"../data/raw\"))\n",
123122
"\n",
124123
"else:\n",
125-
"\n",
126124
" # Download and store in project's data directory\n",
127125
" directory_path = Path.cwd().parents[0]\n",
128126
" raw_dataset_path = directory_path.joinpath(\n",
129127
" str(os.environ.get(\"DATASET\", \"../data/raw/mnist_datasets_tf/\"))\n",
130128
" )\n",
131129
"\n",
132130
"if use_ceph:\n",
133-
"\n",
134131
" # Download files from S3\n",
135132
" s3_endpoint_url = os.environ[\"OBJECT_STORAGE_ENDPOINT_URL\"]\n",
136133
" s3_access_key = os.environ[\"AWS_ACCESS_KEY_ID\"]\n",

0 commit comments

Comments
 (0)