Skip to content

Commit

Permalink
finished assignment2
Browse files Browse the repository at this point in the history
  • Loading branch information
brightredchilli committed Apr 3, 2017
1 parent 653389d commit e5342af
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 12 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
*.swp
*.pyc
*.jpg
.env/*
**/.ipynb_checkpoints/*
**/.ipynb
**/datasets/*

7 changes: 4 additions & 3 deletions assignment2/ConvolutionalNetworks.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -573,10 +573,10 @@
"model = ThreeLayerConvNet(weight_scale=1e-2)\n",
"\n",
"solver = Solver(model, small_data,\n",
" num_epochs=20, batch_size=50,\n",
" num_epochs=10, batch_size=50,\n",
" update_rule='adam',\n",
" optim_config={\n",
" 'learning_rate': 1e-3,\n",
" 'learning_rate': 9e-4,\n",
" },\n",
" verbose=True, print_every=1)\n",
"solver.train()"
Expand Down Expand Up @@ -842,7 +842,8 @@
},
"outputs": [],
"source": [
"# Train a really good model on CIFAR-10"
"# Train a really good model on CIFAR-10\n",
"from cs231n.classifiers.conv import *\n"
]
},
{
Expand Down
2 changes: 2 additions & 0 deletions assignment2/cs231n/classifiers/conv.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@

class ConvNet(object):
18 changes: 9 additions & 9 deletions assignment2/cs231n/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,19 +649,14 @@ def spatial_batchnorm_forward(x, gamma, beta, bn_param):
# be very short; ours is less than five lines. #
#############################################################################
N, C, H, W = x.shape
tmp = x.transpose(1,0,2,3).mean(-1).mean(-1).mean(-1)
print("tmp.shape {}".format(tmp.shape))
tmp = x.transpose(0,2,3,1).reshape(-1, C)
out, cache = batchnorm_forward(tmp, gamma, beta, bn_param)
tmp = np.ones_like(tmp)
for i in arange(tmp.size):
tmp[i] *= out[i]

tmp = tmp.reshape(N, C, H, W)
out = out.reshape(N, H, W, C).transpose(0,3,1,2)
#############################################################################
# END OF YOUR CODE #
#############################################################################

return tmp, cache
return out, cache


def spatial_batchnorm_backward(dout, cache):
Expand All @@ -679,14 +674,19 @@ def spatial_batchnorm_backward(dout, cache):
"""
dx, dgamma, dbeta = None, None, None


#############################################################################
# TODO: Implement the backward pass for spatial batch normalization. #
# #
# HINT: You can implement spatial batch normalization using the vanilla #
# version of batch normalization defined above. Your implementation should #
# be very short; ours is less than five lines. #
#############################################################################
pass

N, C, H, W = dout.shape
dout = dout.transpose(0,2,3,1).reshape(-1, C)
dx, dgamma, dbeta = batchnorm_backward(dout, cache)
dx = dx.reshape(N, H, W, C).transpose(0, 3, 1, 2)
#############################################################################
# END OF YOUR CODE #
#############################################################################
Expand Down
Binary file removed assignment2/kitten.jpg
Binary file not shown.
Binary file removed assignment2/puppy.jpg
Binary file not shown.

0 comments on commit e5342af

Please sign in to comment.