Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add logging through W&B #187

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ Implementation of _Auxiliary Classifier Generative Adversarial Network_.

Paper: https://arxiv.org/abs/1610.09585

[Trained Model](https://app.wandb.ai/borisd13/Keras-GAN_AC-GAN/runs/tz46c8ow?workspace=user-borisd13)

#### Example
```
$ cd acgan/
Expand Down Expand Up @@ -79,6 +81,8 @@ Implementation of _Bidirectional Generative Adversarial Network_.

Paper: https://arxiv.org/abs/1605.09782

[Trained Model](https://app.wandb.ai/borisd13/Keras-BiGAN/runs/2b88b5vv?workspace=user-borisd13)

#### Example
```
$ cd bigan/
Expand All @@ -92,6 +96,8 @@ Implementation of _Boundary-Seeking Generative Adversarial Networks_.

Paper: https://arxiv.org/abs/1702.08431

[Trained Model](https://app.wandb.ai/borisd13/Keras-BGAN/runs/360cv3g4?workspace=user-borisd13)

#### Example
```
$ cd bgan/
Expand Down
18 changes: 18 additions & 0 deletions acgan/acgan.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import print_function, division

import wandb
from keras.datasets import mnist
from keras.layers import Input, Dense, Reshape, Flatten, Dropout, multiply
from keras.layers import BatchNormalization, Activation, Embedding, ZeroPadding2D
Expand All @@ -21,6 +22,16 @@ def __init__(self):
self.img_shape = (self.img_rows, self.img_cols, self.channels)
self.num_classes = 10
self.latent_dim = 100

# Log project run
wandb.init(anonymous='allow',
project="Keras-GAN_AC-GAN",
config={"img_rows": self.img_rows,
"img_cols": self.img_cols,
"channels": self.channels,
"img_shape": self.img_shape,
"num_classes": self.num_classes,
"latent_dim": self.latent_dim})

optimizer = Adam(0.0002, 0.5)
losses = ['binary_crossentropy', 'sparse_categorical_crossentropy']
Expand Down Expand Up @@ -117,6 +128,9 @@ def build_discriminator(self):
return Model(img, [validity, label])

def train(self, epochs, batch_size=128, sample_interval=50):

# add extra parameters to log
wandb.config.update({"epochs": epochs, "batch_size": batch_size})

# Load the dataset
(X_train, y_train), (_, _) = mnist.load_data()
Expand Down Expand Up @@ -167,6 +181,9 @@ def train(self, epochs, batch_size=128, sample_interval=50):

# Plot the progress
print ("%d [D loss: %f, acc.: %.2f%%, op_acc: %.2f%%] [G loss: %f]" % (epoch, d_loss[0], 100*d_loss[3], 100*d_loss[4], g_loss[0]))

# Log progress
wandb.log({'D loss': d_loss[0], 'acc': d_loss[3], 'op_acc': d_loss[4], 'G loss': g_loss[0]}, step=epoch)

# If at save interval => save generated image samples
if epoch % sample_interval == 0:
Expand All @@ -188,6 +205,7 @@ def sample_images(self, epoch):
axs[i,j].imshow(gen_imgs[cnt,:,:,0], cmap='gray')
axs[i,j].axis('off')
cnt += 1
wandb.log({"images": fig}, step=epoch)
fig.savefig("images/%d.png" % epoch)
plt.close()

Expand Down
14 changes: 14 additions & 0 deletions bgan/bgan.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import print_function, division

import wandb
from keras.datasets import mnist
from keras.layers import Input, Dense, Reshape, Flatten, Dropout
from keras.layers import BatchNormalization, Activation, ZeroPadding2D
Expand All @@ -24,6 +25,15 @@ def __init__(self):
self.img_shape = (self.img_rows, self.img_cols, self.channels)
self.latent_dim = 100

# Log project run
wandb.init(anonymous='allow',
project="Keras-BGAN",
config={"img_rows": self.img_rows,
"img_cols": self.img_cols,
"channels": self.channels,
"img_shape": self.img_shape,
"latent_dim": self.latent_dim})

optimizer = Adam(0.0002, 0.5)

# Build and compile the discriminator
Expand Down Expand Up @@ -139,6 +149,9 @@ def train(self, epochs, batch_size=128, sample_interval=50):

# Plot the progress
print ("%d [D loss: %f, acc.: %.2f%%] [G loss: %f]" % (epoch, d_loss[0], 100*d_loss[1], g_loss))

# Log progress
wandb.log({'D loss': d_loss[0], 'acc': d_loss[1], 'G loss': g_loss}, step=epoch)

# If at save interval => save generated image samples
if epoch % sample_interval == 0:
Expand All @@ -158,6 +171,7 @@ def sample_images(self, epoch):
axs[i,j].imshow(gen_imgs[cnt, :,:,0], cmap='gray')
axs[i,j].axis('off')
cnt += 1
wandb.log({"images": fig}, step=epoch)
fig.savefig("images/mnist_%d.png" % epoch)
plt.close()

Expand Down
17 changes: 17 additions & 0 deletions bigan/bigan.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import print_function, division

import wandb
from keras.datasets import mnist
from keras.layers import Input, Dense, Reshape, Flatten, Dropout, multiply, GaussianNoise
from keras.layers import BatchNormalization, Activation, Embedding, ZeroPadding2D
Expand All @@ -23,6 +24,15 @@ def __init__(self):
self.channels = 1
self.img_shape = (self.img_rows, self.img_cols, self.channels)
self.latent_dim = 100

# Log project run
wandb.init(anonymous='allow',
project="Keras-BiGAN",
config={"img_rows": self.img_rows,
"img_cols": self.img_cols,
"channels": self.channels,
"img_shape": self.img_shape,
"latent_dim": self.latent_dim})

optimizer = Adam(0.0002, 0.5)

Expand Down Expand Up @@ -119,6 +129,9 @@ def build_discriminator(self):

def train(self, epochs, batch_size=128, sample_interval=50):

# add extra parameters to log
wandb.config.update({"epochs": epochs, "batch_size": batch_size})

# Load the dataset
(X_train, _), (_, _) = mnist.load_data()

Expand Down Expand Up @@ -160,6 +173,9 @@ def train(self, epochs, batch_size=128, sample_interval=50):

# Plot the progress
print ("%d [D loss: %f, acc: %.2f%%] [G loss: %f]" % (epoch, d_loss[0], 100*d_loss[1], g_loss[0]))

# Log progress
wandb.log({'D loss': d_loss[0], 'acc': d_loss[1], 'G loss': g_loss[0]}, step=epoch)

# If at save interval => save generated image samples
if epoch % sample_interval == 0:
Expand All @@ -179,6 +195,7 @@ def sample_interval(self, epoch):
axs[i,j].imshow(gen_imgs[cnt, :,:,0], cmap='gray')
axs[i,j].axis('off')
cnt += 1
wandb.log({"images": fig}, step=epoch)
fig.savefig("images/mnist_%d.png" % epoch)
plt.close()

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ matplotlib
numpy
scipy
pillow
wandb
#urllib
#skimage
scikit-image
Expand Down