@yaroslav-bulatov , .
, . FIFOQueue , , shuffle_batch, , . , , , - , ! RandomShuffleQueue , . , . , , .
, . !
import os
import tensorflow as tf
import numpy as np
from itertools import cycle
output_dir = '/my/output/dir'
my_dirs = [
[
'/path/to/datasets/blacksquares/black_square_100x100.png',
'/path/to/datasets/blacksquares/black_square_200x200.png',
'/path/to/datasets/blacksquares/black_square_300x300.png'
],
[
'/path/to/datasets/whitesquares/white_square_100x100.png',
'/path/to/datasets/whitesquares/white_square_200x200.png',
'/path/to/datasets/whitesquares/white_square_300x300.png',
'/path/to/datasets/whitesquares/white_square_400x400.png'
],
[
'/path/to/datasets/mixedsquares/black_square_200x200.png',
'/path/to/datasets/mixedsquares/white_square_200x200.png'
]
]
patch_size = (100, 100, 1)
batch_size = 20
queue_capacity = 1000
filename_queue = tf.FIFOQueue(
capacity=queue_capacity,
dtypes=tf.string,
shapes=[[]]
)
filenames_placeholder = tf.placeholder(dtype='string', shape=(None))
filenames_enqueue_op = filename_queue.enqueue_many(filenames_placeholder)
image_reader = tf.WholeFileReader()
key, file = image_reader.read(filename_queue)
uint8image = tf.image.decode_png(file)
cropped_image = tf.random_crop(uint8image, patch_size) # take a random 100x100 crop
float_image = tf.div(tf.cast(cropped_image, tf.float32), 255) # put pixels in the [0,1] range
images_queue = tf.RandomShuffleQueue(
capacity=queue_capacity,
min_after_dequeue=0, # allow queue to become completely empty (as we need to empty it)
dtypes=tf.float32,
shapes=patch_size
)
images_enqueue_op = images_queue.enqueue(float_image)
input = tf.placeholder(shape=(None,) + patch_size, dtype=tf.float32)
avg_image = tf.Variable(np.random.normal(loc=0.5, scale=0.5, size=patch_size).astype(np.float32))
loss = tf.nn.l2_loss(tf.sub(avg_image, input))
train_op = tf.train.AdamOptimizer(2.).minimize(loss)
sess = tf.InteractiveSession()
sess.run(tf.initialize_all_variables())
for dir_index, image_paths in enumerate(my_dirs):
image_paths_cycle = cycle(image_paths)
sess.run(tf.initialize_all_variables())
num_epochs = 1000
for i in range(num_epochs):
size = sess.run(filename_queue.size())
image_paths = []
while size < queue_capacity:
image_paths.append(next(image_paths_cycle))
size += 1
sess.run(filenames_enqueue_op, feed_dict={filenames_placeholder: image_paths})
size = sess.run(images_queue.size())
while size < queue_capacity:
sess.run([images_enqueue_op])
size += 1
batch = images_queue.dequeue_many(batch_size).eval()
_, result, loss_i = sess.run([train_op, avg_image, loss], feed_dict={input: batch})
print('Iteration {:d}. Loss: {:.2f}'.format(i, loss_i))
if loss_i < 0.05:
break
size = sess.run(filename_queue.size())
sess.run(filename_queue.dequeue_many(size))
size = sess.run(filename_queue.size())
assert size == 0
size = sess.run(images_queue.size())
sess.run(images_queue.dequeue_many(size))
size = sess.run(filename_queue.size())
assert size == 0
result_image = np.clip(result * 255, 0, 255).astype(np.uint8)
with open(os.path.join(output_dir, 'result_' + str(dir_index)), 'wb') as result_file:
result_file.write(tf.image.encode_png(result_image).eval())
print('Happy days!')
exit(0)
result_0 - , result_1 - result_2 - ( ) .