- Download the coco2017 and wikiart datasets
- Generate the tfrecords for training and validation.
python3 -m adain.dataset_utils.create_tfrecords --image_paths_pattern coco/train2017/* --prefix coco-train --output_dir tfrecords
python3 -m adain.dataset_utils.create_tfrecords --image_paths_pattern coco/val2017/* --prefix coco-val --output_dir tfrecords
python3 -m adain.dataset_utils.create_tfrecords --image_paths_pattern wikiart/train/* --prefix wikiart-train --output_dir tfrecords
- Start training with:
python3 -m adain.main --config_path configs/coco-wikiart.json
- To export
saved_model
, usepython3 -m adain.export --config_path configs/coco-wikiart.json
content_images = glob('assets/images/content/*')
style_images = glob('assets/images/style/*')
saved_model = tf.saved_model.load('export')
inference_fn = saved_model.signatures['serving_default']
content_image = read_image(content_images[3])
style_image = read_image(style_images[16])
alpha = tf.constant(1.0)
resize = tf.constant(True)
serving_input = {
'style_images': style_image,
'content_images': content_image,
'alpha': alpha,
'resize': resize
}
stylized_image = inference_fn(**serving_input)['stylized_images'][0]
result = prepare_visualization_image(
content_image[0],
style_image[0],
stylized_image, figsize=(20, 5))
imshow(result, figsize=(20, 10))
@article{DBLP:journals/corr/HuangB17,
author = {Xun Huang and
Serge J. Belongie},
title = {Arbitrary Style Transfer in Real-time with Adaptive Instance Normalization},
journal = {CoRR},
volume = {abs/1703.06868},
year = {2017},
url = {http://arxiv.org/abs/1703.06868},
archivePrefix = {arXiv},
eprint = {1703.06868},
timestamp = {Mon, 13 Aug 2018 16:46:12 +0200},
biburl = {https://dblp.org/rec/journals/corr/HuangB17.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}