diff --git a/README.md b/README.md index a58cd8bf..36153746 100644 --- a/README.md +++ b/README.md @@ -86,3 +86,7 @@ dictionary with the following keys: and images without valid values for `rhip, rshoulder, lhip, lshoulder` are ignored. + +## Contributions + +- Google Colab Demo can be found here [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1nKtymlEhcE4aZs-EOTBaqPPIAKX5RQB8#scrollTo=m0TFJfYd6pOT) by Marcos Carbonell diff --git a/demo/.gitignore b/demo/.gitignore new file mode 100644 index 00000000..a62418ae --- /dev/null +++ b/demo/.gitignore @@ -0,0 +1,9 @@ +datasets/ +__pycache__/ +log/ +data/ +*.tar.gz +*.zip +checkpoints/* + + diff --git a/demo/README.md b/demo/README.md new file mode 100644 index 00000000..848e8225 --- /dev/null +++ b/demo/README.md @@ -0,0 +1,4 @@ +# vunet-demo + +- This repository is a build for as a demo for the [VUnet Paper](https://arxiv.org/pdf/1804.04694.pdf) of Esser et. al. +- The Google Colab Demo can be found here [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1nKtymlEhcE4aZs-EOTBaqPPIAKX5RQB8#scrollTo=m0TFJfYd6pOT) diff --git a/demo/arial.ttf b/demo/arial.ttf new file mode 100644 index 00000000..886789b8 Binary files /dev/null and b/demo/arial.ttf differ diff --git a/demo/batches_pg2.py b/demo/batches_pg2.py new file mode 100644 index 00000000..f9396dde --- /dev/null +++ b/demo/batches_pg2.py @@ -0,0 +1,408 @@ +from PIL import Image, ImageDraw, ImageFont +import numpy as np +import os +import cv2 +import math + +def load_img(path, target_size): + """Load image. target_size is specified as (height, width, channels) + where channels == 1 means grayscale. uint8 image returned.""" + img = Image.open(path) + grayscale = target_size[2] == 1 + if grayscale: + if img.mode != 'L': + img = img.convert('L') + else: + if img.mode != 'RGB': + img = img.convert('RGB') + wh_tuple = (target_size[1], target_size[0]) + if img.size != wh_tuple: + img = img.resize(wh_tuple, resample = Image.BILINEAR) + + x = np.asarray(img, dtype = "uint8") + if len(x.shape) == 2: + x = np.expand_dims(x, -1) + + return x + + +def save_image(X, name): + """Save image as png.""" + fname = os.path.join(out_dir, name + ".png") + Image.fromarray(X).save(fname) + + +def preprocess(x): + """From uint8 image to [-1,1].""" + return np.cast[np.float32](x / 127.5 - 1.0) + + +def preprocess_mask(x): + """From uint8 mask to [0,1].""" + mask = np.cast[np.float32](x / 255.0) + if mask.shape[-1] == 3: + mask = np.amax(mask, axis = -1, keepdims = True) + return mask + + +def postprocess(x): + """[-1,1] to uint8.""" + x = (x + 1.0) / 2.0 + x = np.clip(255 * x, 0, 255) + x = np.cast[np.uint8](x) + return x + +def tile(X, rows, cols): + """Tile images for display.""" + tiling = np.zeros((rows * X.shape[1], cols * X.shape[2], X.shape[3]), dtype = X.dtype) + for i in range(rows): + for j in range(cols): + idx = i * cols + j + if idx < X.shape[0]: + img = X[idx,...] + tiling[ + i*X.shape[1]:(i+1)*X.shape[1], + j*X.shape[2]:(j+1)*X.shape[2], + :] = img + return tiling + + +def plot_batch(X, out_path): + """Save batch of images tiled.""" + n_channels = X.shape[3] + if n_channels > 3: + X = X[:,:,:,np.random.choice(n_channels, size = 3)] + X = postprocess(X) + rc = math.sqrt(X.shape[0]) + rows = cols = math.ceil(rc) + canvas = tile(X, rows, cols) + canvas = np.squeeze(canvas) + Image.fromarray(canvas).save(out_path) + + +def make_joint_img(img_shape, jo, joints): + # three channels: left, right, center + scale_factor = img_shape[1] / 128 + thickness = int(3 * scale_factor) + imgs = list() + for i in range(3): + imgs.append(np.zeros(img_shape[:2], dtype = "uint8")) + + assert("cnose" in jo) + # MSCOCO + body = ["lhip", "lshoulder", "rshoulder", "rhip"] + body_pts = np.array([[joints[jo.index(part),:] for part in body]]) + if np.min(body_pts) >= 0: + body_pts = np.int_(body_pts) + cv2.fillPoly(imgs[2], body_pts, 255) + + right_lines = [ + ("rankle", "rknee"), + ("rknee", "rhip"), + ("rhip", "rshoulder"), + ("rshoulder", "relbow"), + ("relbow", "rwrist")] + for line in right_lines: + l = [jo.index(line[0]), jo.index(line[1])] + if np.min(joints[l]) >= 0: + a = tuple(np.int_(joints[l[0]])) + b = tuple(np.int_(joints[l[1]])) + cv2.line(imgs[0], a, b, color = 255, thickness = thickness) + + left_lines = [ + ("lankle", "lknee"), + ("lknee", "lhip"), + ("lhip", "lshoulder"), + ("lshoulder", "lelbow"), + ("lelbow", "lwrist")] + for line in left_lines: + l = [jo.index(line[0]), jo.index(line[1])] + if np.min(joints[l]) >= 0: + a = tuple(np.int_(joints[l[0]])) + b = tuple(np.int_(joints[l[1]])) + cv2.line(imgs[1], a, b, color = 255, thickness = thickness) + + rs = joints[jo.index("rshoulder")] + ls = joints[jo.index("lshoulder")] + cn = joints[jo.index("cnose")] + neck = 0.5*(rs+ls) + a = tuple(np.int_(neck)) + b = tuple(np.int_(cn)) + if np.min(a) >= 0 and np.min(b) >= 0: + cv2.line(imgs[0], a, b, color = 127, thickness = thickness) + cv2.line(imgs[1], a, b, color = 127, thickness = thickness) + + cn = tuple(np.int_(cn)) + leye = tuple(np.int_(joints[jo.index("leye")])) + reye = tuple(np.int_(joints[jo.index("reye")])) + if np.min(reye) >= 0 and np.min(leye) >= 0 and np.min(cn) >= 0: + cv2.line(imgs[0], cn, reye, color = 255, thickness = thickness) + cv2.line(imgs[1], cn, leye, color = 255, thickness = thickness) + + img = np.stack(imgs, axis = -1) + if img_shape[-1] == 1: + img = np.mean(img, axis = -1)[:,:,None] + return img + + +def valid_joints(*joints): + j = np.stack(joints) + return (j >= 0).all() + + +def zoom(img, factor, center = None): + shape = img.shape[:2] + if center is None or not valid_joints(center): + center = np.array(shape) / 2 + e1 = np.array([1,0]) + e2 = np.array([0,1]) + + dst_center = np.array(center) + dst_e1 = e1 * factor + dst_e2 = e2 * factor + + src = np.float32([center, center+e1, center+e2]) + dst = np.float32([dst_center, dst_center+dst_e1, dst_center+dst_e2]) + M = cv2.getAffineTransform(src, dst) + + return cv2.warpAffine(img, M, shape, flags = cv2.INTER_AREA, borderMode = cv2.BORDER_REPLICATE) + + +def get_crop(bpart, joints, jo, wh, o_w, o_h, ar = 1.0): + bpart_indices = [jo.index(b) for b in bpart] + part_src = np.float32(joints[bpart_indices]) + + # fall backs + if not valid_joints(part_src): + if bpart[0] == "lhip" and bpart[1] == "lknee": + bpart = ["lhip"] + bpart_indices = [jo.index(b) for b in bpart] + part_src = np.float32(joints[bpart_indices]) + elif bpart[0] == "rhip" and bpart[1] == "rknee": + bpart = ["rhip"] + bpart_indices = [jo.index(b) for b in bpart] + part_src = np.float32(joints[bpart_indices]) + elif bpart[0] == "lshoulder" and bpart[1] == "rshoulder" and bpart[2] == "cnose": + bpart = ["lshoulder", "rshoulder", "rshoulder"] + bpart_indices = [jo.index(b) for b in bpart] + part_src = np.float32(joints[bpart_indices]) + + + if not valid_joints(part_src): + return None + + if part_src.shape[0] == 1: + # leg fallback + a = part_src[0] + b = np.float32([a[0],o_h - 1]) + part_src = np.float32([a,b]) + + if part_src.shape[0] == 4: + pass + elif part_src.shape[0] == 3: + # lshoulder, rshoulder, cnose + if bpart == ["lshoulder", "rshoulder", "rshoulder"]: + segment = part_src[1] - part_src[0] + normal = np.array([-segment[1],segment[0]]) + if normal[1] > 0.0: + normal = -normal + + a = part_src[0] + normal + b = part_src[0] + c = part_src[1] + d = part_src[1] + normal + part_src = np.float32([a,b,c,d]) + else: + assert bpart == ["lshoulder", "rshoulder", "cnose"] + neck = 0.5*(part_src[0] + part_src[1]) + neck_to_nose = part_src[2] - neck + part_src = np.float32([neck + 2*neck_to_nose, neck]) + + # segment box + segment = part_src[1] - part_src[0] + normal = np.array([-segment[1],segment[0]]) + alpha = 1.0 / 2.0 + a = part_src[0] + alpha*normal + b = part_src[0] - alpha*normal + c = part_src[1] - alpha*normal + d = part_src[1] + alpha*normal + #part_src = np.float32([a,b,c,d]) + part_src = np.float32([b,c,d,a]) + else: + assert part_src.shape[0] == 2 + + segment = part_src[1] - part_src[0] + normal = np.array([-segment[1],segment[0]]) + alpha = ar / 2.0 + a = part_src[0] + alpha*normal + b = part_src[0] - alpha*normal + c = part_src[1] - alpha*normal + d = part_src[1] + alpha*normal + part_src = np.float32([a,b,c,d]) + + dst = np.float32([[0.0,0.0],[0.0,1.0],[1.0,1.0],[1.0,0.0]]) + part_dst = np.float32(wh * dst) + + M = cv2.getPerspectiveTransform(part_src, part_dst) + return M + + +def normalize(imgs, coords, stickmen, jo): + + out_imgs = list() + out_stickmen = list() + + bs = len(imgs) + for i in range(bs): + img = imgs[i] + joints = coords[i] + stickman = stickmen[i] + + h,w = img.shape[:2] + o_h = h + o_w = w + h = h // 4 + w = w // 4 + wh = np.array([w,h]) + wh = np.expand_dims(wh, 0) + + bparts = [ + ["lshoulder","lhip","rhip","rshoulder"], + ["lshoulder", "rshoulder", "cnose"], + ["lshoulder","lelbow"], + ["lelbow", "lwrist"], + ["rshoulder","relbow"], + ["relbow", "rwrist"], + ["lhip", "lknee"], + ["rhip", "rknee"]] + ar = 0.5 + + part_imgs = list() + part_stickmen = list() + for bpart in bparts: + part_img = np.zeros((h,w,3)) + part_stickman = np.zeros((h,w,3)) + M = get_crop(bpart, joints, jo, wh, o_w, o_h, ar) + + if M is not None: + part_img = cv2.warpPerspective(img, M, (h,w), borderMode = cv2.BORDER_REPLICATE) + part_stickman = cv2.warpPerspective(stickman, M, (h,w), borderMode = cv2.BORDER_REPLICATE) + + part_imgs.append(part_img) + part_stickmen.append(part_stickman) + img = np.concatenate(part_imgs, axis = 2) + stickman = np.concatenate(part_stickmen, axis = 2) + + """ + bpart = ["lshoulder","lhip","rhip","rshoulder"] + dst = np.float32([[0.0,0.0],[0.0,1.0],[1.0,1.0],[1.0,0.0]]) + bpart_indices = [jo.index(b) for b in bpart] + part_src = np.float32(joints[bpart_indices]) + part_dst = np.float32(wh * dst) + + M = cv2.getPerspectiveTransform(part_src, part_dst) + img = cv2.warpPerspective(img, M, (h,w), borderMode = cv2.BORDER_REPLICATE) + stickman = cv2.warpPerspective(stickman, M, (h,w), borderMode = cv2.BORDER_REPLICATE) + """ + + """ + # center of possible rescaling + c = joints[jo.index("cneck")] + + # find valid body part for scale estimation + a = joints[jo.index("lshoulder")] + b = joints[jo.index("lhip")] + target_length = 33.0 + if not valid_joints(a,b): + a = joints[jo.index("rshoulder")] + b = joints[jo.index("rhip")] + target_length = 33.0 + if not valid_joints(a,b): + a = joints[jo.index("rshoulder")] + b = joints[jo.index("relbow")] + target_length = 33.0 / 2 + if not valid_joints(a,b): + a = joints[jo.index("lshoulder")] + b = joints[jo.index("lelbow")] + target_length = 33.0 / 2 + if not valid_joints(a,b): + a = joints[jo.index("lwrist")] + b = joints[jo.index("lelbow")] + target_length = 33.0 / 2 + if not valid_joints(a,b): + a = joints[jo.index("rwrist")] + b = joints[jo.index("relbow")] + target_length = 33.0 / 2 + + if valid_joints(a,b): + body_length = np.linalg.norm(b - a) + factor = target_length / body_length + img = zoom(img, factor, center = c) + stickman = zoom(stickman, factor, center = c) + else: + factor = 0.25 + img = zoom(img, factor, center = c) + stickman = zoom(stickman, factor, center = c) + """ + + out_imgs.append(img) + out_stickmen.append(stickman) + out_imgs = np.stack(out_imgs) + out_stickmen = np.stack(out_stickmen) + return out_imgs, out_stickmen + + +def make_mask_img(img_shape, jo, joints): + scale_factor = img_shape[1] / 128 + masks = 3*[None] + for i in range(3): + masks[i] = np.zeros(img_shape[:2], dtype = "uint8") + + body = ["lhip", "lshoulder", "rshoulder", "rhip"] + body_pts = np.array([[joints[jo.index(part),:] for part in body]], dtype = np.int32) + cv2.fillPoly(masks[1], body_pts, 255) + + head = ["lshoulder", "chead", "rshoulder"] + head_pts = np.array([[joints[jo.index(part),:] for part in head]], dtype = np.int32) + cv2.fillPoly(masks[2], head_pts, 255) + + thickness = int(15 * scale_factor) + lines = [[ + ("rankle", "rknee"), + ("rknee", "rhip"), + ("rhip", "lhip"), + ("lhip", "lknee"), + ("lknee", "lankle") ], [ + ("rhip", "rshoulder"), + ("rshoulder", "relbow"), + ("relbow", "rwrist"), + ("rhip", "lhip"), + ("rshoulder", "lshoulder"), + ("lhip", "lshoulder"), + ("lshoulder", "lelbow"), + ("lelbow", "lwrist")], [ + ("rshoulder", "chead"), + ("rshoulder", "lshoulder"), + ("lshoulder", "chead")]] + for i in range(len(lines)): + for j in range(len(lines[i])): + line = [jo.index(lines[i][j][0]), jo.index(lines[i][j][1])] + a = tuple(np.int_(joints[line[0]])) + b = tuple(np.int_(joints[line[1]])) + cv2.line(masks[i], a, b, color = 255, thickness = thickness) + + for i in range(3): + r = int(11 * scale_factor) + if r % 2 == 0: + r = r + 1 + masks[i] = cv2.GaussianBlur(masks[i], (r,r), 0) + maxmask = np.max(masks[i]) + if maxmask > 0: + masks[i] = masks[i] / maxmask + mask = np.stack(masks, axis = -1) + mask = np.uint8(255 * mask) + + return mask + + + diff --git a/demo/buffered_wrapper.py b/demo/buffered_wrapper.py new file mode 100644 index 00000000..c97d91fb --- /dev/null +++ b/demo/buffered_wrapper.py @@ -0,0 +1,23 @@ + +from multiprocessing.pool import ThreadPool + +class BufferedWrapper(object): + """Fetch next batch asynchronuously to avoid bottleneck during GPU + training.""" + def __init__(self, gen): + self.gen = gen + self.n = gen.n + self.pool = ThreadPool(1) + self._async_next() + + + def _async_next(self): + self.buffer_ = self.pool.apply_async(next, (self.gen,)) + + + def __next__(self): + result = self.buffer_.get() + self._async_next() + return result + + diff --git a/demo/config.py b/demo/config.py new file mode 100644 index 00000000..fde61de2 --- /dev/null +++ b/demo/config.py @@ -0,0 +1,9 @@ +import os +import tensorflow as tf + +N_BOXES = 8 +default_log_dir = os.path.join(os.getcwd(), "log") +config = tf.ConfigProto() +config.gpu_options.allow_growth = False +session = tf.Session(config = config) + diff --git a/demo/custom_vgg19.py b/demo/custom_vgg19.py new file mode 100644 index 00000000..ec94a994 --- /dev/null +++ b/demo/custom_vgg19.py @@ -0,0 +1,229 @@ +# Copyright 2015 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# pylint: disable=invalid-name +"""VGG19 model for Keras. + +# Reference + +- [Very Deep Convolutional Networks for Large-Scale Image +Recognition](https://arxiv.org/abs/1409.1556) + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import warnings + +from tensorflow.contrib.keras.python.keras import backend as K +from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import _obtain_input_shape +from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import decode_predictions # pylint: disable=unused-import +from tensorflow.contrib.keras.python.keras.applications.imagenet_utils import preprocess_input # pylint: disable=unused-import +from tensorflow.contrib.keras.python.keras.engine.topology import get_source_inputs +from tensorflow.contrib.keras.python.keras.layers import Conv2D +from tensorflow.contrib.keras.python.keras.layers import Dense +from tensorflow.contrib.keras.python.keras.layers import Flatten +from tensorflow.contrib.keras.python.keras.layers import GlobalAveragePooling2D +from tensorflow.contrib.keras.python.keras.layers import GlobalMaxPooling2D +from tensorflow.contrib.keras.python.keras.layers import Input +from tensorflow.contrib.keras.python.keras.layers import AveragePooling2D as MaxPooling2D +from tensorflow.contrib.keras.python.keras.models import Model +from tensorflow.contrib.keras.python.keras.utils import layer_utils +from tensorflow.contrib.keras.python.keras.utils.data_utils import get_file + + +WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels.h5' +WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5' + + +def VGG19(include_top=True, + weights='imagenet', + input_tensor=None, + input_shape=None, + pooling=None, + classes=1000): + """Instantiates the VGG19 architecture. + + Optionally loads weights pre-trained + on ImageNet. Note that when using TensorFlow, + for best performance you should set + `image_data_format="channels_last"` in your Keras config + at ~/.keras/keras.json. + + The model and the weights are compatible with both + TensorFlow and Theano. The data format + convention used by the model is the one + specified in your Keras config file. + + Arguments: + include_top: whether to include the 3 fully-connected + layers at the top of the network. + weights: one of `None` (random initialization) + or "imagenet" (pre-training on ImageNet). + input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) + to use as image input for the model. + input_shape: optional shape tuple, only to be specified + if `include_top` is False (otherwise the input shape + has to be `(224, 224, 3)` (with `channels_last` data format) + or `(3, 224, 244)` (with `channels_first` data format). + It should have exactly 3 inputs channels, + and width and height should be no smaller than 48. + E.g. `(200, 200, 3)` would be one valid value. + pooling: Optional pooling mode for feature extraction + when `include_top` is `False`. + - `None` means that the output of the model will be + the 4D tensor output of the + last convolutional layer. + - `avg` means that global average pooling + will be applied to the output of the + last convolutional layer, and thus + the output of the model will be a 2D tensor. + - `max` means that global max pooling will + be applied. + classes: optional number of classes to classify images + into, only to be specified if `include_top` is True, and + if no `weights` argument is specified. + + Returns: + A Keras model instance. + + Raises: + ValueError: in case of invalid argument for `weights`, + or invalid input shape. + """ + if weights not in {'imagenet', None}: + raise ValueError('The `weights` argument should be either ' + '`None` (random initialization) or `imagenet` ' + '(pre-training on ImageNet).') + + if weights == 'imagenet' and include_top and classes != 1000: + raise ValueError('If using `weights` as imagenet with `include_top`' + ' as true, `classes` should be 1000') + # Determine proper input shape + input_shape = _obtain_input_shape( + input_shape, + default_size=224, + min_size=48, + data_format=K.image_data_format(), + include_top=include_top) + + if input_tensor is None: + img_input = Input(shape=input_shape) + else: + img_input = Input(tensor=input_tensor, shape=input_shape) + + # Block 1 + x = Conv2D( + 64, (3, 3), activation='relu', padding='same', + name='block1_conv1')(img_input) + x = Conv2D( + 64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) + x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) + + # Block 2 + x = Conv2D( + 128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) + x = Conv2D( + 128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) + x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) + + # Block 3 + x = Conv2D( + 256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) + x = Conv2D( + 256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) + x = Conv2D( + 256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) + x = Conv2D( + 256, (3, 3), activation='relu', padding='same', name='block3_conv4')(x) + x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) + + # Block 4 + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block4_conv4')(x) + x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) + + # Block 5 + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) + x = Conv2D( + 512, (3, 3), activation='relu', padding='same', name='block5_conv4')(x) + x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) + + if include_top: + # Classification block + x = Flatten(name='flatten')(x) + x = Dense(4096, activation='relu', name='fc1')(x) + x = Dense(4096, activation='relu', name='fc2')(x) + x = Dense(classes, activation='softmax', name='predictions')(x) + else: + if pooling == 'avg': + x = GlobalAveragePooling2D()(x) + elif pooling == 'max': + x = GlobalMaxPooling2D()(x) + + # Ensure that the model takes into account + # any potential predecessors of `input_tensor`. + if input_tensor is not None: + inputs = get_source_inputs(input_tensor) + else: + inputs = img_input + # Create model. + model = Model(inputs, x, name='vgg19') + + # load weights + if weights == 'imagenet': + if include_top: + weights_path = get_file( + 'vgg19_weights_tf_dim_ordering_tf_kernels.h5', + WEIGHTS_PATH, + cache_subdir='models') + else: + weights_path = get_file( + 'vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5', + WEIGHTS_PATH_NO_TOP, + cache_subdir='models') + model.load_weights(weights_path) + if K.backend() == 'theano': + layer_utils.convert_all_kernels_in_model(model) + + if K.image_data_format() == 'channels_first': + if include_top: + maxpool = model.get_layer(name='block5_pool') + shape = maxpool.output_shape[1:] + dense = model.get_layer(name='fc1') + layer_utils.convert_dense_weights_data_format(dense, shape, + 'channels_first') + + if K.backend() == 'tensorflow': + warnings.warn('You are using the TensorFlow backend, yet you ' + 'are using the Theano ' + 'image data format convention ' + '(`image_data_format="channels_first"`). ' + 'For best performance, set ' + '`image_data_format="channels_last"` in ' + 'your Keras config ' + 'at ~/.keras/keras.json.') + return model diff --git a/demo/deeploss.py b/demo/deeploss.py new file mode 100644 index 00000000..e18d282b --- /dev/null +++ b/demo/deeploss.py @@ -0,0 +1,258 @@ +import tensorflow as tf +import numpy as np +# vgg19 from keras +from tensorflow.contrib.keras.api.keras.models import Model +#from tensorflow.contrib.keras.api.keras.applications.vgg19 import VGG19 +from custom_vgg19 import VGG19 +from tensorflow.contrib.keras.api.keras import backend as K +import models + + +def preprocess_input(x): + """Preprocesses a tensor encoding a batch of images. + # Arguments + x: input tensor, 4D in [-1,1] + # Returns + Preprocessed tensor. + """ + # from [-1, 1] to [0,255.0] + x = (x + 1.0) / 2.0 * 255.0 + # 'RGB'->'BGR' + x = x[:, :, :, ::-1] + # Zero-center by mean pixel + x = x - np.array([103.939, 116.779, 123.68]).reshape((1,1,1,3)) + return x + + +class VGG19Features(object): + def __init__(self, session, feature_layers = None, feature_weights = None): + K.set_session(session) + self.base_model = VGG19( + include_top = False, + weights='imagenet') + if feature_layers is None: + feature_layers = [ + "input_1", + "block1_conv2", "block2_conv2", + "block3_conv2", "block4_conv2", + "block5_conv2"] + self.layer_names = [l.name for l in self.base_model.layers] + for k in feature_layers: + if not k in self.layer_names: + raise KeyError( + "Invalid layer {}. Available layers: {}".format( + k, self.layer_names)) + features = [self.base_model.get_layer(k).output for k in feature_layers] + self.model = Model( + inputs = self.base_model.input, + outputs = features) + if feature_weights is None: + feature_weights = len(feature_layers) * [1.0] + gram_weights = len(feature_layers) * [0.1] + self.feature_weights = feature_weights + self.gram_weights = gram_weights + assert len(self.feature_weights) == len(features) + + self.variables = self.base_model.weights + + + def extract_features(self, x): + """x should be rgb in [-1,1].""" + x = preprocess_input(x) + features = self.model.predict(x) + return features + + + def make_feature_ops(self, x): + """x should be rgb tensor in [-1,1].""" + x = preprocess_input(x) + features = self.model(x) + return features + + + def grams(self, fs): + gs = list() + for f in fs: + bs, h, w, c = f.shape.as_list() + f = tf.reshape(f, [bs, h*w, c]) + ft = tf.transpose(f, [0,2,1]) + g = tf.matmul(ft, f) + g = g / (4.0*h*w) + gs.append(g) + return gs + + + def make_loss_op(self, x, y): + """x, y should be rgb tensors in [-1,1].""" + x = preprocess_input(x) + x_features = self.model(x) + + y = preprocess_input(y) + y_features = self.model(y) + + x_grams = self.grams(x_features) + y_grams = self.grams(y_features) + + losses = [ + tf.reduce_mean(tf.abs(xf - yf)) for xf, yf in zip( + x_features, y_features)] + gram_losses = [ + tf.reduce_mean(tf.abs(xg - yg)) for xg, yg in zip( + x_grams, y_grams)] + + for i in range(len(losses)): + losses[i] = self.feature_weights[i] * losses[i] + gram_losses[i] = self.gram_weights[i] * gram_losses[i] + loss = tf.add_n(losses) + tf.add_n(gram_losses) + + self.losses = losses + self.gram_losses = gram_losses + + return loss + + +class PixelFeatures(object): + def __init__(self, session): + self.variables = [] + + + def make_loss_op(self, x, y): + """x, y should be rgb tensors in [-1,1].""" + x_features = [x] + y_features = [y] + + losses = [ + tf.reduce_mean( + tf.reduce_sum( + tf.abs(xf - yf), + axis = [1,2,3])) + for xf, yf in zip(x_features, y_features)] + + self.feature_weights = 11*[1000.0/(128*128*3)] + for i in range(len(losses)): + losses[i] = self.feature_weights[i] * losses[i] + + loss = tf.add_n(losses) + + self.losses = losses + + return loss + + +class JigsawFeatures(object): + def __init__(self, session): + self.cfn = models.make_model( + "cfn", models.cfn_features, + n_scales = 5, + max_filters = 256) + + x_init = tf.placeholder( + tf.float32, + shape = [64,128,128,3]) + _ = self.cfn(x_init, init = True, dropout_p = 0.5) + + self.variables = [v for v in tf.trainable_variables() if + v.name.startswith("cfn")] + self.saver = tf.train.Saver(self.variables) + restore_path = "../jigsaw/log/2017-08-16T14:27:04/checkpoints/model.ckpt-100000" + self.saver.restore(session, restore_path) + + self.kwargs = {"init": False, "dropout_p": 0.0} + + + def make_loss_op(self, x, y): + """x, y should be rgb tensors in [-1,1].""" + x_features = [x] + y_features = [y] + x_features += self.cfn(x, **self.kwargs) + y_features += self.cfn(y, **self.kwargs) + + losses = [ + tf.reduce_mean( + tf.reduce_sum( + tf.abs(xf - yf), + axis = [1,2,3])) + for xf, yf in zip(x_features, y_features)] + + self.feature_weights = 11*[1.0/(128*128*3)] + for i in range(len(losses)): + losses[i] = self.feature_weights[i] * losses[i] + + loss = tf.add_n(losses) + + self.losses = losses + + return loss + + +class AttrFeatures(object): + def __init__(self, session): + self.fnet = models.make_model( + "encoder", models.feature_encoder, + n_scales = 5, + max_filters = 512) + + x_init = tf.placeholder( + tf.float32, + shape = [64,128,128,3]) + _ = self.fnet(x_init, init = True, dropout_p = 0.5) + + self.variables = [v for v in tf.trainable_variables() if + v.name.startswith("encoder")] + self.saver = tf.train.Saver(self.variables) + restore_path = "log/2017-08-13T21:33:12/checkpoints/model.ckpt-100000" + self.saver.restore(session, restore_path) + + self.kwargs = {"init": False, "dropout_p": 0.0} + + + def make_loss_op(self, x, y): + """x, y should be rgb tensors in [-1,1].""" + x_features = [x] + y_features = [y] + x_features += self.fnet(x, **self.kwargs) + y_features += self.fnet(y, **self.kwargs) + + losses = [ + tf.reduce_mean( + tf.reduce_sum( + tf.abs(xf - yf), + axis = [1,2,3])) + for xf, yf in zip(x_features, y_features)] + + self.feature_weights = 11*[1.0/(128*128*3)] + for i in range(len(losses)): + losses[i] = self.feature_weights[i] * losses[i] + + loss = tf.add_n(losses) + + self.losses = losses + + return loss + + +if __name__ == "__main__": + import sys + from tensorflow.contrib.keras.api.keras.preprocessing import ( + image) + + s = tf.Session() + + img_path = sys.argv[1] + img = image.load_img(img_path, target_size=(128, 128)) + x = image.img_to_array(img) + x = np.expand_dims(x, axis=0) + x = x / 255.0 * 2.0 - 1.0 + print(x.shape, np.min(x), np.max(x)) + x = tf.constant(x) + + feature_layers = [ + "input_1", "block1_conv1", "block1_conv2", "block1_pool", "block2_conv2", + "block3_conv2", "block4_conv2", "block5_conv2"] + vgg19 = VGG19Features(feature_layers) + fmaps = vgg19.make_feature_ops(x) + + for i in range(len(fmaps)): + print(i) + f = fmaps[i].eval(session=s) + print(f.shape) diff --git a/demo/events.out.tfevents.1509984274.hcigpu02 b/demo/events.out.tfevents.1509984274.hcigpu02 new file mode 100644 index 00000000..23a3425a Binary files /dev/null and b/demo/events.out.tfevents.1509984274.hcigpu02 differ diff --git a/demo/get_batches.py b/demo/get_batches.py new file mode 100644 index 00000000..c94a2526 --- /dev/null +++ b/demo/get_batches.py @@ -0,0 +1,52 @@ + +import sys +from batches_pg2 import plot_batch +from index_flow import IndexFlow +from buffered_wrapper import BufferedWrapper + +def get_batches( + shape, + index_path, + train, + mask, + fill_batches = True, + shuffle = True, + return_keys = ["imgs", "joints", "norm_imgs", "norm_joints"]): + """Buffered IndexFlow.""" + flow = IndexFlow(shape, index_path, train, mask, fill_batches, shuffle, return_keys) + return BufferedWrapper(flow) + + +if __name__ == "__main__": + if not len(sys.argv) == 2: + print("Useage: {} ".format(sys.argv[0])) + exit(1) + + batches = get_batches( + shape = (16, 128, 128, 3), + index_path = sys.argv[1], + train = True, + mask = False, + shuffle = True) + X, C = next(batches) + plot_batch(X, "unmasked.png") + plot_batch(C, "joints.png") + + """ + batches = get_batches( + shape = (16, 128, 128, 3), + index_path = sys.argv[1], + train = True, + mask = True) + X, C = next(batches) + plot_batch(X, "masked.png") + + batches = get_batches( + shape = (16, 32, 32, 3), + index_path = sys.argv[1], + train = True, + mask = True) + X, C = next(batches) + plot_batch(X, "masked32.png") + plot_batch(C, "joints32.png") + """ diff --git a/demo/index_flow.py b/demo/index_flow.py new file mode 100644 index 00000000..ad480490 --- /dev/null +++ b/demo/index_flow.py @@ -0,0 +1,138 @@ +import numpy as np +import pickle +import os + +from batches_pg2 import ( + valid_joints, + load_img, + preprocess, + preprocess_mask, + make_joint_img, + normalize, + make_mask_img +) + +class IndexFlow(object): + """Batches from index file.""" + + def __init__( + self, + shape, + index_path, + train, + mask = True, + fill_batches = True, + shuffle = True, + return_keys = ["imgs", "joints"]): + self.shape = shape + self.batch_size = self.shape[0] + self.img_shape = self.shape[1:] + with open(index_path, "rb") as f: + self.index = pickle.load(f) + self.basepath = os.path.dirname(index_path) + self.train = train + self.mask = mask + self.fill_batches = fill_batches + self.shuffle_ = shuffle + self.return_keys = return_keys + + self.jo = self.index["joint_order"] + # rescale joint coordinates to image shape + h,w = self.img_shape[:2] + wh = np.array([[[w,h]]]) + self.index["joints"] = self.index["joints"] * wh + + self.indices = np.array( + [i for i in range(len(self.index["train"])) + if self._filter(i)]) + + self.n = self.indices.shape[0] + self.shuffle() + + + def _filter(self, i): + good = True + good = good and (self.index["train"][i] == self.train) + joints = self.index["joints"][i] + required_joints = ["lshoulder","rshoulder","lhip","rhip"] + joint_indices = [self.jo.index(b) for b in required_joints] + joints = np.float32(joints[joint_indices]) + good = good and valid_joints(joints) + return good + + + def __next__(self): + batch = dict() + + # get indices for batch + batch_start, batch_end = self.batch_start, self.batch_start + self.batch_size + batch_indices = self.indices[batch_start:batch_end] + if self.fill_batches and batch_indices.shape[0] != self.batch_size: + n_missing = self.batch_size - batch_indices.shape[0] + batch_indices = np.concatenate([batch_indices, self.indices[:n_missing]], axis = 0) + assert(batch_indices.shape[0] == self.batch_size) + batch_indices = np.array(batch_indices) + batch["indices"] = batch_indices + + # prepare next batch + if batch_end >= self.n: + self.shuffle() + else: + self.batch_start = batch_end + + # prepare batch data + # load images + batch["imgs"] = list() + for i in batch_indices: + fname = self.index["imgs"][i] + # traintest = "train" if self.train else "test" + # path = os.path.join(self.basepath, "..", "original", "filted_up_{}".format(traintest), fname) + path = os.path.join(self.basepath, fname) + batch["imgs"].append(load_img(path, target_size = self.img_shape)) + batch["imgs"] = np.stack(batch["imgs"]) + batch["imgs"] = preprocess(batch["imgs"]) + + # load joint coordinates + batch["joints_coordinates"] = [self.index["joints"][i] for i in batch_indices] + + # generate stickmen images from coordinates + batch["joints"] = list() + for joints in batch["joints_coordinates"]: + img = make_joint_img(self.img_shape, self.jo, joints) + batch["joints"].append(img) + batch["joints"] = np.stack(batch["joints"]) + batch["joints"] = preprocess(batch["joints"]) + + if False and self.mask: + if "masks" in self.index: + batch_masks = list() + for i in batch_indices: + fname = self.index["masks"][i] + path = os.path.join(self.basepath, fname) + batch_masks.append(load_img(path, target_size = self.img_shape)) + else: + # generate mask based on joint coordinates + batch_masks = list() + for joints in batch["joints_coordinates"]: + mask = make_mask_img(self.img_shape, self.jo, joints) + batch_masks.append(mask) + batch["masks"] = np.stack(batch_masks) + batch["masks"] = preprocess_mask(batch["masks"]) + # apply mask to images + batch["imgs"] = batch["imgs"] * batch["masks"] + + + imgs, joints = normalize(batch["imgs"], batch["joints_coordinates"], batch["joints"], self.jo) + batch["norm_imgs"] = imgs + batch["norm_joints"] = joints + + batch_list = [batch[k] for k in self.return_keys] + return batch_list + + + def shuffle(self): + self.batch_start = 0 + if self.shuffle_: + np.random.shuffle(self.indices) + + diff --git a/demo/log.txt b/demo/log.txt new file mode 100644 index 00000000..97ec97c4 --- /dev/null +++ b/demo/log.txt @@ -0,0 +1,7307 @@ +INFO:__main__:Namespace(batch_size=8, checkpoint='log/2017-10-25T16:31:50/checkpoints/model.ckpt-96000', ckpt_freq=1000, data_index='/export/home/pesser/groupstorage/data/PG2/processed00/index.p', drop_prob=0.1, init_batches=4, log_dir='/net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log', log_freq=250, lr=0.001, lr_decay_begin=1000, lr_decay_end=100000, mask=True, mode='train', spatial_size=256, test_freq=1000) +INFO:__main__:Number of training samples: 29365 +INFO:__main__:Number of validation samples: 7336 +INFO:__main__:Latent shape: [8, 4, 4, 128] +INFO:__main__:Latent shape: [8, 8, 8, 128] +INFO:__main__:Defined graph +INFO:__main__:Restored model from log/2017-10-25T16:31:50/checkpoints/model.ckpt-96000 +INFO:__main__:global_step: 1 +INFO:__main__:kl_loss: 60.20652770996094 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2023.456787109375 +INFO:__main__:loss: 2023.456787109375 +INFO:__main__:vgg_gram_loss_0: 12.149802207946777 +INFO:__main__:vgg_gram_loss_1: 61.064796447753906 +INFO:__main__:vgg_gram_loss_2: 59.43148422241211 +INFO:__main__:vgg_gram_loss_3: 35.10826873779297 +INFO:__main__:vgg_gram_loss_4: 100.04723358154297 +INFO:__main__:vgg_gram_loss_5: 0.1606256514787674 +INFO:__main__:vgg_loss_0: 10.296666145324707 +INFO:__main__:vgg_loss_1: 22.996049880981445 +INFO:__main__:vgg_loss_2: 32.64899444580078 +INFO:__main__:vgg_loss_3: 24.495223999023438 +INFO:__main__:vgg_loss_4: 44.76327133178711 +INFO:__main__:vgg_loss_5: 1.5289274454116821 +INFO:__main__:validation_loss: 3355.6640625 +INFO:__main__:global_step: 251 +INFO:__main__:kl_loss: 173.14788818359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1666.423583984375 +INFO:__main__:loss: 1666.4237060546875 +INFO:__main__:vgg_gram_loss_0: 20.077381134033203 +INFO:__main__:vgg_gram_loss_1: 32.40482711791992 +INFO:__main__:vgg_gram_loss_2: 27.869312286376953 +INFO:__main__:vgg_gram_loss_3: 20.718557357788086 +INFO:__main__:vgg_gram_loss_4: 78.97044372558594 +INFO:__main__:vgg_gram_loss_5: 0.16276900470256805 +INFO:__main__:vgg_loss_0: 13.030125617980957 +INFO:__main__:vgg_loss_1: 25.78565788269043 +INFO:__main__:vgg_loss_2: 35.54685974121094 +INFO:__main__:vgg_loss_3: 27.092498779296875 +INFO:__main__:vgg_loss_4: 49.93857955932617 +INFO:__main__:vgg_loss_5: 1.687692642211914 +INFO:__main__:validation_loss: 1993.6649169921875 +INFO:__main__:global_step: 501 +INFO:__main__:kl_loss: 265.14068603515625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2187.02294921875 +INFO:__main__:loss: 2187.023193359375 +INFO:__main__:vgg_gram_loss_0: 10.400809288024902 +INFO:__main__:vgg_gram_loss_1: 39.22432327270508 +INFO:__main__:vgg_gram_loss_2: 39.60877227783203 +INFO:__main__:vgg_gram_loss_3: 32.36015319824219 +INFO:__main__:vgg_gram_loss_4: 115.88433837890625 +INFO:__main__:vgg_gram_loss_5: 0.19620363414287567 +INFO:__main__:vgg_loss_0: 15.687895774841309 +INFO:__main__:vgg_loss_1: 35.55717849731445 +INFO:__main__:vgg_loss_2: 48.08674240112305 +INFO:__main__:vgg_loss_3: 35.310001373291016 +INFO:__main__:vgg_loss_4: 62.99497604370117 +INFO:__main__:vgg_loss_5: 2.0931894779205322 +INFO:__main__:validation_loss: 2363.730224609375 +INFO:__main__:global_step: 751 +INFO:__main__:kl_loss: 340.3248291015625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1956.13134765625 +INFO:__main__:loss: 1956.1317138671875 +INFO:__main__:vgg_gram_loss_0: 18.76248550415039 +INFO:__main__:vgg_gram_loss_1: 41.23588180541992 +INFO:__main__:vgg_gram_loss_2: 38.775596618652344 +INFO:__main__:vgg_gram_loss_3: 26.62955665588379 +INFO:__main__:vgg_gram_loss_4: 94.28778076171875 +INFO:__main__:vgg_gram_loss_5: 0.16194427013397217 +INFO:__main__:vgg_loss_0: 14.774397850036621 +INFO:__main__:vgg_loss_1: 30.749303817749023 +INFO:__main__:vgg_loss_2: 41.24767303466797 +INFO:__main__:vgg_loss_3: 30.020872116088867 +INFO:__main__:vgg_loss_4: 52.83884811401367 +INFO:__main__:vgg_loss_5: 1.7419308423995972 +INFO:__main__:validation_loss: 2045.3822021484375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 1001 +INFO:__main__:kl_loss: 364.91424560546875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1847.637939453125 +INFO:__main__:loss: 1847.6383056640625 +INFO:__main__:vgg_gram_loss_0: 13.954872131347656 +INFO:__main__:vgg_gram_loss_1: 35.536136627197266 +INFO:__main__:vgg_gram_loss_2: 31.152179718017578 +INFO:__main__:vgg_gram_loss_3: 25.605749130249023 +INFO:__main__:vgg_gram_loss_4: 96.90245056152344 +INFO:__main__:vgg_gram_loss_5: 0.18925856053829193 +INFO:__main__:vgg_loss_0: 13.134907722473145 +INFO:__main__:vgg_loss_1: 30.432283401489258 +INFO:__main__:vgg_loss_2: 39.65081024169922 +INFO:__main__:vgg_loss_3: 28.924427032470703 +INFO:__main__:vgg_loss_4: 52.24839401245117 +INFO:__main__:vgg_loss_5: 1.7961026430130005 +INFO:__main__:validation_loss: 2097.765869140625 +INFO:__main__:global_step: 1251 +INFO:__main__:kl_loss: 398.1080627441406 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1941.419677734375 +INFO:__main__:loss: 1941.4200439453125 +INFO:__main__:vgg_gram_loss_0: 18.83348274230957 +INFO:__main__:vgg_gram_loss_1: 33.11129379272461 +INFO:__main__:vgg_gram_loss_2: 29.547454833984375 +INFO:__main__:vgg_gram_loss_3: 22.545629501342773 +INFO:__main__:vgg_gram_loss_4: 94.59038543701172 +INFO:__main__:vgg_gram_loss_5: 0.17973926663398743 +INFO:__main__:vgg_loss_0: 15.769280433654785 +INFO:__main__:vgg_loss_1: 32.75959777832031 +INFO:__main__:vgg_loss_2: 45.70271301269531 +INFO:__main__:vgg_loss_3: 33.16744613647461 +INFO:__main__:vgg_loss_4: 60.09803009033203 +INFO:__main__:vgg_loss_5: 1.9788700342178345 +INFO:__main__:validation_loss: 2015.1007080078125 +INFO:__main__:global_step: 1501 +INFO:__main__:kl_loss: 516.0904541015625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1786.6876220703125 +INFO:__main__:loss: 1786.6881103515625 +INFO:__main__:vgg_gram_loss_0: 14.205286026000977 +INFO:__main__:vgg_gram_loss_1: 27.378259658813477 +INFO:__main__:vgg_gram_loss_2: 33.25638198852539 +INFO:__main__:vgg_gram_loss_3: 23.698928833007812 +INFO:__main__:vgg_gram_loss_4: 87.22620391845703 +INFO:__main__:vgg_gram_loss_5: 0.16732317209243774 +INFO:__main__:vgg_loss_0: 13.700509071350098 +INFO:__main__:vgg_loss_1: 30.18804359436035 +INFO:__main__:vgg_loss_2: 41.626312255859375 +INFO:__main__:vgg_loss_3: 30.353256225585938 +INFO:__main__:vgg_loss_4: 53.790164947509766 +INFO:__main__:vgg_loss_5: 1.746876835823059 +INFO:__main__:validation_loss: 2140.793701171875 +INFO:__main__:global_step: 1751 +INFO:__main__:kl_loss: 517.904052734375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1886.168212890625 +INFO:__main__:loss: 1886.168701171875 +INFO:__main__:vgg_gram_loss_0: 18.17301368713379 +INFO:__main__:vgg_gram_loss_1: 44.91527557373047 +INFO:__main__:vgg_gram_loss_2: 36.753273010253906 +INFO:__main__:vgg_gram_loss_3: 25.398351669311523 +INFO:__main__:vgg_gram_loss_4: 94.37198638916016 +INFO:__main__:vgg_gram_loss_5: 0.16551266610622406 +INFO:__main__:vgg_loss_0: 11.549256324768066 +INFO:__main__:vgg_loss_1: 25.97317886352539 +INFO:__main__:vgg_loss_2: 37.60198974609375 +INFO:__main__:vgg_loss_3: 28.477455139160156 +INFO:__main__:vgg_loss_4: 52.14436340332031 +INFO:__main__:vgg_loss_5: 1.7099798917770386 +INFO:__main__:validation_loss: 2123.537841796875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 2001 +INFO:__main__:kl_loss: 724.5712280273438 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2327.536865234375 +INFO:__main__:loss: 2327.53759765625 +INFO:__main__:vgg_gram_loss_0: 17.133886337280273 +INFO:__main__:vgg_gram_loss_1: 59.6475715637207 +INFO:__main__:vgg_gram_loss_2: 41.20440673828125 +INFO:__main__:vgg_gram_loss_3: 34.825626373291016 +INFO:__main__:vgg_gram_loss_4: 111.4234848022461 +INFO:__main__:vgg_gram_loss_5: 0.21071158349514008 +INFO:__main__:vgg_loss_0: 16.122833251953125 +INFO:__main__:vgg_loss_1: 39.45900344848633 +INFO:__main__:vgg_loss_2: 48.76203918457031 +INFO:__main__:vgg_loss_3: 34.72369384765625 +INFO:__main__:vgg_loss_4: 59.98519515991211 +INFO:__main__:vgg_loss_5: 2.008913993835449 +INFO:__main__:validation_loss: 2361.58251953125 +INFO:__main__:global_step: 2251 +INFO:__main__:kl_loss: 701.630126953125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2170.956298828125 +INFO:__main__:loss: 2170.95703125 +INFO:__main__:vgg_gram_loss_0: 17.107728958129883 +INFO:__main__:vgg_gram_loss_1: 36.949127197265625 +INFO:__main__:vgg_gram_loss_2: 36.3306770324707 +INFO:__main__:vgg_gram_loss_3: 31.79855728149414 +INFO:__main__:vgg_gram_loss_4: 114.5477523803711 +INFO:__main__:vgg_gram_loss_5: 0.2067674845457077 +INFO:__main__:vgg_loss_0: 16.155357360839844 +INFO:__main__:vgg_loss_1: 33.5197868347168 +INFO:__main__:vgg_loss_2: 47.23255157470703 +INFO:__main__:vgg_loss_3: 35.08713150024414 +INFO:__main__:vgg_loss_4: 63.1865119934082 +INFO:__main__:vgg_loss_5: 2.0693235397338867 +INFO:__main__:validation_loss: 1757.99365234375 +INFO:__main__:global_step: 2501 +INFO:__main__:kl_loss: 813.126953125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1948.814697265625 +INFO:__main__:loss: 1948.8155517578125 +INFO:__main__:vgg_gram_loss_0: 13.936678886413574 +INFO:__main__:vgg_gram_loss_1: 32.551326751708984 +INFO:__main__:vgg_gram_loss_2: 30.84714698791504 +INFO:__main__:vgg_gram_loss_3: 24.257200241088867 +INFO:__main__:vgg_gram_loss_4: 98.40998077392578 +INFO:__main__:vgg_gram_loss_5: 0.19481153786182404 +INFO:__main__:vgg_loss_0: 14.516462326049805 +INFO:__main__:vgg_loss_1: 31.28249740600586 +INFO:__main__:vgg_loss_2: 45.52101516723633 +INFO:__main__:vgg_loss_3: 34.17013168334961 +INFO:__main__:vgg_loss_4: 61.984283447265625 +INFO:__main__:vgg_loss_5: 2.0914158821105957 +INFO:__main__:validation_loss: 2176.75439453125 +INFO:__main__:global_step: 2751 +INFO:__main__:kl_loss: 876.168212890625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1803.515380859375 +INFO:__main__:loss: 1803.5162353515625 +INFO:__main__:vgg_gram_loss_0: 19.57283592224121 +INFO:__main__:vgg_gram_loss_1: 41.463653564453125 +INFO:__main__:vgg_gram_loss_2: 34.83030319213867 +INFO:__main__:vgg_gram_loss_3: 23.803367614746094 +INFO:__main__:vgg_gram_loss_4: 88.15823364257812 +INFO:__main__:vgg_gram_loss_5: 0.16864289343357086 +INFO:__main__:vgg_loss_0: 12.400886535644531 +INFO:__main__:vgg_loss_1: 26.177406311035156 +INFO:__main__:vgg_loss_2: 36.32575225830078 +INFO:__main__:vgg_loss_3: 27.017194747924805 +INFO:__main__:vgg_loss_4: 49.116207122802734 +INFO:__main__:vgg_loss_5: 1.6685727834701538 +INFO:__main__:validation_loss: 2109.174560546875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 3001 +INFO:__main__:kl_loss: 874.681396484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1993.727783203125 +INFO:__main__:loss: 1993.7286376953125 +INFO:__main__:vgg_gram_loss_0: 9.39432430267334 +INFO:__main__:vgg_gram_loss_1: 49.747406005859375 +INFO:__main__:vgg_gram_loss_2: 40.818172454833984 +INFO:__main__:vgg_gram_loss_3: 28.133386611938477 +INFO:__main__:vgg_gram_loss_4: 92.25126647949219 +INFO:__main__:vgg_gram_loss_5: 0.1733321249485016 +INFO:__main__:vgg_loss_0: 14.199066162109375 +INFO:__main__:vgg_loss_1: 33.83330535888672 +INFO:__main__:vgg_loss_2: 42.886558532714844 +INFO:__main__:vgg_loss_3: 30.94998550415039 +INFO:__main__:vgg_loss_4: 54.553794860839844 +INFO:__main__:vgg_loss_5: 1.8049606084823608 +INFO:__main__:validation_loss: 1760.3115234375 +INFO:__main__:global_step: 3251 +INFO:__main__:kl_loss: 832.4716186523438 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1796.2969970703125 +INFO:__main__:loss: 1796.2978515625 +INFO:__main__:vgg_gram_loss_0: 13.52221393585205 +INFO:__main__:vgg_gram_loss_1: 29.603153228759766 +INFO:__main__:vgg_gram_loss_2: 26.941171646118164 +INFO:__main__:vgg_gram_loss_3: 22.704923629760742 +INFO:__main__:vgg_gram_loss_4: 89.89064025878906 +INFO:__main__:vgg_gram_loss_5: 0.1799435317516327 +INFO:__main__:vgg_loss_0: 13.937674522399902 +INFO:__main__:vgg_loss_1: 31.861469268798828 +INFO:__main__:vgg_loss_2: 42.74617004394531 +INFO:__main__:vgg_loss_3: 30.874834060668945 +INFO:__main__:vgg_loss_4: 55.147701263427734 +INFO:__main__:vgg_loss_5: 1.849489450454712 +INFO:__main__:validation_loss: 2409.61962890625 +INFO:__main__:global_step: 3501 +INFO:__main__:kl_loss: 928.4254150390625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1850.1971435546875 +INFO:__main__:loss: 1850.1981201171875 +INFO:__main__:vgg_gram_loss_0: 18.71107292175293 +INFO:__main__:vgg_gram_loss_1: 30.63387107849121 +INFO:__main__:vgg_gram_loss_2: 28.906375885009766 +INFO:__main__:vgg_gram_loss_3: 23.24925422668457 +INFO:__main__:vgg_gram_loss_4: 93.26155853271484 +INFO:__main__:vgg_gram_loss_5: 0.19740276038646698 +INFO:__main__:vgg_loss_0: 13.973001480102539 +INFO:__main__:vgg_loss_1: 29.986095428466797 +INFO:__main__:vgg_loss_2: 42.69750213623047 +INFO:__main__:vgg_loss_3: 31.014432907104492 +INFO:__main__:vgg_loss_4: 55.51621627807617 +INFO:__main__:vgg_loss_5: 1.8926173448562622 +INFO:__main__:validation_loss: 1825.427734375 +INFO:__main__:global_step: 3751 +INFO:__main__:kl_loss: 1118.3740234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1970.6109619140625 +INFO:__main__:loss: 1970.612060546875 +INFO:__main__:vgg_gram_loss_0: 13.173460960388184 +INFO:__main__:vgg_gram_loss_1: 39.9528694152832 +INFO:__main__:vgg_gram_loss_2: 34.67822265625 +INFO:__main__:vgg_gram_loss_3: 26.1641845703125 +INFO:__main__:vgg_gram_loss_4: 106.43123626708984 +INFO:__main__:vgg_gram_loss_5: 0.18200846016407013 +INFO:__main__:vgg_loss_0: 13.137272834777832 +INFO:__main__:vgg_loss_1: 31.23973274230957 +INFO:__main__:vgg_loss_2: 41.655094146728516 +INFO:__main__:vgg_loss_3: 30.921598434448242 +INFO:__main__:vgg_loss_4: 54.81023406982422 +INFO:__main__:vgg_loss_5: 1.7762824296951294 +INFO:__main__:validation_loss: 2355.878662109375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 4001 +INFO:__main__:kl_loss: 1141.31201171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1659.1766357421875 +INFO:__main__:loss: 1659.177734375 +INFO:__main__:vgg_gram_loss_0: 15.296905517578125 +INFO:__main__:vgg_gram_loss_1: 29.74534034729004 +INFO:__main__:vgg_gram_loss_2: 26.242666244506836 +INFO:__main__:vgg_gram_loss_3: 19.549055099487305 +INFO:__main__:vgg_gram_loss_4: 84.08551788330078 +INFO:__main__:vgg_gram_loss_5: 0.1774490922689438 +INFO:__main__:vgg_loss_0: 11.836042404174805 +INFO:__main__:vgg_loss_1: 25.887290954589844 +INFO:__main__:vgg_loss_2: 37.43425369262695 +INFO:__main__:vgg_loss_3: 28.02289581298828 +INFO:__main__:vgg_loss_4: 51.81277847290039 +INFO:__main__:vgg_loss_5: 1.7451210021972656 +INFO:__main__:validation_loss: 2517.59521484375 +INFO:__main__:global_step: 4251 +INFO:__main__:kl_loss: 1199.059814453125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2222.4951171875 +INFO:__main__:loss: 2222.496337890625 +INFO:__main__:vgg_gram_loss_0: 14.938468933105469 +INFO:__main__:vgg_gram_loss_1: 40.962032318115234 +INFO:__main__:vgg_gram_loss_2: 33.206573486328125 +INFO:__main__:vgg_gram_loss_3: 36.72642135620117 +INFO:__main__:vgg_gram_loss_4: 111.7970962524414 +INFO:__main__:vgg_gram_loss_5: 0.2122902125120163 +INFO:__main__:vgg_loss_0: 17.67517852783203 +INFO:__main__:vgg_loss_1: 39.86125564575195 +INFO:__main__:vgg_loss_2: 47.52143096923828 +INFO:__main__:vgg_loss_3: 35.75613784790039 +INFO:__main__:vgg_loss_4: 63.707515716552734 +INFO:__main__:vgg_loss_5: 2.134636163711548 +INFO:__main__:validation_loss: 1828.6475830078125 +INFO:__main__:global_step: 4501 +INFO:__main__:kl_loss: 1247.668212890625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1640.203369140625 +INFO:__main__:loss: 1640.20458984375 +INFO:__main__:vgg_gram_loss_0: 12.333768844604492 +INFO:__main__:vgg_gram_loss_1: 20.17340660095215 +INFO:__main__:vgg_gram_loss_2: 23.368614196777344 +INFO:__main__:vgg_gram_loss_3: 19.387720108032227 +INFO:__main__:vgg_gram_loss_4: 90.00624084472656 +INFO:__main__:vgg_gram_loss_5: 0.1868402510881424 +INFO:__main__:vgg_loss_0: 11.576725959777832 +INFO:__main__:vgg_loss_1: 24.910717010498047 +INFO:__main__:vgg_loss_2: 38.58959197998047 +INFO:__main__:vgg_loss_3: 29.907052993774414 +INFO:__main__:vgg_loss_4: 55.688236236572266 +INFO:__main__:vgg_loss_5: 1.9117759466171265 +INFO:__main__:validation_loss: 1857.666748046875 +INFO:__main__:global_step: 4751 +INFO:__main__:kl_loss: 1330.21435546875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1772.7276611328125 +INFO:__main__:loss: 1772.72900390625 +INFO:__main__:vgg_gram_loss_0: 12.628029823303223 +INFO:__main__:vgg_gram_loss_1: 30.191160202026367 +INFO:__main__:vgg_gram_loss_2: 28.742584228515625 +INFO:__main__:vgg_gram_loss_3: 23.408655166625977 +INFO:__main__:vgg_gram_loss_4: 94.96575164794922 +INFO:__main__:vgg_gram_loss_5: 0.17650341987609863 +INFO:__main__:vgg_loss_0: 12.080496788024902 +INFO:__main__:vgg_loss_1: 26.644556045532227 +INFO:__main__:vgg_loss_2: 39.07718276977539 +INFO:__main__:vgg_loss_3: 29.733915328979492 +INFO:__main__:vgg_loss_4: 55.0627555847168 +INFO:__main__:vgg_loss_5: 1.8339576721191406 +INFO:__main__:validation_loss: 1741.0556640625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 5001 +INFO:__main__:kl_loss: 1371.990234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2060.505859375 +INFO:__main__:loss: 2060.50732421875 +INFO:__main__:vgg_gram_loss_0: 10.114445686340332 +INFO:__main__:vgg_gram_loss_1: 27.754196166992188 +INFO:__main__:vgg_gram_loss_2: 42.70623779296875 +INFO:__main__:vgg_gram_loss_3: 31.155282974243164 +INFO:__main__:vgg_gram_loss_4: 106.2396469116211 +INFO:__main__:vgg_gram_loss_5: 0.2120528221130371 +INFO:__main__:vgg_loss_0: 13.734189987182617 +INFO:__main__:vgg_loss_1: 33.53791809082031 +INFO:__main__:vgg_loss_2: 47.36857604980469 +INFO:__main__:vgg_loss_3: 34.70136642456055 +INFO:__main__:vgg_loss_4: 62.465728759765625 +INFO:__main__:vgg_loss_5: 2.1115729808807373 +INFO:__main__:validation_loss: 2520.490966796875 +INFO:__main__:global_step: 5251 +INFO:__main__:kl_loss: 1243.628662109375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1891.0390625 +INFO:__main__:loss: 1891.040283203125 +INFO:__main__:vgg_gram_loss_0: 12.999078750610352 +INFO:__main__:vgg_gram_loss_1: 43.352413177490234 +INFO:__main__:vgg_gram_loss_2: 32.902950286865234 +INFO:__main__:vgg_gram_loss_3: 23.427854537963867 +INFO:__main__:vgg_gram_loss_4: 87.47883605957031 +INFO:__main__:vgg_gram_loss_5: 0.18693867325782776 +INFO:__main__:vgg_loss_0: 13.056669235229492 +INFO:__main__:vgg_loss_1: 30.94521713256836 +INFO:__main__:vgg_loss_2: 43.70237731933594 +INFO:__main__:vgg_loss_3: 31.98383331298828 +INFO:__main__:vgg_loss_4: 56.30106735229492 +INFO:__main__:vgg_loss_5: 1.8705779314041138 +INFO:__main__:validation_loss: 1730.5810546875 +INFO:__main__:global_step: 5501 +INFO:__main__:kl_loss: 1728.80224609375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2488.67724609375 +INFO:__main__:loss: 2488.678955078125 +INFO:__main__:vgg_gram_loss_0: 21.705032348632812 +INFO:__main__:vgg_gram_loss_1: 71.9174575805664 +INFO:__main__:vgg_gram_loss_2: 54.15242385864258 +INFO:__main__:vgg_gram_loss_3: 38.18333053588867 +INFO:__main__:vgg_gram_loss_4: 113.03267669677734 +INFO:__main__:vgg_gram_loss_5: 0.21321962773799896 +INFO:__main__:vgg_loss_0: 16.03223991394043 +INFO:__main__:vgg_loss_1: 37.315208435058594 +INFO:__main__:vgg_loss_2: 48.6584358215332 +INFO:__main__:vgg_loss_3: 35.010501861572266 +INFO:__main__:vgg_loss_4: 59.5318489074707 +INFO:__main__:vgg_loss_5: 1.98308265209198 +INFO:__main__:validation_loss: 2196.510498046875 +INFO:__main__:global_step: 5751 +INFO:__main__:kl_loss: 1414.7266845703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2082.0341796875 +INFO:__main__:loss: 2082.03564453125 +INFO:__main__:vgg_gram_loss_0: 17.04506492614746 +INFO:__main__:vgg_gram_loss_1: 40.56757736206055 +INFO:__main__:vgg_gram_loss_2: 39.39512252807617 +INFO:__main__:vgg_gram_loss_3: 28.267248153686523 +INFO:__main__:vgg_gram_loss_4: 101.11149597167969 +INFO:__main__:vgg_gram_loss_5: 0.1915002018213272 +INFO:__main__:vgg_loss_0: 15.066765785217285 +INFO:__main__:vgg_loss_1: 32.68598937988281 +INFO:__main__:vgg_loss_2: 45.99399948120117 +INFO:__main__:vgg_loss_3: 33.79623794555664 +INFO:__main__:vgg_loss_4: 60.278602600097656 +INFO:__main__:vgg_loss_5: 2.007242441177368 +INFO:__main__:validation_loss: 2262.83984375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 6001 +INFO:__main__:kl_loss: 1736.0665283203125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2166.789794921875 +INFO:__main__:loss: 2166.79150390625 +INFO:__main__:vgg_gram_loss_0: 12.11079216003418 +INFO:__main__:vgg_gram_loss_1: 51.705543518066406 +INFO:__main__:vgg_gram_loss_2: 32.836151123046875 +INFO:__main__:vgg_gram_loss_3: 32.321495056152344 +INFO:__main__:vgg_gram_loss_4: 110.64031219482422 +INFO:__main__:vgg_gram_loss_5: 0.19285158812999725 +INFO:__main__:vgg_loss_0: 15.51778507232666 +INFO:__main__:vgg_loss_1: 35.28180694580078 +INFO:__main__:vgg_loss_2: 45.66849899291992 +INFO:__main__:vgg_loss_3: 34.265281677246094 +INFO:__main__:vgg_loss_4: 60.84768295288086 +INFO:__main__:vgg_loss_5: 1.9697527885437012 +INFO:__main__:validation_loss: 2238.836181640625 +INFO:__main__:global_step: 6251 +INFO:__main__:kl_loss: 1911.279296875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2473.839111328125 +INFO:__main__:loss: 2473.841064453125 +INFO:__main__:vgg_gram_loss_0: 10.262408256530762 +INFO:__main__:vgg_gram_loss_1: 44.55028533935547 +INFO:__main__:vgg_gram_loss_2: 53.020362854003906 +INFO:__main__:vgg_gram_loss_3: 38.87364959716797 +INFO:__main__:vgg_gram_loss_4: 124.38823699951172 +INFO:__main__:vgg_gram_loss_5: 0.255666583776474 +INFO:__main__:vgg_loss_0: 17.760690689086914 +INFO:__main__:vgg_loss_1: 43.78571701049805 +INFO:__main__:vgg_loss_2: 55.12312698364258 +INFO:__main__:vgg_loss_3: 38.84754943847656 +INFO:__main__:vgg_loss_4: 65.6886215209961 +INFO:__main__:vgg_loss_5: 2.211482524871826 +INFO:__main__:validation_loss: 1960.843994140625 +INFO:__main__:global_step: 6501 +INFO:__main__:kl_loss: 1598.666748046875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1777.1851806640625 +INFO:__main__:loss: 1777.186767578125 +INFO:__main__:vgg_gram_loss_0: 15.061113357543945 +INFO:__main__:vgg_gram_loss_1: 34.10643005371094 +INFO:__main__:vgg_gram_loss_2: 29.44182777404785 +INFO:__main__:vgg_gram_loss_3: 21.689485549926758 +INFO:__main__:vgg_gram_loss_4: 85.99014282226562 +INFO:__main__:vgg_gram_loss_5: 0.17214810848236084 +INFO:__main__:vgg_loss_0: 13.499688148498535 +INFO:__main__:vgg_loss_1: 29.6932373046875 +INFO:__main__:vgg_loss_2: 40.63449478149414 +INFO:__main__:vgg_loss_3: 29.72477912902832 +INFO:__main__:vgg_loss_4: 53.668426513671875 +INFO:__main__:vgg_loss_5: 1.7552709579467773 +INFO:__main__:validation_loss: 1826.8505859375 +INFO:__main__:global_step: 6751 +INFO:__main__:kl_loss: 2119.90771484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1971.783447265625 +INFO:__main__:loss: 1971.7855224609375 +INFO:__main__:vgg_gram_loss_0: 16.287397384643555 +INFO:__main__:vgg_gram_loss_1: 27.925281524658203 +INFO:__main__:vgg_gram_loss_2: 33.74029541015625 +INFO:__main__:vgg_gram_loss_3: 26.581327438354492 +INFO:__main__:vgg_gram_loss_4: 97.57390594482422 +INFO:__main__:vgg_gram_loss_5: 0.1936752200126648 +INFO:__main__:vgg_loss_0: 14.744640350341797 +INFO:__main__:vgg_loss_1: 32.6653938293457 +INFO:__main__:vgg_loss_2: 47.16722106933594 +INFO:__main__:vgg_loss_3: 34.90412521362305 +INFO:__main__:vgg_loss_4: 60.58540725708008 +INFO:__main__:vgg_loss_5: 1.9879920482635498 +INFO:__main__:validation_loss: 2444.9775390625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 7001 +INFO:__main__:kl_loss: 1759.365478515625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2016.00927734375 +INFO:__main__:loss: 2016.010986328125 +INFO:__main__:vgg_gram_loss_0: 20.884267807006836 +INFO:__main__:vgg_gram_loss_1: 40.27299880981445 +INFO:__main__:vgg_gram_loss_2: 42.23915481567383 +INFO:__main__:vgg_gram_loss_3: 30.190610885620117 +INFO:__main__:vgg_gram_loss_4: 94.78038787841797 +INFO:__main__:vgg_gram_loss_5: 0.19803155958652496 +INFO:__main__:vgg_loss_0: 13.698533058166504 +INFO:__main__:vgg_loss_1: 31.094709396362305 +INFO:__main__:vgg_loss_2: 41.62931823730469 +INFO:__main__:vgg_loss_3: 30.673473358154297 +INFO:__main__:vgg_loss_4: 55.59429168701172 +INFO:__main__:vgg_loss_5: 1.9460351467132568 +INFO:__main__:validation_loss: 1596.9501953125 +INFO:__main__:global_step: 7251 +INFO:__main__:kl_loss: 1937.2467041015625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1885.02197265625 +INFO:__main__:loss: 1885.02392578125 +INFO:__main__:vgg_gram_loss_0: 18.52371597290039 +INFO:__main__:vgg_gram_loss_1: 37.670501708984375 +INFO:__main__:vgg_gram_loss_2: 32.39860153198242 +INFO:__main__:vgg_gram_loss_3: 23.78447151184082 +INFO:__main__:vgg_gram_loss_4: 87.36138916015625 +INFO:__main__:vgg_gram_loss_5: 0.18844832479953766 +INFO:__main__:vgg_loss_0: 14.654539108276367 +INFO:__main__:vgg_loss_1: 31.725788116455078 +INFO:__main__:vgg_loss_2: 42.622432708740234 +INFO:__main__:vgg_loss_3: 31.314496994018555 +INFO:__main__:vgg_loss_4: 54.91584014892578 +INFO:__main__:vgg_loss_5: 1.8441699743270874 +INFO:__main__:validation_loss: 1773.9703369140625 +INFO:__main__:global_step: 7501 +INFO:__main__:kl_loss: 2131.0380859375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2021.41015625 +INFO:__main__:loss: 2021.4122314453125 +INFO:__main__:vgg_gram_loss_0: 13.283345222473145 +INFO:__main__:vgg_gram_loss_1: 43.86565017700195 +INFO:__main__:vgg_gram_loss_2: 32.67824935913086 +INFO:__main__:vgg_gram_loss_3: 24.61185646057129 +INFO:__main__:vgg_gram_loss_4: 96.58019256591797 +INFO:__main__:vgg_gram_loss_5: 0.19690720736980438 +INFO:__main__:vgg_loss_0: 15.022104263305664 +INFO:__main__:vgg_loss_1: 34.14169692993164 +INFO:__main__:vgg_loss_2: 46.85837173461914 +INFO:__main__:vgg_loss_3: 34.1616325378418 +INFO:__main__:vgg_loss_4: 60.87163162231445 +INFO:__main__:vgg_loss_5: 2.0103940963745117 +INFO:__main__:validation_loss: 2252.0986328125 +INFO:__main__:global_step: 7751 +INFO:__main__:kl_loss: 2289.4501953125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1883.4287109375 +INFO:__main__:loss: 1883.4310302734375 +INFO:__main__:vgg_gram_loss_0: 6.607985019683838 +INFO:__main__:vgg_gram_loss_1: 36.47885513305664 +INFO:__main__:vgg_gram_loss_2: 29.464889526367188 +INFO:__main__:vgg_gram_loss_3: 21.269906997680664 +INFO:__main__:vgg_gram_loss_4: 89.96973419189453 +INFO:__main__:vgg_gram_loss_5: 0.18583011627197266 +INFO:__main__:vgg_loss_0: 15.688158988952637 +INFO:__main__:vgg_loss_1: 37.999820709228516 +INFO:__main__:vgg_loss_2: 47.02214431762695 +INFO:__main__:vgg_loss_3: 33.02042770385742 +INFO:__main__:vgg_loss_4: 57.17068862915039 +INFO:__main__:vgg_loss_5: 1.8072850704193115 +INFO:__main__:validation_loss: 1997.0162353515625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 8001 +INFO:__main__:kl_loss: 2434.55322265625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2146.66064453125 +INFO:__main__:loss: 2146.6630859375 +INFO:__main__:vgg_gram_loss_0: 7.746669292449951 +INFO:__main__:vgg_gram_loss_1: 43.344051361083984 +INFO:__main__:vgg_gram_loss_2: 32.98564147949219 +INFO:__main__:vgg_gram_loss_3: 31.324813842773438 +INFO:__main__:vgg_gram_loss_4: 108.05046844482422 +INFO:__main__:vgg_gram_loss_5: 0.2084517925977707 +INFO:__main__:vgg_loss_0: 16.95317840576172 +INFO:__main__:vgg_loss_1: 38.46986770629883 +INFO:__main__:vgg_loss_2: 47.86206817626953 +INFO:__main__:vgg_loss_3: 36.16668701171875 +INFO:__main__:vgg_loss_4: 64.12487030029297 +INFO:__main__:vgg_loss_5: 2.095412015914917 +INFO:__main__:validation_loss: 1841.358154296875 +INFO:__main__:global_step: 8251 +INFO:__main__:kl_loss: 2044.19677734375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1917.5123291015625 +INFO:__main__:loss: 1917.514404296875 +INFO:__main__:vgg_gram_loss_0: 15.649087905883789 +INFO:__main__:vgg_gram_loss_1: 30.912633895874023 +INFO:__main__:vgg_gram_loss_2: 30.14988136291504 +INFO:__main__:vgg_gram_loss_3: 27.8271541595459 +INFO:__main__:vgg_gram_loss_4: 98.56961059570312 +INFO:__main__:vgg_gram_loss_5: 0.1898769736289978 +INFO:__main__:vgg_loss_0: 14.73673152923584 +INFO:__main__:vgg_loss_1: 32.670494079589844 +INFO:__main__:vgg_loss_2: 43.859127044677734 +INFO:__main__:vgg_loss_3: 31.57451057434082 +INFO:__main__:vgg_loss_4: 55.51279067993164 +INFO:__main__:vgg_loss_5: 1.8505703210830688 +INFO:__main__:validation_loss: 1790.004150390625 +INFO:__main__:global_step: 8501 +INFO:__main__:kl_loss: 2297.97119140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1941.687255859375 +INFO:__main__:loss: 1941.6895751953125 +INFO:__main__:vgg_gram_loss_0: 11.816027641296387 +INFO:__main__:vgg_gram_loss_1: 28.37053871154785 +INFO:__main__:vgg_gram_loss_2: 34.72297668457031 +INFO:__main__:vgg_gram_loss_3: 25.13543701171875 +INFO:__main__:vgg_gram_loss_4: 95.15686798095703 +INFO:__main__:vgg_gram_loss_5: 0.18899141252040863 +INFO:__main__:vgg_loss_0: 14.185519218444824 +INFO:__main__:vgg_loss_1: 33.80742263793945 +INFO:__main__:vgg_loss_2: 48.83255386352539 +INFO:__main__:vgg_loss_3: 34.67539596557617 +INFO:__main__:vgg_loss_4: 59.532249450683594 +INFO:__main__:vgg_loss_5: 1.9135000705718994 +INFO:__main__:validation_loss: 2503.92333984375 +INFO:__main__:global_step: 8751 +INFO:__main__:kl_loss: 2029.747802734375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1929.013427734375 +INFO:__main__:loss: 1929.0155029296875 +INFO:__main__:vgg_gram_loss_0: 13.485989570617676 +INFO:__main__:vgg_gram_loss_1: 32.256019592285156 +INFO:__main__:vgg_gram_loss_2: 31.45692253112793 +INFO:__main__:vgg_gram_loss_3: 26.671667098999023 +INFO:__main__:vgg_gram_loss_4: 97.23084259033203 +INFO:__main__:vgg_gram_loss_5: 0.17051078379154205 +INFO:__main__:vgg_loss_0: 13.79652214050293 +INFO:__main__:vgg_loss_1: 31.372127532958984 +INFO:__main__:vgg_loss_2: 44.68638229370117 +INFO:__main__:vgg_loss_3: 33.30086135864258 +INFO:__main__:vgg_loss_4: 59.520938873291016 +INFO:__main__:vgg_loss_5: 1.8539001941680908 +INFO:__main__:validation_loss: 1828.245849609375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 9001 +INFO:__main__:kl_loss: 2077.443359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1662.433837890625 +INFO:__main__:loss: 1662.4359130859375 +INFO:__main__:vgg_gram_loss_0: 20.073820114135742 +INFO:__main__:vgg_gram_loss_1: 25.09209442138672 +INFO:__main__:vgg_gram_loss_2: 23.551029205322266 +INFO:__main__:vgg_gram_loss_3: 19.17599105834961 +INFO:__main__:vgg_gram_loss_4: 81.62151336669922 +INFO:__main__:vgg_gram_loss_5: 0.1748962700366974 +INFO:__main__:vgg_loss_0: 12.520403861999512 +INFO:__main__:vgg_loss_1: 27.71816062927246 +INFO:__main__:vgg_loss_2: 39.48741912841797 +INFO:__main__:vgg_loss_3: 29.41246223449707 +INFO:__main__:vgg_loss_4: 51.911136627197266 +INFO:__main__:vgg_loss_5: 1.747796893119812 +INFO:__main__:validation_loss: 2209.35205078125 +INFO:__main__:global_step: 9251 +INFO:__main__:kl_loss: 2461.293212890625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1896.51416015625 +INFO:__main__:loss: 1896.5166015625 +INFO:__main__:vgg_gram_loss_0: 8.450826644897461 +INFO:__main__:vgg_gram_loss_1: 32.96268081665039 +INFO:__main__:vgg_gram_loss_2: 35.25413131713867 +INFO:__main__:vgg_gram_loss_3: 28.76320457458496 +INFO:__main__:vgg_gram_loss_4: 105.91483306884766 +INFO:__main__:vgg_gram_loss_5: 0.1938469111919403 +INFO:__main__:vgg_loss_0: 12.521793365478516 +INFO:__main__:vgg_loss_1: 30.456708908081055 +INFO:__main__:vgg_loss_2: 40.558189392089844 +INFO:__main__:vgg_loss_3: 29.686697006225586 +INFO:__main__:vgg_loss_4: 52.71685791015625 +INFO:__main__:vgg_loss_5: 1.8230597972869873 +INFO:__main__:validation_loss: 1830.0399169921875 +INFO:__main__:global_step: 9501 +INFO:__main__:kl_loss: 2555.31396484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1965.73828125 +INFO:__main__:loss: 1965.7408447265625 +INFO:__main__:vgg_gram_loss_0: 13.696782112121582 +INFO:__main__:vgg_gram_loss_1: 48.06203842163086 +INFO:__main__:vgg_gram_loss_2: 36.55930709838867 +INFO:__main__:vgg_gram_loss_3: 28.607547760009766 +INFO:__main__:vgg_gram_loss_4: 95.61166381835938 +INFO:__main__:vgg_gram_loss_5: 0.165425643324852 +INFO:__main__:vgg_loss_0: 14.141860961914062 +INFO:__main__:vgg_loss_1: 31.857946395874023 +INFO:__main__:vgg_loss_2: 41.4349479675293 +INFO:__main__:vgg_loss_3: 29.938541412353516 +INFO:__main__:vgg_loss_4: 51.42768096923828 +INFO:__main__:vgg_loss_5: 1.6439051628112793 +INFO:__main__:validation_loss: 1679.08935546875 +INFO:__main__:global_step: 9751 +INFO:__main__:kl_loss: 2549.347900390625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1886.56103515625 +INFO:__main__:loss: 1886.5635986328125 +INFO:__main__:vgg_gram_loss_0: 11.207842826843262 +INFO:__main__:vgg_gram_loss_1: 33.38605880737305 +INFO:__main__:vgg_gram_loss_2: 29.325515747070312 +INFO:__main__:vgg_gram_loss_3: 22.65707778930664 +INFO:__main__:vgg_gram_loss_4: 96.7847900390625 +INFO:__main__:vgg_gram_loss_5: 0.22450366616249084 +INFO:__main__:vgg_loss_0: 13.013879776000977 +INFO:__main__:vgg_loss_1: 31.942190170288086 +INFO:__main__:vgg_loss_2: 45.62629699707031 +INFO:__main__:vgg_loss_3: 33.09357833862305 +INFO:__main__:vgg_loss_4: 58.058319091796875 +INFO:__main__:vgg_loss_5: 1.9921537637710571 +INFO:__main__:validation_loss: 1996.9764404296875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 10001 +INFO:__main__:kl_loss: 3241.4482421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1702.4114990234375 +INFO:__main__:loss: 1702.414794921875 +INFO:__main__:vgg_gram_loss_0: 11.898480415344238 +INFO:__main__:vgg_gram_loss_1: 24.88400650024414 +INFO:__main__:vgg_gram_loss_2: 27.841232299804688 +INFO:__main__:vgg_gram_loss_3: 21.28725814819336 +INFO:__main__:vgg_gram_loss_4: 86.39045715332031 +INFO:__main__:vgg_gram_loss_5: 0.18358160555362701 +INFO:__main__:vgg_loss_0: 12.072579383850098 +INFO:__main__:vgg_loss_1: 27.72849464416504 +INFO:__main__:vgg_loss_2: 42.09772872924805 +INFO:__main__:vgg_loss_3: 30.993335723876953 +INFO:__main__:vgg_loss_4: 53.33146667480469 +INFO:__main__:vgg_loss_5: 1.7736963033676147 +INFO:__main__:validation_loss: 2375.9248046875 +INFO:__main__:global_step: 10251 +INFO:__main__:kl_loss: 2913.326904296875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2261.784423828125 +INFO:__main__:loss: 2261.787353515625 +INFO:__main__:vgg_gram_loss_0: 20.29518699645996 +INFO:__main__:vgg_gram_loss_1: 46.13532638549805 +INFO:__main__:vgg_gram_loss_2: 39.92474365234375 +INFO:__main__:vgg_gram_loss_3: 27.63934898376465 +INFO:__main__:vgg_gram_loss_4: 108.5341567993164 +INFO:__main__:vgg_gram_loss_5: 0.2337137907743454 +INFO:__main__:vgg_loss_0: 16.388893127441406 +INFO:__main__:vgg_loss_1: 36.86563491821289 +INFO:__main__:vgg_loss_2: 50.898094177246094 +INFO:__main__:vgg_loss_3: 37.15837860107422 +INFO:__main__:vgg_loss_4: 66.07582092285156 +INFO:__main__:vgg_loss_5: 2.2075746059417725 +INFO:__main__:validation_loss: 2008.4005126953125 +INFO:__main__:global_step: 10501 +INFO:__main__:kl_loss: 2772.4619140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1665.134033203125 +INFO:__main__:loss: 1665.1368408203125 +INFO:__main__:vgg_gram_loss_0: 17.528867721557617 +INFO:__main__:vgg_gram_loss_1: 26.647199630737305 +INFO:__main__:vgg_gram_loss_2: 26.195438385009766 +INFO:__main__:vgg_gram_loss_3: 19.85382080078125 +INFO:__main__:vgg_gram_loss_4: 85.17772674560547 +INFO:__main__:vgg_gram_loss_5: 0.17175838351249695 +INFO:__main__:vgg_loss_0: 12.163930892944336 +INFO:__main__:vgg_loss_1: 28.144418716430664 +INFO:__main__:vgg_loss_2: 39.30435562133789 +INFO:__main__:vgg_loss_3: 27.597774505615234 +INFO:__main__:vgg_loss_4: 48.574119567871094 +INFO:__main__:vgg_loss_5: 1.6673803329467773 +INFO:__main__:validation_loss: 2206.001708984375 +INFO:__main__:global_step: 10751 +INFO:__main__:kl_loss: 2703.9130859375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1694.342529296875 +INFO:__main__:loss: 1694.34521484375 +INFO:__main__:vgg_gram_loss_0: 13.905160903930664 +INFO:__main__:vgg_gram_loss_1: 29.344024658203125 +INFO:__main__:vgg_gram_loss_2: 28.814090728759766 +INFO:__main__:vgg_gram_loss_3: 20.8415470123291 +INFO:__main__:vgg_gram_loss_4: 81.15296173095703 +INFO:__main__:vgg_gram_loss_5: 0.16132643818855286 +INFO:__main__:vgg_loss_0: 12.874180793762207 +INFO:__main__:vgg_loss_1: 29.179365158081055 +INFO:__main__:vgg_loss_2: 39.6439094543457 +INFO:__main__:vgg_loss_3: 28.951364517211914 +INFO:__main__:vgg_loss_4: 52.2606086730957 +INFO:__main__:vgg_loss_5: 1.7399569749832153 +INFO:__main__:validation_loss: 2034.6087646484375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 11001 +INFO:__main__:kl_loss: 2643.982177734375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1947.2125244140625 +INFO:__main__:loss: 1947.2152099609375 +INFO:__main__:vgg_gram_loss_0: 14.827585220336914 +INFO:__main__:vgg_gram_loss_1: 26.005048751831055 +INFO:__main__:vgg_gram_loss_2: 25.515039443969727 +INFO:__main__:vgg_gram_loss_3: 22.021961212158203 +INFO:__main__:vgg_gram_loss_4: 97.12137603759766 +INFO:__main__:vgg_gram_loss_5: 0.1919902265071869 +INFO:__main__:vgg_loss_0: 16.557048797607422 +INFO:__main__:vgg_loss_1: 34.50715637207031 +INFO:__main__:vgg_loss_2: 47.035057067871094 +INFO:__main__:vgg_loss_3: 36.387908935546875 +INFO:__main__:vgg_loss_4: 67.1322250366211 +INFO:__main__:vgg_loss_5: 2.1401286125183105 +INFO:__main__:validation_loss: 2133.8974609375 +INFO:__main__:global_step: 11251 +INFO:__main__:kl_loss: 3058.58642578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1920.107177734375 +INFO:__main__:loss: 1920.1102294921875 +INFO:__main__:vgg_gram_loss_0: 12.681962013244629 +INFO:__main__:vgg_gram_loss_1: 27.328372955322266 +INFO:__main__:vgg_gram_loss_2: 31.3234920501709 +INFO:__main__:vgg_gram_loss_3: 25.278867721557617 +INFO:__main__:vgg_gram_loss_4: 100.98329162597656 +INFO:__main__:vgg_gram_loss_5: 0.20000572502613068 +INFO:__main__:vgg_loss_0: 14.080020904541016 +INFO:__main__:vgg_loss_1: 32.73818588256836 +INFO:__main__:vgg_loss_2: 45.41073989868164 +INFO:__main__:vgg_loss_3: 33.08762741088867 +INFO:__main__:vgg_loss_4: 58.95177459716797 +INFO:__main__:vgg_loss_5: 1.9571093320846558 +INFO:__main__:validation_loss: 2126.898193359375 +INFO:__main__:global_step: 11501 +INFO:__main__:kl_loss: 3223.32080078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1886.1070556640625 +INFO:__main__:loss: 1886.1102294921875 +INFO:__main__:vgg_gram_loss_0: 6.771231174468994 +INFO:__main__:vgg_gram_loss_1: 31.556737899780273 +INFO:__main__:vgg_gram_loss_2: 27.337650299072266 +INFO:__main__:vgg_gram_loss_3: 21.93885612487793 +INFO:__main__:vgg_gram_loss_4: 96.59512329101562 +INFO:__main__:vgg_gram_loss_5: 0.20657996833324432 +INFO:__main__:vgg_loss_0: 14.741159439086914 +INFO:__main__:vgg_loss_1: 32.30353546142578 +INFO:__main__:vgg_loss_2: 46.44126510620117 +INFO:__main__:vgg_loss_3: 34.73818588256836 +INFO:__main__:vgg_loss_4: 62.52718734741211 +INFO:__main__:vgg_loss_5: 2.0638880729675293 +INFO:__main__:validation_loss: 2119.157470703125 +INFO:__main__:global_step: 11751 +INFO:__main__:kl_loss: 2798.33642578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1423.464599609375 +INFO:__main__:loss: 1423.4674072265625 +INFO:__main__:vgg_gram_loss_0: 9.188904762268066 +INFO:__main__:vgg_gram_loss_1: 17.212066650390625 +INFO:__main__:vgg_gram_loss_2: 19.176837921142578 +INFO:__main__:vgg_gram_loss_3: 15.346638679504395 +INFO:__main__:vgg_gram_loss_4: 70.6548080444336 +INFO:__main__:vgg_gram_loss_5: 0.16639554500579834 +INFO:__main__:vgg_loss_0: 11.862112045288086 +INFO:__main__:vgg_loss_1: 26.231782913208008 +INFO:__main__:vgg_loss_2: 37.56650161743164 +INFO:__main__:vgg_loss_3: 27.383419036865234 +INFO:__main__:vgg_loss_4: 48.24058151245117 +INFO:__main__:vgg_loss_5: 1.662850260734558 +INFO:__main__:validation_loss: 1706.93994140625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 12001 +INFO:__main__:kl_loss: 2654.115234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1618.133056640625 +INFO:__main__:loss: 1618.1357421875 +INFO:__main__:vgg_gram_loss_0: 12.687799453735352 +INFO:__main__:vgg_gram_loss_1: 25.796537399291992 +INFO:__main__:vgg_gram_loss_2: 25.808584213256836 +INFO:__main__:vgg_gram_loss_3: 20.099903106689453 +INFO:__main__:vgg_gram_loss_4: 79.86431121826172 +INFO:__main__:vgg_gram_loss_5: 0.16555766761302948 +INFO:__main__:vgg_loss_0: 11.580936431884766 +INFO:__main__:vgg_loss_1: 28.09764862060547 +INFO:__main__:vgg_loss_2: 39.67683410644531 +INFO:__main__:vgg_loss_3: 28.344676971435547 +INFO:__main__:vgg_loss_4: 49.84782791137695 +INFO:__main__:vgg_loss_5: 1.6559890508651733 +INFO:__main__:validation_loss: 1994.832763671875 +INFO:__main__:global_step: 12251 +INFO:__main__:kl_loss: 2799.060302734375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1468.264404296875 +INFO:__main__:loss: 1468.2672119140625 +INFO:__main__:vgg_gram_loss_0: 19.166973114013672 +INFO:__main__:vgg_gram_loss_1: 24.330476760864258 +INFO:__main__:vgg_gram_loss_2: 20.55609893798828 +INFO:__main__:vgg_gram_loss_3: 16.42759895324707 +INFO:__main__:vgg_gram_loss_4: 68.98406219482422 +INFO:__main__:vgg_gram_loss_5: 0.15285027027130127 +INFO:__main__:vgg_loss_0: 10.890030860900879 +INFO:__main__:vgg_loss_1: 23.132923126220703 +INFO:__main__:vgg_loss_2: 34.75040817260742 +INFO:__main__:vgg_loss_3: 26.280176162719727 +INFO:__main__:vgg_loss_4: 47.39338302612305 +INFO:__main__:vgg_loss_5: 1.5879415273666382 +INFO:__main__:validation_loss: 2264.0859375 +INFO:__main__:global_step: 12501 +INFO:__main__:kl_loss: 3204.81298828125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2331.565185546875 +INFO:__main__:loss: 2331.568359375 +INFO:__main__:vgg_gram_loss_0: 10.541520118713379 +INFO:__main__:vgg_gram_loss_1: 48.631736755371094 +INFO:__main__:vgg_gram_loss_2: 48.326087951660156 +INFO:__main__:vgg_gram_loss_3: 33.58705139160156 +INFO:__main__:vgg_gram_loss_4: 115.24056243896484 +INFO:__main__:vgg_gram_loss_5: 0.22202351689338684 +INFO:__main__:vgg_loss_0: 16.212562561035156 +INFO:__main__:vgg_loss_1: 38.53394317626953 +INFO:__main__:vgg_loss_2: 51.86149215698242 +INFO:__main__:vgg_loss_3: 36.8984260559082 +INFO:__main__:vgg_loss_4: 64.13957977294922 +INFO:__main__:vgg_loss_5: 2.118056535720825 +INFO:__main__:validation_loss: 2171.927734375 +INFO:__main__:global_step: 12751 +INFO:__main__:kl_loss: 3591.6015625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1858.23876953125 +INFO:__main__:loss: 1858.2423095703125 +INFO:__main__:vgg_gram_loss_0: 8.33130168914795 +INFO:__main__:vgg_gram_loss_1: 40.207374572753906 +INFO:__main__:vgg_gram_loss_2: 31.791385650634766 +INFO:__main__:vgg_gram_loss_3: 25.225576400756836 +INFO:__main__:vgg_gram_loss_4: 90.53115844726562 +INFO:__main__:vgg_gram_loss_5: 0.18880970776081085 +INFO:__main__:vgg_loss_0: 13.582549095153809 +INFO:__main__:vgg_loss_1: 32.469207763671875 +INFO:__main__:vgg_loss_2: 41.88188171386719 +INFO:__main__:vgg_loss_3: 31.225648880004883 +INFO:__main__:vgg_loss_4: 54.397743225097656 +INFO:__main__:vgg_loss_5: 1.8151251077651978 +INFO:__main__:validation_loss: 2385.468505859375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 13001 +INFO:__main__:kl_loss: 3349.11669921875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1828.945068359375 +INFO:__main__:loss: 1828.9483642578125 +INFO:__main__:vgg_gram_loss_0: 13.481892585754395 +INFO:__main__:vgg_gram_loss_1: 49.339473724365234 +INFO:__main__:vgg_gram_loss_2: 30.41879653930664 +INFO:__main__:vgg_gram_loss_3: 22.178022384643555 +INFO:__main__:vgg_gram_loss_4: 80.55927276611328 +INFO:__main__:vgg_gram_loss_5: 0.14683052897453308 +INFO:__main__:vgg_loss_0: 14.550590515136719 +INFO:__main__:vgg_loss_1: 35.22194290161133 +INFO:__main__:vgg_loss_2: 40.037681579589844 +INFO:__main__:vgg_loss_3: 28.40900230407715 +INFO:__main__:vgg_loss_4: 49.83712387084961 +INFO:__main__:vgg_loss_5: 1.6083848476409912 +INFO:__main__:validation_loss: 2253.092529296875 +INFO:__main__:global_step: 13251 +INFO:__main__:kl_loss: 4117.90625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1638.808837890625 +INFO:__main__:loss: 1638.81298828125 +INFO:__main__:vgg_gram_loss_0: 16.00355339050293 +INFO:__main__:vgg_gram_loss_1: 27.079391479492188 +INFO:__main__:vgg_gram_loss_2: 25.015766143798828 +INFO:__main__:vgg_gram_loss_3: 20.76770782470703 +INFO:__main__:vgg_gram_loss_4: 82.87042236328125 +INFO:__main__:vgg_gram_loss_5: 0.16286657750606537 +INFO:__main__:vgg_loss_0: 11.67535400390625 +INFO:__main__:vgg_loss_1: 26.841936111450195 +INFO:__main__:vgg_loss_2: 36.96250534057617 +INFO:__main__:vgg_loss_3: 27.7122745513916 +INFO:__main__:vgg_loss_4: 50.94968795776367 +INFO:__main__:vgg_loss_5: 1.7202852964401245 +INFO:__main__:validation_loss: 2117.615478515625 +INFO:__main__:global_step: 13501 +INFO:__main__:kl_loss: 5110.0400390625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1785.3057861328125 +INFO:__main__:loss: 1785.3109130859375 +INFO:__main__:vgg_gram_loss_0: 15.640748977661133 +INFO:__main__:vgg_gram_loss_1: 27.0262393951416 +INFO:__main__:vgg_gram_loss_2: 29.5236873626709 +INFO:__main__:vgg_gram_loss_3: 22.32098960876465 +INFO:__main__:vgg_gram_loss_4: 91.04948425292969 +INFO:__main__:vgg_gram_loss_5: 0.18062224984169006 +INFO:__main__:vgg_loss_0: 12.897849082946777 +INFO:__main__:vgg_loss_1: 29.169940948486328 +INFO:__main__:vgg_loss_2: 41.37274169921875 +INFO:__main__:vgg_loss_3: 30.79285430908203 +INFO:__main__:vgg_loss_4: 55.28400421142578 +INFO:__main__:vgg_loss_5: 1.8019770383834839 +INFO:__main__:validation_loss: 2342.46923828125 +INFO:__main__:global_step: 13751 +INFO:__main__:kl_loss: 3353.460693359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1825.2525634765625 +INFO:__main__:loss: 1825.255859375 +INFO:__main__:vgg_gram_loss_0: 15.761792182922363 +INFO:__main__:vgg_gram_loss_1: 39.96674346923828 +INFO:__main__:vgg_gram_loss_2: 30.72432518005371 +INFO:__main__:vgg_gram_loss_3: 24.176380157470703 +INFO:__main__:vgg_gram_loss_4: 85.57173919677734 +INFO:__main__:vgg_gram_loss_5: 0.1593572497367859 +INFO:__main__:vgg_loss_0: 14.906819343566895 +INFO:__main__:vgg_loss_1: 34.24696731567383 +INFO:__main__:vgg_loss_2: 40.673587799072266 +INFO:__main__:vgg_loss_3: 28.23394203186035 +INFO:__main__:vgg_loss_4: 48.96389389038086 +INFO:__main__:vgg_loss_5: 1.664943814277649 +INFO:__main__:validation_loss: 1553.1966552734375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 14001 +INFO:__main__:kl_loss: 3246.311767578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1881.316650390625 +INFO:__main__:loss: 1881.3199462890625 +INFO:__main__:vgg_gram_loss_0: 14.242349624633789 +INFO:__main__:vgg_gram_loss_1: 30.5249080657959 +INFO:__main__:vgg_gram_loss_2: 27.972055435180664 +INFO:__main__:vgg_gram_loss_3: 21.534807205200195 +INFO:__main__:vgg_gram_loss_4: 92.98632049560547 +INFO:__main__:vgg_gram_loss_5: 0.18983612954616547 +INFO:__main__:vgg_loss_0: 15.947529792785645 +INFO:__main__:vgg_loss_1: 31.471717834472656 +INFO:__main__:vgg_loss_2: 44.951847076416016 +INFO:__main__:vgg_loss_3: 33.66530990600586 +INFO:__main__:vgg_loss_4: 60.7533073425293 +INFO:__main__:vgg_loss_5: 2.0233490467071533 +INFO:__main__:validation_loss: 1748.1754150390625 +INFO:__main__:global_step: 14251 +INFO:__main__:kl_loss: 3431.954833984375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1458.1470947265625 +INFO:__main__:loss: 1458.1505126953125 +INFO:__main__:vgg_gram_loss_0: 11.338159561157227 +INFO:__main__:vgg_gram_loss_1: 19.594633102416992 +INFO:__main__:vgg_gram_loss_2: 20.68074607849121 +INFO:__main__:vgg_gram_loss_3: 16.244789123535156 +INFO:__main__:vgg_gram_loss_4: 74.70944213867188 +INFO:__main__:vgg_gram_loss_5: 0.16610495746135712 +INFO:__main__:vgg_loss_0: 10.72453498840332 +INFO:__main__:vgg_loss_1: 24.973461151123047 +INFO:__main__:vgg_loss_2: 36.35271453857422 +INFO:__main__:vgg_loss_3: 26.27377700805664 +INFO:__main__:vgg_loss_4: 48.88114547729492 +INFO:__main__:vgg_loss_5: 1.689893364906311 +INFO:__main__:validation_loss: 1886.6724853515625 +INFO:__main__:global_step: 14501 +INFO:__main__:kl_loss: 3985.45361328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1910.81884765625 +INFO:__main__:loss: 1910.8228759765625 +INFO:__main__:vgg_gram_loss_0: 15.037406921386719 +INFO:__main__:vgg_gram_loss_1: 24.871139526367188 +INFO:__main__:vgg_gram_loss_2: 31.145315170288086 +INFO:__main__:vgg_gram_loss_3: 26.66287612915039 +INFO:__main__:vgg_gram_loss_4: 103.48453521728516 +INFO:__main__:vgg_gram_loss_5: 0.20381061732769012 +INFO:__main__:vgg_loss_0: 13.707566261291504 +INFO:__main__:vgg_loss_1: 29.62269401550293 +INFO:__main__:vgg_loss_2: 44.128196716308594 +INFO:__main__:vgg_loss_3: 33.2076301574707 +INFO:__main__:vgg_loss_4: 58.15578079223633 +INFO:__main__:vgg_loss_5: 1.9368208646774292 +INFO:__main__:validation_loss: 1973.97119140625 +INFO:__main__:global_step: 14751 +INFO:__main__:kl_loss: 4005.341796875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1795.0201416015625 +INFO:__main__:loss: 1795.024169921875 +INFO:__main__:vgg_gram_loss_0: 12.135330200195312 +INFO:__main__:vgg_gram_loss_1: 26.882963180541992 +INFO:__main__:vgg_gram_loss_2: 27.51361083984375 +INFO:__main__:vgg_gram_loss_3: 22.776376724243164 +INFO:__main__:vgg_gram_loss_4: 90.52733612060547 +INFO:__main__:vgg_gram_loss_5: 0.20045800507068634 +INFO:__main__:vgg_loss_0: 12.593707084655762 +INFO:__main__:vgg_loss_1: 30.002941131591797 +INFO:__main__:vgg_loss_2: 44.14188766479492 +INFO:__main__:vgg_loss_3: 31.992292404174805 +INFO:__main__:vgg_loss_4: 58.2563591003418 +INFO:__main__:vgg_loss_5: 1.980757236480713 +INFO:__main__:validation_loss: 2075.4521484375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 15001 +INFO:__main__:kl_loss: 3705.33642578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1595.1666259765625 +INFO:__main__:loss: 1595.1702880859375 +INFO:__main__:vgg_gram_loss_0: 10.975664138793945 +INFO:__main__:vgg_gram_loss_1: 22.13172721862793 +INFO:__main__:vgg_gram_loss_2: 24.078147888183594 +INFO:__main__:vgg_gram_loss_3: 19.292373657226562 +INFO:__main__:vgg_gram_loss_4: 82.76775360107422 +INFO:__main__:vgg_gram_loss_5: 0.1540568321943283 +INFO:__main__:vgg_loss_0: 11.58592700958252 +INFO:__main__:vgg_loss_1: 25.57155990600586 +INFO:__main__:vgg_loss_2: 38.18693161010742 +INFO:__main__:vgg_loss_3: 28.970317840576172 +INFO:__main__:vgg_loss_4: 53.57235336303711 +INFO:__main__:vgg_loss_5: 1.7465263605117798 +INFO:__main__:validation_loss: 1781.873046875 +INFO:__main__:global_step: 15251 +INFO:__main__:kl_loss: 3564.941650390625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1503.5670166015625 +INFO:__main__:loss: 1503.570556640625 +INFO:__main__:vgg_gram_loss_0: 8.540643692016602 +INFO:__main__:vgg_gram_loss_1: 24.819456100463867 +INFO:__main__:vgg_gram_loss_2: 24.256214141845703 +INFO:__main__:vgg_gram_loss_3: 18.178815841674805 +INFO:__main__:vgg_gram_loss_4: 72.755126953125 +INFO:__main__:vgg_gram_loss_5: 0.16218386590480804 +INFO:__main__:vgg_loss_0: 11.633992195129395 +INFO:__main__:vgg_loss_1: 27.11802864074707 +INFO:__main__:vgg_loss_2: 37.86495590209961 +INFO:__main__:vgg_loss_3: 26.818740844726562 +INFO:__main__:vgg_loss_4: 46.961204528808594 +INFO:__main__:vgg_loss_5: 1.6040434837341309 +INFO:__main__:validation_loss: 1919.8582763671875 +INFO:__main__:global_step: 15501 +INFO:__main__:kl_loss: 3862.746337890625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1528.5277099609375 +INFO:__main__:loss: 1528.5316162109375 +INFO:__main__:vgg_gram_loss_0: 6.751155376434326 +INFO:__main__:vgg_gram_loss_1: 19.732168197631836 +INFO:__main__:vgg_gram_loss_2: 25.24652671813965 +INFO:__main__:vgg_gram_loss_3: 19.191333770751953 +INFO:__main__:vgg_gram_loss_4: 81.19164276123047 +INFO:__main__:vgg_gram_loss_5: 0.17378969490528107 +INFO:__main__:vgg_loss_0: 11.43883991241455 +INFO:__main__:vgg_loss_1: 27.002866744995117 +INFO:__main__:vgg_loss_2: 37.634483337402344 +INFO:__main__:vgg_loss_3: 27.2590274810791 +INFO:__main__:vgg_loss_4: 48.447044372558594 +INFO:__main__:vgg_loss_5: 1.6366742849349976 +INFO:__main__:validation_loss: 1905.0755615234375 +INFO:__main__:global_step: 15751 +INFO:__main__:kl_loss: 4190.8232421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2032.50634765625 +INFO:__main__:loss: 2032.510498046875 +INFO:__main__:vgg_gram_loss_0: 13.058337211608887 +INFO:__main__:vgg_gram_loss_1: 34.21646499633789 +INFO:__main__:vgg_gram_loss_2: 32.2791862487793 +INFO:__main__:vgg_gram_loss_3: 27.77005958557129 +INFO:__main__:vgg_gram_loss_4: 112.00274658203125 +INFO:__main__:vgg_gram_loss_5: 0.20230674743652344 +INFO:__main__:vgg_loss_0: 13.781364440917969 +INFO:__main__:vgg_loss_1: 32.421974182128906 +INFO:__main__:vgg_loss_2: 46.06599044799805 +INFO:__main__:vgg_loss_3: 33.66859817504883 +INFO:__main__:vgg_loss_4: 59.12703323364258 +INFO:__main__:vgg_loss_5: 1.9072597026824951 +INFO:__main__:validation_loss: 1991.9036865234375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 16001 +INFO:__main__:kl_loss: 3942.93701171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1856.5948486328125 +INFO:__main__:loss: 1856.5987548828125 +INFO:__main__:vgg_gram_loss_0: 14.474349975585938 +INFO:__main__:vgg_gram_loss_1: 27.639780044555664 +INFO:__main__:vgg_gram_loss_2: 26.839385986328125 +INFO:__main__:vgg_gram_loss_3: 20.698564529418945 +INFO:__main__:vgg_gram_loss_4: 91.61420440673828 +INFO:__main__:vgg_gram_loss_5: 0.1941124051809311 +INFO:__main__:vgg_loss_0: 13.904624938964844 +INFO:__main__:vgg_loss_1: 30.179750442504883 +INFO:__main__:vgg_loss_2: 45.923309326171875 +INFO:__main__:vgg_loss_3: 35.29290771484375 +INFO:__main__:vgg_loss_4: 62.52599334716797 +INFO:__main__:vgg_loss_5: 2.032017707824707 +INFO:__main__:validation_loss: 2060.87158203125 +INFO:__main__:global_step: 16251 +INFO:__main__:kl_loss: 4026.26220703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1694.01416015625 +INFO:__main__:loss: 1694.0181884765625 +INFO:__main__:vgg_gram_loss_0: 8.845500946044922 +INFO:__main__:vgg_gram_loss_1: 24.206079483032227 +INFO:__main__:vgg_gram_loss_2: 23.476261138916016 +INFO:__main__:vgg_gram_loss_3: 20.182392120361328 +INFO:__main__:vgg_gram_loss_4: 88.28746795654297 +INFO:__main__:vgg_gram_loss_5: 0.17453892529010773 +INFO:__main__:vgg_loss_0: 12.2302885055542 +INFO:__main__:vgg_loss_1: 29.776330947875977 +INFO:__main__:vgg_loss_2: 43.28388977050781 +INFO:__main__:vgg_loss_3: 31.666786193847656 +INFO:__main__:vgg_loss_4: 54.920257568359375 +INFO:__main__:vgg_loss_5: 1.753005862236023 +INFO:__main__:validation_loss: 2319.92236328125 +INFO:__main__:global_step: 16501 +INFO:__main__:kl_loss: 6287.22607421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2443.67578125 +INFO:__main__:loss: 2443.68212890625 +INFO:__main__:vgg_gram_loss_0: 10.505154609680176 +INFO:__main__:vgg_gram_loss_1: 84.68325805664062 +INFO:__main__:vgg_gram_loss_2: 35.788570404052734 +INFO:__main__:vgg_gram_loss_3: 35.972164154052734 +INFO:__main__:vgg_gram_loss_4: 109.2228775024414 +INFO:__main__:vgg_gram_loss_5: 0.2088581770658493 +INFO:__main__:vgg_loss_0: 18.007917404174805 +INFO:__main__:vgg_loss_1: 45.69658660888672 +INFO:__main__:vgg_loss_2: 48.703975677490234 +INFO:__main__:vgg_loss_3: 36.1117057800293 +INFO:__main__:vgg_loss_4: 61.785709381103516 +INFO:__main__:vgg_loss_5: 2.048408269882202 +INFO:__main__:validation_loss: 1796.233642578125 +INFO:__main__:global_step: 16751 +INFO:__main__:kl_loss: 5193.3701171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1613.5125732421875 +INFO:__main__:loss: 1613.517822265625 +INFO:__main__:vgg_gram_loss_0: 9.90428352355957 +INFO:__main__:vgg_gram_loss_1: 24.719528198242188 +INFO:__main__:vgg_gram_loss_2: 23.356409072875977 +INFO:__main__:vgg_gram_loss_3: 19.07557487487793 +INFO:__main__:vgg_gram_loss_4: 84.72393035888672 +INFO:__main__:vgg_gram_loss_5: 0.18478351831436157 +INFO:__main__:vgg_loss_0: 11.846158027648926 +INFO:__main__:vgg_loss_1: 27.13921546936035 +INFO:__main__:vgg_loss_2: 38.92344284057617 +INFO:__main__:vgg_loss_3: 28.96397590637207 +INFO:__main__:vgg_loss_4: 52.080604553222656 +INFO:__main__:vgg_loss_5: 1.7846001386642456 +INFO:__main__:validation_loss: 2220.157470703125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 17001 +INFO:__main__:kl_loss: 5167.78857421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2153.682373046875 +INFO:__main__:loss: 2153.6875 +INFO:__main__:vgg_gram_loss_0: 17.497949600219727 +INFO:__main__:vgg_gram_loss_1: 35.26213073730469 +INFO:__main__:vgg_gram_loss_2: 39.78397750854492 +INFO:__main__:vgg_gram_loss_3: 30.014745712280273 +INFO:__main__:vgg_gram_loss_4: 108.40862274169922 +INFO:__main__:vgg_gram_loss_5: 0.19994519650936127 +INFO:__main__:vgg_loss_0: 15.828639030456543 +INFO:__main__:vgg_loss_1: 35.10080337524414 +INFO:__main__:vgg_loss_2: 47.98162078857422 +INFO:__main__:vgg_loss_3: 35.97663879394531 +INFO:__main__:vgg_loss_4: 62.64408493041992 +INFO:__main__:vgg_loss_5: 2.037324905395508 +INFO:__main__:validation_loss: 1857.9671630859375 +INFO:__main__:global_step: 17251 +INFO:__main__:kl_loss: 4789.04931640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1838.816162109375 +INFO:__main__:loss: 1838.8209228515625 +INFO:__main__:vgg_gram_loss_0: 15.293060302734375 +INFO:__main__:vgg_gram_loss_1: 33.45280075073242 +INFO:__main__:vgg_gram_loss_2: 26.56186294555664 +INFO:__main__:vgg_gram_loss_3: 22.263498306274414 +INFO:__main__:vgg_gram_loss_4: 90.66622161865234 +INFO:__main__:vgg_gram_loss_5: 0.18304787576198578 +INFO:__main__:vgg_loss_0: 14.160183906555176 +INFO:__main__:vgg_loss_1: 33.23466110229492 +INFO:__main__:vgg_loss_2: 43.448856353759766 +INFO:__main__:vgg_loss_3: 31.427034378051758 +INFO:__main__:vgg_loss_4: 55.25357437133789 +INFO:__main__:vgg_loss_5: 1.8184367418289185 +INFO:__main__:validation_loss: 2144.089599609375 +INFO:__main__:global_step: 17501 +INFO:__main__:kl_loss: 4353.240234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1636.551513671875 +INFO:__main__:loss: 1636.555908203125 +INFO:__main__:vgg_gram_loss_0: 9.070740699768066 +INFO:__main__:vgg_gram_loss_1: 21.96969985961914 +INFO:__main__:vgg_gram_loss_2: 24.71034812927246 +INFO:__main__:vgg_gram_loss_3: 19.113136291503906 +INFO:__main__:vgg_gram_loss_4: 83.71758270263672 +INFO:__main__:vgg_gram_loss_5: 0.1788686066865921 +INFO:__main__:vgg_loss_0: 11.988703727722168 +INFO:__main__:vgg_loss_1: 28.854387283325195 +INFO:__main__:vgg_loss_2: 41.723995208740234 +INFO:__main__:vgg_loss_3: 30.296030044555664 +INFO:__main__:vgg_loss_4: 53.894081115722656 +INFO:__main__:vgg_loss_5: 1.7927372455596924 +INFO:__main__:validation_loss: 3001.1982421875 +INFO:__main__:global_step: 17751 +INFO:__main__:kl_loss: 4352.79736328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1734.5400390625 +INFO:__main__:loss: 1734.54443359375 +INFO:__main__:vgg_gram_loss_0: 7.867430210113525 +INFO:__main__:vgg_gram_loss_1: 27.18879508972168 +INFO:__main__:vgg_gram_loss_2: 27.73171043395996 +INFO:__main__:vgg_gram_loss_3: 22.349393844604492 +INFO:__main__:vgg_gram_loss_4: 85.84656524658203 +INFO:__main__:vgg_gram_loss_5: 0.19267073273658752 +INFO:__main__:vgg_loss_0: 13.095458030700684 +INFO:__main__:vgg_loss_1: 30.833934783935547 +INFO:__main__:vgg_loss_2: 42.64744186401367 +INFO:__main__:vgg_loss_3: 31.58111572265625 +INFO:__main__:vgg_loss_4: 55.693206787109375 +INFO:__main__:vgg_loss_5: 1.8802882432937622 +INFO:__main__:validation_loss: 1898.9388427734375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 18001 +INFO:__main__:kl_loss: 4494.47265625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1945.37939453125 +INFO:__main__:loss: 1945.3839111328125 +INFO:__main__:vgg_gram_loss_0: 13.733105659484863 +INFO:__main__:vgg_gram_loss_1: 29.994049072265625 +INFO:__main__:vgg_gram_loss_2: 28.75807762145996 +INFO:__main__:vgg_gram_loss_3: 26.727506637573242 +INFO:__main__:vgg_gram_loss_4: 104.34041595458984 +INFO:__main__:vgg_gram_loss_5: 0.2174903005361557 +INFO:__main__:vgg_loss_0: 13.798927307128906 +INFO:__main__:vgg_loss_1: 31.710494995117188 +INFO:__main__:vgg_loss_2: 44.4327392578125 +INFO:__main__:vgg_loss_3: 33.577056884765625 +INFO:__main__:vgg_loss_4: 59.793575286865234 +INFO:__main__:vgg_loss_5: 1.992440938949585 +INFO:__main__:validation_loss: 1794.586669921875 +INFO:__main__:global_step: 18251 +INFO:__main__:kl_loss: 4720.86279296875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1839.258544921875 +INFO:__main__:loss: 1839.2633056640625 +INFO:__main__:vgg_gram_loss_0: 4.540200710296631 +INFO:__main__:vgg_gram_loss_1: 26.34616470336914 +INFO:__main__:vgg_gram_loss_2: 28.186838150024414 +INFO:__main__:vgg_gram_loss_3: 23.43221664428711 +INFO:__main__:vgg_gram_loss_4: 98.54674530029297 +INFO:__main__:vgg_gram_loss_5: 0.19403257966041565 +INFO:__main__:vgg_loss_0: 12.439112663269043 +INFO:__main__:vgg_loss_1: 31.08835220336914 +INFO:__main__:vgg_loss_2: 46.41108703613281 +INFO:__main__:vgg_loss_3: 34.44707107543945 +INFO:__main__:vgg_loss_4: 60.251800537109375 +INFO:__main__:vgg_loss_5: 1.968103289604187 +INFO:__main__:validation_loss: 2101.885009765625 +INFO:__main__:global_step: 18501 +INFO:__main__:kl_loss: 4477.91845703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1692.970947265625 +INFO:__main__:loss: 1692.9754638671875 +INFO:__main__:vgg_gram_loss_0: 12.12562084197998 +INFO:__main__:vgg_gram_loss_1: 29.67469596862793 +INFO:__main__:vgg_gram_loss_2: 27.203489303588867 +INFO:__main__:vgg_gram_loss_3: 19.88246726989746 +INFO:__main__:vgg_gram_loss_4: 83.80357360839844 +INFO:__main__:vgg_gram_loss_5: 0.18367114663124084 +INFO:__main__:vgg_loss_0: 12.472047805786133 +INFO:__main__:vgg_loss_1: 27.716249465942383 +INFO:__main__:vgg_loss_2: 40.94178009033203 +INFO:__main__:vgg_loss_3: 30.065845489501953 +INFO:__main__:vgg_loss_4: 52.750099182128906 +INFO:__main__:vgg_loss_5: 1.7746295928955078 +INFO:__main__:validation_loss: 2006.8173828125 +INFO:__main__:global_step: 18751 +INFO:__main__:kl_loss: 4860.4130859375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1955.7493896484375 +INFO:__main__:loss: 1955.7542724609375 +INFO:__main__:vgg_gram_loss_0: 19.381507873535156 +INFO:__main__:vgg_gram_loss_1: 47.8715705871582 +INFO:__main__:vgg_gram_loss_2: 38.236873626708984 +INFO:__main__:vgg_gram_loss_3: 29.810638427734375 +INFO:__main__:vgg_gram_loss_4: 92.07727813720703 +INFO:__main__:vgg_gram_loss_5: 0.17494356632232666 +INFO:__main__:vgg_loss_0: 12.591361999511719 +INFO:__main__:vgg_loss_1: 31.21622657775879 +INFO:__main__:vgg_loss_2: 40.136322021484375 +INFO:__main__:vgg_loss_3: 28.3277587890625 +INFO:__main__:vgg_loss_4: 49.71849822998047 +INFO:__main__:vgg_loss_5: 1.606885552406311 +INFO:__main__:validation_loss: 2128.826904296875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 19001 +INFO:__main__:kl_loss: 4992.90576171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2126.513916015625 +INFO:__main__:loss: 2126.518798828125 +INFO:__main__:vgg_gram_loss_0: 11.762397766113281 +INFO:__main__:vgg_gram_loss_1: 35.14719009399414 +INFO:__main__:vgg_gram_loss_2: 44.32926559448242 +INFO:__main__:vgg_gram_loss_3: 34.10895919799805 +INFO:__main__:vgg_gram_loss_4: 108.45552825927734 +INFO:__main__:vgg_gram_loss_5: 0.19841814041137695 +INFO:__main__:vgg_loss_0: 13.905014991760254 +INFO:__main__:vgg_loss_1: 35.95668029785156 +INFO:__main__:vgg_loss_2: 48.03961181640625 +INFO:__main__:vgg_loss_3: 33.26726150512695 +INFO:__main__:vgg_loss_4: 58.25690841674805 +INFO:__main__:vgg_loss_5: 1.8755427598953247 +INFO:__main__:validation_loss: 1577.431640625 +INFO:__main__:global_step: 19251 +INFO:__main__:kl_loss: 5322.5244140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1775.259765625 +INFO:__main__:loss: 1775.26513671875 +INFO:__main__:vgg_gram_loss_0: 11.807829856872559 +INFO:__main__:vgg_gram_loss_1: 26.243391036987305 +INFO:__main__:vgg_gram_loss_2: 32.09434127807617 +INFO:__main__:vgg_gram_loss_3: 22.65289306640625 +INFO:__main__:vgg_gram_loss_4: 81.91146087646484 +INFO:__main__:vgg_gram_loss_5: 0.17412681877613068 +INFO:__main__:vgg_loss_0: 13.532183647155762 +INFO:__main__:vgg_loss_1: 31.96437644958496 +INFO:__main__:vgg_loss_2: 46.52936553955078 +INFO:__main__:vgg_loss_3: 32.50563049316406 +INFO:__main__:vgg_loss_4: 53.91973876953125 +INFO:__main__:vgg_loss_5: 1.7166208028793335 +INFO:__main__:validation_loss: 2093.943603515625 +INFO:__main__:global_step: 19501 +INFO:__main__:kl_loss: 4853.25146484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1649.866455078125 +INFO:__main__:loss: 1649.871337890625 +INFO:__main__:vgg_gram_loss_0: 5.752615928649902 +INFO:__main__:vgg_gram_loss_1: 19.739910125732422 +INFO:__main__:vgg_gram_loss_2: 27.144628524780273 +INFO:__main__:vgg_gram_loss_3: 19.404037475585938 +INFO:__main__:vgg_gram_loss_4: 86.3724594116211 +INFO:__main__:vgg_gram_loss_5: 0.189721018075943 +INFO:__main__:vgg_loss_0: 12.063374519348145 +INFO:__main__:vgg_loss_1: 30.16156768798828 +INFO:__main__:vgg_loss_2: 44.0325813293457 +INFO:__main__:vgg_loss_3: 30.744182586669922 +INFO:__main__:vgg_loss_4: 52.65507125854492 +INFO:__main__:vgg_loss_5: 1.7131348848342896 +INFO:__main__:validation_loss: 1981.44580078125 +INFO:__main__:global_step: 19751 +INFO:__main__:kl_loss: 5671.16796875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1668.09912109375 +INFO:__main__:loss: 1668.104736328125 +INFO:__main__:vgg_gram_loss_0: 8.434948921203613 +INFO:__main__:vgg_gram_loss_1: 28.247406005859375 +INFO:__main__:vgg_gram_loss_2: 28.897809982299805 +INFO:__main__:vgg_gram_loss_3: 22.10834312438965 +INFO:__main__:vgg_gram_loss_4: 83.86568450927734 +INFO:__main__:vgg_gram_loss_5: 0.16308985650539398 +INFO:__main__:vgg_loss_0: 12.341493606567383 +INFO:__main__:vgg_loss_1: 30.371694564819336 +INFO:__main__:vgg_loss_2: 39.89834976196289 +INFO:__main__:vgg_loss_3: 27.892358779907227 +INFO:__main__:vgg_loss_4: 49.730926513671875 +INFO:__main__:vgg_loss_5: 1.6677101850509644 +INFO:__main__:validation_loss: 1963.6214599609375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 20001 +INFO:__main__:kl_loss: 5496.36767578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1846.4300537109375 +INFO:__main__:loss: 1846.435546875 +INFO:__main__:vgg_gram_loss_0: 11.289488792419434 +INFO:__main__:vgg_gram_loss_1: 31.000329971313477 +INFO:__main__:vgg_gram_loss_2: 27.215173721313477 +INFO:__main__:vgg_gram_loss_3: 26.007368087768555 +INFO:__main__:vgg_gram_loss_4: 101.5735092163086 +INFO:__main__:vgg_gram_loss_5: 0.1830417811870575 +INFO:__main__:vgg_loss_0: 12.642966270446777 +INFO:__main__:vgg_loss_1: 30.449230194091797 +INFO:__main__:vgg_loss_2: 40.37249755859375 +INFO:__main__:vgg_loss_3: 30.449600219726562 +INFO:__main__:vgg_loss_4: 56.2236213684082 +INFO:__main__:vgg_loss_5: 1.879172444343567 +INFO:__main__:validation_loss: 1797.7470703125 +INFO:__main__:global_step: 20251 +INFO:__main__:kl_loss: 4787.83154296875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1417.0654296875 +INFO:__main__:loss: 1417.0701904296875 +INFO:__main__:vgg_gram_loss_0: 6.927775859832764 +INFO:__main__:vgg_gram_loss_1: 18.93682861328125 +INFO:__main__:vgg_gram_loss_2: 22.47786521911621 +INFO:__main__:vgg_gram_loss_3: 16.596920013427734 +INFO:__main__:vgg_gram_loss_4: 72.18505096435547 +INFO:__main__:vgg_gram_loss_5: 0.16100217401981354 +INFO:__main__:vgg_loss_0: 10.205305099487305 +INFO:__main__:vgg_loss_1: 24.550825119018555 +INFO:__main__:vgg_loss_2: 35.729862213134766 +INFO:__main__:vgg_loss_3: 26.56914520263672 +INFO:__main__:vgg_loss_4: 47.4525260925293 +INFO:__main__:vgg_loss_5: 1.6199994087219238 +INFO:__main__:validation_loss: 1864.7733154296875 +INFO:__main__:global_step: 20501 +INFO:__main__:kl_loss: 4419.68896484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1530.7275390625 +INFO:__main__:loss: 1530.73193359375 +INFO:__main__:vgg_gram_loss_0: 13.85461711883545 +INFO:__main__:vgg_gram_loss_1: 18.291284561157227 +INFO:__main__:vgg_gram_loss_2: 20.529714584350586 +INFO:__main__:vgg_gram_loss_3: 17.255401611328125 +INFO:__main__:vgg_gram_loss_4: 76.59956359863281 +INFO:__main__:vgg_gram_loss_5: 0.1520557850599289 +INFO:__main__:vgg_loss_0: 12.632413864135742 +INFO:__main__:vgg_loss_1: 27.370203018188477 +INFO:__main__:vgg_loss_2: 38.95009231567383 +INFO:__main__:vgg_loss_3: 28.210474014282227 +INFO:__main__:vgg_loss_4: 50.657588958740234 +INFO:__main__:vgg_loss_5: 1.6421043872833252 +INFO:__main__:validation_loss: 1866.90380859375 +INFO:__main__:global_step: 20751 +INFO:__main__:kl_loss: 4980.6796875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1683.9337158203125 +INFO:__main__:loss: 1683.938720703125 +INFO:__main__:vgg_gram_loss_0: 8.742308616638184 +INFO:__main__:vgg_gram_loss_1: 22.607135772705078 +INFO:__main__:vgg_gram_loss_2: 25.643049240112305 +INFO:__main__:vgg_gram_loss_3: 20.816970825195312 +INFO:__main__:vgg_gram_loss_4: 90.33566284179688 +INFO:__main__:vgg_gram_loss_5: 0.17780880630016327 +INFO:__main__:vgg_loss_0: 12.081668853759766 +INFO:__main__:vgg_loss_1: 28.6904239654541 +INFO:__main__:vgg_loss_2: 41.31591796875 +INFO:__main__:vgg_loss_3: 30.087696075439453 +INFO:__main__:vgg_loss_4: 54.46744155883789 +INFO:__main__:vgg_loss_5: 1.8206408023834229 +INFO:__main__:validation_loss: 1846.0135498046875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 21001 +INFO:__main__:kl_loss: 5041.5732421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1710.8983154296875 +INFO:__main__:loss: 1710.9033203125 +INFO:__main__:vgg_gram_loss_0: 9.825221061706543 +INFO:__main__:vgg_gram_loss_1: 24.367124557495117 +INFO:__main__:vgg_gram_loss_2: 26.96771812438965 +INFO:__main__:vgg_gram_loss_3: 20.87141990661621 +INFO:__main__:vgg_gram_loss_4: 84.66051483154297 +INFO:__main__:vgg_gram_loss_5: 0.1696486920118332 +INFO:__main__:vgg_loss_0: 12.736309051513672 +INFO:__main__:vgg_loss_1: 32.24247360229492 +INFO:__main__:vgg_loss_2: 43.70392990112305 +INFO:__main__:vgg_loss_3: 30.749725341796875 +INFO:__main__:vgg_loss_4: 54.11089324951172 +INFO:__main__:vgg_loss_5: 1.7746679782867432 +INFO:__main__:validation_loss: 2120.66259765625 +INFO:__main__:global_step: 21251 +INFO:__main__:kl_loss: 4932.83544921875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1569.167724609375 +INFO:__main__:loss: 1569.172607421875 +INFO:__main__:vgg_gram_loss_0: 11.817398071289062 +INFO:__main__:vgg_gram_loss_1: 24.529876708984375 +INFO:__main__:vgg_gram_loss_2: 24.53694725036621 +INFO:__main__:vgg_gram_loss_3: 18.322492599487305 +INFO:__main__:vgg_gram_loss_4: 78.92101287841797 +INFO:__main__:vgg_gram_loss_5: 0.16936343908309937 +INFO:__main__:vgg_loss_0: 11.163140296936035 +INFO:__main__:vgg_loss_1: 26.56531524658203 +INFO:__main__:vgg_loss_2: 39.06063461303711 +INFO:__main__:vgg_loss_3: 27.787824630737305 +INFO:__main__:vgg_loss_4: 49.31822967529297 +INFO:__main__:vgg_loss_5: 1.6413142681121826 +INFO:__main__:validation_loss: 1877.9803466796875 +INFO:__main__:global_step: 21501 +INFO:__main__:kl_loss: 7461.875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1957.14111328125 +INFO:__main__:loss: 1957.1485595703125 +INFO:__main__:vgg_gram_loss_0: 7.639766216278076 +INFO:__main__:vgg_gram_loss_1: 51.336177825927734 +INFO:__main__:vgg_gram_loss_2: 27.561857223510742 +INFO:__main__:vgg_gram_loss_3: 24.805944442749023 +INFO:__main__:vgg_gram_loss_4: 93.11919403076172 +INFO:__main__:vgg_gram_loss_5: 0.1948944479227066 +INFO:__main__:vgg_loss_0: 13.733922004699707 +INFO:__main__:vgg_loss_1: 35.36043167114258 +INFO:__main__:vgg_loss_2: 44.46965026855469 +INFO:__main__:vgg_loss_3: 32.736568450927734 +INFO:__main__:vgg_loss_4: 58.56448745727539 +INFO:__main__:vgg_loss_5: 1.9053258895874023 +INFO:__main__:validation_loss: 2084.21484375 +INFO:__main__:global_step: 21751 +INFO:__main__:kl_loss: 5346.93701171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1805.1666259765625 +INFO:__main__:loss: 1805.1719970703125 +INFO:__main__:vgg_gram_loss_0: 8.519783973693848 +INFO:__main__:vgg_gram_loss_1: 23.52079200744629 +INFO:__main__:vgg_gram_loss_2: 26.342756271362305 +INFO:__main__:vgg_gram_loss_3: 21.02391242980957 +INFO:__main__:vgg_gram_loss_4: 89.01609802246094 +INFO:__main__:vgg_gram_loss_5: 0.2042589634656906 +INFO:__main__:vgg_loss_0: 14.254344940185547 +INFO:__main__:vgg_loss_1: 33.31464385986328 +INFO:__main__:vgg_loss_2: 46.98442459106445 +INFO:__main__:vgg_loss_3: 34.28253936767578 +INFO:__main__:vgg_loss_4: 61.56409454345703 +INFO:__main__:vgg_loss_5: 2.00569224357605 +INFO:__main__:validation_loss: 1630.6170654296875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 22001 +INFO:__main__:kl_loss: 7184.490234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1714.4228515625 +INFO:__main__:loss: 1714.4300537109375 +INFO:__main__:vgg_gram_loss_0: 11.607829093933105 +INFO:__main__:vgg_gram_loss_1: 33.61949157714844 +INFO:__main__:vgg_gram_loss_2: 28.496732711791992 +INFO:__main__:vgg_gram_loss_3: 22.338260650634766 +INFO:__main__:vgg_gram_loss_4: 82.47252655029297 +INFO:__main__:vgg_gram_loss_5: 0.16853296756744385 +INFO:__main__:vgg_loss_0: 12.333734512329102 +INFO:__main__:vgg_loss_1: 30.52727508544922 +INFO:__main__:vgg_loss_2: 41.495201110839844 +INFO:__main__:vgg_loss_3: 29.285791397094727 +INFO:__main__:vgg_loss_4: 48.942047119140625 +INFO:__main__:vgg_loss_5: 1.5971674919128418 +INFO:__main__:validation_loss: 2014.9185791015625 +INFO:__main__:global_step: 22251 +INFO:__main__:kl_loss: 5150.4931640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1725.57958984375 +INFO:__main__:loss: 1725.584716796875 +INFO:__main__:vgg_gram_loss_0: 13.618362426757812 +INFO:__main__:vgg_gram_loss_1: 23.64889144897461 +INFO:__main__:vgg_gram_loss_2: 25.980188369750977 +INFO:__main__:vgg_gram_loss_3: 20.49812889099121 +INFO:__main__:vgg_gram_loss_4: 86.66451263427734 +INFO:__main__:vgg_gram_loss_5: 0.1958109587430954 +INFO:__main__:vgg_loss_0: 12.967350959777832 +INFO:__main__:vgg_loss_1: 28.715744018554688 +INFO:__main__:vgg_loss_2: 41.358184814453125 +INFO:__main__:vgg_loss_3: 31.56941032409668 +INFO:__main__:vgg_loss_4: 57.91679000854492 +INFO:__main__:vgg_loss_5: 1.9824949502944946 +INFO:__main__:validation_loss: 2300.52294921875 +INFO:__main__:global_step: 22501 +INFO:__main__:kl_loss: 5312.90478515625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1798.2847900390625 +INFO:__main__:loss: 1798.2901611328125 +INFO:__main__:vgg_gram_loss_0: 9.693204879760742 +INFO:__main__:vgg_gram_loss_1: 35.04866027832031 +INFO:__main__:vgg_gram_loss_2: 30.47637939453125 +INFO:__main__:vgg_gram_loss_3: 24.25353240966797 +INFO:__main__:vgg_gram_loss_4: 81.70611572265625 +INFO:__main__:vgg_gram_loss_5: 0.18399392068386078 +INFO:__main__:vgg_loss_0: 13.380143165588379 +INFO:__main__:vgg_loss_1: 31.39232063293457 +INFO:__main__:vgg_loss_2: 43.217620849609375 +INFO:__main__:vgg_loss_3: 32.23326110839844 +INFO:__main__:vgg_loss_4: 56.16984176635742 +INFO:__main__:vgg_loss_5: 1.9018902778625488 +INFO:__main__:validation_loss: 1591.3680419921875 +INFO:__main__:global_step: 22751 +INFO:__main__:kl_loss: 4559.7900390625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1466.56884765625 +INFO:__main__:loss: 1466.5733642578125 +INFO:__main__:vgg_gram_loss_0: 7.598983287811279 +INFO:__main__:vgg_gram_loss_1: 18.60493278503418 +INFO:__main__:vgg_gram_loss_2: 21.320310592651367 +INFO:__main__:vgg_gram_loss_3: 16.56488037109375 +INFO:__main__:vgg_gram_loss_4: 76.72950744628906 +INFO:__main__:vgg_gram_loss_5: 0.1628524363040924 +INFO:__main__:vgg_loss_0: 10.02210521697998 +INFO:__main__:vgg_loss_1: 23.95241355895996 +INFO:__main__:vgg_loss_2: 37.66127014160156 +INFO:__main__:vgg_loss_3: 27.937902450561523 +INFO:__main__:vgg_loss_4: 51.06528854370117 +INFO:__main__:vgg_loss_5: 1.6933501958847046 +INFO:__main__:validation_loss: 1818.0550537109375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 23001 +INFO:__main__:kl_loss: 5948.6865234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1885.263427734375 +INFO:__main__:loss: 1885.2694091796875 +INFO:__main__:vgg_gram_loss_0: 10.98624324798584 +INFO:__main__:vgg_gram_loss_1: 28.256193161010742 +INFO:__main__:vgg_gram_loss_2: 32.08843994140625 +INFO:__main__:vgg_gram_loss_3: 23.447072982788086 +INFO:__main__:vgg_gram_loss_4: 91.99577331542969 +INFO:__main__:vgg_gram_loss_5: 0.17653536796569824 +INFO:__main__:vgg_loss_0: 14.164463996887207 +INFO:__main__:vgg_loss_1: 34.57194900512695 +INFO:__main__:vgg_loss_2: 48.01591491699219 +INFO:__main__:vgg_loss_3: 33.545352935791016 +INFO:__main__:vgg_loss_4: 57.98910903930664 +INFO:__main__:vgg_loss_5: 1.8156613111495972 +INFO:__main__:validation_loss: 1723.014404296875 +INFO:__main__:global_step: 23251 +INFO:__main__:kl_loss: 5328.88671875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1721.304443359375 +INFO:__main__:loss: 1721.309814453125 +INFO:__main__:vgg_gram_loss_0: 8.487386703491211 +INFO:__main__:vgg_gram_loss_1: 25.378110885620117 +INFO:__main__:vgg_gram_loss_2: 25.46892738342285 +INFO:__main__:vgg_gram_loss_3: 19.612510681152344 +INFO:__main__:vgg_gram_loss_4: 81.29297637939453 +INFO:__main__:vgg_gram_loss_5: 0.18134182691574097 +INFO:__main__:vgg_loss_0: 14.340794563293457 +INFO:__main__:vgg_loss_1: 36.10295486450195 +INFO:__main__:vgg_loss_2: 46.48504638671875 +INFO:__main__:vgg_loss_3: 31.61625862121582 +INFO:__main__:vgg_loss_4: 53.56495666503906 +INFO:__main__:vgg_loss_5: 1.7296303510665894 +INFO:__main__:validation_loss: 2234.624755859375 +INFO:__main__:global_step: 23501 +INFO:__main__:kl_loss: 4475.861328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1489.15087890625 +INFO:__main__:loss: 1489.1553955078125 +INFO:__main__:vgg_gram_loss_0: 11.218605041503906 +INFO:__main__:vgg_gram_loss_1: 21.20235252380371 +INFO:__main__:vgg_gram_loss_2: 21.704696655273438 +INFO:__main__:vgg_gram_loss_3: 19.147504806518555 +INFO:__main__:vgg_gram_loss_4: 76.21410369873047 +INFO:__main__:vgg_gram_loss_5: 0.15340687334537506 +INFO:__main__:vgg_loss_0: 10.513860702514648 +INFO:__main__:vgg_loss_1: 25.580469131469727 +INFO:__main__:vgg_loss_2: 37.393375396728516 +INFO:__main__:vgg_loss_3: 26.532764434814453 +INFO:__main__:vgg_loss_4: 46.62006759643555 +INFO:__main__:vgg_loss_5: 1.5489575862884521 +INFO:__main__:validation_loss: 1824.110107421875 +INFO:__main__:global_step: 23751 +INFO:__main__:kl_loss: 6224.55517578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1386.58203125 +INFO:__main__:loss: 1386.5882568359375 +INFO:__main__:vgg_gram_loss_0: 7.017411708831787 +INFO:__main__:vgg_gram_loss_1: 17.303508758544922 +INFO:__main__:vgg_gram_loss_2: 18.809751510620117 +INFO:__main__:vgg_gram_loss_3: 14.905494689941406 +INFO:__main__:vgg_gram_loss_4: 71.70153045654297 +INFO:__main__:vgg_gram_loss_5: 0.1587752252817154 +INFO:__main__:vgg_loss_0: 9.687398910522461 +INFO:__main__:vgg_loss_1: 23.777997970581055 +INFO:__main__:vgg_loss_2: 36.09005355834961 +INFO:__main__:vgg_loss_3: 26.77157974243164 +INFO:__main__:vgg_loss_4: 49.452274322509766 +INFO:__main__:vgg_loss_5: 1.6406444311141968 +INFO:__main__:validation_loss: 2340.13427734375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 24001 +INFO:__main__:kl_loss: 6341.3232421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1951.650634765625 +INFO:__main__:loss: 1951.656982421875 +INFO:__main__:vgg_gram_loss_0: 8.191737174987793 +INFO:__main__:vgg_gram_loss_1: 34.70285415649414 +INFO:__main__:vgg_gram_loss_2: 38.78208923339844 +INFO:__main__:vgg_gram_loss_3: 29.2270565032959 +INFO:__main__:vgg_gram_loss_4: 97.66033935546875 +INFO:__main__:vgg_gram_loss_5: 0.1729177087545395 +INFO:__main__:vgg_loss_0: 13.707581520080566 +INFO:__main__:vgg_loss_1: 34.524784088134766 +INFO:__main__:vgg_loss_2: 46.172306060791016 +INFO:__main__:vgg_loss_3: 31.92188262939453 +INFO:__main__:vgg_loss_4: 53.5425910949707 +INFO:__main__:vgg_loss_5: 1.724003553390503 +INFO:__main__:validation_loss: 2117.68359375 +INFO:__main__:global_step: 24251 +INFO:__main__:kl_loss: 5249.95068359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1721.580810546875 +INFO:__main__:loss: 1721.5860595703125 +INFO:__main__:vgg_gram_loss_0: 10.823271751403809 +INFO:__main__:vgg_gram_loss_1: 30.82798194885254 +INFO:__main__:vgg_gram_loss_2: 27.697574615478516 +INFO:__main__:vgg_gram_loss_3: 20.705650329589844 +INFO:__main__:vgg_gram_loss_4: 85.38189697265625 +INFO:__main__:vgg_gram_loss_5: 0.16573119163513184 +INFO:__main__:vgg_loss_0: 12.905257225036621 +INFO:__main__:vgg_loss_1: 30.286806106567383 +INFO:__main__:vgg_loss_2: 41.30002212524414 +INFO:__main__:vgg_loss_3: 29.55193519592285 +INFO:__main__:vgg_loss_4: 52.948768615722656 +INFO:__main__:vgg_loss_5: 1.7212789058685303 +INFO:__main__:validation_loss: 1826.853759765625 +INFO:__main__:global_step: 24501 +INFO:__main__:kl_loss: 7116.6982421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1796.1981201171875 +INFO:__main__:loss: 1796.2052001953125 +INFO:__main__:vgg_gram_loss_0: 11.833331108093262 +INFO:__main__:vgg_gram_loss_1: 31.096662521362305 +INFO:__main__:vgg_gram_loss_2: 26.738489151000977 +INFO:__main__:vgg_gram_loss_3: 25.092031478881836 +INFO:__main__:vgg_gram_loss_4: 92.46266174316406 +INFO:__main__:vgg_gram_loss_5: 0.18547070026397705 +INFO:__main__:vgg_loss_0: 12.73943042755127 +INFO:__main__:vgg_loss_1: 29.491924285888672 +INFO:__main__:vgg_loss_2: 41.31367874145508 +INFO:__main__:vgg_loss_3: 31.352888107299805 +INFO:__main__:vgg_loss_4: 55.145469665527344 +INFO:__main__:vgg_loss_5: 1.7875674962997437 +INFO:__main__:validation_loss: 1618.7242431640625 +INFO:__main__:global_step: 24751 +INFO:__main__:kl_loss: 5999.43310546875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1486.602294921875 +INFO:__main__:loss: 1486.6082763671875 +INFO:__main__:vgg_gram_loss_0: 6.329232692718506 +INFO:__main__:vgg_gram_loss_1: 19.43773078918457 +INFO:__main__:vgg_gram_loss_2: 21.31064796447754 +INFO:__main__:vgg_gram_loss_3: 15.947096824645996 +INFO:__main__:vgg_gram_loss_4: 75.87899017333984 +INFO:__main__:vgg_gram_loss_5: 0.16657531261444092 +INFO:__main__:vgg_loss_0: 11.089919090270996 +INFO:__main__:vgg_loss_1: 25.27316665649414 +INFO:__main__:vgg_loss_2: 38.6810302734375 +INFO:__main__:vgg_loss_3: 28.890499114990234 +INFO:__main__:vgg_loss_4: 52.608612060546875 +INFO:__main__:vgg_loss_5: 1.7069624662399292 +INFO:__main__:validation_loss: 2251.744384765625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 25001 +INFO:__main__:kl_loss: 5627.7431640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1410.3660888671875 +INFO:__main__:loss: 1410.3717041015625 +INFO:__main__:vgg_gram_loss_0: 10.170222282409668 +INFO:__main__:vgg_gram_loss_1: 18.430492401123047 +INFO:__main__:vgg_gram_loss_2: 19.04314613342285 +INFO:__main__:vgg_gram_loss_3: 15.144770622253418 +INFO:__main__:vgg_gram_loss_4: 69.84618377685547 +INFO:__main__:vgg_gram_loss_5: 0.16545914113521576 +INFO:__main__:vgg_loss_0: 10.268118858337402 +INFO:__main__:vgg_loss_1: 25.2415828704834 +INFO:__main__:vgg_loss_2: 36.96037673950195 +INFO:__main__:vgg_loss_3: 26.81494903564453 +INFO:__main__:vgg_loss_4: 48.318851470947266 +INFO:__main__:vgg_loss_5: 1.6690479516983032 +INFO:__main__:validation_loss: 2013.0743408203125 +INFO:__main__:global_step: 25251 +INFO:__main__:kl_loss: 6142.14453125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1709.140869140625 +INFO:__main__:loss: 1709.14697265625 +INFO:__main__:vgg_gram_loss_0: 11.320706367492676 +INFO:__main__:vgg_gram_loss_1: 25.93552017211914 +INFO:__main__:vgg_gram_loss_2: 25.518827438354492 +INFO:__main__:vgg_gram_loss_3: 19.39504051208496 +INFO:__main__:vgg_gram_loss_4: 83.94001007080078 +INFO:__main__:vgg_gram_loss_5: 0.19050250947475433 +INFO:__main__:vgg_loss_0: 13.218282699584961 +INFO:__main__:vgg_loss_1: 28.710430145263672 +INFO:__main__:vgg_loss_2: 41.21616744995117 +INFO:__main__:vgg_loss_3: 31.4913387298584 +INFO:__main__:vgg_loss_4: 58.88178634643555 +INFO:__main__:vgg_loss_5: 2.00955867767334 +INFO:__main__:validation_loss: 1683.882080078125 +INFO:__main__:global_step: 25501 +INFO:__main__:kl_loss: 6186.7451171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1853.9647216796875 +INFO:__main__:loss: 1853.970947265625 +INFO:__main__:vgg_gram_loss_0: 20.786651611328125 +INFO:__main__:vgg_gram_loss_1: 28.144865036010742 +INFO:__main__:vgg_gram_loss_2: 27.325027465820312 +INFO:__main__:vgg_gram_loss_3: 22.583049774169922 +INFO:__main__:vgg_gram_loss_4: 92.78234100341797 +INFO:__main__:vgg_gram_loss_5: 0.18690651655197144 +INFO:__main__:vgg_loss_0: 13.260847091674805 +INFO:__main__:vgg_loss_1: 29.745174407958984 +INFO:__main__:vgg_loss_2: 43.2514533996582 +INFO:__main__:vgg_loss_3: 32.04433822631836 +INFO:__main__:vgg_loss_4: 58.720855712890625 +INFO:__main__:vgg_loss_5: 1.9614146947860718 +INFO:__main__:validation_loss: 1650.27783203125 +INFO:__main__:global_step: 25751 +INFO:__main__:kl_loss: 6973.0205078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1645.13134765625 +INFO:__main__:loss: 1645.1383056640625 +INFO:__main__:vgg_gram_loss_0: 9.280381202697754 +INFO:__main__:vgg_gram_loss_1: 27.68904685974121 +INFO:__main__:vgg_gram_loss_2: 24.16663932800293 +INFO:__main__:vgg_gram_loss_3: 21.12166404724121 +INFO:__main__:vgg_gram_loss_4: 79.82893371582031 +INFO:__main__:vgg_gram_loss_5: 0.15804514288902283 +INFO:__main__:vgg_loss_0: 12.165033340454102 +INFO:__main__:vgg_loss_1: 29.910614013671875 +INFO:__main__:vgg_loss_2: 40.40812301635742 +INFO:__main__:vgg_loss_3: 29.678409576416016 +INFO:__main__:vgg_loss_4: 52.912105560302734 +INFO:__main__:vgg_loss_5: 1.7072830200195312 +INFO:__main__:validation_loss: 2215.6083984375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 26001 +INFO:__main__:kl_loss: 6218.8818359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1682.552490234375 +INFO:__main__:loss: 1682.5587158203125 +INFO:__main__:vgg_gram_loss_0: 7.330090522766113 +INFO:__main__:vgg_gram_loss_1: 23.091108322143555 +INFO:__main__:vgg_gram_loss_2: 23.08942985534668 +INFO:__main__:vgg_gram_loss_3: 19.701995849609375 +INFO:__main__:vgg_gram_loss_4: 89.66356658935547 +INFO:__main__:vgg_gram_loss_5: 0.1859285980463028 +INFO:__main__:vgg_loss_0: 13.050247192382812 +INFO:__main__:vgg_loss_1: 30.06724739074707 +INFO:__main__:vgg_loss_2: 41.38309097290039 +INFO:__main__:vgg_loss_3: 30.49485206604004 +INFO:__main__:vgg_loss_4: 56.499664306640625 +INFO:__main__:vgg_loss_5: 1.9533076286315918 +INFO:__main__:validation_loss: 1918.3074951171875 +INFO:__main__:global_step: 26251 +INFO:__main__:kl_loss: 6852.59375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1588.879638671875 +INFO:__main__:loss: 1588.886474609375 +INFO:__main__:vgg_gram_loss_0: 8.004632949829102 +INFO:__main__:vgg_gram_loss_1: 32.57810592651367 +INFO:__main__:vgg_gram_loss_2: 24.929096221923828 +INFO:__main__:vgg_gram_loss_3: 20.542936325073242 +INFO:__main__:vgg_gram_loss_4: 77.0531234741211 +INFO:__main__:vgg_gram_loss_5: 0.15604647994041443 +INFO:__main__:vgg_loss_0: 11.412250518798828 +INFO:__main__:vgg_loss_1: 27.01420021057129 +INFO:__main__:vgg_loss_2: 38.5195426940918 +INFO:__main__:vgg_loss_3: 28.043766021728516 +INFO:__main__:vgg_loss_4: 47.96466064453125 +INFO:__main__:vgg_loss_5: 1.5575368404388428 +INFO:__main__:validation_loss: 2249.79248046875 +INFO:__main__:global_step: 26501 +INFO:__main__:kl_loss: 6980.28857421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1704.5946044921875 +INFO:__main__:loss: 1704.6015625 +INFO:__main__:vgg_gram_loss_0: 12.008533477783203 +INFO:__main__:vgg_gram_loss_1: 21.613807678222656 +INFO:__main__:vgg_gram_loss_2: 26.681095123291016 +INFO:__main__:vgg_gram_loss_3: 20.401952743530273 +INFO:__main__:vgg_gram_loss_4: 84.60264587402344 +INFO:__main__:vgg_gram_loss_5: 0.1894301474094391 +INFO:__main__:vgg_loss_0: 12.829402923583984 +INFO:__main__:vgg_loss_1: 29.313905715942383 +INFO:__main__:vgg_loss_2: 43.35658264160156 +INFO:__main__:vgg_loss_3: 32.1512565612793 +INFO:__main__:vgg_loss_4: 55.89781951904297 +INFO:__main__:vgg_loss_5: 1.8724706172943115 +INFO:__main__:validation_loss: 1873.46240234375 +INFO:__main__:global_step: 26751 +INFO:__main__:kl_loss: 6226.38330078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1691.88623046875 +INFO:__main__:loss: 1691.8924560546875 +INFO:__main__:vgg_gram_loss_0: 7.683363437652588 +INFO:__main__:vgg_gram_loss_1: 21.128986358642578 +INFO:__main__:vgg_gram_loss_2: 22.99751091003418 +INFO:__main__:vgg_gram_loss_3: 19.45563316345215 +INFO:__main__:vgg_gram_loss_4: 86.91204071044922 +INFO:__main__:vgg_gram_loss_5: 0.18538795411586761 +INFO:__main__:vgg_loss_0: 13.23610782623291 +INFO:__main__:vgg_loss_1: 31.90540313720703 +INFO:__main__:vgg_loss_2: 43.89427947998047 +INFO:__main__:vgg_loss_3: 31.82846450805664 +INFO:__main__:vgg_loss_4: 57.198577880859375 +INFO:__main__:vgg_loss_5: 1.9515100717544556 +INFO:__main__:validation_loss: 2264.475830078125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 27001 +INFO:__main__:kl_loss: 6897.892578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1583.3160400390625 +INFO:__main__:loss: 1583.322998046875 +INFO:__main__:vgg_gram_loss_0: 8.693373680114746 +INFO:__main__:vgg_gram_loss_1: 21.26497459411621 +INFO:__main__:vgg_gram_loss_2: 23.890304565429688 +INFO:__main__:vgg_gram_loss_3: 17.55023765563965 +INFO:__main__:vgg_gram_loss_4: 79.44637298583984 +INFO:__main__:vgg_gram_loss_5: 0.17783643305301666 +INFO:__main__:vgg_loss_0: 11.692310333251953 +INFO:__main__:vgg_loss_1: 27.731632232666016 +INFO:__main__:vgg_loss_2: 40.75484085083008 +INFO:__main__:vgg_loss_3: 30.193740844726562 +INFO:__main__:vgg_loss_4: 53.523582458496094 +INFO:__main__:vgg_loss_5: 1.7439730167388916 +INFO:__main__:validation_loss: 1912.774169921875 +INFO:__main__:global_step: 27251 +INFO:__main__:kl_loss: 5178.7509765625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1626.1431884765625 +INFO:__main__:loss: 1626.1483154296875 +INFO:__main__:vgg_gram_loss_0: 12.652191162109375 +INFO:__main__:vgg_gram_loss_1: 32.19399642944336 +INFO:__main__:vgg_gram_loss_2: 25.745038986206055 +INFO:__main__:vgg_gram_loss_3: 21.77153778076172 +INFO:__main__:vgg_gram_loss_4: 80.12590789794922 +INFO:__main__:vgg_gram_loss_5: 0.1634579300880432 +INFO:__main__:vgg_loss_0: 11.062541007995605 +INFO:__main__:vgg_loss_1: 27.467317581176758 +INFO:__main__:vgg_loss_2: 38.08487319946289 +INFO:__main__:vgg_loss_3: 27.266159057617188 +INFO:__main__:vgg_loss_4: 47.11213684082031 +INFO:__main__:vgg_loss_5: 1.5835005044937134 +INFO:__main__:validation_loss: 1783.89697265625 +INFO:__main__:global_step: 27501 +INFO:__main__:kl_loss: 6521.7353515625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1723.310302734375 +INFO:__main__:loss: 1723.3167724609375 +INFO:__main__:vgg_gram_loss_0: 11.551633834838867 +INFO:__main__:vgg_gram_loss_1: 26.776636123657227 +INFO:__main__:vgg_gram_loss_2: 25.140085220336914 +INFO:__main__:vgg_gram_loss_3: 19.369688034057617 +INFO:__main__:vgg_gram_loss_4: 83.29908752441406 +INFO:__main__:vgg_gram_loss_5: 0.19278185069561005 +INFO:__main__:vgg_loss_0: 12.514986038208008 +INFO:__main__:vgg_loss_1: 29.113794326782227 +INFO:__main__:vgg_loss_2: 44.04909896850586 +INFO:__main__:vgg_loss_3: 32.55424118041992 +INFO:__main__:vgg_loss_4: 58.1593017578125 +INFO:__main__:vgg_loss_5: 1.9407258033752441 +INFO:__main__:validation_loss: 1973.332275390625 +INFO:__main__:global_step: 27751 +INFO:__main__:kl_loss: 7555.8271484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1700.685791015625 +INFO:__main__:loss: 1700.693359375 +INFO:__main__:vgg_gram_loss_0: 10.452322959899902 +INFO:__main__:vgg_gram_loss_1: 24.14700698852539 +INFO:__main__:vgg_gram_loss_2: 26.62457275390625 +INFO:__main__:vgg_gram_loss_3: 19.78400421142578 +INFO:__main__:vgg_gram_loss_4: 84.8041000366211 +INFO:__main__:vgg_gram_loss_5: 0.1840541511774063 +INFO:__main__:vgg_loss_0: 12.366714477539062 +INFO:__main__:vgg_loss_1: 31.705739974975586 +INFO:__main__:vgg_loss_2: 42.88370895385742 +INFO:__main__:vgg_loss_3: 30.60978126525879 +INFO:__main__:vgg_loss_4: 54.733646392822266 +INFO:__main__:vgg_loss_5: 1.8414698839187622 +INFO:__main__:validation_loss: 1748.7911376953125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 28001 +INFO:__main__:kl_loss: 7006.484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1801.0430908203125 +INFO:__main__:loss: 1801.050048828125 +INFO:__main__:vgg_gram_loss_0: 13.374608039855957 +INFO:__main__:vgg_gram_loss_1: 22.997451782226562 +INFO:__main__:vgg_gram_loss_2: 26.51910400390625 +INFO:__main__:vgg_gram_loss_3: 21.808897018432617 +INFO:__main__:vgg_gram_loss_4: 88.30428314208984 +INFO:__main__:vgg_gram_loss_5: 0.18019089102745056 +INFO:__main__:vgg_loss_0: 14.381964683532715 +INFO:__main__:vgg_loss_1: 34.07027053833008 +INFO:__main__:vgg_loss_2: 46.85667037963867 +INFO:__main__:vgg_loss_3: 32.78913497924805 +INFO:__main__:vgg_loss_4: 57.058799743652344 +INFO:__main__:vgg_loss_5: 1.8672504425048828 +INFO:__main__:validation_loss: 2233.9267578125 +INFO:__main__:global_step: 28251 +INFO:__main__:kl_loss: 6991.6796875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1570.6463623046875 +INFO:__main__:loss: 1570.6533203125 +INFO:__main__:vgg_gram_loss_0: 8.678009986877441 +INFO:__main__:vgg_gram_loss_1: 25.753881454467773 +INFO:__main__:vgg_gram_loss_2: 21.553869247436523 +INFO:__main__:vgg_gram_loss_3: 17.74287986755371 +INFO:__main__:vgg_gram_loss_4: 75.90467834472656 +INFO:__main__:vgg_gram_loss_5: 0.15736109018325806 +INFO:__main__:vgg_loss_0: 12.255733489990234 +INFO:__main__:vgg_loss_1: 30.231689453125 +INFO:__main__:vgg_loss_2: 40.39585876464844 +INFO:__main__:vgg_loss_3: 29.1583251953125 +INFO:__main__:vgg_loss_4: 50.635379791259766 +INFO:__main__:vgg_loss_5: 1.6616016626358032 +INFO:__main__:validation_loss: 2021.83544921875 +INFO:__main__:global_step: 28501 +INFO:__main__:kl_loss: 7319.66455078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1507.8668212890625 +INFO:__main__:loss: 1507.8741455078125 +INFO:__main__:vgg_gram_loss_0: 13.68748664855957 +INFO:__main__:vgg_gram_loss_1: 29.370996475219727 +INFO:__main__:vgg_gram_loss_2: 24.294782638549805 +INFO:__main__:vgg_gram_loss_3: 18.50814437866211 +INFO:__main__:vgg_gram_loss_4: 72.04717254638672 +INFO:__main__:vgg_gram_loss_5: 0.15147070586681366 +INFO:__main__:vgg_loss_0: 11.467558860778809 +INFO:__main__:vgg_loss_1: 27.06207275390625 +INFO:__main__:vgg_loss_2: 34.5655517578125 +INFO:__main__:vgg_loss_3: 25.13874626159668 +INFO:__main__:vgg_loss_4: 43.76496505737305 +INFO:__main__:vgg_loss_5: 1.5144022703170776 +INFO:__main__:validation_loss: 2028.8160400390625 +INFO:__main__:global_step: 28751 +INFO:__main__:kl_loss: 6046.599609375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1541.998291015625 +INFO:__main__:loss: 1542.00439453125 +INFO:__main__:vgg_gram_loss_0: 13.567819595336914 +INFO:__main__:vgg_gram_loss_1: 24.398635864257812 +INFO:__main__:vgg_gram_loss_2: 21.604875564575195 +INFO:__main__:vgg_gram_loss_3: 18.050750732421875 +INFO:__main__:vgg_gram_loss_4: 80.14633178710938 +INFO:__main__:vgg_gram_loss_5: 0.15791325271129608 +INFO:__main__:vgg_loss_0: 11.199384689331055 +INFO:__main__:vgg_loss_1: 26.727035522460938 +INFO:__main__:vgg_loss_2: 37.31371307373047 +INFO:__main__:vgg_loss_3: 26.51299285888672 +INFO:__main__:vgg_loss_4: 47.15300369262695 +INFO:__main__:vgg_loss_5: 1.5671954154968262 +INFO:__main__:validation_loss: 1630.343505859375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 29001 +INFO:__main__:kl_loss: 6740.71826171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1682.2861328125 +INFO:__main__:loss: 1682.2928466796875 +INFO:__main__:vgg_gram_loss_0: 8.695304870605469 +INFO:__main__:vgg_gram_loss_1: 19.400619506835938 +INFO:__main__:vgg_gram_loss_2: 22.67316436767578 +INFO:__main__:vgg_gram_loss_3: 20.046979904174805 +INFO:__main__:vgg_gram_loss_4: 87.79853820800781 +INFO:__main__:vgg_gram_loss_5: 0.19452379643917084 +INFO:__main__:vgg_loss_0: 12.044966697692871 +INFO:__main__:vgg_loss_1: 29.201589584350586 +INFO:__main__:vgg_loss_2: 44.07809066772461 +INFO:__main__:vgg_loss_3: 32.421363830566406 +INFO:__main__:vgg_loss_4: 57.96095657348633 +INFO:__main__:vgg_loss_5: 1.9411317110061646 +INFO:__main__:validation_loss: 1976.0982666015625 +INFO:__main__:global_step: 29251 +INFO:__main__:kl_loss: 6531.47607421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1465.192626953125 +INFO:__main__:loss: 1465.19921875 +INFO:__main__:vgg_gram_loss_0: 5.189122676849365 +INFO:__main__:vgg_gram_loss_1: 20.3861141204834 +INFO:__main__:vgg_gram_loss_2: 20.574844360351562 +INFO:__main__:vgg_gram_loss_3: 17.603164672851562 +INFO:__main__:vgg_gram_loss_4: 77.5077896118164 +INFO:__main__:vgg_gram_loss_5: 0.16504782438278198 +INFO:__main__:vgg_loss_0: 10.536651611328125 +INFO:__main__:vgg_loss_1: 25.29227066040039 +INFO:__main__:vgg_loss_2: 38.12633514404297 +INFO:__main__:vgg_loss_3: 27.339921951293945 +INFO:__main__:vgg_loss_4: 48.62360763549805 +INFO:__main__:vgg_loss_5: 1.69362211227417 +INFO:__main__:validation_loss: 2093.66845703125 +INFO:__main__:global_step: 29501 +INFO:__main__:kl_loss: 7027.560546875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1550.742431640625 +INFO:__main__:loss: 1550.74951171875 +INFO:__main__:vgg_gram_loss_0: 9.524287223815918 +INFO:__main__:vgg_gram_loss_1: 18.086713790893555 +INFO:__main__:vgg_gram_loss_2: 21.886558532714844 +INFO:__main__:vgg_gram_loss_3: 18.569007873535156 +INFO:__main__:vgg_gram_loss_4: 83.06659698486328 +INFO:__main__:vgg_gram_loss_5: 0.17091219127178192 +INFO:__main__:vgg_loss_0: 10.751038551330566 +INFO:__main__:vgg_loss_1: 24.940603256225586 +INFO:__main__:vgg_loss_2: 39.313690185546875 +INFO:__main__:vgg_loss_3: 29.500207901000977 +INFO:__main__:vgg_loss_4: 52.622894287109375 +INFO:__main__:vgg_loss_5: 1.7159700393676758 +INFO:__main__:validation_loss: 1794.6380615234375 +INFO:__main__:global_step: 29751 +INFO:__main__:kl_loss: 8569.4365234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1732.98583984375 +INFO:__main__:loss: 1732.994384765625 +INFO:__main__:vgg_gram_loss_0: 12.27071762084961 +INFO:__main__:vgg_gram_loss_1: 24.522642135620117 +INFO:__main__:vgg_gram_loss_2: 29.224720001220703 +INFO:__main__:vgg_gram_loss_3: 20.72979736328125 +INFO:__main__:vgg_gram_loss_4: 79.61004638671875 +INFO:__main__:vgg_gram_loss_5: 0.17849066853523254 +INFO:__main__:vgg_loss_0: 13.388283729553223 +INFO:__main__:vgg_loss_1: 31.589082717895508 +INFO:__main__:vgg_loss_2: 46.221923828125 +INFO:__main__:vgg_loss_3: 32.76070022583008 +INFO:__main__:vgg_loss_4: 54.363521575927734 +INFO:__main__:vgg_loss_5: 1.7372430562973022 +INFO:__main__:validation_loss: 2095.364501953125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 30001 +INFO:__main__:kl_loss: 7812.275390625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1689.72021484375 +INFO:__main__:loss: 1689.72802734375 +INFO:__main__:vgg_gram_loss_0: 7.3410797119140625 +INFO:__main__:vgg_gram_loss_1: 22.4700984954834 +INFO:__main__:vgg_gram_loss_2: 21.892820358276367 +INFO:__main__:vgg_gram_loss_3: 18.213821411132812 +INFO:__main__:vgg_gram_loss_4: 85.97173309326172 +INFO:__main__:vgg_gram_loss_5: 0.19198797643184662 +INFO:__main__:vgg_loss_0: 12.74738597869873 +INFO:__main__:vgg_loss_1: 30.815444946289062 +INFO:__main__:vgg_loss_2: 44.6407470703125 +INFO:__main__:vgg_loss_3: 32.8369026184082 +INFO:__main__:vgg_loss_4: 58.828250885009766 +INFO:__main__:vgg_loss_5: 1.993740439414978 +INFO:__main__:validation_loss: 2419.019287109375 +INFO:__main__:global_step: 30251 +INFO:__main__:kl_loss: 8521.3984375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1964.75830078125 +INFO:__main__:loss: 1964.766845703125 +INFO:__main__:vgg_gram_loss_0: 6.659420967102051 +INFO:__main__:vgg_gram_loss_1: 22.764286041259766 +INFO:__main__:vgg_gram_loss_2: 33.33052444458008 +INFO:__main__:vgg_gram_loss_3: 27.01814079284668 +INFO:__main__:vgg_gram_loss_4: 101.89287567138672 +INFO:__main__:vgg_gram_loss_5: 0.21862566471099854 +INFO:__main__:vgg_loss_0: 15.357515335083008 +INFO:__main__:vgg_loss_1: 34.13472366333008 +INFO:__main__:vgg_loss_2: 50.27109146118164 +INFO:__main__:vgg_loss_3: 36.350791931152344 +INFO:__main__:vgg_loss_4: 62.87358093261719 +INFO:__main__:vgg_loss_5: 2.0800621509552 +INFO:__main__:validation_loss: 2112.103515625 +INFO:__main__:global_step: 30501 +INFO:__main__:kl_loss: 7397.51220703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1605.8050537109375 +INFO:__main__:loss: 1605.8125 +INFO:__main__:vgg_gram_loss_0: 8.002279281616211 +INFO:__main__:vgg_gram_loss_1: 23.73541831970215 +INFO:__main__:vgg_gram_loss_2: 24.308568954467773 +INFO:__main__:vgg_gram_loss_3: 19.924631118774414 +INFO:__main__:vgg_gram_loss_4: 80.81922912597656 +INFO:__main__:vgg_gram_loss_5: 0.16787424683570862 +INFO:__main__:vgg_loss_0: 11.824690818786621 +INFO:__main__:vgg_loss_1: 29.90940284729004 +INFO:__main__:vgg_loss_2: 42.416412353515625 +INFO:__main__:vgg_loss_3: 28.987524032592773 +INFO:__main__:vgg_loss_4: 49.466102600097656 +INFO:__main__:vgg_loss_5: 1.5988966226577759 +INFO:__main__:validation_loss: 1885.314208984375 +INFO:__main__:global_step: 30751 +INFO:__main__:kl_loss: 8702.7744140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1596.2640380859375 +INFO:__main__:loss: 1596.272705078125 +INFO:__main__:vgg_gram_loss_0: 4.084274768829346 +INFO:__main__:vgg_gram_loss_1: 18.62978744506836 +INFO:__main__:vgg_gram_loss_2: 21.505178451538086 +INFO:__main__:vgg_gram_loss_3: 17.89800453186035 +INFO:__main__:vgg_gram_loss_4: 81.99454498291016 +INFO:__main__:vgg_gram_loss_5: 0.19694426655769348 +INFO:__main__:vgg_loss_0: 11.946273803710938 +INFO:__main__:vgg_loss_1: 29.416860580444336 +INFO:__main__:vgg_loss_2: 43.647804260253906 +INFO:__main__:vgg_loss_3: 31.734018325805664 +INFO:__main__:vgg_loss_4: 56.293212890625 +INFO:__main__:vgg_loss_5: 1.9059001207351685 +INFO:__main__:validation_loss: 1580.9931640625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 31001 +INFO:__main__:kl_loss: 7422.7265625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1665.5206298828125 +INFO:__main__:loss: 1665.528076171875 +INFO:__main__:vgg_gram_loss_0: 7.4895195960998535 +INFO:__main__:vgg_gram_loss_1: 20.219242095947266 +INFO:__main__:vgg_gram_loss_2: 27.897180557250977 +INFO:__main__:vgg_gram_loss_3: 19.956405639648438 +INFO:__main__:vgg_gram_loss_4: 85.25379180908203 +INFO:__main__:vgg_gram_loss_5: 0.1990671306848526 +INFO:__main__:vgg_loss_0: 12.088481903076172 +INFO:__main__:vgg_loss_1: 29.120532989501953 +INFO:__main__:vgg_loss_2: 42.53718566894531 +INFO:__main__:vgg_loss_3: 31.43267059326172 +INFO:__main__:vgg_loss_4: 55.092994689941406 +INFO:__main__:vgg_loss_5: 1.8170398473739624 +INFO:__main__:validation_loss: 1984.239501953125 +INFO:__main__:global_step: 31251 +INFO:__main__:kl_loss: 8185.1015625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1604.841552734375 +INFO:__main__:loss: 1604.8497314453125 +INFO:__main__:vgg_gram_loss_0: 13.089444160461426 +INFO:__main__:vgg_gram_loss_1: 22.364084243774414 +INFO:__main__:vgg_gram_loss_2: 21.805578231811523 +INFO:__main__:vgg_gram_loss_3: 18.109394073486328 +INFO:__main__:vgg_gram_loss_4: 77.81670379638672 +INFO:__main__:vgg_gram_loss_5: 0.17303717136383057 +INFO:__main__:vgg_loss_0: 13.159098625183105 +INFO:__main__:vgg_loss_1: 28.713388442993164 +INFO:__main__:vgg_loss_2: 40.76849365234375 +INFO:__main__:vgg_loss_3: 30.154808044433594 +INFO:__main__:vgg_loss_4: 53.038516998291016 +INFO:__main__:vgg_loss_5: 1.7757582664489746 +INFO:__main__:validation_loss: 1555.4007568359375 +INFO:__main__:global_step: 31501 +INFO:__main__:kl_loss: 7402.59375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1490.0303955078125 +INFO:__main__:loss: 1490.037841796875 +INFO:__main__:vgg_gram_loss_0: 9.725199699401855 +INFO:__main__:vgg_gram_loss_1: 23.177865982055664 +INFO:__main__:vgg_gram_loss_2: 25.40339469909668 +INFO:__main__:vgg_gram_loss_3: 18.384431838989258 +INFO:__main__:vgg_gram_loss_4: 75.72759246826172 +INFO:__main__:vgg_gram_loss_5: 0.1574365347623825 +INFO:__main__:vgg_loss_0: 9.718292236328125 +INFO:__main__:vgg_loss_1: 23.763614654541016 +INFO:__main__:vgg_loss_2: 36.29725646972656 +INFO:__main__:vgg_loss_3: 26.906002044677734 +INFO:__main__:vgg_loss_4: 47.20603561401367 +INFO:__main__:vgg_loss_5: 1.5389440059661865 +INFO:__main__:validation_loss: 1775.9581298828125 +INFO:__main__:global_step: 31751 +INFO:__main__:kl_loss: 8372.71875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1771.30517578125 +INFO:__main__:loss: 1771.3135986328125 +INFO:__main__:vgg_gram_loss_0: 12.17890453338623 +INFO:__main__:vgg_gram_loss_1: 26.358612060546875 +INFO:__main__:vgg_gram_loss_2: 30.13270378112793 +INFO:__main__:vgg_gram_loss_3: 23.5117244720459 +INFO:__main__:vgg_gram_loss_4: 88.96038055419922 +INFO:__main__:vgg_gram_loss_5: 0.18301668763160706 +INFO:__main__:vgg_loss_0: 12.808883666992188 +INFO:__main__:vgg_loss_1: 30.362239837646484 +INFO:__main__:vgg_loss_2: 42.860774993896484 +INFO:__main__:vgg_loss_3: 31.0824031829834 +INFO:__main__:vgg_loss_4: 54.00221252441406 +INFO:__main__:vgg_loss_5: 1.8191654682159424 +INFO:__main__:validation_loss: 2132.423828125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 32001 +INFO:__main__:kl_loss: 7507.0712890625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1368.962646484375 +INFO:__main__:loss: 1368.9700927734375 +INFO:__main__:vgg_gram_loss_0: 7.300660133361816 +INFO:__main__:vgg_gram_loss_1: 15.08447551727295 +INFO:__main__:vgg_gram_loss_2: 19.11664390563965 +INFO:__main__:vgg_gram_loss_3: 15.4403657913208 +INFO:__main__:vgg_gram_loss_4: 71.96074676513672 +INFO:__main__:vgg_gram_loss_5: 0.15559887886047363 +INFO:__main__:vgg_loss_0: 9.949869155883789 +INFO:__main__:vgg_loss_1: 23.242774963378906 +INFO:__main__:vgg_loss_2: 35.481956481933594 +INFO:__main__:vgg_loss_3: 26.68964385986328 +INFO:__main__:vgg_loss_4: 47.822391510009766 +INFO:__main__:vgg_loss_5: 1.547431468963623 +INFO:__main__:validation_loss: 1999.1343994140625 +INFO:__main__:global_step: 32251 +INFO:__main__:kl_loss: 8140.81494140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1863.8477783203125 +INFO:__main__:loss: 1863.85595703125 +INFO:__main__:vgg_gram_loss_0: 6.013930797576904 +INFO:__main__:vgg_gram_loss_1: 35.12582778930664 +INFO:__main__:vgg_gram_loss_2: 34.75369644165039 +INFO:__main__:vgg_gram_loss_3: 28.84720230102539 +INFO:__main__:vgg_gram_loss_4: 87.96319580078125 +INFO:__main__:vgg_gram_loss_5: 0.1937442272901535 +INFO:__main__:vgg_loss_0: 13.145959854125977 +INFO:__main__:vgg_loss_1: 32.48296356201172 +INFO:__main__:vgg_loss_2: 45.67557907104492 +INFO:__main__:vgg_loss_3: 32.40920639038086 +INFO:__main__:vgg_loss_4: 54.372718811035156 +INFO:__main__:vgg_loss_5: 1.7855448722839355 +INFO:__main__:validation_loss: 2302.90673828125 +INFO:__main__:global_step: 32501 +INFO:__main__:kl_loss: 7173.57470703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1423.6370849609375 +INFO:__main__:loss: 1423.644287109375 +INFO:__main__:vgg_gram_loss_0: 10.145112991333008 +INFO:__main__:vgg_gram_loss_1: 21.457168579101562 +INFO:__main__:vgg_gram_loss_2: 20.230438232421875 +INFO:__main__:vgg_gram_loss_3: 16.04782485961914 +INFO:__main__:vgg_gram_loss_4: 72.99710083007812 +INFO:__main__:vgg_gram_loss_5: 0.1581924557685852 +INFO:__main__:vgg_loss_0: 11.063898086547852 +INFO:__main__:vgg_loss_1: 24.61655616760254 +INFO:__main__:vgg_loss_2: 34.753379821777344 +INFO:__main__:vgg_loss_3: 25.293195724487305 +INFO:__main__:vgg_loss_4: 46.3483772277832 +INFO:__main__:vgg_loss_5: 1.6162017583847046 +INFO:__main__:validation_loss: 1436.0740966796875 +INFO:__main__:global_step: 32751 +INFO:__main__:kl_loss: 6524.986328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1567.75244140625 +INFO:__main__:loss: 1567.7589111328125 +INFO:__main__:vgg_gram_loss_0: 15.303736686706543 +INFO:__main__:vgg_gram_loss_1: 23.70351219177246 +INFO:__main__:vgg_gram_loss_2: 26.473554611206055 +INFO:__main__:vgg_gram_loss_3: 19.5965633392334 +INFO:__main__:vgg_gram_loss_4: 78.61532592773438 +INFO:__main__:vgg_gram_loss_5: 0.15331611037254333 +INFO:__main__:vgg_loss_0: 11.39196491241455 +INFO:__main__:vgg_loss_1: 27.400014877319336 +INFO:__main__:vgg_loss_2: 37.67536544799805 +INFO:__main__:vgg_loss_3: 25.966724395751953 +INFO:__main__:vgg_loss_4: 45.72749328613281 +INFO:__main__:vgg_loss_5: 1.5428931713104248 +INFO:__main__:validation_loss: 2331.234130859375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 33001 +INFO:__main__:kl_loss: 10939.5009765625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1900.011962890625 +INFO:__main__:loss: 1900.02294921875 +INFO:__main__:vgg_gram_loss_0: 7.07985258102417 +INFO:__main__:vgg_gram_loss_1: 27.271387100219727 +INFO:__main__:vgg_gram_loss_2: 35.25288391113281 +INFO:__main__:vgg_gram_loss_3: 30.127553939819336 +INFO:__main__:vgg_gram_loss_4: 99.07093048095703 +INFO:__main__:vgg_gram_loss_5: 0.1930004358291626 +INFO:__main__:vgg_loss_0: 13.147265434265137 +INFO:__main__:vgg_loss_1: 33.134429931640625 +INFO:__main__:vgg_loss_2: 46.22199630737305 +INFO:__main__:vgg_loss_3: 32.368289947509766 +INFO:__main__:vgg_loss_4: 54.38119888305664 +INFO:__main__:vgg_loss_5: 1.75357985496521 +INFO:__main__:validation_loss: 1508.9962158203125 +INFO:__main__:global_step: 33251 +INFO:__main__:kl_loss: 7553.2265625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1798.7579345703125 +INFO:__main__:loss: 1798.7655029296875 +INFO:__main__:vgg_gram_loss_0: 12.825403213500977 +INFO:__main__:vgg_gram_loss_1: 29.995691299438477 +INFO:__main__:vgg_gram_loss_2: 26.814319610595703 +INFO:__main__:vgg_gram_loss_3: 23.321575164794922 +INFO:__main__:vgg_gram_loss_4: 88.9933090209961 +INFO:__main__:vgg_gram_loss_5: 0.1796519160270691 +INFO:__main__:vgg_loss_0: 13.508710861206055 +INFO:__main__:vgg_loss_1: 34.84256362915039 +INFO:__main__:vgg_loss_2: 42.6619758605957 +INFO:__main__:vgg_loss_3: 30.537158966064453 +INFO:__main__:vgg_loss_4: 54.273677825927734 +INFO:__main__:vgg_loss_5: 1.7975609302520752 +INFO:__main__:validation_loss: 2110.814208984375 +INFO:__main__:global_step: 33501 +INFO:__main__:kl_loss: 7539.8232421875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1470.26708984375 +INFO:__main__:loss: 1470.274658203125 +INFO:__main__:vgg_gram_loss_0: 7.521893501281738 +INFO:__main__:vgg_gram_loss_1: 18.998716354370117 +INFO:__main__:vgg_gram_loss_2: 20.80803108215332 +INFO:__main__:vgg_gram_loss_3: 17.182662963867188 +INFO:__main__:vgg_gram_loss_4: 73.57921600341797 +INFO:__main__:vgg_gram_loss_5: 0.15982714295387268 +INFO:__main__:vgg_loss_0: 11.317607879638672 +INFO:__main__:vgg_loss_1: 27.017763137817383 +INFO:__main__:vgg_loss_2: 38.52604293823242 +INFO:__main__:vgg_loss_3: 28.11553382873535 +INFO:__main__:vgg_loss_4: 49.25281524658203 +INFO:__main__:vgg_loss_5: 1.5733047723770142 +INFO:__main__:validation_loss: 1999.73974609375 +INFO:__main__:global_step: 33751 +INFO:__main__:kl_loss: 6667.193359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1280.2393798828125 +INFO:__main__:loss: 1280.24609375 +INFO:__main__:vgg_gram_loss_0: 8.491766929626465 +INFO:__main__:vgg_gram_loss_1: 16.49424171447754 +INFO:__main__:vgg_gram_loss_2: 17.525793075561523 +INFO:__main__:vgg_gram_loss_3: 13.442642211914062 +INFO:__main__:vgg_gram_loss_4: 63.419002532958984 +INFO:__main__:vgg_gram_loss_5: 0.14162833988666534 +INFO:__main__:vgg_loss_0: 9.988456726074219 +INFO:__main__:vgg_loss_1: 24.418048858642578 +INFO:__main__:vgg_loss_2: 34.24618148803711 +INFO:__main__:vgg_loss_3: 24.316545486450195 +INFO:__main__:vgg_loss_4: 42.19325637817383 +INFO:__main__:vgg_loss_5: 1.370316743850708 +INFO:__main__:validation_loss: 1770.0706787109375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 34001 +INFO:__main__:kl_loss: 7476.705078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1807.9027099609375 +INFO:__main__:loss: 1807.91015625 +INFO:__main__:vgg_gram_loss_0: 9.691965103149414 +INFO:__main__:vgg_gram_loss_1: 25.24032211303711 +INFO:__main__:vgg_gram_loss_2: 31.78862953186035 +INFO:__main__:vgg_gram_loss_3: 23.491683959960938 +INFO:__main__:vgg_gram_loss_4: 90.4525375366211 +INFO:__main__:vgg_gram_loss_5: 0.2146189957857132 +INFO:__main__:vgg_loss_0: 12.932808876037598 +INFO:__main__:vgg_loss_1: 32.66667556762695 +INFO:__main__:vgg_loss_2: 46.28645706176758 +INFO:__main__:vgg_loss_3: 32.0604248046875 +INFO:__main__:vgg_loss_4: 54.852962493896484 +INFO:__main__:vgg_loss_5: 1.9014643430709839 +INFO:__main__:validation_loss: 1683.9722900390625 +INFO:__main__:global_step: 34251 +INFO:__main__:kl_loss: 7209.8486328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1582.0489501953125 +INFO:__main__:loss: 1582.05615234375 +INFO:__main__:vgg_gram_loss_0: 7.021142482757568 +INFO:__main__:vgg_gram_loss_1: 23.712890625 +INFO:__main__:vgg_gram_loss_2: 22.4034423828125 +INFO:__main__:vgg_gram_loss_3: 19.306621551513672 +INFO:__main__:vgg_gram_loss_4: 83.2828598022461 +INFO:__main__:vgg_gram_loss_5: 0.16146491467952728 +INFO:__main__:vgg_loss_0: 11.05384349822998 +INFO:__main__:vgg_loss_1: 27.824371337890625 +INFO:__main__:vgg_loss_2: 40.05014419555664 +INFO:__main__:vgg_loss_3: 28.684080123901367 +INFO:__main__:vgg_loss_4: 51.2217903137207 +INFO:__main__:vgg_loss_5: 1.6871557235717773 +INFO:__main__:validation_loss: 1809.0576171875 +INFO:__main__:global_step: 34501 +INFO:__main__:kl_loss: 8045.5849609375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1911.607666015625 +INFO:__main__:loss: 1911.61572265625 +INFO:__main__:vgg_gram_loss_0: 7.64352560043335 +INFO:__main__:vgg_gram_loss_1: 30.03386878967285 +INFO:__main__:vgg_gram_loss_2: 35.1494026184082 +INFO:__main__:vgg_gram_loss_3: 27.489959716796875 +INFO:__main__:vgg_gram_loss_4: 92.80669403076172 +INFO:__main__:vgg_gram_loss_5: 0.1786065399646759 +INFO:__main__:vgg_loss_0: 13.750406265258789 +INFO:__main__:vgg_loss_1: 35.86495590209961 +INFO:__main__:vgg_loss_2: 48.537879943847656 +INFO:__main__:vgg_loss_3: 33.572113037109375 +INFO:__main__:vgg_loss_4: 55.488426208496094 +INFO:__main__:vgg_loss_5: 1.8057047128677368 +INFO:__main__:validation_loss: 1884.7188720703125 +INFO:__main__:global_step: 34751 +INFO:__main__:kl_loss: 8501.81640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1621.1163330078125 +INFO:__main__:loss: 1621.1248779296875 +INFO:__main__:vgg_gram_loss_0: 8.063578605651855 +INFO:__main__:vgg_gram_loss_1: 27.075748443603516 +INFO:__main__:vgg_gram_loss_2: 23.694639205932617 +INFO:__main__:vgg_gram_loss_3: 19.012617111206055 +INFO:__main__:vgg_gram_loss_4: 76.67565155029297 +INFO:__main__:vgg_gram_loss_5: 0.16126571595668793 +INFO:__main__:vgg_loss_0: 13.592759132385254 +INFO:__main__:vgg_loss_1: 31.755699157714844 +INFO:__main__:vgg_loss_2: 41.26490020751953 +INFO:__main__:vgg_loss_3: 29.42359733581543 +INFO:__main__:vgg_loss_4: 51.78695297241211 +INFO:__main__:vgg_loss_5: 1.7158440351486206 +INFO:__main__:validation_loss: 2097.615478515625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 35001 +INFO:__main__:kl_loss: 7987.68798828125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1503.52880859375 +INFO:__main__:loss: 1503.5367431640625 +INFO:__main__:vgg_gram_loss_0: 9.600152969360352 +INFO:__main__:vgg_gram_loss_1: 18.655702590942383 +INFO:__main__:vgg_gram_loss_2: 18.9547176361084 +INFO:__main__:vgg_gram_loss_3: 16.299245834350586 +INFO:__main__:vgg_gram_loss_4: 74.2104721069336 +INFO:__main__:vgg_gram_loss_5: 0.18371054530143738 +INFO:__main__:vgg_loss_0: 12.533797264099121 +INFO:__main__:vgg_loss_1: 29.307140350341797 +INFO:__main__:vgg_loss_2: 40.745513916015625 +INFO:__main__:vgg_loss_3: 28.7817325592041 +INFO:__main__:vgg_loss_4: 49.73834991455078 +INFO:__main__:vgg_loss_5: 1.6951854228973389 +INFO:__main__:validation_loss: 1906.821044921875 +INFO:__main__:global_step: 35251 +INFO:__main__:kl_loss: 8501.720703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1648.5947265625 +INFO:__main__:loss: 1648.603271484375 +INFO:__main__:vgg_gram_loss_0: 10.086333274841309 +INFO:__main__:vgg_gram_loss_1: 22.291767120361328 +INFO:__main__:vgg_gram_loss_2: 22.4310359954834 +INFO:__main__:vgg_gram_loss_3: 19.004484176635742 +INFO:__main__:vgg_gram_loss_4: 89.95596313476562 +INFO:__main__:vgg_gram_loss_5: 0.18401210010051727 +INFO:__main__:vgg_loss_0: 11.595192909240723 +INFO:__main__:vgg_loss_1: 27.05489730834961 +INFO:__main__:vgg_loss_2: 40.13462448120117 +INFO:__main__:vgg_loss_3: 30.07659339904785 +INFO:__main__:vgg_loss_4: 55.03064727783203 +INFO:__main__:vgg_loss_5: 1.8733744621276855 +INFO:__main__:validation_loss: 1783.89013671875 +INFO:__main__:global_step: 35501 +INFO:__main__:kl_loss: 8287.830078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1577.493896484375 +INFO:__main__:loss: 1577.502197265625 +INFO:__main__:vgg_gram_loss_0: 10.192741394042969 +INFO:__main__:vgg_gram_loss_1: 20.201032638549805 +INFO:__main__:vgg_gram_loss_2: 21.043058395385742 +INFO:__main__:vgg_gram_loss_3: 17.567325592041016 +INFO:__main__:vgg_gram_loss_4: 82.69979095458984 +INFO:__main__:vgg_gram_loss_5: 0.17941467463970184 +INFO:__main__:vgg_loss_0: 11.9807767868042 +INFO:__main__:vgg_loss_1: 28.976285934448242 +INFO:__main__:vgg_loss_2: 40.867698669433594 +INFO:__main__:vgg_loss_3: 29.05699920654297 +INFO:__main__:vgg_loss_4: 51.03157424926758 +INFO:__main__:vgg_loss_5: 1.7021023035049438 +INFO:__main__:validation_loss: 1376.5712890625 +INFO:__main__:global_step: 35751 +INFO:__main__:kl_loss: 8451.173828125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1559.8070068359375 +INFO:__main__:loss: 1559.8154296875 +INFO:__main__:vgg_gram_loss_0: 6.388759136199951 +INFO:__main__:vgg_gram_loss_1: 16.789342880249023 +INFO:__main__:vgg_gram_loss_2: 19.687841415405273 +INFO:__main__:vgg_gram_loss_3: 16.294286727905273 +INFO:__main__:vgg_gram_loss_4: 76.1805648803711 +INFO:__main__:vgg_gram_loss_5: 0.17934273183345795 +INFO:__main__:vgg_loss_0: 13.143385887145996 +INFO:__main__:vgg_loss_1: 29.143680572509766 +INFO:__main__:vgg_loss_2: 42.55323791503906 +INFO:__main__:vgg_loss_3: 31.910526275634766 +INFO:__main__:vgg_loss_4: 57.76618576049805 +INFO:__main__:vgg_loss_5: 1.9242489337921143 +INFO:__main__:validation_loss: 2288.017578125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 36001 +INFO:__main__:kl_loss: 7553.9130859375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1746.185302734375 +INFO:__main__:loss: 1746.19287109375 +INFO:__main__:vgg_gram_loss_0: 9.255935668945312 +INFO:__main__:vgg_gram_loss_1: 20.793149948120117 +INFO:__main__:vgg_gram_loss_2: 25.787322998046875 +INFO:__main__:vgg_gram_loss_3: 22.0164852142334 +INFO:__main__:vgg_gram_loss_4: 88.27578735351562 +INFO:__main__:vgg_gram_loss_5: 0.18374089896678925 +INFO:__main__:vgg_loss_0: 13.735885620117188 +INFO:__main__:vgg_loss_1: 34.52604293823242 +INFO:__main__:vgg_loss_2: 47.120784759521484 +INFO:__main__:vgg_loss_3: 31.969921112060547 +INFO:__main__:vgg_loss_4: 53.78569793701172 +INFO:__main__:vgg_loss_5: 1.7862669229507446 +INFO:__main__:validation_loss: 1900.9381103515625 +INFO:__main__:global_step: 36251 +INFO:__main__:kl_loss: 7772.583984375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1503.9161376953125 +INFO:__main__:loss: 1503.9239501953125 +INFO:__main__:vgg_gram_loss_0: 4.804553031921387 +INFO:__main__:vgg_gram_loss_1: 14.753111839294434 +INFO:__main__:vgg_gram_loss_2: 21.612346649169922 +INFO:__main__:vgg_gram_loss_3: 16.24247932434082 +INFO:__main__:vgg_gram_loss_4: 74.70458221435547 +INFO:__main__:vgg_gram_loss_5: 0.17574135959148407 +INFO:__main__:vgg_loss_0: 12.151427268981934 +INFO:__main__:vgg_loss_1: 29.248201370239258 +INFO:__main__:vgg_loss_2: 41.967437744140625 +INFO:__main__:vgg_loss_3: 29.91663932800293 +INFO:__main__:vgg_loss_4: 53.397701263427734 +INFO:__main__:vgg_loss_5: 1.8090234994888306 +INFO:__main__:validation_loss: 1785.992431640625 +INFO:__main__:global_step: 36501 +INFO:__main__:kl_loss: 7209.14208984375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1599.328369140625 +INFO:__main__:loss: 1599.3355712890625 +INFO:__main__:vgg_gram_loss_0: 7.356851100921631 +INFO:__main__:vgg_gram_loss_1: 20.298858642578125 +INFO:__main__:vgg_gram_loss_2: 21.875749588012695 +INFO:__main__:vgg_gram_loss_3: 17.733182907104492 +INFO:__main__:vgg_gram_loss_4: 80.12234497070312 +INFO:__main__:vgg_gram_loss_5: 0.19216053187847137 +INFO:__main__:vgg_loss_0: 12.841423034667969 +INFO:__main__:vgg_loss_1: 32.35765838623047 +INFO:__main__:vgg_loss_2: 42.91802978515625 +INFO:__main__:vgg_loss_3: 30.298646926879883 +INFO:__main__:vgg_loss_4: 52.12350082397461 +INFO:__main__:vgg_loss_5: 1.747230887413025 +INFO:__main__:validation_loss: 1815.7471923828125 +INFO:__main__:global_step: 36751 +INFO:__main__:kl_loss: 9152.4345703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1610.016845703125 +INFO:__main__:loss: 1610.0260009765625 +INFO:__main__:vgg_gram_loss_0: 7.259591579437256 +INFO:__main__:vgg_gram_loss_1: 26.751981735229492 +INFO:__main__:vgg_gram_loss_2: 24.15738868713379 +INFO:__main__:vgg_gram_loss_3: 19.41010093688965 +INFO:__main__:vgg_gram_loss_4: 76.37455749511719 +INFO:__main__:vgg_gram_loss_5: 0.15342143177986145 +INFO:__main__:vgg_loss_0: 13.114911079406738 +INFO:__main__:vgg_loss_1: 34.795658111572266 +INFO:__main__:vgg_loss_2: 42.658931732177734 +INFO:__main__:vgg_loss_3: 27.849403381347656 +INFO:__main__:vgg_loss_4: 47.87040710449219 +INFO:__main__:vgg_loss_5: 1.6070008277893066 +INFO:__main__:validation_loss: 1735.09716796875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 37001 +INFO:__main__:kl_loss: 7648.14599609375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1302.353759765625 +INFO:__main__:loss: 1302.3614501953125 +INFO:__main__:vgg_gram_loss_0: 7.602331638336182 +INFO:__main__:vgg_gram_loss_1: 14.79955005645752 +INFO:__main__:vgg_gram_loss_2: 18.46120262145996 +INFO:__main__:vgg_gram_loss_3: 14.231940269470215 +INFO:__main__:vgg_gram_loss_4: 65.24879455566406 +INFO:__main__:vgg_gram_loss_5: 0.14482997357845306 +INFO:__main__:vgg_loss_0: 9.934188842773438 +INFO:__main__:vgg_loss_1: 23.894054412841797 +INFO:__main__:vgg_loss_2: 34.83245849609375 +INFO:__main__:vgg_loss_3: 25.21493911743164 +INFO:__main__:vgg_loss_4: 44.64652633666992 +INFO:__main__:vgg_loss_5: 1.4599418640136719 +INFO:__main__:validation_loss: 1903.701416015625 +INFO:__main__:global_step: 37251 +INFO:__main__:kl_loss: 9111.056640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1678.5345458984375 +INFO:__main__:loss: 1678.543701171875 +INFO:__main__:vgg_gram_loss_0: 6.289675235748291 +INFO:__main__:vgg_gram_loss_1: 29.444677352905273 +INFO:__main__:vgg_gram_loss_2: 33.45845031738281 +INFO:__main__:vgg_gram_loss_3: 23.495359420776367 +INFO:__main__:vgg_gram_loss_4: 77.74526977539062 +INFO:__main__:vgg_gram_loss_5: 0.16624127328395844 +INFO:__main__:vgg_loss_0: 11.726822853088379 +INFO:__main__:vgg_loss_1: 29.57559585571289 +INFO:__main__:vgg_loss_2: 41.910484313964844 +INFO:__main__:vgg_loss_3: 29.389162063598633 +INFO:__main__:vgg_loss_4: 50.77119064331055 +INFO:__main__:vgg_loss_5: 1.7339656352996826 +INFO:__main__:validation_loss: 2013.730224609375 +INFO:__main__:global_step: 37501 +INFO:__main__:kl_loss: 10201.34765625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1423.797119140625 +INFO:__main__:loss: 1423.807373046875 +INFO:__main__:vgg_gram_loss_0: 7.088632106781006 +INFO:__main__:vgg_gram_loss_1: 18.344762802124023 +INFO:__main__:vgg_gram_loss_2: 19.61504554748535 +INFO:__main__:vgg_gram_loss_3: 15.51315975189209 +INFO:__main__:vgg_gram_loss_4: 72.98799896240234 +INFO:__main__:vgg_gram_loss_5: 0.1583849936723709 +INFO:__main__:vgg_loss_0: 10.556436538696289 +INFO:__main__:vgg_loss_1: 26.214061737060547 +INFO:__main__:vgg_loss_2: 38.40428924560547 +INFO:__main__:vgg_loss_3: 27.458547592163086 +INFO:__main__:vgg_loss_4: 46.876461029052734 +INFO:__main__:vgg_loss_5: 1.5416526794433594 +INFO:__main__:validation_loss: 1880.207275390625 +INFO:__main__:global_step: 37751 +INFO:__main__:kl_loss: 8125.533203125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1850.907958984375 +INFO:__main__:loss: 1850.9161376953125 +INFO:__main__:vgg_gram_loss_0: 11.285836219787598 +INFO:__main__:vgg_gram_loss_1: 30.047155380249023 +INFO:__main__:vgg_gram_loss_2: 29.37656021118164 +INFO:__main__:vgg_gram_loss_3: 22.120805740356445 +INFO:__main__:vgg_gram_loss_4: 91.99359130859375 +INFO:__main__:vgg_gram_loss_5: 0.1854284703731537 +INFO:__main__:vgg_loss_0: 13.541316986083984 +INFO:__main__:vgg_loss_1: 31.008007049560547 +INFO:__main__:vgg_loss_2: 45.592323303222656 +INFO:__main__:vgg_loss_3: 33.829322814941406 +INFO:__main__:vgg_loss_4: 59.274654388427734 +INFO:__main__:vgg_loss_5: 1.9265990257263184 +INFO:__main__:validation_loss: 1972.7264404296875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 38001 +INFO:__main__:kl_loss: 9419.595703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1733.5814208984375 +INFO:__main__:loss: 1733.5908203125 +INFO:__main__:vgg_gram_loss_0: 12.361004829406738 +INFO:__main__:vgg_gram_loss_1: 23.803571701049805 +INFO:__main__:vgg_gram_loss_2: 23.572237014770508 +INFO:__main__:vgg_gram_loss_3: 18.59601402282715 +INFO:__main__:vgg_gram_loss_4: 86.97547912597656 +INFO:__main__:vgg_gram_loss_5: 0.19118832051753998 +INFO:__main__:vgg_loss_0: 12.74040412902832 +INFO:__main__:vgg_loss_1: 30.129796981811523 +INFO:__main__:vgg_loss_2: 45.47703170776367 +INFO:__main__:vgg_loss_3: 32.6150016784668 +INFO:__main__:vgg_loss_4: 58.343318939208984 +INFO:__main__:vgg_loss_5: 1.9112049341201782 +INFO:__main__:validation_loss: 2338.38916015625 +INFO:__main__:global_step: 38251 +INFO:__main__:kl_loss: 9518.783203125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 2060.427490234375 +INFO:__main__:loss: 2060.43701171875 +INFO:__main__:vgg_gram_loss_0: 10.313047409057617 +INFO:__main__:vgg_gram_loss_1: 27.976511001586914 +INFO:__main__:vgg_gram_loss_2: 38.32465362548828 +INFO:__main__:vgg_gram_loss_3: 27.022119522094727 +INFO:__main__:vgg_gram_loss_4: 97.69241333007812 +INFO:__main__:vgg_gram_loss_5: 0.2003655731678009 +INFO:__main__:vgg_loss_0: 15.831891059875488 +INFO:__main__:vgg_loss_1: 39.451725006103516 +INFO:__main__:vgg_loss_2: 54.24762725830078 +INFO:__main__:vgg_loss_3: 36.66573715209961 +INFO:__main__:vgg_loss_4: 62.35550308227539 +INFO:__main__:vgg_loss_5: 2.00393009185791 +INFO:__main__:validation_loss: 2011.1722412109375 +INFO:__main__:global_step: 38501 +INFO:__main__:kl_loss: 9474.318359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1508.028564453125 +INFO:__main__:loss: 1508.0380859375 +INFO:__main__:vgg_gram_loss_0: 7.7902727127075195 +INFO:__main__:vgg_gram_loss_1: 22.38382911682129 +INFO:__main__:vgg_gram_loss_2: 20.956754684448242 +INFO:__main__:vgg_gram_loss_3: 19.129777908325195 +INFO:__main__:vgg_gram_loss_4: 75.7132797241211 +INFO:__main__:vgg_gram_loss_5: 0.17522607743740082 +INFO:__main__:vgg_loss_0: 10.89272403717041 +INFO:__main__:vgg_loss_1: 26.571640014648438 +INFO:__main__:vgg_loss_2: 38.16643524169922 +INFO:__main__:vgg_loss_3: 28.097518920898438 +INFO:__main__:vgg_loss_4: 50.052101135253906 +INFO:__main__:vgg_loss_5: 1.6761561632156372 +INFO:__main__:validation_loss: 1843.180419921875 +INFO:__main__:global_step: 38751 +INFO:__main__:kl_loss: 9249.4697265625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1742.8668212890625 +INFO:__main__:loss: 1742.8760986328125 +INFO:__main__:vgg_gram_loss_0: 7.83588171005249 +INFO:__main__:vgg_gram_loss_1: 20.856687545776367 +INFO:__main__:vgg_gram_loss_2: 33.193946838378906 +INFO:__main__:vgg_gram_loss_3: 26.880416870117188 +INFO:__main__:vgg_gram_loss_4: 87.89495086669922 +INFO:__main__:vgg_gram_loss_5: 0.19870135188102722 +INFO:__main__:vgg_loss_0: 11.750406265258789 +INFO:__main__:vgg_loss_1: 29.24363136291504 +INFO:__main__:vgg_loss_2: 43.41839599609375 +INFO:__main__:vgg_loss_3: 31.520845413208008 +INFO:__main__:vgg_loss_4: 53.951114654541016 +INFO:__main__:vgg_loss_5: 1.8283920288085938 +INFO:__main__:validation_loss: 2239.486083984375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 39001 +INFO:__main__:kl_loss: 7569.99658203125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1423.7969970703125 +INFO:__main__:loss: 1423.8045654296875 +INFO:__main__:vgg_gram_loss_0: 9.573261260986328 +INFO:__main__:vgg_gram_loss_1: 16.354341506958008 +INFO:__main__:vgg_gram_loss_2: 18.250646591186523 +INFO:__main__:vgg_gram_loss_3: 15.152325630187988 +INFO:__main__:vgg_gram_loss_4: 72.42867279052734 +INFO:__main__:vgg_gram_loss_5: 0.17282617092132568 +INFO:__main__:vgg_loss_0: 10.599743843078613 +INFO:__main__:vgg_loss_1: 25.255971908569336 +INFO:__main__:vgg_loss_2: 37.571231842041016 +INFO:__main__:vgg_loss_3: 28.005123138427734 +INFO:__main__:vgg_loss_4: 49.75764083862305 +INFO:__main__:vgg_loss_5: 1.6376086473464966 +INFO:__main__:validation_loss: 1635.4595947265625 +INFO:__main__:global_step: 39251 +INFO:__main__:kl_loss: 8055.4599609375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1606.0399169921875 +INFO:__main__:loss: 1606.0479736328125 +INFO:__main__:vgg_gram_loss_0: 10.887603759765625 +INFO:__main__:vgg_gram_loss_1: 20.1430606842041 +INFO:__main__:vgg_gram_loss_2: 25.136465072631836 +INFO:__main__:vgg_gram_loss_3: 19.698516845703125 +INFO:__main__:vgg_gram_loss_4: 80.75541687011719 +INFO:__main__:vgg_gram_loss_5: 0.16688372194766998 +INFO:__main__:vgg_loss_0: 11.602705955505371 +INFO:__main__:vgg_loss_1: 28.676408767700195 +INFO:__main__:vgg_loss_2: 41.04060363769531 +INFO:__main__:vgg_loss_3: 29.47882652282715 +INFO:__main__:vgg_loss_4: 51.91919708251953 +INFO:__main__:vgg_loss_5: 1.7022839784622192 +INFO:__main__:validation_loss: 1720.8876953125 +INFO:__main__:global_step: 39501 +INFO:__main__:kl_loss: 9407.181640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1675.4443359375 +INFO:__main__:loss: 1675.4537353515625 +INFO:__main__:vgg_gram_loss_0: 6.195657253265381 +INFO:__main__:vgg_gram_loss_1: 20.00478172302246 +INFO:__main__:vgg_gram_loss_2: 33.11785888671875 +INFO:__main__:vgg_gram_loss_3: 24.370134353637695 +INFO:__main__:vgg_gram_loss_4: 81.1272964477539 +INFO:__main__:vgg_gram_loss_5: 0.1531304270029068 +INFO:__main__:vgg_loss_0: 12.192713737487793 +INFO:__main__:vgg_loss_1: 31.965723037719727 +INFO:__main__:vgg_loss_2: 45.188446044921875 +INFO:__main__:vgg_loss_3: 30.495031356811523 +INFO:__main__:vgg_loss_4: 48.73186111450195 +INFO:__main__:vgg_loss_5: 1.5462100505828857 +INFO:__main__:validation_loss: 1805.4560546875 +INFO:__main__:global_step: 39751 +INFO:__main__:kl_loss: 8921.2373046875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1445.104736328125 +INFO:__main__:loss: 1445.1136474609375 +INFO:__main__:vgg_gram_loss_0: 6.228761196136475 +INFO:__main__:vgg_gram_loss_1: 18.240026473999023 +INFO:__main__:vgg_gram_loss_2: 21.422693252563477 +INFO:__main__:vgg_gram_loss_3: 15.411415100097656 +INFO:__main__:vgg_gram_loss_4: 68.06610107421875 +INFO:__main__:vgg_gram_loss_5: 0.16765783727169037 +INFO:__main__:vgg_loss_0: 11.41713809967041 +INFO:__main__:vgg_loss_1: 27.90165901184082 +INFO:__main__:vgg_loss_2: 40.970943450927734 +INFO:__main__:vgg_loss_3: 28.40261459350586 +INFO:__main__:vgg_loss_4: 49.144073486328125 +INFO:__main__:vgg_loss_5: 1.6478737592697144 +INFO:__main__:validation_loss: 2044.31787109375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 40001 +INFO:__main__:kl_loss: 8835.5859375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1756.416015625 +INFO:__main__:loss: 1756.4248046875 +INFO:__main__:vgg_gram_loss_0: 13.285600662231445 +INFO:__main__:vgg_gram_loss_1: 22.949234008789062 +INFO:__main__:vgg_gram_loss_2: 26.83192253112793 +INFO:__main__:vgg_gram_loss_3: 21.025815963745117 +INFO:__main__:vgg_gram_loss_4: 88.21632385253906 +INFO:__main__:vgg_gram_loss_5: 0.18179000914096832 +INFO:__main__:vgg_loss_0: 13.19233226776123 +INFO:__main__:vgg_loss_1: 31.744434356689453 +INFO:__main__:vgg_loss_2: 44.59366989135742 +INFO:__main__:vgg_loss_3: 31.583251953125 +INFO:__main__:vgg_loss_4: 55.838191986083984 +INFO:__main__:vgg_loss_5: 1.8406068086624146 +INFO:__main__:validation_loss: 1870.2249755859375 +INFO:__main__:global_step: 40251 +INFO:__main__:kl_loss: 9479.92578125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1748.8116455078125 +INFO:__main__:loss: 1748.8211669921875 +INFO:__main__:vgg_gram_loss_0: 7.578904151916504 +INFO:__main__:vgg_gram_loss_1: 19.574827194213867 +INFO:__main__:vgg_gram_loss_2: 28.963642120361328 +INFO:__main__:vgg_gram_loss_3: 22.991683959960938 +INFO:__main__:vgg_gram_loss_4: 85.82445526123047 +INFO:__main__:vgg_gram_loss_5: 0.17482519149780273 +INFO:__main__:vgg_loss_0: 13.565220832824707 +INFO:__main__:vgg_loss_1: 33.028968811035156 +INFO:__main__:vgg_loss_2: 46.2962532043457 +INFO:__main__:vgg_loss_3: 32.74042510986328 +INFO:__main__:vgg_loss_4: 57.112606048583984 +INFO:__main__:vgg_loss_5: 1.9105504751205444 +INFO:__main__:validation_loss: 1610.255859375 +INFO:__main__:global_step: 40501 +INFO:__main__:kl_loss: 8865.5 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1214.0989990234375 +INFO:__main__:loss: 1214.10791015625 +INFO:__main__:vgg_gram_loss_0: 5.549455165863037 +INFO:__main__:vgg_gram_loss_1: 11.609081268310547 +INFO:__main__:vgg_gram_loss_2: 16.091876983642578 +INFO:__main__:vgg_gram_loss_3: 12.829075813293457 +INFO:__main__:vgg_gram_loss_4: 61.15956497192383 +INFO:__main__:vgg_gram_loss_5: 0.14389389753341675 +INFO:__main__:vgg_loss_0: 8.696565628051758 +INFO:__main__:vgg_loss_1: 21.34469223022461 +INFO:__main__:vgg_loss_2: 33.75932312011719 +INFO:__main__:vgg_loss_3: 25.207828521728516 +INFO:__main__:vgg_loss_4: 44.95752716064453 +INFO:__main__:vgg_loss_5: 1.4708985090255737 +INFO:__main__:validation_loss: 1877.5142822265625 +INFO:__main__:global_step: 40751 +INFO:__main__:kl_loss: 10130.162109375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1719.4500732421875 +INFO:__main__:loss: 1719.460205078125 +INFO:__main__:vgg_gram_loss_0: 10.33341121673584 +INFO:__main__:vgg_gram_loss_1: 21.72492218017578 +INFO:__main__:vgg_gram_loss_2: 26.83856773376465 +INFO:__main__:vgg_gram_loss_3: 21.325246810913086 +INFO:__main__:vgg_gram_loss_4: 87.40166473388672 +INFO:__main__:vgg_gram_loss_5: 0.1841447800397873 +INFO:__main__:vgg_loss_0: 12.617207527160645 +INFO:__main__:vgg_loss_1: 31.58260726928711 +INFO:__main__:vgg_loss_2: 44.888519287109375 +INFO:__main__:vgg_loss_3: 31.738065719604492 +INFO:__main__:vgg_loss_4: 53.523948669433594 +INFO:__main__:vgg_loss_5: 1.7316975593566895 +INFO:__main__:validation_loss: 1668.9146728515625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 41001 +INFO:__main__:kl_loss: 8297.326171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1756.654052734375 +INFO:__main__:loss: 1756.662353515625 +INFO:__main__:vgg_gram_loss_0: 10.775683403015137 +INFO:__main__:vgg_gram_loss_1: 23.95917320251465 +INFO:__main__:vgg_gram_loss_2: 25.010766983032227 +INFO:__main__:vgg_gram_loss_3: 21.397451400756836 +INFO:__main__:vgg_gram_loss_4: 86.80598449707031 +INFO:__main__:vgg_gram_loss_5: 0.18591319024562836 +INFO:__main__:vgg_loss_0: 13.500639915466309 +INFO:__main__:vgg_loss_1: 33.56721496582031 +INFO:__main__:vgg_loss_2: 46.034820556640625 +INFO:__main__:vgg_loss_3: 32.26215362548828 +INFO:__main__:vgg_loss_4: 55.98773956298828 +INFO:__main__:vgg_loss_5: 1.843262791633606 +INFO:__main__:validation_loss: 1792.62744140625 +INFO:__main__:global_step: 41251 +INFO:__main__:kl_loss: 9321.31640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1622.11962890625 +INFO:__main__:loss: 1622.12890625 +INFO:__main__:vgg_gram_loss_0: 5.1870293617248535 +INFO:__main__:vgg_gram_loss_1: 17.641048431396484 +INFO:__main__:vgg_gram_loss_2: 24.76276397705078 +INFO:__main__:vgg_gram_loss_3: 21.049531936645508 +INFO:__main__:vgg_gram_loss_4: 85.08516693115234 +INFO:__main__:vgg_gram_loss_5: 0.17605161666870117 +INFO:__main__:vgg_loss_0: 12.118850708007812 +INFO:__main__:vgg_loss_1: 30.45805549621582 +INFO:__main__:vgg_loss_2: 43.062740325927734 +INFO:__main__:vgg_loss_3: 30.306379318237305 +INFO:__main__:vgg_loss_4: 52.844722747802734 +INFO:__main__:vgg_loss_5: 1.731568694114685 +INFO:__main__:validation_loss: 2122.1435546875 +INFO:__main__:global_step: 41501 +INFO:__main__:kl_loss: 9629.1669921875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1729.3316650390625 +INFO:__main__:loss: 1729.34130859375 +INFO:__main__:vgg_gram_loss_0: 10.370546340942383 +INFO:__main__:vgg_gram_loss_1: 22.965070724487305 +INFO:__main__:vgg_gram_loss_2: 32.55009841918945 +INFO:__main__:vgg_gram_loss_3: 23.038524627685547 +INFO:__main__:vgg_gram_loss_4: 82.88329315185547 +INFO:__main__:vgg_gram_loss_5: 0.162612184882164 +INFO:__main__:vgg_loss_0: 13.443943977355957 +INFO:__main__:vgg_loss_1: 31.29157829284668 +INFO:__main__:vgg_loss_2: 43.43289566040039 +INFO:__main__:vgg_loss_3: 31.020904541015625 +INFO:__main__:vgg_loss_4: 52.96934509277344 +INFO:__main__:vgg_loss_5: 1.7375121116638184 +INFO:__main__:validation_loss: 1720.43505859375 +INFO:__main__:global_step: 41751 +INFO:__main__:kl_loss: 7760.45458984375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1171.41845703125 +INFO:__main__:loss: 1171.42626953125 +INFO:__main__:vgg_gram_loss_0: 5.524149417877197 +INFO:__main__:vgg_gram_loss_1: 14.368407249450684 +INFO:__main__:vgg_gram_loss_2: 14.979059219360352 +INFO:__main__:vgg_gram_loss_3: 12.026633262634277 +INFO:__main__:vgg_gram_loss_4: 60.20589828491211 +INFO:__main__:vgg_gram_loss_5: 0.13775920867919922 +INFO:__main__:vgg_loss_0: 8.54099178314209 +INFO:__main__:vgg_loss_1: 20.751142501831055 +INFO:__main__:vgg_loss_2: 31.884323120117188 +INFO:__main__:vgg_loss_3: 23.20992660522461 +INFO:__main__:vgg_loss_4: 41.266387939453125 +INFO:__main__:vgg_loss_5: 1.3890204429626465 +INFO:__main__:validation_loss: 2049.815185546875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 42001 +INFO:__main__:kl_loss: 10774.4921875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1554.42041015625 +INFO:__main__:loss: 1554.43115234375 +INFO:__main__:vgg_gram_loss_0: 10.117583274841309 +INFO:__main__:vgg_gram_loss_1: 22.148685455322266 +INFO:__main__:vgg_gram_loss_2: 21.80250358581543 +INFO:__main__:vgg_gram_loss_3: 17.378145217895508 +INFO:__main__:vgg_gram_loss_4: 79.85076141357422 +INFO:__main__:vgg_gram_loss_5: 0.18269981443881989 +INFO:__main__:vgg_loss_0: 11.245356559753418 +INFO:__main__:vgg_loss_1: 26.620996475219727 +INFO:__main__:vgg_loss_2: 38.28331756591797 +INFO:__main__:vgg_loss_3: 29.019271850585938 +INFO:__main__:vgg_loss_4: 52.459503173828125 +INFO:__main__:vgg_loss_5: 1.7752699851989746 +INFO:__main__:validation_loss: 2204.73046875 +INFO:__main__:global_step: 42251 +INFO:__main__:kl_loss: 9690.2119140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1595.619140625 +INFO:__main__:loss: 1595.6287841796875 +INFO:__main__:vgg_gram_loss_0: 6.687384128570557 +INFO:__main__:vgg_gram_loss_1: 16.456064224243164 +INFO:__main__:vgg_gram_loss_2: 20.837387084960938 +INFO:__main__:vgg_gram_loss_3: 18.50313949584961 +INFO:__main__:vgg_gram_loss_4: 84.68807220458984 +INFO:__main__:vgg_gram_loss_5: 0.18541781604290009 +INFO:__main__:vgg_loss_0: 11.752615928649902 +INFO:__main__:vgg_loss_1: 27.975170135498047 +INFO:__main__:vgg_loss_2: 42.3662223815918 +INFO:__main__:vgg_loss_3: 31.381454467773438 +INFO:__main__:vgg_loss_4: 56.4162483215332 +INFO:__main__:vgg_loss_5: 1.8746510744094849 +INFO:__main__:validation_loss: 2143.83447265625 +INFO:__main__:global_step: 42501 +INFO:__main__:kl_loss: 9934.81640625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1872.5047607421875 +INFO:__main__:loss: 1872.5146484375 +INFO:__main__:vgg_gram_loss_0: 12.773937225341797 +INFO:__main__:vgg_gram_loss_1: 30.18731689453125 +INFO:__main__:vgg_gram_loss_2: 29.598819732666016 +INFO:__main__:vgg_gram_loss_3: 25.712451934814453 +INFO:__main__:vgg_gram_loss_4: 96.12899780273438 +INFO:__main__:vgg_gram_loss_5: 0.1937173455953598 +INFO:__main__:vgg_loss_0: 13.41330623626709 +INFO:__main__:vgg_loss_1: 32.328792572021484 +INFO:__main__:vgg_loss_2: 44.687564849853516 +INFO:__main__:vgg_loss_3: 32.12128829956055 +INFO:__main__:vgg_loss_4: 55.499454498291016 +INFO:__main__:vgg_loss_5: 1.8553204536437988 +INFO:__main__:validation_loss: 2310.297607421875 +INFO:__main__:global_step: 42751 +INFO:__main__:kl_loss: 9581.1416015625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1596.0203857421875 +INFO:__main__:loss: 1596.0299072265625 +INFO:__main__:vgg_gram_loss_0: 5.961778163909912 +INFO:__main__:vgg_gram_loss_1: 25.3477725982666 +INFO:__main__:vgg_gram_loss_2: 22.447004318237305 +INFO:__main__:vgg_gram_loss_3: 17.86504554748535 +INFO:__main__:vgg_gram_loss_4: 79.71936798095703 +INFO:__main__:vgg_gram_loss_5: 0.17816276848316193 +INFO:__main__:vgg_loss_0: 13.08590316772461 +INFO:__main__:vgg_loss_1: 32.5493049621582 +INFO:__main__:vgg_loss_2: 41.170955657958984 +INFO:__main__:vgg_loss_3: 29.197307586669922 +INFO:__main__:vgg_loss_4: 50.03356170654297 +INFO:__main__:vgg_loss_5: 1.647894263267517 +INFO:__main__:validation_loss: 1669.0289306640625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 43001 +INFO:__main__:kl_loss: 8766.6083984375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1422.8505859375 +INFO:__main__:loss: 1422.859375 +INFO:__main__:vgg_gram_loss_0: 13.175976753234863 +INFO:__main__:vgg_gram_loss_1: 20.616771697998047 +INFO:__main__:vgg_gram_loss_2: 19.96365737915039 +INFO:__main__:vgg_gram_loss_3: 15.677929878234863 +INFO:__main__:vgg_gram_loss_4: 68.13050079345703 +INFO:__main__:vgg_gram_loss_5: 0.1573638916015625 +INFO:__main__:vgg_loss_0: 10.43492603302002 +INFO:__main__:vgg_loss_1: 24.882993698120117 +INFO:__main__:vgg_loss_2: 36.53907775878906 +INFO:__main__:vgg_loss_3: 26.51683235168457 +INFO:__main__:vgg_loss_4: 46.914371490478516 +INFO:__main__:vgg_loss_5: 1.5597275495529175 +INFO:__main__:validation_loss: 2067.302001953125 +INFO:__main__:global_step: 43251 +INFO:__main__:kl_loss: 9174.779296875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1414.285400390625 +INFO:__main__:loss: 1414.2945556640625 +INFO:__main__:vgg_gram_loss_0: 8.293191909790039 +INFO:__main__:vgg_gram_loss_1: 19.458078384399414 +INFO:__main__:vgg_gram_loss_2: 25.102001190185547 +INFO:__main__:vgg_gram_loss_3: 17.223682403564453 +INFO:__main__:vgg_gram_loss_4: 66.61102294921875 +INFO:__main__:vgg_gram_loss_5: 0.14332716166973114 +INFO:__main__:vgg_loss_0: 10.430252075195312 +INFO:__main__:vgg_loss_1: 26.313987731933594 +INFO:__main__:vgg_loss_2: 36.42021179199219 +INFO:__main__:vgg_loss_3: 25.402538299560547 +INFO:__main__:vgg_loss_4: 45.8823127746582 +INFO:__main__:vgg_loss_5: 1.5764851570129395 +INFO:__main__:validation_loss: 1737.7913818359375 +INFO:__main__:global_step: 43501 +INFO:__main__:kl_loss: 9088.90625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1287.9010009765625 +INFO:__main__:loss: 1287.9100341796875 +INFO:__main__:vgg_gram_loss_0: 6.474820613861084 +INFO:__main__:vgg_gram_loss_1: 14.208205223083496 +INFO:__main__:vgg_gram_loss_2: 18.201261520385742 +INFO:__main__:vgg_gram_loss_3: 14.232101440429688 +INFO:__main__:vgg_gram_loss_4: 65.35556030273438 +INFO:__main__:vgg_gram_loss_5: 0.14019489288330078 +INFO:__main__:vgg_loss_0: 9.71634578704834 +INFO:__main__:vgg_loss_1: 22.936613082885742 +INFO:__main__:vgg_loss_2: 34.41395950317383 +INFO:__main__:vgg_loss_3: 25.192184448242188 +INFO:__main__:vgg_loss_4: 45.206905364990234 +INFO:__main__:vgg_loss_5: 1.5020639896392822 +INFO:__main__:validation_loss: 2021.074462890625 +INFO:__main__:global_step: 43751 +INFO:__main__:kl_loss: 9391.5068359375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1660.9571533203125 +INFO:__main__:loss: 1660.966552734375 +INFO:__main__:vgg_gram_loss_0: 9.486883163452148 +INFO:__main__:vgg_gram_loss_1: 21.380613327026367 +INFO:__main__:vgg_gram_loss_2: 24.187665939331055 +INFO:__main__:vgg_gram_loss_3: 19.412879943847656 +INFO:__main__:vgg_gram_loss_4: 81.29649353027344 +INFO:__main__:vgg_gram_loss_5: 0.18476049602031708 +INFO:__main__:vgg_loss_0: 12.340121269226074 +INFO:__main__:vgg_loss_1: 31.325048446655273 +INFO:__main__:vgg_loss_2: 45.184329986572266 +INFO:__main__:vgg_loss_3: 31.754453659057617 +INFO:__main__:vgg_loss_4: 53.9002799987793 +INFO:__main__:vgg_loss_5: 1.7379114627838135 +INFO:__main__:validation_loss: 1886.210693359375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 44001 +INFO:__main__:kl_loss: 9578.005859375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1441.51416015625 +INFO:__main__:loss: 1441.523681640625 +INFO:__main__:vgg_gram_loss_0: 8.3568754196167 +INFO:__main__:vgg_gram_loss_1: 18.725130081176758 +INFO:__main__:vgg_gram_loss_2: 18.568134307861328 +INFO:__main__:vgg_gram_loss_3: 15.511079788208008 +INFO:__main__:vgg_gram_loss_4: 72.5020751953125 +INFO:__main__:vgg_gram_loss_5: 0.15925979614257812 +INFO:__main__:vgg_loss_0: 11.454483032226562 +INFO:__main__:vgg_loss_1: 28.54051971435547 +INFO:__main__:vgg_loss_2: 38.66178512573242 +INFO:__main__:vgg_loss_3: 27.00679588317871 +INFO:__main__:vgg_loss_4: 47.23535919189453 +INFO:__main__:vgg_loss_5: 1.5813331604003906 +INFO:__main__:validation_loss: 1775.327880859375 +INFO:__main__:global_step: 44251 +INFO:__main__:kl_loss: 8632.037109375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1457.911376953125 +INFO:__main__:loss: 1457.9200439453125 +INFO:__main__:vgg_gram_loss_0: 7.811997413635254 +INFO:__main__:vgg_gram_loss_1: 17.430986404418945 +INFO:__main__:vgg_gram_loss_2: 19.178979873657227 +INFO:__main__:vgg_gram_loss_3: 15.022111892700195 +INFO:__main__:vgg_gram_loss_4: 71.64246368408203 +INFO:__main__:vgg_gram_loss_5: 0.16500459611415863 +INFO:__main__:vgg_loss_0: 10.91763973236084 +INFO:__main__:vgg_loss_1: 27.38864517211914 +INFO:__main__:vgg_loss_2: 39.3030891418457 +INFO:__main__:vgg_loss_3: 28.41068458557129 +INFO:__main__:vgg_loss_4: 52.53445053100586 +INFO:__main__:vgg_loss_5: 1.776222825050354 +INFO:__main__:validation_loss: 1784.4569091796875 +INFO:__main__:global_step: 44501 +INFO:__main__:kl_loss: 8698.96484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1622.139892578125 +INFO:__main__:loss: 1622.1485595703125 +INFO:__main__:vgg_gram_loss_0: 10.778220176696777 +INFO:__main__:vgg_gram_loss_1: 23.28611183166504 +INFO:__main__:vgg_gram_loss_2: 24.35749053955078 +INFO:__main__:vgg_gram_loss_3: 18.789602279663086 +INFO:__main__:vgg_gram_loss_4: 80.2012710571289 +INFO:__main__:vgg_gram_loss_5: 0.17325326800346375 +INFO:__main__:vgg_loss_0: 12.196002960205078 +INFO:__main__:vgg_loss_1: 27.754016876220703 +INFO:__main__:vgg_loss_2: 40.98173141479492 +INFO:__main__:vgg_loss_3: 30.518558502197266 +INFO:__main__:vgg_loss_4: 53.61234664916992 +INFO:__main__:vgg_loss_5: 1.779394268989563 +INFO:__main__:validation_loss: 1742.3265380859375 +INFO:__main__:global_step: 44751 +INFO:__main__:kl_loss: 9614.53515625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1300.4881591796875 +INFO:__main__:loss: 1300.497802734375 +INFO:__main__:vgg_gram_loss_0: 6.188023090362549 +INFO:__main__:vgg_gram_loss_1: 15.605554580688477 +INFO:__main__:vgg_gram_loss_2: 16.89484214782715 +INFO:__main__:vgg_gram_loss_3: 14.300389289855957 +INFO:__main__:vgg_gram_loss_4: 65.00393676757812 +INFO:__main__:vgg_gram_loss_5: 0.14798857271671295 +INFO:__main__:vgg_loss_0: 9.03235149383545 +INFO:__main__:vgg_loss_1: 22.887588500976562 +INFO:__main__:vgg_loss_2: 35.74382400512695 +INFO:__main__:vgg_loss_3: 26.235004425048828 +INFO:__main__:vgg_loss_4: 46.511295318603516 +INFO:__main__:vgg_loss_5: 1.546831727027893 +INFO:__main__:validation_loss: 2100.8994140625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 45001 +INFO:__main__:kl_loss: 11119.951171875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1636.18994140625 +INFO:__main__:loss: 1636.2010498046875 +INFO:__main__:vgg_gram_loss_0: 6.257763862609863 +INFO:__main__:vgg_gram_loss_1: 21.747140884399414 +INFO:__main__:vgg_gram_loss_2: 27.795560836791992 +INFO:__main__:vgg_gram_loss_3: 21.7324275970459 +INFO:__main__:vgg_gram_loss_4: 79.32820129394531 +INFO:__main__:vgg_gram_loss_5: 0.17926429212093353 +INFO:__main__:vgg_loss_0: 12.734206199645996 +INFO:__main__:vgg_loss_1: 32.0416259765625 +INFO:__main__:vgg_loss_2: 42.97636795043945 +INFO:__main__:vgg_loss_3: 29.90683364868164 +INFO:__main__:vgg_loss_4: 50.79035186767578 +INFO:__main__:vgg_loss_5: 1.7482315301895142 +INFO:__main__:validation_loss: 2227.136962890625 +INFO:__main__:global_step: 45251 +INFO:__main__:kl_loss: 9197.736328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1275.852294921875 +INFO:__main__:loss: 1275.8614501953125 +INFO:__main__:vgg_gram_loss_0: 8.55854606628418 +INFO:__main__:vgg_gram_loss_1: 12.4595947265625 +INFO:__main__:vgg_gram_loss_2: 15.525347709655762 +INFO:__main__:vgg_gram_loss_3: 12.588106155395508 +INFO:__main__:vgg_gram_loss_4: 65.49786376953125 +INFO:__main__:vgg_gram_loss_5: 0.15774497389793396 +INFO:__main__:vgg_loss_0: 10.166051864624023 +INFO:__main__:vgg_loss_1: 23.966094970703125 +INFO:__main__:vgg_loss_2: 34.28947830200195 +INFO:__main__:vgg_loss_3: 24.786027908325195 +INFO:__main__:vgg_loss_4: 45.53057098388672 +INFO:__main__:vgg_loss_5: 1.6450568437576294 +INFO:__main__:validation_loss: 1712.7000732421875 +INFO:__main__:global_step: 45501 +INFO:__main__:kl_loss: 10470.8828125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1424.3133544921875 +INFO:__main__:loss: 1424.3238525390625 +INFO:__main__:vgg_gram_loss_0: 5.505692958831787 +INFO:__main__:vgg_gram_loss_1: 17.72892189025879 +INFO:__main__:vgg_gram_loss_2: 20.36162757873535 +INFO:__main__:vgg_gram_loss_3: 15.450578689575195 +INFO:__main__:vgg_gram_loss_4: 68.43685150146484 +INFO:__main__:vgg_gram_loss_5: 0.1570785492658615 +INFO:__main__:vgg_loss_0: 11.841625213623047 +INFO:__main__:vgg_loss_1: 27.814090728759766 +INFO:__main__:vgg_loss_2: 39.09339904785156 +INFO:__main__:vgg_loss_3: 27.879079818725586 +INFO:__main__:vgg_loss_4: 48.98435974121094 +INFO:__main__:vgg_loss_5: 1.6093655824661255 +INFO:__main__:validation_loss: 1818.6302490234375 +INFO:__main__:global_step: 45751 +INFO:__main__:kl_loss: 12194.9619140625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1556.3525390625 +INFO:__main__:loss: 1556.36474609375 +INFO:__main__:vgg_gram_loss_0: 10.379335403442383 +INFO:__main__:vgg_gram_loss_1: 21.33768653869629 +INFO:__main__:vgg_gram_loss_2: 22.973947525024414 +INFO:__main__:vgg_gram_loss_3: 18.441633224487305 +INFO:__main__:vgg_gram_loss_4: 76.26399230957031 +INFO:__main__:vgg_gram_loss_5: 0.17760609090328217 +INFO:__main__:vgg_loss_0: 11.58940601348877 +INFO:__main__:vgg_loss_1: 27.034364700317383 +INFO:__main__:vgg_loss_2: 39.85527801513672 +INFO:__main__:vgg_loss_3: 29.524471282958984 +INFO:__main__:vgg_loss_4: 51.941917419433594 +INFO:__main__:vgg_loss_5: 1.7508496046066284 +INFO:__main__:validation_loss: 1767.68408203125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 46001 +INFO:__main__:kl_loss: 12475.865234375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1439.7618408203125 +INFO:__main__:loss: 1439.7742919921875 +INFO:__main__:vgg_gram_loss_0: 8.620463371276855 +INFO:__main__:vgg_gram_loss_1: 21.148704528808594 +INFO:__main__:vgg_gram_loss_2: 20.159622192382812 +INFO:__main__:vgg_gram_loss_3: 16.194292068481445 +INFO:__main__:vgg_gram_loss_4: 75.39995574951172 +INFO:__main__:vgg_gram_loss_5: 0.16231608390808105 +INFO:__main__:vgg_loss_0: 10.062175750732422 +INFO:__main__:vgg_loss_1: 24.90937614440918 +INFO:__main__:vgg_loss_2: 36.917198181152344 +INFO:__main__:vgg_loss_3: 26.672433853149414 +INFO:__main__:vgg_loss_4: 46.155879974365234 +INFO:__main__:vgg_loss_5: 1.549934983253479 +INFO:__main__:validation_loss: 2169.751708984375 +INFO:__main__:global_step: 46251 +INFO:__main__:kl_loss: 11624.9453125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1534.9998779296875 +INFO:__main__:loss: 1535.011474609375 +INFO:__main__:vgg_gram_loss_0: 7.248232364654541 +INFO:__main__:vgg_gram_loss_1: 21.65983772277832 +INFO:__main__:vgg_gram_loss_2: 22.797204971313477 +INFO:__main__:vgg_gram_loss_3: 18.71656608581543 +INFO:__main__:vgg_gram_loss_4: 74.11918640136719 +INFO:__main__:vgg_gram_loss_5: 0.15466660261154175 +INFO:__main__:vgg_loss_0: 11.791651725769043 +INFO:__main__:vgg_loss_1: 32.250301361083984 +INFO:__main__:vgg_loss_2: 40.416099548339844 +INFO:__main__:vgg_loss_3: 28.58140754699707 +INFO:__main__:vgg_loss_4: 47.765777587890625 +INFO:__main__:vgg_loss_5: 1.4990417957305908 +INFO:__main__:validation_loss: 1740.700439453125 +INFO:__main__:global_step: 46501 +INFO:__main__:kl_loss: 10672.705078125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1604.95458984375 +INFO:__main__:loss: 1604.9652099609375 +INFO:__main__:vgg_gram_loss_0: 5.847805500030518 +INFO:__main__:vgg_gram_loss_1: 21.89736557006836 +INFO:__main__:vgg_gram_loss_2: 21.4101619720459 +INFO:__main__:vgg_gram_loss_3: 18.585386276245117 +INFO:__main__:vgg_gram_loss_4: 76.29341125488281 +INFO:__main__:vgg_gram_loss_5: 0.18277041614055634 +INFO:__main__:vgg_loss_0: 12.595179557800293 +INFO:__main__:vgg_loss_1: 32.220523834228516 +INFO:__main__:vgg_loss_2: 45.066402435302734 +INFO:__main__:vgg_loss_3: 31.926246643066406 +INFO:__main__:vgg_loss_4: 53.212242126464844 +INFO:__main__:vgg_loss_5: 1.753411054611206 +INFO:__main__:validation_loss: 1601.732666015625 +INFO:__main__:global_step: 46751 +INFO:__main__:kl_loss: 9982.9345703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1473.312744140625 +INFO:__main__:loss: 1473.32275390625 +INFO:__main__:vgg_gram_loss_0: 6.299622535705566 +INFO:__main__:vgg_gram_loss_1: 19.18580436706543 +INFO:__main__:vgg_gram_loss_2: 18.69093132019043 +INFO:__main__:vgg_gram_loss_3: 15.918965339660645 +INFO:__main__:vgg_gram_loss_4: 72.23751831054688 +INFO:__main__:vgg_gram_loss_5: 0.16409669816493988 +INFO:__main__:vgg_loss_0: 12.512399673461914 +INFO:__main__:vgg_loss_1: 31.974864959716797 +INFO:__main__:vgg_loss_2: 40.401756286621094 +INFO:__main__:vgg_loss_3: 27.9616756439209 +INFO:__main__:vgg_loss_4: 47.72086715698242 +INFO:__main__:vgg_loss_5: 1.5940617322921753 +INFO:__main__:validation_loss: 2159.54052734375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 47001 +INFO:__main__:kl_loss: 10078.283203125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1431.392822265625 +INFO:__main__:loss: 1431.4029541015625 +INFO:__main__:vgg_gram_loss_0: 5.914727687835693 +INFO:__main__:vgg_gram_loss_1: 15.144320487976074 +INFO:__main__:vgg_gram_loss_2: 18.318933486938477 +INFO:__main__:vgg_gram_loss_3: 15.577391624450684 +INFO:__main__:vgg_gram_loss_4: 72.8405532836914 +INFO:__main__:vgg_gram_loss_5: 0.17846618592739105 +INFO:__main__:vgg_loss_0: 10.73085880279541 +INFO:__main__:vgg_loss_1: 26.31502914428711 +INFO:__main__:vgg_loss_2: 39.2459602355957 +INFO:__main__:vgg_loss_3: 28.666126251220703 +INFO:__main__:vgg_loss_4: 51.59365463256836 +INFO:__main__:vgg_loss_5: 1.7525335550308228 +INFO:__main__:validation_loss: 1978.1878662109375 +INFO:__main__:global_step: 47251 +INFO:__main__:kl_loss: 10986.896484375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1664.9908447265625 +INFO:__main__:loss: 1665.0018310546875 +INFO:__main__:vgg_gram_loss_0: 7.318727016448975 +INFO:__main__:vgg_gram_loss_1: 18.565624237060547 +INFO:__main__:vgg_gram_loss_2: 21.79892349243164 +INFO:__main__:vgg_gram_loss_3: 17.390148162841797 +INFO:__main__:vgg_gram_loss_4: 84.30352783203125 +INFO:__main__:vgg_gram_loss_5: 0.198177769780159 +INFO:__main__:vgg_loss_0: 12.391894340515137 +INFO:__main__:vgg_loss_1: 29.72643280029297 +INFO:__main__:vgg_loss_2: 45.29069137573242 +INFO:__main__:vgg_loss_3: 33.64618682861328 +INFO:__main__:vgg_loss_4: 60.37594985961914 +INFO:__main__:vgg_loss_5: 1.9918694496154785 +INFO:__main__:validation_loss: 1929.20556640625 +INFO:__main__:global_step: 47501 +INFO:__main__:kl_loss: 10182.921875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1380.0665283203125 +INFO:__main__:loss: 1380.07666015625 +INFO:__main__:vgg_gram_loss_0: 5.493350028991699 +INFO:__main__:vgg_gram_loss_1: 21.50483512878418 +INFO:__main__:vgg_gram_loss_2: 21.4522762298584 +INFO:__main__:vgg_gram_loss_3: 15.535638809204102 +INFO:__main__:vgg_gram_loss_4: 65.71512603759766 +INFO:__main__:vgg_gram_loss_5: 0.14386455714702606 +INFO:__main__:vgg_loss_0: 10.470024108886719 +INFO:__main__:vgg_loss_1: 25.66710090637207 +INFO:__main__:vgg_loss_2: 36.51114273071289 +INFO:__main__:vgg_loss_3: 26.23501205444336 +INFO:__main__:vgg_loss_4: 45.74760818481445 +INFO:__main__:vgg_loss_5: 1.537335991859436 +INFO:__main__:validation_loss: 2030.8070068359375 +INFO:__main__:global_step: 47751 +INFO:__main__:kl_loss: 8652.5986328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1464.86181640625 +INFO:__main__:loss: 1464.8704833984375 +INFO:__main__:vgg_gram_loss_0: 11.339594841003418 +INFO:__main__:vgg_gram_loss_1: 18.35701560974121 +INFO:__main__:vgg_gram_loss_2: 20.269136428833008 +INFO:__main__:vgg_gram_loss_3: 17.011545181274414 +INFO:__main__:vgg_gram_loss_4: 75.84097290039062 +INFO:__main__:vgg_gram_loss_5: 0.16368991136550903 +INFO:__main__:vgg_loss_0: 10.44469165802002 +INFO:__main__:vgg_loss_1: 24.215784072875977 +INFO:__main__:vgg_loss_2: 35.9162483215332 +INFO:__main__:vgg_loss_3: 27.062015533447266 +INFO:__main__:vgg_loss_4: 50.63377380371094 +INFO:__main__:vgg_loss_5: 1.717882513999939 +INFO:__main__:validation_loss: 1622.1221923828125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 48001 +INFO:__main__:kl_loss: 10134.9765625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1321.98388671875 +INFO:__main__:loss: 1321.9940185546875 +INFO:__main__:vgg_gram_loss_0: 6.113168239593506 +INFO:__main__:vgg_gram_loss_1: 18.699430465698242 +INFO:__main__:vgg_gram_loss_2: 18.64750862121582 +INFO:__main__:vgg_gram_loss_3: 15.411559104919434 +INFO:__main__:vgg_gram_loss_4: 66.18915557861328 +INFO:__main__:vgg_gram_loss_5: 0.1562570333480835 +INFO:__main__:vgg_loss_0: 9.754739761352539 +INFO:__main__:vgg_loss_1: 24.668560028076172 +INFO:__main__:vgg_loss_2: 35.20296096801758 +INFO:__main__:vgg_loss_3: 24.725141525268555 +INFO:__main__:vgg_loss_4: 43.35619354248047 +INFO:__main__:vgg_loss_5: 1.4721070528030396 +INFO:__main__:validation_loss: 1860.442138671875 +INFO:__main__:global_step: 48251 +INFO:__main__:kl_loss: 10799.1328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1588.071533203125 +INFO:__main__:loss: 1588.082275390625 +INFO:__main__:vgg_gram_loss_0: 7.068974494934082 +INFO:__main__:vgg_gram_loss_1: 21.27472686767578 +INFO:__main__:vgg_gram_loss_2: 20.338542938232422 +INFO:__main__:vgg_gram_loss_3: 17.793970108032227 +INFO:__main__:vgg_gram_loss_4: 83.56456756591797 +INFO:__main__:vgg_gram_loss_5: 0.18094377219676971 +INFO:__main__:vgg_loss_0: 11.307072639465332 +INFO:__main__:vgg_loss_1: 27.885665893554688 +INFO:__main__:vgg_loss_2: 41.30362319946289 +INFO:__main__:vgg_loss_3: 30.19803237915039 +INFO:__main__:vgg_loss_4: 54.84920883178711 +INFO:__main__:vgg_loss_5: 1.8490036725997925 +INFO:__main__:validation_loss: 1754.48828125 +INFO:__main__:global_step: 48501 +INFO:__main__:kl_loss: 11141.1884765625 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1241.2001953125 +INFO:__main__:loss: 1241.2113037109375 +INFO:__main__:vgg_gram_loss_0: 6.524314880371094 +INFO:__main__:vgg_gram_loss_1: 14.145950317382812 +INFO:__main__:vgg_gram_loss_2: 19.864683151245117 +INFO:__main__:vgg_gram_loss_3: 14.449272155761719 +INFO:__main__:vgg_gram_loss_4: 62.964393615722656 +INFO:__main__:vgg_gram_loss_5: 0.1428176611661911 +INFO:__main__:vgg_loss_0: 8.853433609008789 +INFO:__main__:vgg_loss_1: 21.497922897338867 +INFO:__main__:vgg_loss_2: 32.32904815673828 +INFO:__main__:vgg_loss_3: 23.743886947631836 +INFO:__main__:vgg_loss_4: 42.31006622314453 +INFO:__main__:vgg_loss_5: 1.414239764213562 +INFO:__main__:validation_loss: 1661.0098876953125 +INFO:__main__:global_step: 48751 +INFO:__main__:kl_loss: 11161.685546875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1458.2374267578125 +INFO:__main__:loss: 1458.24853515625 +INFO:__main__:vgg_gram_loss_0: 8.194229125976562 +INFO:__main__:vgg_gram_loss_1: 21.387786865234375 +INFO:__main__:vgg_gram_loss_2: 19.230297088623047 +INFO:__main__:vgg_gram_loss_3: 17.19060707092285 +INFO:__main__:vgg_gram_loss_4: 73.0634536743164 +INFO:__main__:vgg_gram_loss_5: 0.15924127399921417 +INFO:__main__:vgg_loss_0: 11.185633659362793 +INFO:__main__:vgg_loss_1: 28.76934051513672 +INFO:__main__:vgg_loss_2: 36.9621467590332 +INFO:__main__:vgg_loss_3: 26.794952392578125 +INFO:__main__:vgg_loss_4: 47.13070297241211 +INFO:__main__:vgg_loss_5: 1.579094648361206 +INFO:__main__:validation_loss: 2132.970947265625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 49001 +INFO:__main__:kl_loss: 10595.41796875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1872.53076171875 +INFO:__main__:loss: 1872.5413818359375 +INFO:__main__:vgg_gram_loss_0: 6.234829902648926 +INFO:__main__:vgg_gram_loss_1: 28.839014053344727 +INFO:__main__:vgg_gram_loss_2: 26.18931007385254 +INFO:__main__:vgg_gram_loss_3: 21.980501174926758 +INFO:__main__:vgg_gram_loss_4: 86.63667297363281 +INFO:__main__:vgg_gram_loss_5: 0.19575807452201843 +INFO:__main__:vgg_loss_0: 15.651512145996094 +INFO:__main__:vgg_loss_1: 42.12035369873047 +INFO:__main__:vgg_loss_2: 50.76625061035156 +INFO:__main__:vgg_loss_3: 34.42153549194336 +INFO:__main__:vgg_loss_4: 59.507530212402344 +INFO:__main__:vgg_loss_5: 1.9628841876983643 +INFO:__main__:validation_loss: 1724.3360595703125 +INFO:__main__:global_step: 49251 +INFO:__main__:kl_loss: 9937.1220703125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1278.438232421875 +INFO:__main__:loss: 1278.4481201171875 +INFO:__main__:vgg_gram_loss_0: 7.141271114349365 +INFO:__main__:vgg_gram_loss_1: 14.809605598449707 +INFO:__main__:vgg_gram_loss_2: 16.92582130432129 +INFO:__main__:vgg_gram_loss_3: 13.93735408782959 +INFO:__main__:vgg_gram_loss_4: 64.91397094726562 +INFO:__main__:vgg_gram_loss_5: 0.15024583041667938 +INFO:__main__:vgg_loss_0: 9.361444473266602 +INFO:__main__:vgg_loss_1: 23.374597549438477 +INFO:__main__:vgg_loss_2: 34.301395416259766 +INFO:__main__:vgg_loss_3: 25.015689849853516 +INFO:__main__:vgg_loss_4: 44.26148223876953 +INFO:__main__:vgg_loss_5: 1.4947830438613892 +INFO:__main__:validation_loss: 1633.2581787109375 +INFO:__main__:global_step: 49501 +INFO:__main__:kl_loss: 11633.6611328125 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1451.303466796875 +INFO:__main__:loss: 1451.3150634765625 +INFO:__main__:vgg_gram_loss_0: 5.764016628265381 +INFO:__main__:vgg_gram_loss_1: 14.599926948547363 +INFO:__main__:vgg_gram_loss_2: 22.467702865600586 +INFO:__main__:vgg_gram_loss_3: 17.43221664428711 +INFO:__main__:vgg_gram_loss_4: 78.71646881103516 +INFO:__main__:vgg_gram_loss_5: 0.1577153354883194 +INFO:__main__:vgg_loss_0: 11.183650016784668 +INFO:__main__:vgg_loss_1: 27.313695907592773 +INFO:__main__:vgg_loss_2: 37.94483184814453 +INFO:__main__:vgg_loss_3: 26.80497169494629 +INFO:__main__:vgg_loss_4: 46.3929328918457 +INFO:__main__:vgg_loss_5: 1.482581615447998 +INFO:__main__:validation_loss: 1969.69921875 +INFO:__main__:global_step: 49751 +INFO:__main__:kl_loss: 10819.669921875 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1361.5533447265625 +INFO:__main__:loss: 1361.564208984375 +INFO:__main__:vgg_gram_loss_0: 6.355626583099365 +INFO:__main__:vgg_gram_loss_1: 11.874571800231934 +INFO:__main__:vgg_gram_loss_2: 17.2707462310791 +INFO:__main__:vgg_gram_loss_3: 12.773699760437012 +INFO:__main__:vgg_gram_loss_4: 66.23451232910156 +INFO:__main__:vgg_gram_loss_5: 0.16165654361248016 +INFO:__main__:vgg_loss_0: 11.160950660705566 +INFO:__main__:vgg_loss_1: 24.960468292236328 +INFO:__main__:vgg_loss_2: 38.56708526611328 +INFO:__main__:vgg_loss_3: 28.930692672729492 +INFO:__main__:vgg_loss_4: 52.32777404785156 +INFO:__main__:vgg_loss_5: 1.6928824186325073 +INFO:__main__:validation_loss: 1607.077880859375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 50001 +INFO:__main__:kl_loss: 10482.8349609375 +INFO:__main__:kl_weight: 9.999999974752427e-07 +INFO:__main__:likelihood_loss: 1497.994384765625 +INFO:__main__:loss: 1498.0048828125 +INFO:__main__:vgg_gram_loss_0: 6.353371620178223 +INFO:__main__:vgg_gram_loss_1: 16.795412063598633 +INFO:__main__:vgg_gram_loss_2: 21.50495719909668 +INFO:__main__:vgg_gram_loss_3: 16.367414474487305 +INFO:__main__:vgg_gram_loss_4: 77.75798034667969 +INFO:__main__:vgg_gram_loss_5: 0.18754617869853973 +INFO:__main__:vgg_loss_0: 10.910284042358398 +INFO:__main__:vgg_loss_1: 27.008249282836914 +INFO:__main__:vgg_loss_2: 40.579803466796875 +INFO:__main__:vgg_loss_3: 29.356081008911133 +INFO:__main__:vgg_loss_4: 51.09076690673828 +INFO:__main__:vgg_loss_5: 1.6869752407073975 +INFO:__main__:validation_loss: 1919.357177734375 +INFO:__main__:global_step: 50251 +INFO:__main__:kl_loss: 6441.45703125 +INFO:__main__:kl_weight: 0.010000989772379398 +INFO:__main__:likelihood_loss: 1588.80517578125 +INFO:__main__:loss: 1653.22607421875 +INFO:__main__:vgg_gram_loss_0: 11.603306770324707 +INFO:__main__:vgg_gram_loss_1: 22.275760650634766 +INFO:__main__:vgg_gram_loss_2: 23.075563430786133 +INFO:__main__:vgg_gram_loss_3: 18.30158042907715 +INFO:__main__:vgg_gram_loss_4: 77.6226577758789 +INFO:__main__:vgg_gram_loss_5: 0.16284334659576416 +INFO:__main__:vgg_loss_0: 12.071303367614746 +INFO:__main__:vgg_loss_1: 29.309852600097656 +INFO:__main__:vgg_loss_2: 41.47458267211914 +INFO:__main__:vgg_loss_3: 29.454029083251953 +INFO:__main__:vgg_loss_4: 50.73735046386719 +INFO:__main__:vgg_loss_5: 1.6722099781036377 +INFO:__main__:validation_loss: 2064.455078125 +INFO:__main__:global_step: 50501 +INFO:__main__:kl_loss: 3479.3857421875 +INFO:__main__:kl_weight: 0.02000097930431366 +INFO:__main__:likelihood_loss: 1429.242431640625 +INFO:__main__:loss: 1498.83349609375 +INFO:__main__:vgg_gram_loss_0: 7.872462749481201 +INFO:__main__:vgg_gram_loss_1: 15.969047546386719 +INFO:__main__:vgg_gram_loss_2: 17.567533493041992 +INFO:__main__:vgg_gram_loss_3: 13.820124626159668 +INFO:__main__:vgg_gram_loss_4: 70.57093048095703 +INFO:__main__:vgg_gram_loss_5: 0.18946531414985657 +INFO:__main__:vgg_loss_0: 10.881997108459473 +INFO:__main__:vgg_loss_1: 27.785066604614258 +INFO:__main__:vgg_loss_2: 40.48052215576172 +INFO:__main__:vgg_loss_3: 28.765846252441406 +INFO:__main__:vgg_loss_4: 50.20218276977539 +INFO:__main__:vgg_loss_5: 1.7433021068572998 +INFO:__main__:validation_loss: 1751.7431640625 +INFO:__main__:global_step: 50751 +INFO:__main__:kl_loss: 2181.4326171875 +INFO:__main__:kl_weight: 0.030000969767570496 +INFO:__main__:likelihood_loss: 1419.87939453125 +INFO:__main__:loss: 1485.324462890625 +INFO:__main__:vgg_gram_loss_0: 7.737094402313232 +INFO:__main__:vgg_gram_loss_1: 18.276548385620117 +INFO:__main__:vgg_gram_loss_2: 18.63458824157715 +INFO:__main__:vgg_gram_loss_3: 14.927560806274414 +INFO:__main__:vgg_gram_loss_4: 66.627685546875 +INFO:__main__:vgg_gram_loss_5: 0.15339207649230957 +INFO:__main__:vgg_loss_0: 11.689186096191406 +INFO:__main__:vgg_loss_1: 28.27732276916504 +INFO:__main__:vgg_loss_2: 39.37963104248047 +INFO:__main__:vgg_loss_3: 28.145734786987305 +INFO:__main__:vgg_loss_4: 48.5289306640625 +INFO:__main__:vgg_loss_5: 1.5982006788253784 +INFO:__main__:validation_loss: 1976.7685546875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 51001 +INFO:__main__:kl_loss: 2096.58837890625 +INFO:__main__:kl_weight: 0.04000095650553703 +INFO:__main__:likelihood_loss: 1863.44091796875 +INFO:__main__:loss: 1947.306396484375 +INFO:__main__:vgg_gram_loss_0: 5.469057559967041 +INFO:__main__:vgg_gram_loss_1: 23.44309425354004 +INFO:__main__:vgg_gram_loss_2: 32.70433044433594 +INFO:__main__:vgg_gram_loss_3: 24.84476661682129 +INFO:__main__:vgg_gram_loss_4: 92.6097640991211 +INFO:__main__:vgg_gram_loss_5: 0.18744021654129028 +INFO:__main__:vgg_loss_0: 13.651097297668457 +INFO:__main__:vgg_loss_1: 36.333900451660156 +INFO:__main__:vgg_loss_2: 49.459991455078125 +INFO:__main__:vgg_loss_3: 34.59016799926758 +INFO:__main__:vgg_loss_4: 57.5623664855957 +INFO:__main__:vgg_loss_5: 1.8322093486785889 +INFO:__main__:validation_loss: 2109.63525390625 +INFO:__main__:global_step: 51251 +INFO:__main__:kl_loss: 1139.9891357421875 +INFO:__main__:kl_weight: 0.05000094696879387 +INFO:__main__:likelihood_loss: 1247.5076904296875 +INFO:__main__:loss: 1304.5081787109375 +INFO:__main__:vgg_gram_loss_0: 7.700303077697754 +INFO:__main__:vgg_gram_loss_1: 13.78819465637207 +INFO:__main__:vgg_gram_loss_2: 17.027082443237305 +INFO:__main__:vgg_gram_loss_3: 14.051159858703613 +INFO:__main__:vgg_gram_loss_4: 65.38643646240234 +INFO:__main__:vgg_gram_loss_5: 0.13996851444244385 +INFO:__main__:vgg_loss_0: 8.920395851135254 +INFO:__main__:vgg_loss_1: 21.259119033813477 +INFO:__main__:vgg_loss_2: 32.462154388427734 +INFO:__main__:vgg_loss_3: 24.015968322753906 +INFO:__main__:vgg_loss_4: 43.305912017822266 +INFO:__main__:vgg_loss_5: 1.4448519945144653 +INFO:__main__:validation_loss: 2189.865966796875 +INFO:__main__:global_step: 51501 +INFO:__main__:kl_loss: 1096.5625 +INFO:__main__:kl_weight: 0.060000937432050705 +INFO:__main__:likelihood_loss: 1434.164306640625 +INFO:__main__:loss: 1499.9591064453125 +INFO:__main__:vgg_gram_loss_0: 5.695139408111572 +INFO:__main__:vgg_gram_loss_1: 19.66288948059082 +INFO:__main__:vgg_gram_loss_2: 19.621091842651367 +INFO:__main__:vgg_gram_loss_3: 16.523672103881836 +INFO:__main__:vgg_gram_loss_4: 70.08119201660156 +INFO:__main__:vgg_gram_loss_5: 0.15740208327770233 +INFO:__main__:vgg_loss_0: 11.443211555480957 +INFO:__main__:vgg_loss_1: 28.306964874267578 +INFO:__main__:vgg_loss_2: 37.93976593017578 +INFO:__main__:vgg_loss_3: 27.58085823059082 +INFO:__main__:vgg_loss_4: 48.209373474121094 +INFO:__main__:vgg_loss_5: 1.611291766166687 +INFO:__main__:validation_loss: 1622.193359375 +INFO:__main__:global_step: 51751 +INFO:__main__:kl_loss: 974.5350952148438 +INFO:__main__:kl_weight: 0.07000092417001724 +INFO:__main__:likelihood_loss: 1551.93701171875 +INFO:__main__:loss: 1620.1553955078125 +INFO:__main__:vgg_gram_loss_0: 8.096752166748047 +INFO:__main__:vgg_gram_loss_1: 17.85909080505371 +INFO:__main__:vgg_gram_loss_2: 21.68777847290039 +INFO:__main__:vgg_gram_loss_3: 16.98805809020996 +INFO:__main__:vgg_gram_loss_4: 78.21915435791016 +INFO:__main__:vgg_gram_loss_5: 0.18453454971313477 +INFO:__main__:vgg_loss_0: 11.632030487060547 +INFO:__main__:vgg_loss_1: 30.164386749267578 +INFO:__main__:vgg_loss_2: 42.687538146972656 +INFO:__main__:vgg_loss_3: 29.92292594909668 +INFO:__main__:vgg_loss_4: 51.24711227416992 +INFO:__main__:vgg_loss_5: 1.6980128288269043 +INFO:__main__:validation_loss: 1853.6083984375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 52001 +INFO:__main__:kl_loss: 796.4420166015625 +INFO:__main__:kl_weight: 0.08000091463327408 +INFO:__main__:likelihood_loss: 1652.8472900390625 +INFO:__main__:loss: 1716.5633544921875 +INFO:__main__:vgg_gram_loss_0: 5.346341133117676 +INFO:__main__:vgg_gram_loss_1: 18.816364288330078 +INFO:__main__:vgg_gram_loss_2: 26.21480369567871 +INFO:__main__:vgg_gram_loss_3: 21.443639755249023 +INFO:__main__:vgg_gram_loss_4: 85.23721313476562 +INFO:__main__:vgg_gram_loss_5: 0.1823924034833908 +INFO:__main__:vgg_loss_0: 11.99412727355957 +INFO:__main__:vgg_loss_1: 31.607637405395508 +INFO:__main__:vgg_loss_2: 44.40090560913086 +INFO:__main__:vgg_loss_3: 31.070636749267578 +INFO:__main__:vgg_loss_4: 52.53947448730469 +INFO:__main__:vgg_loss_5: 1.7159429788589478 +INFO:__main__:validation_loss: 1907.5570068359375 +INFO:__main__:global_step: 52251 +INFO:__main__:kl_loss: 743.0657348632812 +INFO:__main__:kl_weight: 0.09000090509653091 +INFO:__main__:likelihood_loss: 1385.849365234375 +INFO:__main__:loss: 1452.7259521484375 +INFO:__main__:vgg_gram_loss_0: 5.245667934417725 +INFO:__main__:vgg_gram_loss_1: 13.94110107421875 +INFO:__main__:vgg_gram_loss_2: 20.320175170898438 +INFO:__main__:vgg_gram_loss_3: 15.484875679016113 +INFO:__main__:vgg_gram_loss_4: 71.19024658203125 +INFO:__main__:vgg_gram_loss_5: 0.15179328620433807 +INFO:__main__:vgg_loss_0: 10.517475128173828 +INFO:__main__:vgg_loss_1: 24.974077224731445 +INFO:__main__:vgg_loss_2: 36.866058349609375 +INFO:__main__:vgg_loss_3: 27.30190658569336 +INFO:__main__:vgg_loss_4: 49.605194091796875 +INFO:__main__:vgg_loss_5: 1.5712664127349854 +INFO:__main__:validation_loss: 2022.8323974609375 +INFO:__main__:global_step: 52501 +INFO:__main__:kl_loss: 637.813232421875 +INFO:__main__:kl_weight: 0.10000089555978775 +INFO:__main__:likelihood_loss: 1760.822021484375 +INFO:__main__:loss: 1824.6038818359375 +INFO:__main__:vgg_gram_loss_0: 5.313925266265869 +INFO:__main__:vgg_gram_loss_1: 21.3103084564209 +INFO:__main__:vgg_gram_loss_2: 29.3493595123291 +INFO:__main__:vgg_gram_loss_3: 20.926727294921875 +INFO:__main__:vgg_gram_loss_4: 85.4075927734375 +INFO:__main__:vgg_gram_loss_5: 0.18467698991298676 +INFO:__main__:vgg_loss_0: 13.07962417602539 +INFO:__main__:vgg_loss_1: 32.572757720947266 +INFO:__main__:vgg_loss_2: 49.33335494995117 +INFO:__main__:vgg_loss_3: 34.4154167175293 +INFO:__main__:vgg_loss_4: 58.431766510009766 +INFO:__main__:vgg_loss_5: 1.8388787508010864 +INFO:__main__:validation_loss: 1809.31005859375 +INFO:__main__:global_step: 52751 +INFO:__main__:kl_loss: 620.4498291015625 +INFO:__main__:kl_weight: 0.11000088602304459 +INFO:__main__:likelihood_loss: 1946.52197265625 +INFO:__main__:loss: 2014.77197265625 +INFO:__main__:vgg_gram_loss_0: 6.0110602378845215 +INFO:__main__:vgg_gram_loss_1: 39.03408432006836 +INFO:__main__:vgg_gram_loss_2: 27.813644409179688 +INFO:__main__:vgg_gram_loss_3: 24.762374877929688 +INFO:__main__:vgg_gram_loss_4: 95.22132110595703 +INFO:__main__:vgg_gram_loss_5: 0.2029167264699936 +INFO:__main__:vgg_loss_0: 14.952186584472656 +INFO:__main__:vgg_loss_1: 39.09959411621094 +INFO:__main__:vgg_loss_2: 47.42322540283203 +INFO:__main__:vgg_loss_3: 34.21035385131836 +INFO:__main__:vgg_loss_4: 58.67253112792969 +INFO:__main__:vgg_loss_5: 1.9010875225067139 +INFO:__main__:validation_loss: 1947.168212890625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 53001 +INFO:__main__:kl_loss: 500.6832275390625 +INFO:__main__:kl_weight: 0.12000087648630142 +INFO:__main__:likelihood_loss: 1537.3515625 +INFO:__main__:loss: 1597.4339599609375 +INFO:__main__:vgg_gram_loss_0: 11.605873107910156 +INFO:__main__:vgg_gram_loss_1: 17.331052780151367 +INFO:__main__:vgg_gram_loss_2: 23.5655574798584 +INFO:__main__:vgg_gram_loss_3: 17.11884117126465 +INFO:__main__:vgg_gram_loss_4: 74.2616958618164 +INFO:__main__:vgg_gram_loss_5: 0.16537658870220184 +INFO:__main__:vgg_loss_0: 11.59255313873291 +INFO:__main__:vgg_loss_1: 27.062543869018555 +INFO:__main__:vgg_loss_2: 41.53169250488281 +INFO:__main__:vgg_loss_3: 29.53998565673828 +INFO:__main__:vgg_loss_4: 52.00334548950195 +INFO:__main__:vgg_loss_5: 1.6917881965637207 +INFO:__main__:validation_loss: 2117.257080078125 +INFO:__main__:global_step: 53251 +INFO:__main__:kl_loss: 482.48944091796875 +INFO:__main__:kl_weight: 0.13000085949897766 +INFO:__main__:likelihood_loss: 1404.3759765625 +INFO:__main__:loss: 1467.0999755859375 +INFO:__main__:vgg_gram_loss_0: 7.9877848625183105 +INFO:__main__:vgg_gram_loss_1: 14.766952514648438 +INFO:__main__:vgg_gram_loss_2: 18.378337860107422 +INFO:__main__:vgg_gram_loss_3: 16.32552719116211 +INFO:__main__:vgg_gram_loss_4: 70.78013610839844 +INFO:__main__:vgg_gram_loss_5: 0.1577749103307724 +INFO:__main__:vgg_loss_0: 11.03810977935791 +INFO:__main__:vgg_loss_1: 27.573923110961914 +INFO:__main__:vgg_loss_2: 38.46590042114258 +INFO:__main__:vgg_loss_3: 26.80157470703125 +INFO:__main__:vgg_loss_4: 47.02141189575195 +INFO:__main__:vgg_loss_5: 1.5777209997177124 +INFO:__main__:validation_loss: 1684.718994140625 +INFO:__main__:global_step: 53501 +INFO:__main__:kl_loss: 461.0302429199219 +INFO:__main__:kl_weight: 0.1400008499622345 +INFO:__main__:likelihood_loss: 1447.0191650390625 +INFO:__main__:loss: 1511.5638427734375 +INFO:__main__:vgg_gram_loss_0: 4.7887396812438965 +INFO:__main__:vgg_gram_loss_1: 14.607309341430664 +INFO:__main__:vgg_gram_loss_2: 19.768455505371094 +INFO:__main__:vgg_gram_loss_3: 15.794618606567383 +INFO:__main__:vgg_gram_loss_4: 71.68677520751953 +INFO:__main__:vgg_gram_loss_5: 0.16718849539756775 +INFO:__main__:vgg_loss_0: 11.967134475708008 +INFO:__main__:vgg_loss_1: 28.10008430480957 +INFO:__main__:vgg_loss_2: 40.48341751098633 +INFO:__main__:vgg_loss_3: 29.450363159179688 +INFO:__main__:vgg_loss_4: 50.92267990112305 +INFO:__main__:vgg_loss_5: 1.6670780181884766 +INFO:__main__:validation_loss: 2204.85302734375 +INFO:__main__:global_step: 53751 +INFO:__main__:kl_loss: 431.852294921875 +INFO:__main__:kl_weight: 0.15000084042549133 +INFO:__main__:likelihood_loss: 1411.210693359375 +INFO:__main__:loss: 1475.9888916015625 +INFO:__main__:vgg_gram_loss_0: 4.401966571807861 +INFO:__main__:vgg_gram_loss_1: 16.503515243530273 +INFO:__main__:vgg_gram_loss_2: 18.363956451416016 +INFO:__main__:vgg_gram_loss_3: 15.213739395141602 +INFO:__main__:vgg_gram_loss_4: 69.20381164550781 +INFO:__main__:vgg_gram_loss_5: 0.15624019503593445 +INFO:__main__:vgg_loss_0: 11.972630500793457 +INFO:__main__:vgg_loss_1: 29.638750076293945 +INFO:__main__:vgg_loss_2: 39.531620025634766 +INFO:__main__:vgg_loss_3: 28.06954574584961 +INFO:__main__:vgg_loss_4: 47.68438720703125 +INFO:__main__:vgg_loss_5: 1.5019475221633911 +INFO:__main__:validation_loss: 1901.4234619140625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 54001 +INFO:__main__:kl_loss: 462.9907531738281 +INFO:__main__:kl_weight: 0.16000083088874817 +INFO:__main__:likelihood_loss: 1842.2979736328125 +INFO:__main__:loss: 1916.3768310546875 +INFO:__main__:vgg_gram_loss_0: 3.738231658935547 +INFO:__main__:vgg_gram_loss_1: 23.489931106567383 +INFO:__main__:vgg_gram_loss_2: 29.33441162109375 +INFO:__main__:vgg_gram_loss_3: 21.77472686767578 +INFO:__main__:vgg_gram_loss_4: 91.37846374511719 +INFO:__main__:vgg_gram_loss_5: 0.2116180658340454 +INFO:__main__:vgg_loss_0: 14.744144439697266 +INFO:__main__:vgg_loss_1: 38.0261344909668 +INFO:__main__:vgg_loss_2: 52.06601333618164 +INFO:__main__:vgg_loss_3: 34.94027328491211 +INFO:__main__:vgg_loss_4: 56.85561752319336 +INFO:__main__:vgg_loss_5: 1.900015115737915 +INFO:__main__:validation_loss: 2100.53955078125 +INFO:__main__:global_step: 54251 +INFO:__main__:kl_loss: 358.58038330078125 +INFO:__main__:kl_weight: 0.170000821352005 +INFO:__main__:likelihood_loss: 1339.577880859375 +INFO:__main__:loss: 1400.536865234375 +INFO:__main__:vgg_gram_loss_0: 3.810626745223999 +INFO:__main__:vgg_gram_loss_1: 13.236065864562988 +INFO:__main__:vgg_gram_loss_2: 17.399415969848633 +INFO:__main__:vgg_gram_loss_3: 14.694482803344727 +INFO:__main__:vgg_gram_loss_4: 69.71311950683594 +INFO:__main__:vgg_gram_loss_5: 0.17458117008209229 +INFO:__main__:vgg_loss_0: 10.67831802368164 +INFO:__main__:vgg_loss_1: 26.348798751831055 +INFO:__main__:vgg_loss_2: 38.10749816894531 +INFO:__main__:vgg_loss_3: 26.664831161499023 +INFO:__main__:vgg_loss_4: 45.56396484375 +INFO:__main__:vgg_loss_5: 1.5238978862762451 +INFO:__main__:validation_loss: 1801.173828125 +INFO:__main__:global_step: 54501 +INFO:__main__:kl_loss: 309.11846923828125 +INFO:__main__:kl_weight: 0.18000081181526184 +INFO:__main__:likelihood_loss: 1565.0927734375 +INFO:__main__:loss: 1620.734375 +INFO:__main__:vgg_gram_loss_0: 6.526357173919678 +INFO:__main__:vgg_gram_loss_1: 19.692264556884766 +INFO:__main__:vgg_gram_loss_2: 20.819067001342773 +INFO:__main__:vgg_gram_loss_3: 18.053436279296875 +INFO:__main__:vgg_gram_loss_4: 79.7742919921875 +INFO:__main__:vgg_gram_loss_5: 0.1723712980747223 +INFO:__main__:vgg_loss_0: 13.056347846984863 +INFO:__main__:vgg_loss_1: 30.305248260498047 +INFO:__main__:vgg_loss_2: 41.51729965209961 +INFO:__main__:vgg_loss_3: 29.634963989257812 +INFO:__main__:vgg_loss_4: 51.80848693847656 +INFO:__main__:vgg_loss_5: 1.6584300994873047 +INFO:__main__:validation_loss: 1592.962158203125 +INFO:__main__:global_step: 54751 +INFO:__main__:kl_loss: 355.88592529296875 +INFO:__main__:kl_weight: 0.19000080227851868 +INFO:__main__:likelihood_loss: 1548.958984375 +INFO:__main__:loss: 1616.57763671875 +INFO:__main__:vgg_gram_loss_0: 6.070631504058838 +INFO:__main__:vgg_gram_loss_1: 17.057086944580078 +INFO:__main__:vgg_gram_loss_2: 22.516061782836914 +INFO:__main__:vgg_gram_loss_3: 18.91379165649414 +INFO:__main__:vgg_gram_loss_4: 76.80543518066406 +INFO:__main__:vgg_gram_loss_5: 0.16906532645225525 +INFO:__main__:vgg_loss_0: 13.024688720703125 +INFO:__main__:vgg_loss_1: 30.129215240478516 +INFO:__main__:vgg_loss_2: 41.8564567565918 +INFO:__main__:vgg_loss_3: 29.795534133911133 +INFO:__main__:vgg_loss_4: 51.76190185546875 +INFO:__main__:vgg_loss_5: 1.691930890083313 +INFO:__main__:validation_loss: 1913.247802734375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 55001 +INFO:__main__:kl_loss: 322.4005432128906 +INFO:__main__:kl_weight: 0.2000007927417755 +INFO:__main__:likelihood_loss: 1534.005859375 +INFO:__main__:loss: 1598.4862060546875 +INFO:__main__:vgg_gram_loss_0: 10.427729606628418 +INFO:__main__:vgg_gram_loss_1: 17.52162742614746 +INFO:__main__:vgg_gram_loss_2: 22.398324966430664 +INFO:__main__:vgg_gram_loss_3: 18.782310485839844 +INFO:__main__:vgg_gram_loss_4: 79.3458251953125 +INFO:__main__:vgg_gram_loss_5: 0.16543404757976532 +INFO:__main__:vgg_loss_0: 12.260175704956055 +INFO:__main__:vgg_loss_1: 29.027027130126953 +INFO:__main__:vgg_loss_2: 38.96953201293945 +INFO:__main__:vgg_loss_3: 27.939149856567383 +INFO:__main__:vgg_loss_4: 48.373592376708984 +INFO:__main__:vgg_loss_5: 1.5904537439346313 +INFO:__main__:validation_loss: 2030.5093994140625 +INFO:__main__:global_step: 55251 +INFO:__main__:kl_loss: 366.66400146484375 +INFO:__main__:kl_weight: 0.21000078320503235 +INFO:__main__:likelihood_loss: 1675.959716796875 +INFO:__main__:loss: 1752.95947265625 +INFO:__main__:vgg_gram_loss_0: 7.035398960113525 +INFO:__main__:vgg_gram_loss_1: 20.947263717651367 +INFO:__main__:vgg_gram_loss_2: 24.243879318237305 +INFO:__main__:vgg_gram_loss_3: 20.27840232849121 +INFO:__main__:vgg_gram_loss_4: 88.2254409790039 +INFO:__main__:vgg_gram_loss_5: 0.2016974687576294 +INFO:__main__:vgg_loss_0: 11.855454444885254 +INFO:__main__:vgg_loss_1: 29.761552810668945 +INFO:__main__:vgg_loss_2: 43.730464935302734 +INFO:__main__:vgg_loss_3: 31.802310943603516 +INFO:__main__:vgg_loss_4: 55.30104446411133 +INFO:__main__:vgg_loss_5: 1.809045672416687 +INFO:__main__:validation_loss: 2055.14794921875 +INFO:__main__:global_step: 55501 +INFO:__main__:kl_loss: 295.6310729980469 +INFO:__main__:kl_weight: 0.22000077366828918 +INFO:__main__:likelihood_loss: 1755.7501220703125 +INFO:__main__:loss: 1820.7891845703125 +INFO:__main__:vgg_gram_loss_0: 10.51201343536377 +INFO:__main__:vgg_gram_loss_1: 27.488889694213867 +INFO:__main__:vgg_gram_loss_2: 26.922956466674805 +INFO:__main__:vgg_gram_loss_3: 21.056529998779297 +INFO:__main__:vgg_gram_loss_4: 85.41593170166016 +INFO:__main__:vgg_gram_loss_5: 0.19088390469551086 +INFO:__main__:vgg_loss_0: 14.30600643157959 +INFO:__main__:vgg_loss_1: 32.54475784301758 +INFO:__main__:vgg_loss_2: 44.064945220947266 +INFO:__main__:vgg_loss_3: 31.656850814819336 +INFO:__main__:vgg_loss_4: 55.168460845947266 +INFO:__main__:vgg_loss_5: 1.8217823505401611 +INFO:__main__:validation_loss: 1617.594970703125 +INFO:__main__:global_step: 55751 +INFO:__main__:kl_loss: 289.1202392578125 +INFO:__main__:kl_weight: 0.23000076413154602 +INFO:__main__:likelihood_loss: 1424.61474609375 +INFO:__main__:loss: 1491.1126708984375 +INFO:__main__:vgg_gram_loss_0: 4.744224548339844 +INFO:__main__:vgg_gram_loss_1: 16.211872100830078 +INFO:__main__:vgg_gram_loss_2: 17.896440505981445 +INFO:__main__:vgg_gram_loss_3: 14.85644245147705 +INFO:__main__:vgg_gram_loss_4: 73.3225326538086 +INFO:__main__:vgg_gram_loss_5: 0.16847817599773407 +INFO:__main__:vgg_loss_0: 11.480287551879883 +INFO:__main__:vgg_loss_1: 27.546110153198242 +INFO:__main__:vgg_loss_2: 38.94001770019531 +INFO:__main__:vgg_loss_3: 28.381101608276367 +INFO:__main__:vgg_loss_4: 49.70298385620117 +INFO:__main__:vgg_loss_5: 1.67245614528656 +INFO:__main__:validation_loss: 1813.496826171875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 56001 +INFO:__main__:kl_loss: 308.44256591796875 +INFO:__main__:kl_weight: 0.24000075459480286 +INFO:__main__:likelihood_loss: 1839.719482421875 +INFO:__main__:loss: 1913.7459716796875 +INFO:__main__:vgg_gram_loss_0: 6.148536205291748 +INFO:__main__:vgg_gram_loss_1: 44.439029693603516 +INFO:__main__:vgg_gram_loss_2: 30.517669677734375 +INFO:__main__:vgg_gram_loss_3: 25.989551544189453 +INFO:__main__:vgg_gram_loss_4: 83.1806411743164 +INFO:__main__:vgg_gram_loss_5: 0.1638626903295517 +INFO:__main__:vgg_loss_0: 14.204683303833008 +INFO:__main__:vgg_loss_1: 39.07451629638672 +INFO:__main__:vgg_loss_2: 42.76865005493164 +INFO:__main__:vgg_loss_3: 29.527097702026367 +INFO:__main__:vgg_loss_4: 50.26337814331055 +INFO:__main__:vgg_loss_5: 1.6662834882736206 +INFO:__main__:validation_loss: 1974.0245361328125 +INFO:__main__:global_step: 56251 +INFO:__main__:kl_loss: 250.94212341308594 +INFO:__main__:kl_weight: 0.2500007450580597 +INFO:__main__:likelihood_loss: 1559.380859375 +INFO:__main__:loss: 1622.1165771484375 +INFO:__main__:vgg_gram_loss_0: 7.549241542816162 +INFO:__main__:vgg_gram_loss_1: 18.741119384765625 +INFO:__main__:vgg_gram_loss_2: 20.824504852294922 +INFO:__main__:vgg_gram_loss_3: 16.466163635253906 +INFO:__main__:vgg_gram_loss_4: 77.28612518310547 +INFO:__main__:vgg_gram_loss_5: 0.18312078714370728 +INFO:__main__:vgg_loss_0: 11.924654960632324 +INFO:__main__:vgg_loss_1: 29.507802963256836 +INFO:__main__:vgg_loss_2: 42.73615264892578 +INFO:__main__:vgg_loss_3: 30.59701919555664 +INFO:__main__:vgg_loss_4: 54.2194938659668 +INFO:__main__:vgg_loss_5: 1.840773105621338 +INFO:__main__:validation_loss: 2466.46435546875 +INFO:__main__:global_step: 56501 +INFO:__main__:kl_loss: 244.99449157714844 +INFO:__main__:kl_weight: 0.26000073552131653 +INFO:__main__:likelihood_loss: 1746.5936279296875 +INFO:__main__:loss: 1810.2923583984375 +INFO:__main__:vgg_gram_loss_0: 13.306136131286621 +INFO:__main__:vgg_gram_loss_1: 34.73527145385742 +INFO:__main__:vgg_gram_loss_2: 28.598623275756836 +INFO:__main__:vgg_gram_loss_3: 22.409894943237305 +INFO:__main__:vgg_gram_loss_4: 87.85713958740234 +INFO:__main__:vgg_gram_loss_5: 0.15691962838172913 +INFO:__main__:vgg_loss_0: 11.87444019317627 +INFO:__main__:vgg_loss_1: 28.48001480102539 +INFO:__main__:vgg_loss_2: 39.46847915649414 +INFO:__main__:vgg_loss_3: 28.913679122924805 +INFO:__main__:vgg_loss_4: 51.895050048828125 +INFO:__main__:vgg_loss_5: 1.6230523586273193 +INFO:__main__:validation_loss: 2071.701416015625 +INFO:__main__:global_step: 56751 +INFO:__main__:kl_loss: 257.2318115234375 +INFO:__main__:kl_weight: 0.27000072598457336 +INFO:__main__:likelihood_loss: 1523.1878662109375 +INFO:__main__:loss: 1592.640625 +INFO:__main__:vgg_gram_loss_0: 7.933885097503662 +INFO:__main__:vgg_gram_loss_1: 17.093976974487305 +INFO:__main__:vgg_gram_loss_2: 19.91901206970215 +INFO:__main__:vgg_gram_loss_3: 16.41315460205078 +INFO:__main__:vgg_gram_loss_4: 76.2152328491211 +INFO:__main__:vgg_gram_loss_5: 0.19090986251831055 +INFO:__main__:vgg_loss_0: 10.965791702270508 +INFO:__main__:vgg_loss_1: 28.365589141845703 +INFO:__main__:vgg_loss_2: 41.338279724121094 +INFO:__main__:vgg_loss_3: 29.8920955657959 +INFO:__main__:vgg_loss_4: 54.499595642089844 +INFO:__main__:vgg_loss_5: 1.8100500106811523 +INFO:__main__:validation_loss: 1678.0438232421875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 57001 +INFO:__main__:kl_loss: 254.40325927734375 +INFO:__main__:kl_weight: 0.2800007164478302 +INFO:__main__:likelihood_loss: 1580.7952880859375 +INFO:__main__:loss: 1652.0283203125 +INFO:__main__:vgg_gram_loss_0: 5.988608360290527 +INFO:__main__:vgg_gram_loss_1: 22.908893585205078 +INFO:__main__:vgg_gram_loss_2: 24.294221878051758 +INFO:__main__:vgg_gram_loss_3: 18.163793563842773 +INFO:__main__:vgg_gram_loss_4: 76.63917541503906 +INFO:__main__:vgg_gram_loss_5: 0.18036030232906342 +INFO:__main__:vgg_loss_0: 12.071866989135742 +INFO:__main__:vgg_loss_1: 28.942556381225586 +INFO:__main__:vgg_loss_2: 42.191978454589844 +INFO:__main__:vgg_loss_3: 30.12472152709961 +INFO:__main__:vgg_loss_4: 52.86692810058594 +INFO:__main__:vgg_loss_5: 1.7859548330307007 +INFO:__main__:validation_loss: 1794.59765625 +INFO:__main__:global_step: 57251 +INFO:__main__:kl_loss: 233.87469482421875 +INFO:__main__:kl_weight: 0.29000070691108704 +INFO:__main__:likelihood_loss: 1617.7073974609375 +INFO:__main__:loss: 1685.53125 +INFO:__main__:vgg_gram_loss_0: 4.5518317222595215 +INFO:__main__:vgg_gram_loss_1: 15.912202835083008 +INFO:__main__:vgg_gram_loss_2: 23.057598114013672 +INFO:__main__:vgg_gram_loss_3: 18.661273956298828 +INFO:__main__:vgg_gram_loss_4: 83.39867401123047 +INFO:__main__:vgg_gram_loss_5: 0.185943141579628 +INFO:__main__:vgg_loss_0: 12.52323055267334 +INFO:__main__:vgg_loss_1: 30.470561981201172 +INFO:__main__:vgg_loss_2: 45.55154800415039 +INFO:__main__:vgg_loss_3: 32.33949279785156 +INFO:__main__:vgg_loss_4: 55.08700942993164 +INFO:__main__:vgg_loss_5: 1.8021098375320435 +INFO:__main__:validation_loss: 1676.0894775390625 +INFO:__main__:global_step: 57501 +INFO:__main__:kl_loss: 240.29698181152344 +INFO:__main__:kl_weight: 0.30000069737434387 +INFO:__main__:likelihood_loss: 1532.528076171875 +INFO:__main__:loss: 1604.6173095703125 +INFO:__main__:vgg_gram_loss_0: 6.826887607574463 +INFO:__main__:vgg_gram_loss_1: 22.4799861907959 +INFO:__main__:vgg_gram_loss_2: 23.11798095703125 +INFO:__main__:vgg_gram_loss_3: 18.424182891845703 +INFO:__main__:vgg_gram_loss_4: 73.97643280029297 +INFO:__main__:vgg_gram_loss_5: 0.1532263159751892 +INFO:__main__:vgg_loss_0: 11.317983627319336 +INFO:__main__:vgg_loss_1: 28.035123825073242 +INFO:__main__:vgg_loss_2: 40.7356071472168 +INFO:__main__:vgg_loss_3: 29.25511932373047 +INFO:__main__:vgg_loss_4: 50.573848724365234 +INFO:__main__:vgg_loss_5: 1.6092559099197388 +INFO:__main__:validation_loss: 2205.823486328125 +INFO:__main__:global_step: 57751 +INFO:__main__:kl_loss: 193.55615234375 +INFO:__main__:kl_weight: 0.3100006878376007 +INFO:__main__:likelihood_loss: 1320.489013671875 +INFO:__main__:loss: 1380.4915771484375 +INFO:__main__:vgg_gram_loss_0: 8.774993896484375 +INFO:__main__:vgg_gram_loss_1: 16.969257354736328 +INFO:__main__:vgg_gram_loss_2: 16.205549240112305 +INFO:__main__:vgg_gram_loss_3: 13.636611938476562 +INFO:__main__:vgg_gram_loss_4: 65.27970123291016 +INFO:__main__:vgg_gram_loss_5: 0.16517125070095062 +INFO:__main__:vgg_loss_0: 9.513181686401367 +INFO:__main__:vgg_loss_1: 23.318103790283203 +INFO:__main__:vgg_loss_2: 35.0458984375 +INFO:__main__:vgg_loss_3: 25.928007125854492 +INFO:__main__:vgg_loss_4: 47.58470153808594 +INFO:__main__:vgg_loss_5: 1.6766400337219238 +INFO:__main__:validation_loss: 2050.588134765625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 58001 +INFO:__main__:kl_loss: 243.0958709716797 +INFO:__main__:kl_weight: 0.32000067830085754 +INFO:__main__:likelihood_loss: 1726.4154052734375 +INFO:__main__:loss: 1804.206298828125 +INFO:__main__:vgg_gram_loss_0: 6.8467535972595215 +INFO:__main__:vgg_gram_loss_1: 20.26763343811035 +INFO:__main__:vgg_gram_loss_2: 28.784292221069336 +INFO:__main__:vgg_gram_loss_3: 20.6180477142334 +INFO:__main__:vgg_gram_loss_4: 82.578369140625 +INFO:__main__:vgg_gram_loss_5: 0.19503463804721832 +INFO:__main__:vgg_loss_0: 14.219518661499023 +INFO:__main__:vgg_loss_1: 34.867950439453125 +INFO:__main__:vgg_loss_2: 46.92289352416992 +INFO:__main__:vgg_loss_3: 32.465965270996094 +INFO:__main__:vgg_loss_4: 55.683982849121094 +INFO:__main__:vgg_loss_5: 1.8326365947723389 +INFO:__main__:validation_loss: 1689.6268310546875 +INFO:__main__:global_step: 58251 +INFO:__main__:kl_loss: 205.43701171875 +INFO:__main__:kl_weight: 0.3300006687641144 +INFO:__main__:likelihood_loss: 1348.15673828125 +INFO:__main__:loss: 1415.9510498046875 +INFO:__main__:vgg_gram_loss_0: 8.835326194763184 +INFO:__main__:vgg_gram_loss_1: 16.253568649291992 +INFO:__main__:vgg_gram_loss_2: 17.209184646606445 +INFO:__main__:vgg_gram_loss_3: 14.403388977050781 +INFO:__main__:vgg_gram_loss_4: 69.08394622802734 +INFO:__main__:vgg_gram_loss_5: 0.16471914947032928 +INFO:__main__:vgg_loss_0: 10.182188987731934 +INFO:__main__:vgg_loss_1: 23.86077880859375 +INFO:__main__:vgg_loss_2: 35.344940185546875 +INFO:__main__:vgg_loss_3: 25.976760864257812 +INFO:__main__:vgg_loss_4: 46.7400016784668 +INFO:__main__:vgg_loss_5: 1.5765422582626343 +INFO:__main__:validation_loss: 1976.2889404296875 +INFO:__main__:global_step: 58501 +INFO:__main__:kl_loss: 219.08233642578125 +INFO:__main__:kl_weight: 0.3400006592273712 +INFO:__main__:likelihood_loss: 1532.663330078125 +INFO:__main__:loss: 1607.1514892578125 +INFO:__main__:vgg_gram_loss_0: 7.922638893127441 +INFO:__main__:vgg_gram_loss_1: 15.997161865234375 +INFO:__main__:vgg_gram_loss_2: 19.515382766723633 +INFO:__main__:vgg_gram_loss_3: 15.397544860839844 +INFO:__main__:vgg_gram_loss_4: 77.22454071044922 +INFO:__main__:vgg_gram_loss_5: 0.19094879925251007 +INFO:__main__:vgg_loss_0: 11.872652053833008 +INFO:__main__:vgg_loss_1: 27.999563217163086 +INFO:__main__:vgg_loss_2: 42.17500305175781 +INFO:__main__:vgg_loss_3: 30.67888832092285 +INFO:__main__:vgg_loss_4: 55.72467803955078 +INFO:__main__:vgg_loss_5: 1.8336634635925293 +INFO:__main__:validation_loss: 1927.972412109375 +INFO:__main__:global_step: 58751 +INFO:__main__:kl_loss: 215.67520141601562 +INFO:__main__:kl_weight: 0.35000064969062805 +INFO:__main__:likelihood_loss: 1681.076416015625 +INFO:__main__:loss: 1756.5628662109375 +INFO:__main__:vgg_gram_loss_0: 5.610507488250732 +INFO:__main__:vgg_gram_loss_1: 16.57940101623535 +INFO:__main__:vgg_gram_loss_2: 22.824447631835938 +INFO:__main__:vgg_gram_loss_3: 21.823196411132812 +INFO:__main__:vgg_gram_loss_4: 88.89098358154297 +INFO:__main__:vgg_gram_loss_5: 0.19646035134792328 +INFO:__main__:vgg_loss_0: 13.272629737854004 +INFO:__main__:vgg_loss_1: 32.366817474365234 +INFO:__main__:vgg_loss_2: 45.62297058105469 +INFO:__main__:vgg_loss_3: 32.30221939086914 +INFO:__main__:vgg_loss_4: 54.84516906738281 +INFO:__main__:vgg_loss_5: 1.8804705142974854 +INFO:__main__:validation_loss: 2150.22314453125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 59001 +INFO:__main__:kl_loss: 193.33583068847656 +INFO:__main__:kl_weight: 0.3600006401538849 +INFO:__main__:likelihood_loss: 1429.91064453125 +INFO:__main__:loss: 1499.51171875 +INFO:__main__:vgg_gram_loss_0: 6.279611110687256 +INFO:__main__:vgg_gram_loss_1: 18.69470977783203 +INFO:__main__:vgg_gram_loss_2: 21.265682220458984 +INFO:__main__:vgg_gram_loss_3: 15.021896362304688 +INFO:__main__:vgg_gram_loss_4: 69.36939239501953 +INFO:__main__:vgg_gram_loss_5: 0.17039336264133453 +INFO:__main__:vgg_loss_0: 10.9222412109375 +INFO:__main__:vgg_loss_1: 26.75729751586914 +INFO:__main__:vgg_loss_2: 39.64116668701172 +INFO:__main__:vgg_loss_3: 28.392799377441406 +INFO:__main__:vgg_loss_4: 47.86614990234375 +INFO:__main__:vgg_loss_5: 1.6008058786392212 +INFO:__main__:validation_loss: 2160.015625 +INFO:__main__:global_step: 59251 +INFO:__main__:kl_loss: 180.37911987304688 +INFO:__main__:kl_weight: 0.3700006306171417 +INFO:__main__:likelihood_loss: 1618.112548828125 +INFO:__main__:loss: 1684.8529052734375 +INFO:__main__:vgg_gram_loss_0: 5.837273597717285 +INFO:__main__:vgg_gram_loss_1: 18.40519142150879 +INFO:__main__:vgg_gram_loss_2: 23.166988372802734 +INFO:__main__:vgg_gram_loss_3: 19.53774070739746 +INFO:__main__:vgg_gram_loss_4: 84.2310791015625 +INFO:__main__:vgg_gram_loss_5: 0.1909681260585785 +INFO:__main__:vgg_loss_0: 10.9801607131958 +INFO:__main__:vgg_loss_1: 29.02680778503418 +INFO:__main__:vgg_loss_2: 42.57131576538086 +INFO:__main__:vgg_loss_3: 31.413135528564453 +INFO:__main__:vgg_loss_4: 56.370323181152344 +INFO:__main__:vgg_loss_5: 1.8914778232574463 +INFO:__main__:validation_loss: 2272.75927734375 +INFO:__main__:global_step: 59501 +INFO:__main__:kl_loss: 183.94268798828125 +INFO:__main__:kl_weight: 0.38000062108039856 +INFO:__main__:likelihood_loss: 1798.91015625 +INFO:__main__:loss: 1868.8084716796875 +INFO:__main__:vgg_gram_loss_0: 7.954429626464844 +INFO:__main__:vgg_gram_loss_1: 22.87876319885254 +INFO:__main__:vgg_gram_loss_2: 27.028940200805664 +INFO:__main__:vgg_gram_loss_3: 20.88850975036621 +INFO:__main__:vgg_gram_loss_4: 86.61819458007812 +INFO:__main__:vgg_gram_loss_5: 0.19709549844264984 +INFO:__main__:vgg_loss_0: 14.981225967407227 +INFO:__main__:vgg_loss_1: 38.249427795410156 +INFO:__main__:vgg_loss_2: 49.45981979370117 +INFO:__main__:vgg_loss_3: 33.42839431762695 +INFO:__main__:vgg_loss_4: 56.236228942871094 +INFO:__main__:vgg_loss_5: 1.860987663269043 +INFO:__main__:validation_loss: 1814.781494140625 +INFO:__main__:global_step: 59751 +INFO:__main__:kl_loss: 193.98171997070312 +INFO:__main__:kl_weight: 0.3900006115436554 +INFO:__main__:likelihood_loss: 1749.99755859375 +INFO:__main__:loss: 1825.6505126953125 +INFO:__main__:vgg_gram_loss_0: 7.6700310707092285 +INFO:__main__:vgg_gram_loss_1: 22.643075942993164 +INFO:__main__:vgg_gram_loss_2: 23.319944381713867 +INFO:__main__:vgg_gram_loss_3: 21.483612060546875 +INFO:__main__:vgg_gram_loss_4: 96.3400650024414 +INFO:__main__:vgg_gram_loss_5: 0.21944260597229004 +INFO:__main__:vgg_loss_0: 11.843279838562012 +INFO:__main__:vgg_loss_1: 29.51251792907715 +INFO:__main__:vgg_loss_2: 43.358131408691406 +INFO:__main__:vgg_loss_3: 32.64407730102539 +INFO:__main__:vgg_loss_4: 58.97665786743164 +INFO:__main__:vgg_loss_5: 1.9886924028396606 +INFO:__main__:validation_loss: 1773.67041015625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 60001 +INFO:__main__:kl_loss: 197.79312133789062 +INFO:__main__:kl_weight: 0.40000060200691223 +INFO:__main__:likelihood_loss: 1480.00732421875 +INFO:__main__:loss: 1559.124755859375 +INFO:__main__:vgg_gram_loss_0: 8.505050659179688 +INFO:__main__:vgg_gram_loss_1: 16.957721710205078 +INFO:__main__:vgg_gram_loss_2: 19.72979736328125 +INFO:__main__:vgg_gram_loss_3: 15.914692878723145 +INFO:__main__:vgg_gram_loss_4: 73.58789825439453 +INFO:__main__:vgg_gram_loss_5: 0.15875029563903809 +INFO:__main__:vgg_loss_0: 12.003179550170898 +INFO:__main__:vgg_loss_1: 26.718629837036133 +INFO:__main__:vgg_loss_2: 38.910369873046875 +INFO:__main__:vgg_loss_3: 28.952842712402344 +INFO:__main__:vgg_loss_4: 52.769004821777344 +INFO:__main__:vgg_loss_5: 1.7935148477554321 +INFO:__main__:validation_loss: 2121.648681640625 +INFO:__main__:global_step: 60251 +INFO:__main__:kl_loss: 181.4539031982422 +INFO:__main__:kl_weight: 0.41000059247016907 +INFO:__main__:likelihood_loss: 1709.8804931640625 +INFO:__main__:loss: 1784.2767333984375 +INFO:__main__:vgg_gram_loss_0: 8.733294486999512 +INFO:__main__:vgg_gram_loss_1: 17.923580169677734 +INFO:__main__:vgg_gram_loss_2: 23.080129623413086 +INFO:__main__:vgg_gram_loss_3: 19.587923049926758 +INFO:__main__:vgg_gram_loss_4: 86.67509460449219 +INFO:__main__:vgg_gram_loss_5: 0.20076461136341095 +INFO:__main__:vgg_loss_0: 12.751214981079102 +INFO:__main__:vgg_loss_1: 31.6510009765625 +INFO:__main__:vgg_loss_2: 44.42028045654297 +INFO:__main__:vgg_loss_3: 33.183746337890625 +INFO:__main__:vgg_loss_4: 61.778175354003906 +INFO:__main__:vgg_loss_5: 1.9908862113952637 +INFO:__main__:validation_loss: 1997.012939453125 +INFO:__main__:global_step: 60501 +INFO:__main__:kl_loss: 170.43324279785156 +INFO:__main__:kl_weight: 0.4200005829334259 +INFO:__main__:likelihood_loss: 1556.390869140625 +INFO:__main__:loss: 1627.972900390625 +INFO:__main__:vgg_gram_loss_0: 10.932792663574219 +INFO:__main__:vgg_gram_loss_1: 22.852108001708984 +INFO:__main__:vgg_gram_loss_2: 23.604936599731445 +INFO:__main__:vgg_gram_loss_3: 18.7296199798584 +INFO:__main__:vgg_gram_loss_4: 75.07914733886719 +INFO:__main__:vgg_gram_loss_5: 0.17030248045921326 +INFO:__main__:vgg_loss_0: 10.895041465759277 +INFO:__main__:vgg_loss_1: 25.68528175354004 +INFO:__main__:vgg_loss_2: 39.50252151489258 +INFO:__main__:vgg_loss_3: 29.35719108581543 +INFO:__main__:vgg_loss_4: 52.71746063232422 +INFO:__main__:vgg_loss_5: 1.751753568649292 +INFO:__main__:validation_loss: 1990.8094482421875 +INFO:__main__:global_step: 60751 +INFO:__main__:kl_loss: 183.24961853027344 +INFO:__main__:kl_weight: 0.43000057339668274 +INFO:__main__:likelihood_loss: 2108.6884765625 +INFO:__main__:loss: 2187.48583984375 +INFO:__main__:vgg_gram_loss_0: 9.331361770629883 +INFO:__main__:vgg_gram_loss_1: 32.97922897338867 +INFO:__main__:vgg_gram_loss_2: 31.343585968017578 +INFO:__main__:vgg_gram_loss_3: 25.617319107055664 +INFO:__main__:vgg_gram_loss_4: 102.14390563964844 +INFO:__main__:vgg_gram_loss_5: 0.19482062757015228 +INFO:__main__:vgg_loss_0: 18.63156509399414 +INFO:__main__:vgg_loss_1: 41.982215881347656 +INFO:__main__:vgg_loss_2: 55.34001922607422 +INFO:__main__:vgg_loss_3: 38.46071243286133 +INFO:__main__:vgg_loss_4: 63.79774475097656 +INFO:__main__:vgg_loss_5: 1.9152261018753052 +INFO:__main__:validation_loss: 1520.69873046875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 61001 +INFO:__main__:kl_loss: 167.36868286132812 +INFO:__main__:kl_weight: 0.4400005638599396 +INFO:__main__:likelihood_loss: 1539.850830078125 +INFO:__main__:loss: 1613.4931640625 +INFO:__main__:vgg_gram_loss_0: 9.395010948181152 +INFO:__main__:vgg_gram_loss_1: 21.405475616455078 +INFO:__main__:vgg_gram_loss_2: 24.553144454956055 +INFO:__main__:vgg_gram_loss_3: 17.760648727416992 +INFO:__main__:vgg_gram_loss_4: 74.90666961669922 +INFO:__main__:vgg_gram_loss_5: 0.1572108119726181 +INFO:__main__:vgg_loss_0: 11.96183967590332 +INFO:__main__:vgg_loss_1: 29.305347442626953 +INFO:__main__:vgg_loss_2: 40.379425048828125 +INFO:__main__:vgg_loss_3: 27.906919479370117 +INFO:__main__:vgg_loss_4: 48.647972106933594 +INFO:__main__:vgg_loss_5: 1.5904710292816162 +INFO:__main__:validation_loss: 1659.5078125 +INFO:__main__:global_step: 61251 +INFO:__main__:kl_loss: 150.88949584960938 +INFO:__main__:kl_weight: 0.4500005543231964 +INFO:__main__:likelihood_loss: 1341.9801025390625 +INFO:__main__:loss: 1409.8804931640625 +INFO:__main__:vgg_gram_loss_0: 7.739903926849365 +INFO:__main__:vgg_gram_loss_1: 17.0947208404541 +INFO:__main__:vgg_gram_loss_2: 18.212779998779297 +INFO:__main__:vgg_gram_loss_3: 14.0092134475708 +INFO:__main__:vgg_gram_loss_4: 65.4217758178711 +INFO:__main__:vgg_gram_loss_5: 0.1537393480539322 +INFO:__main__:vgg_loss_0: 9.86634349822998 +INFO:__main__:vgg_loss_1: 24.50064468383789 +INFO:__main__:vgg_loss_2: 36.40787124633789 +INFO:__main__:vgg_loss_3: 26.246822357177734 +INFO:__main__:vgg_loss_4: 47.179168701171875 +INFO:__main__:vgg_loss_5: 1.5630505084991455 +INFO:__main__:validation_loss: 2023.6031494140625 +INFO:__main__:global_step: 61501 +INFO:__main__:kl_loss: 172.0615234375 +INFO:__main__:kl_weight: 0.46000054478645325 +INFO:__main__:likelihood_loss: 1423.3935546875 +INFO:__main__:loss: 1502.5419921875 +INFO:__main__:vgg_gram_loss_0: 6.877858638763428 +INFO:__main__:vgg_gram_loss_1: 18.043676376342773 +INFO:__main__:vgg_gram_loss_2: 20.73009490966797 +INFO:__main__:vgg_gram_loss_3: 17.797643661499023 +INFO:__main__:vgg_gram_loss_4: 74.64494323730469 +INFO:__main__:vgg_gram_loss_5: 0.1636204570531845 +INFO:__main__:vgg_loss_0: 9.85766315460205 +INFO:__main__:vgg_loss_1: 23.75548553466797 +INFO:__main__:vgg_loss_2: 35.95502853393555 +INFO:__main__:vgg_loss_3: 27.236677169799805 +INFO:__main__:vgg_loss_4: 48.01709747314453 +INFO:__main__:vgg_loss_5: 1.598944067955017 +INFO:__main__:validation_loss: 2065.132080078125 +INFO:__main__:global_step: 61751 +INFO:__main__:kl_loss: 153.73365783691406 +INFO:__main__:kl_weight: 0.4700005352497101 +INFO:__main__:likelihood_loss: 1526.9849853515625 +INFO:__main__:loss: 1599.2398681640625 +INFO:__main__:vgg_gram_loss_0: 7.539271831512451 +INFO:__main__:vgg_gram_loss_1: 17.918149948120117 +INFO:__main__:vgg_gram_loss_2: 24.20075798034668 +INFO:__main__:vgg_gram_loss_3: 18.901962280273438 +INFO:__main__:vgg_gram_loss_4: 77.95181274414062 +INFO:__main__:vgg_gram_loss_5: 0.16425113379955292 +INFO:__main__:vgg_loss_0: 11.178567886352539 +INFO:__main__:vgg_loss_1: 28.170799255371094 +INFO:__main__:vgg_loss_2: 40.80046463012695 +INFO:__main__:vgg_loss_3: 28.324134826660156 +INFO:__main__:vgg_loss_4: 48.68288040161133 +INFO:__main__:vgg_loss_5: 1.5639485120773315 +INFO:__main__:validation_loss: 1952.921142578125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 62001 +INFO:__main__:kl_loss: 157.3220977783203 +INFO:__main__:kl_weight: 0.4800005257129669 +INFO:__main__:likelihood_loss: 1395.3570556640625 +INFO:__main__:loss: 1470.8717041015625 +INFO:__main__:vgg_gram_loss_0: 8.790541648864746 +INFO:__main__:vgg_gram_loss_1: 16.917423248291016 +INFO:__main__:vgg_gram_loss_2: 19.832860946655273 +INFO:__main__:vgg_gram_loss_3: 15.707832336425781 +INFO:__main__:vgg_gram_loss_4: 69.8316650390625 +INFO:__main__:vgg_gram_loss_5: 0.15188977122306824 +INFO:__main__:vgg_loss_0: 10.436554908752441 +INFO:__main__:vgg_loss_1: 25.277132034301758 +INFO:__main__:vgg_loss_2: 36.39839172363281 +INFO:__main__:vgg_loss_3: 26.555042266845703 +INFO:__main__:vgg_loss_4: 47.579185485839844 +INFO:__main__:vgg_loss_5: 1.5928845405578613 +INFO:__main__:validation_loss: 1986.4376220703125 +INFO:__main__:global_step: 62251 +INFO:__main__:kl_loss: 155.29299926757812 +INFO:__main__:kl_weight: 0.49000051617622375 +INFO:__main__:likelihood_loss: 1563.59130859375 +INFO:__main__:loss: 1639.6849365234375 +INFO:__main__:vgg_gram_loss_0: 5.029366970062256 +INFO:__main__:vgg_gram_loss_1: 19.014877319335938 +INFO:__main__:vgg_gram_loss_2: 24.09842872619629 +INFO:__main__:vgg_gram_loss_3: 17.659481048583984 +INFO:__main__:vgg_gram_loss_4: 78.6868896484375 +INFO:__main__:vgg_gram_loss_5: 0.18552082777023315 +INFO:__main__:vgg_loss_0: 11.27612018585205 +INFO:__main__:vgg_loss_1: 29.093385696411133 +INFO:__main__:vgg_loss_2: 42.61553192138672 +INFO:__main__:vgg_loss_3: 30.585060119628906 +INFO:__main__:vgg_loss_4: 52.72942352294922 +INFO:__main__:vgg_loss_5: 1.7441712617874146 +INFO:__main__:validation_loss: 1859.8572998046875 +INFO:__main__:global_step: 62501 +INFO:__main__:kl_loss: 181.54559326171875 +INFO:__main__:kl_weight: 0.5000004768371582 +INFO:__main__:likelihood_loss: 1895.2548828125 +INFO:__main__:loss: 1986.0277099609375 +INFO:__main__:vgg_gram_loss_0: 7.939996242523193 +INFO:__main__:vgg_gram_loss_1: 19.90495491027832 +INFO:__main__:vgg_gram_loss_2: 26.79852294921875 +INFO:__main__:vgg_gram_loss_3: 21.490812301635742 +INFO:__main__:vgg_gram_loss_4: 96.40921783447266 +INFO:__main__:vgg_gram_loss_5: 0.2147810310125351 +INFO:__main__:vgg_loss_0: 14.152655601501465 +INFO:__main__:vgg_loss_1: 34.47138595581055 +INFO:__main__:vgg_loss_2: 52.4765625 +INFO:__main__:vgg_loss_3: 37.76268768310547 +INFO:__main__:vgg_loss_4: 65.34140014648438 +INFO:__main__:vgg_loss_5: 2.0880179405212402 +INFO:__main__:validation_loss: 2046.990478515625 +INFO:__main__:global_step: 62751 +INFO:__main__:kl_loss: 154.36138916015625 +INFO:__main__:kl_weight: 0.510000467300415 +INFO:__main__:likelihood_loss: 1456.501220703125 +INFO:__main__:loss: 1535.2255859375 +INFO:__main__:vgg_gram_loss_0: 5.498467445373535 +INFO:__main__:vgg_gram_loss_1: 17.481721878051758 +INFO:__main__:vgg_gram_loss_2: 20.46467399597168 +INFO:__main__:vgg_gram_loss_3: 17.913406372070312 +INFO:__main__:vgg_gram_loss_4: 73.32341003417969 +INFO:__main__:vgg_gram_loss_5: 0.14709335565567017 +INFO:__main__:vgg_loss_0: 12.278597831726074 +INFO:__main__:vgg_loss_1: 29.25261116027832 +INFO:__main__:vgg_loss_2: 38.8270263671875 +INFO:__main__:vgg_loss_3: 27.46396255493164 +INFO:__main__:vgg_loss_4: 47.117122650146484 +INFO:__main__:vgg_loss_5: 1.532151460647583 +INFO:__main__:validation_loss: 1875.308837890625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 63001 +INFO:__main__:kl_loss: 158.30812072753906 +INFO:__main__:kl_weight: 0.5200004577636719 +INFO:__main__:likelihood_loss: 1332.3291015625 +INFO:__main__:loss: 1414.6494140625 +INFO:__main__:vgg_gram_loss_0: 6.255046844482422 +INFO:__main__:vgg_gram_loss_1: 14.249056816101074 +INFO:__main__:vgg_gram_loss_2: 16.584993362426758 +INFO:__main__:vgg_gram_loss_3: 13.93603801727295 +INFO:__main__:vgg_gram_loss_4: 71.7183609008789 +INFO:__main__:vgg_gram_loss_5: 0.15620115399360657 +INFO:__main__:vgg_loss_0: 9.447062492370605 +INFO:__main__:vgg_loss_1: 24.19516372680664 +INFO:__main__:vgg_loss_2: 35.44914627075195 +INFO:__main__:vgg_loss_3: 25.659456253051758 +INFO:__main__:vgg_loss_4: 47.22391891479492 +INFO:__main__:vgg_loss_5: 1.5913810729980469 +INFO:__main__:validation_loss: 1799.3363037109375 +INFO:__main__:global_step: 63251 +INFO:__main__:kl_loss: 148.7079315185547 +INFO:__main__:kl_weight: 0.5300004482269287 +INFO:__main__:likelihood_loss: 1577.3358154296875 +INFO:__main__:loss: 1656.151123046875 +INFO:__main__:vgg_gram_loss_0: 7.008537292480469 +INFO:__main__:vgg_gram_loss_1: 19.99354362487793 +INFO:__main__:vgg_gram_loss_2: 21.231698989868164 +INFO:__main__:vgg_gram_loss_3: 17.07937240600586 +INFO:__main__:vgg_gram_loss_4: 80.08322143554688 +INFO:__main__:vgg_gram_loss_5: 0.183013454079628 +INFO:__main__:vgg_loss_0: 12.919233322143555 +INFO:__main__:vgg_loss_1: 29.230680465698242 +INFO:__main__:vgg_loss_2: 41.317047119140625 +INFO:__main__:vgg_loss_3: 30.217702865600586 +INFO:__main__:vgg_loss_4: 54.374755859375 +INFO:__main__:vgg_loss_5: 1.8283731937408447 +INFO:__main__:validation_loss: 1956.83349609375 +INFO:__main__:global_step: 63501 +INFO:__main__:kl_loss: 148.82266235351562 +INFO:__main__:kl_weight: 0.5400004386901855 +INFO:__main__:likelihood_loss: 1506.35009765625 +INFO:__main__:loss: 1586.71435546875 +INFO:__main__:vgg_gram_loss_0: 11.792441368103027 +INFO:__main__:vgg_gram_loss_1: 22.938583374023438 +INFO:__main__:vgg_gram_loss_2: 22.563390731811523 +INFO:__main__:vgg_gram_loss_3: 17.500226974487305 +INFO:__main__:vgg_gram_loss_4: 72.64840698242188 +INFO:__main__:vgg_gram_loss_5: 0.1615828573703766 +INFO:__main__:vgg_loss_0: 10.978843688964844 +INFO:__main__:vgg_loss_1: 26.082365036010742 +INFO:__main__:vgg_loss_2: 38.441402435302734 +INFO:__main__:vgg_loss_3: 28.088895797729492 +INFO:__main__:vgg_loss_4: 48.461326599121094 +INFO:__main__:vgg_loss_5: 1.6125644445419312 +INFO:__main__:validation_loss: 1992.5570068359375 +INFO:__main__:global_step: 63751 +INFO:__main__:kl_loss: 144.92318725585938 +INFO:__main__:kl_weight: 0.5500004291534424 +INFO:__main__:likelihood_loss: 1538.49755859375 +INFO:__main__:loss: 1618.205322265625 +INFO:__main__:vgg_gram_loss_0: 9.499394416809082 +INFO:__main__:vgg_gram_loss_1: 17.774600982666016 +INFO:__main__:vgg_gram_loss_2: 20.655994415283203 +INFO:__main__:vgg_gram_loss_3: 16.993106842041016 +INFO:__main__:vgg_gram_loss_4: 77.11156463623047 +INFO:__main__:vgg_gram_loss_5: 0.17355114221572876 +INFO:__main__:vgg_loss_0: 11.731160163879395 +INFO:__main__:vgg_loss_1: 27.712064743041992 +INFO:__main__:vgg_loss_2: 41.247802734375 +INFO:__main__:vgg_loss_3: 30.156343460083008 +INFO:__main__:vgg_loss_4: 52.89293670654297 +INFO:__main__:vgg_loss_5: 1.7510243654251099 +INFO:__main__:validation_loss: 1677.9342041015625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 64001 +INFO:__main__:kl_loss: 130.691650390625 +INFO:__main__:kl_weight: 0.5600004196166992 +INFO:__main__:likelihood_loss: 1296.4849853515625 +INFO:__main__:loss: 1369.67236328125 +INFO:__main__:vgg_gram_loss_0: 4.836056709289551 +INFO:__main__:vgg_gram_loss_1: 15.333206176757812 +INFO:__main__:vgg_gram_loss_2: 18.758073806762695 +INFO:__main__:vgg_gram_loss_3: 14.820637702941895 +INFO:__main__:vgg_gram_loss_4: 63.64361572265625 +INFO:__main__:vgg_gram_loss_5: 0.1568421572446823 +INFO:__main__:vgg_loss_0: 9.76473617553711 +INFO:__main__:vgg_loss_1: 23.370521545410156 +INFO:__main__:vgg_loss_2: 35.16050720214844 +INFO:__main__:vgg_loss_3: 25.989593505859375 +INFO:__main__:vgg_loss_4: 45.89315414428711 +INFO:__main__:vgg_loss_5: 1.570067286491394 +INFO:__main__:validation_loss: 1913.1480712890625 +INFO:__main__:global_step: 64251 +INFO:__main__:kl_loss: 131.55601501464844 +INFO:__main__:kl_weight: 0.570000410079956 +INFO:__main__:likelihood_loss: 1356.806396484375 +INFO:__main__:loss: 1431.7933349609375 +INFO:__main__:vgg_gram_loss_0: 5.879093170166016 +INFO:__main__:vgg_gram_loss_1: 12.707127571105957 +INFO:__main__:vgg_gram_loss_2: 16.545490264892578 +INFO:__main__:vgg_gram_loss_3: 14.249430656433105 +INFO:__main__:vgg_gram_loss_4: 71.69304656982422 +INFO:__main__:vgg_gram_loss_5: 0.17117194831371307 +INFO:__main__:vgg_loss_0: 10.482495307922363 +INFO:__main__:vgg_loss_1: 25.258399963378906 +INFO:__main__:vgg_loss_2: 37.025882720947266 +INFO:__main__:vgg_loss_3: 27.014118194580078 +INFO:__main__:vgg_loss_4: 48.70873260498047 +INFO:__main__:vgg_loss_5: 1.6262907981872559 +INFO:__main__:validation_loss: 1797.1842041015625 +INFO:__main__:global_step: 64501 +INFO:__main__:kl_loss: 142.2903289794922 +INFO:__main__:kl_weight: 0.5800004005432129 +INFO:__main__:likelihood_loss: 1376.1630859375 +INFO:__main__:loss: 1458.6915283203125 +INFO:__main__:vgg_gram_loss_0: 4.241140842437744 +INFO:__main__:vgg_gram_loss_1: 17.07489585876465 +INFO:__main__:vgg_gram_loss_2: 20.38210105895996 +INFO:__main__:vgg_gram_loss_3: 16.10785484313965 +INFO:__main__:vgg_gram_loss_4: 68.23886108398438 +INFO:__main__:vgg_gram_loss_5: 0.16461364924907684 +INFO:__main__:vgg_loss_0: 9.85578727722168 +INFO:__main__:vgg_loss_1: 24.325496673583984 +INFO:__main__:vgg_loss_2: 36.98074722290039 +INFO:__main__:vgg_loss_3: 27.342815399169922 +INFO:__main__:vgg_loss_4: 48.85261154174805 +INFO:__main__:vgg_loss_5: 1.6656838655471802 +INFO:__main__:validation_loss: 1885.7972412109375 +INFO:__main__:global_step: 64751 +INFO:__main__:kl_loss: 149.5614471435547 +INFO:__main__:kl_weight: 0.5900003910064697 +INFO:__main__:likelihood_loss: 1452.321533203125 +INFO:__main__:loss: 1540.5628662109375 +INFO:__main__:vgg_gram_loss_0: 9.444628715515137 +INFO:__main__:vgg_gram_loss_1: 17.266794204711914 +INFO:__main__:vgg_gram_loss_2: 20.748369216918945 +INFO:__main__:vgg_gram_loss_3: 15.716171264648438 +INFO:__main__:vgg_gram_loss_4: 73.0754623413086 +INFO:__main__:vgg_gram_loss_5: 0.16180644929409027 +INFO:__main__:vgg_loss_0: 10.525382041931152 +INFO:__main__:vgg_loss_1: 25.975059509277344 +INFO:__main__:vgg_loss_2: 38.856319427490234 +INFO:__main__:vgg_loss_3: 27.922163009643555 +INFO:__main__:vgg_loss_4: 49.148658752441406 +INFO:__main__:vgg_loss_5: 1.6234883069992065 +INFO:__main__:validation_loss: 1734.2523193359375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 65001 +INFO:__main__:kl_loss: 130.78785705566406 +INFO:__main__:kl_weight: 0.6000003814697266 +INFO:__main__:likelihood_loss: 1521.681396484375 +INFO:__main__:loss: 1600.1541748046875 +INFO:__main__:vgg_gram_loss_0: 5.774623394012451 +INFO:__main__:vgg_gram_loss_1: 15.3505277633667 +INFO:__main__:vgg_gram_loss_2: 22.47835350036621 +INFO:__main__:vgg_gram_loss_3: 17.383214950561523 +INFO:__main__:vgg_gram_loss_4: 76.5810775756836 +INFO:__main__:vgg_gram_loss_5: 0.16186819970607758 +INFO:__main__:vgg_loss_0: 11.943792343139648 +INFO:__main__:vgg_loss_1: 28.103734970092773 +INFO:__main__:vgg_loss_2: 41.24433898925781 +INFO:__main__:vgg_loss_3: 29.874759674072266 +INFO:__main__:vgg_loss_4: 53.72663116455078 +INFO:__main__:vgg_loss_5: 1.7133642435073853 +INFO:__main__:validation_loss: 1988.6507568359375 +INFO:__main__:global_step: 65251 +INFO:__main__:kl_loss: 133.06964111328125 +INFO:__main__:kl_weight: 0.6100003719329834 +INFO:__main__:likelihood_loss: 1607.8204345703125 +INFO:__main__:loss: 1688.992919921875 +INFO:__main__:vgg_gram_loss_0: 7.787967205047607 +INFO:__main__:vgg_gram_loss_1: 18.953765869140625 +INFO:__main__:vgg_gram_loss_2: 24.768461227416992 +INFO:__main__:vgg_gram_loss_3: 19.00188446044922 +INFO:__main__:vgg_gram_loss_4: 77.5338363647461 +INFO:__main__:vgg_gram_loss_5: 0.17573629319667816 +INFO:__main__:vgg_loss_0: 13.058457374572754 +INFO:__main__:vgg_loss_1: 32.80005645751953 +INFO:__main__:vgg_loss_2: 43.70207214355469 +INFO:__main__:vgg_loss_3: 30.06410026550293 +INFO:__main__:vgg_loss_4: 51.99100875854492 +INFO:__main__:vgg_loss_5: 1.7267571687698364 +INFO:__main__:validation_loss: 2065.5517578125 +INFO:__main__:global_step: 65501 +INFO:__main__:kl_loss: 137.739501953125 +INFO:__main__:kl_weight: 0.6200003623962402 +INFO:__main__:likelihood_loss: 1674.1041259765625 +INFO:__main__:loss: 1759.502685546875 +INFO:__main__:vgg_gram_loss_0: 7.077807903289795 +INFO:__main__:vgg_gram_loss_1: 17.701295852661133 +INFO:__main__:vgg_gram_loss_2: 21.997289657592773 +INFO:__main__:vgg_gram_loss_3: 19.126741409301758 +INFO:__main__:vgg_gram_loss_4: 86.34031677246094 +INFO:__main__:vgg_gram_loss_5: 0.19935135543346405 +INFO:__main__:vgg_loss_0: 12.725826263427734 +INFO:__main__:vgg_loss_1: 29.975820541381836 +INFO:__main__:vgg_loss_2: 44.98339080810547 +INFO:__main__:vgg_loss_3: 33.735801696777344 +INFO:__main__:vgg_loss_4: 59.039859771728516 +INFO:__main__:vgg_loss_5: 1.9173345565795898 +INFO:__main__:validation_loss: 1923.3988037109375 +INFO:__main__:global_step: 65751 +INFO:__main__:kl_loss: 143.68910217285156 +INFO:__main__:kl_weight: 0.6300003528594971 +INFO:__main__:likelihood_loss: 1703.1085205078125 +INFO:__main__:loss: 1793.6326904296875 +INFO:__main__:vgg_gram_loss_0: 4.728102207183838 +INFO:__main__:vgg_gram_loss_1: 21.864030838012695 +INFO:__main__:vgg_gram_loss_2: 25.741498947143555 +INFO:__main__:vgg_gram_loss_3: 20.477821350097656 +INFO:__main__:vgg_gram_loss_4: 85.23058319091797 +INFO:__main__:vgg_gram_loss_5: 0.18515996634960175 +INFO:__main__:vgg_loss_0: 12.521238327026367 +INFO:__main__:vgg_loss_1: 30.961219787597656 +INFO:__main__:vgg_loss_2: 47.269073486328125 +INFO:__main__:vgg_loss_3: 33.10855484008789 +INFO:__main__:vgg_loss_4: 56.687747955322266 +INFO:__main__:vgg_loss_5: 1.8466848134994507 +INFO:__main__:validation_loss: 2012.11572265625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 66001 +INFO:__main__:kl_loss: 140.4301300048828 +INFO:__main__:kl_weight: 0.6400003433227539 +INFO:__main__:likelihood_loss: 1508.233154296875 +INFO:__main__:loss: 1598.1085205078125 +INFO:__main__:vgg_gram_loss_0: 6.333065032958984 +INFO:__main__:vgg_gram_loss_1: 15.689684867858887 +INFO:__main__:vgg_gram_loss_2: 18.358047485351562 +INFO:__main__:vgg_gram_loss_3: 16.155691146850586 +INFO:__main__:vgg_gram_loss_4: 77.42984008789062 +INFO:__main__:vgg_gram_loss_5: 0.17993436753749847 +INFO:__main__:vgg_loss_0: 11.451391220092773 +INFO:__main__:vgg_loss_1: 29.1550350189209 +INFO:__main__:vgg_loss_2: 42.23038864135742 +INFO:__main__:vgg_loss_3: 29.86561393737793 +INFO:__main__:vgg_loss_4: 53.03596496582031 +INFO:__main__:vgg_loss_5: 1.761966347694397 +INFO:__main__:validation_loss: 2053.955810546875 +INFO:__main__:global_step: 66251 +INFO:__main__:kl_loss: 138.91708374023438 +INFO:__main__:kl_weight: 0.6500003337860107 +INFO:__main__:likelihood_loss: 1539.1015625 +INFO:__main__:loss: 1629.397705078125 +INFO:__main__:vgg_gram_loss_0: 8.766200065612793 +INFO:__main__:vgg_gram_loss_1: 17.94972801208496 +INFO:__main__:vgg_gram_loss_2: 23.18792724609375 +INFO:__main__:vgg_gram_loss_3: 18.552114486694336 +INFO:__main__:vgg_gram_loss_4: 78.57548522949219 +INFO:__main__:vgg_gram_loss_5: 0.16626155376434326 +INFO:__main__:vgg_loss_0: 10.935999870300293 +INFO:__main__:vgg_loss_1: 26.230270385742188 +INFO:__main__:vgg_loss_2: 40.5624885559082 +INFO:__main__:vgg_loss_3: 29.772716522216797 +INFO:__main__:vgg_loss_4: 51.44537353515625 +INFO:__main__:vgg_loss_5: 1.6757436990737915 +INFO:__main__:validation_loss: 2003.36767578125 +INFO:__main__:global_step: 66501 +INFO:__main__:kl_loss: 132.59556579589844 +INFO:__main__:kl_weight: 0.6600003242492676 +INFO:__main__:likelihood_loss: 1206.0162353515625 +INFO:__main__:loss: 1293.529296875 +INFO:__main__:vgg_gram_loss_0: 4.618630409240723 +INFO:__main__:vgg_gram_loss_1: 12.44128131866455 +INFO:__main__:vgg_gram_loss_2: 15.721549987792969 +INFO:__main__:vgg_gram_loss_3: 12.134242057800293 +INFO:__main__:vgg_gram_loss_4: 60.20072555541992 +INFO:__main__:vgg_gram_loss_5: 0.13847239315509796 +INFO:__main__:vgg_loss_0: 8.987995147705078 +INFO:__main__:vgg_loss_1: 22.49643898010254 +INFO:__main__:vgg_loss_2: 34.06087112426758 +INFO:__main__:vgg_loss_3: 24.920372009277344 +INFO:__main__:vgg_loss_4: 44.036216735839844 +INFO:__main__:vgg_loss_5: 1.4464504718780518 +INFO:__main__:validation_loss: 1772.6448974609375 +INFO:__main__:global_step: 66751 +INFO:__main__:kl_loss: 121.65013122558594 +INFO:__main__:kl_weight: 0.6700003147125244 +INFO:__main__:likelihood_loss: 1269.3359375 +INFO:__main__:loss: 1350.841552734375 +INFO:__main__:vgg_gram_loss_0: 5.661892890930176 +INFO:__main__:vgg_gram_loss_1: 14.447422981262207 +INFO:__main__:vgg_gram_loss_2: 18.51140594482422 +INFO:__main__:vgg_gram_loss_3: 13.463394165039062 +INFO:__main__:vgg_gram_loss_4: 64.50457000732422 +INFO:__main__:vgg_gram_loss_5: 0.1402585804462433 +INFO:__main__:vgg_loss_0: 9.957453727722168 +INFO:__main__:vgg_loss_1: 23.48259162902832 +INFO:__main__:vgg_loss_2: 34.38383865356445 +INFO:__main__:vgg_loss_3: 24.68840789794922 +INFO:__main__:vgg_loss_4: 43.223114013671875 +INFO:__main__:vgg_loss_5: 1.4028245210647583 +INFO:__main__:validation_loss: 1835.481689453125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 67001 +INFO:__main__:kl_loss: 139.10964965820312 +INFO:__main__:kl_weight: 0.6800003051757812 +INFO:__main__:likelihood_loss: 1433.261962890625 +INFO:__main__:loss: 1527.8565673828125 +INFO:__main__:vgg_gram_loss_0: 5.023831367492676 +INFO:__main__:vgg_gram_loss_1: 14.9660062789917 +INFO:__main__:vgg_gram_loss_2: 21.340497970581055 +INFO:__main__:vgg_gram_loss_3: 16.20815658569336 +INFO:__main__:vgg_gram_loss_4: 74.86559295654297 +INFO:__main__:vgg_gram_loss_5: 0.17463155090808868 +INFO:__main__:vgg_loss_0: 10.094112396240234 +INFO:__main__:vgg_loss_1: 25.098695755004883 +INFO:__main__:vgg_loss_2: 38.61017990112305 +INFO:__main__:vgg_loss_3: 28.326242446899414 +INFO:__main__:vgg_loss_4: 50.30464553833008 +INFO:__main__:vgg_loss_5: 1.6397920846939087 +INFO:__main__:validation_loss: 2387.88916015625 +INFO:__main__:global_step: 67251 +INFO:__main__:kl_loss: 122.5555648803711 +INFO:__main__:kl_weight: 0.6900002956390381 +INFO:__main__:likelihood_loss: 1456.716064453125 +INFO:__main__:loss: 1541.2794189453125 +INFO:__main__:vgg_gram_loss_0: 7.547300815582275 +INFO:__main__:vgg_gram_loss_1: 17.3953914642334 +INFO:__main__:vgg_gram_loss_2: 18.41188621520996 +INFO:__main__:vgg_gram_loss_3: 16.024436950683594 +INFO:__main__:vgg_gram_loss_4: 72.1866455078125 +INFO:__main__:vgg_gram_loss_5: 0.16891400516033173 +INFO:__main__:vgg_loss_0: 11.940223693847656 +INFO:__main__:vgg_loss_1: 28.571102142333984 +INFO:__main__:vgg_loss_2: 39.23056411743164 +INFO:__main__:vgg_loss_3: 28.315532684326172 +INFO:__main__:vgg_loss_4: 49.885337829589844 +INFO:__main__:vgg_loss_5: 1.6658669710159302 +INFO:__main__:validation_loss: 1848.583251953125 +INFO:__main__:global_step: 67501 +INFO:__main__:kl_loss: 122.26922607421875 +INFO:__main__:kl_weight: 0.7000002861022949 +INFO:__main__:likelihood_loss: 1392.700439453125 +INFO:__main__:loss: 1478.2889404296875 +INFO:__main__:vgg_gram_loss_0: 6.852509498596191 +INFO:__main__:vgg_gram_loss_1: 17.724885940551758 +INFO:__main__:vgg_gram_loss_2: 19.158313751220703 +INFO:__main__:vgg_gram_loss_3: 15.150555610656738 +INFO:__main__:vgg_gram_loss_4: 69.83219909667969 +INFO:__main__:vgg_gram_loss_5: 0.15133295953273773 +INFO:__main__:vgg_loss_0: 10.726128578186035 +INFO:__main__:vgg_loss_1: 26.22296905517578 +INFO:__main__:vgg_loss_2: 36.78580856323242 +INFO:__main__:vgg_loss_3: 26.704618453979492 +INFO:__main__:vgg_loss_4: 47.64253234863281 +INFO:__main__:vgg_loss_5: 1.5882437229156494 +INFO:__main__:validation_loss: 2041.9786376953125 +INFO:__main__:global_step: 67751 +INFO:__main__:kl_loss: 123.15654754638672 +INFO:__main__:kl_weight: 0.7100002765655518 +INFO:__main__:likelihood_loss: 1531.4691162109375 +INFO:__main__:loss: 1618.9102783203125 +INFO:__main__:vgg_gram_loss_0: 3.985860824584961 +INFO:__main__:vgg_gram_loss_1: 15.77622127532959 +INFO:__main__:vgg_gram_loss_2: 19.705886840820312 +INFO:__main__:vgg_gram_loss_3: 17.437889099121094 +INFO:__main__:vgg_gram_loss_4: 74.76033020019531 +INFO:__main__:vgg_gram_loss_5: 0.1705358475446701 +INFO:__main__:vgg_loss_0: 12.997247695922852 +INFO:__main__:vgg_loss_1: 31.911155700683594 +INFO:__main__:vgg_loss_2: 42.923057556152344 +INFO:__main__:vgg_loss_3: 30.909244537353516 +INFO:__main__:vgg_loss_4: 53.91752624511719 +INFO:__main__:vgg_loss_5: 1.7988643646240234 +INFO:__main__:validation_loss: 1930.9964599609375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 68001 +INFO:__main__:kl_loss: 120.66307067871094 +INFO:__main__:kl_weight: 0.7200002670288086 +INFO:__main__:likelihood_loss: 1660.11083984375 +INFO:__main__:loss: 1746.98828125 +INFO:__main__:vgg_gram_loss_0: 4.675761699676514 +INFO:__main__:vgg_gram_loss_1: 17.88255500793457 +INFO:__main__:vgg_gram_loss_2: 27.682275772094727 +INFO:__main__:vgg_gram_loss_3: 20.549049377441406 +INFO:__main__:vgg_gram_loss_4: 85.3249740600586 +INFO:__main__:vgg_gram_loss_5: 0.19199660420417786 +INFO:__main__:vgg_loss_0: 12.904491424560547 +INFO:__main__:vgg_loss_1: 29.32299041748047 +INFO:__main__:vgg_loss_2: 44.046905517578125 +INFO:__main__:vgg_loss_3: 31.680994033813477 +INFO:__main__:vgg_loss_4: 55.882442474365234 +INFO:__main__:vgg_loss_5: 1.8777046203613281 +INFO:__main__:validation_loss: 1966.3978271484375 +INFO:__main__:global_step: 68251 +INFO:__main__:kl_loss: 124.3830795288086 +INFO:__main__:kl_weight: 0.7300002574920654 +INFO:__main__:likelihood_loss: 1543.2431640625 +INFO:__main__:loss: 1634.0428466796875 +INFO:__main__:vgg_gram_loss_0: 4.517995357513428 +INFO:__main__:vgg_gram_loss_1: 17.603374481201172 +INFO:__main__:vgg_gram_loss_2: 21.23562240600586 +INFO:__main__:vgg_gram_loss_3: 16.44476890563965 +INFO:__main__:vgg_gram_loss_4: 75.24378204345703 +INFO:__main__:vgg_gram_loss_5: 0.18510901927947998 +INFO:__main__:vgg_loss_0: 12.260743141174316 +INFO:__main__:vgg_loss_1: 31.31107521057129 +INFO:__main__:vgg_loss_2: 43.945106506347656 +INFO:__main__:vgg_loss_3: 30.964588165283203 +INFO:__main__:vgg_loss_4: 53.162113189697266 +INFO:__main__:vgg_loss_5: 1.7743662595748901 +INFO:__main__:validation_loss: 1975.024658203125 +INFO:__main__:global_step: 68501 +INFO:__main__:kl_loss: 120.29890441894531 +INFO:__main__:kl_weight: 0.7400002479553223 +INFO:__main__:likelihood_loss: 1573.298095703125 +INFO:__main__:loss: 1662.3193359375 +INFO:__main__:vgg_gram_loss_0: 6.859035015106201 +INFO:__main__:vgg_gram_loss_1: 19.82688331604004 +INFO:__main__:vgg_gram_loss_2: 22.367755889892578 +INFO:__main__:vgg_gram_loss_3: 17.733129501342773 +INFO:__main__:vgg_gram_loss_4: 77.9203109741211 +INFO:__main__:vgg_gram_loss_5: 0.16752628982067108 +INFO:__main__:vgg_loss_0: 13.409920692443848 +INFO:__main__:vgg_loss_1: 30.256275177001953 +INFO:__main__:vgg_loss_2: 41.42337417602539 +INFO:__main__:vgg_loss_3: 29.832212448120117 +INFO:__main__:vgg_loss_4: 53.11823654174805 +INFO:__main__:vgg_loss_5: 1.7449506521224976 +INFO:__main__:validation_loss: 2240.46875 +INFO:__main__:global_step: 68751 +INFO:__main__:kl_loss: 125.92501068115234 +INFO:__main__:kl_weight: 0.7500002384185791 +INFO:__main__:likelihood_loss: 1751.3173828125 +INFO:__main__:loss: 1845.76123046875 +INFO:__main__:vgg_gram_loss_0: 2.8916208744049072 +INFO:__main__:vgg_gram_loss_1: 20.429540634155273 +INFO:__main__:vgg_gram_loss_2: 26.835046768188477 +INFO:__main__:vgg_gram_loss_3: 21.94961929321289 +INFO:__main__:vgg_gram_loss_4: 85.21687316894531 +INFO:__main__:vgg_gram_loss_5: 0.18313011527061462 +INFO:__main__:vgg_loss_0: 14.87549877166748 +INFO:__main__:vgg_loss_1: 37.31819534301758 +INFO:__main__:vgg_loss_2: 48.62551498413086 +INFO:__main__:vgg_loss_3: 33.33528137207031 +INFO:__main__:vgg_loss_4: 56.715576171875 +INFO:__main__:vgg_loss_5: 1.8875964879989624 +INFO:__main__:validation_loss: 2493.322998046875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 69001 +INFO:__main__:kl_loss: 125.6292724609375 +INFO:__main__:kl_weight: 0.7600002288818359 +INFO:__main__:likelihood_loss: 1536.2408447265625 +INFO:__main__:loss: 1631.7191162109375 +INFO:__main__:vgg_gram_loss_0: 5.972162246704102 +INFO:__main__:vgg_gram_loss_1: 16.523221969604492 +INFO:__main__:vgg_gram_loss_2: 21.487659454345703 +INFO:__main__:vgg_gram_loss_3: 19.820798873901367 +INFO:__main__:vgg_gram_loss_4: 83.37060546875 +INFO:__main__:vgg_gram_loss_5: 0.17578040063381195 +INFO:__main__:vgg_loss_0: 10.990117073059082 +INFO:__main__:vgg_loss_1: 27.160594940185547 +INFO:__main__:vgg_loss_2: 38.87975311279297 +INFO:__main__:vgg_loss_3: 29.251968383789062 +INFO:__main__:vgg_loss_4: 51.939918518066406 +INFO:__main__:vgg_loss_5: 1.6756130456924438 +INFO:__main__:validation_loss: 2174.975341796875 +INFO:__main__:global_step: 69251 +INFO:__main__:kl_loss: 123.2645263671875 +INFO:__main__:kl_weight: 0.7700002193450928 +INFO:__main__:likelihood_loss: 1596.554931640625 +INFO:__main__:loss: 1691.4686279296875 +INFO:__main__:vgg_gram_loss_0: 5.541776180267334 +INFO:__main__:vgg_gram_loss_1: 20.66242790222168 +INFO:__main__:vgg_gram_loss_2: 21.898353576660156 +INFO:__main__:vgg_gram_loss_3: 21.246999740600586 +INFO:__main__:vgg_gram_loss_4: 84.6812973022461 +INFO:__main__:vgg_gram_loss_5: 0.1856197565793991 +INFO:__main__:vgg_loss_0: 11.673689842224121 +INFO:__main__:vgg_loss_1: 30.411474227905273 +INFO:__main__:vgg_loss_2: 41.39936828613281 +INFO:__main__:vgg_loss_3: 29.173614501953125 +INFO:__main__:vgg_loss_4: 50.74263381958008 +INFO:__main__:vgg_loss_5: 1.6937453746795654 +INFO:__main__:validation_loss: 1977.4515380859375 +INFO:__main__:global_step: 69501 +INFO:__main__:kl_loss: 134.11952209472656 +INFO:__main__:kl_weight: 0.7800002098083496 +INFO:__main__:likelihood_loss: 1518.17626953125 +INFO:__main__:loss: 1622.78955078125 +INFO:__main__:vgg_gram_loss_0: 6.403439521789551 +INFO:__main__:vgg_gram_loss_1: 23.630760192871094 +INFO:__main__:vgg_gram_loss_2: 25.288049697875977 +INFO:__main__:vgg_gram_loss_3: 18.252885818481445 +INFO:__main__:vgg_gram_loss_4: 69.32908630371094 +INFO:__main__:vgg_gram_loss_5: 0.14441928267478943 +INFO:__main__:vgg_loss_0: 12.037620544433594 +INFO:__main__:vgg_loss_1: 30.384794235229492 +INFO:__main__:vgg_loss_2: 40.51081466674805 +INFO:__main__:vgg_loss_3: 27.982465744018555 +INFO:__main__:vgg_loss_4: 48.102970123291016 +INFO:__main__:vgg_loss_5: 1.5679389238357544 +INFO:__main__:validation_loss: 1976.461181640625 +INFO:__main__:global_step: 69751 +INFO:__main__:kl_loss: 101.4223403930664 +INFO:__main__:kl_weight: 0.7900002002716064 +INFO:__main__:likelihood_loss: 1594.7515869140625 +INFO:__main__:loss: 1674.875244140625 +INFO:__main__:vgg_gram_loss_0: 6.184321880340576 +INFO:__main__:vgg_gram_loss_1: 19.901065826416016 +INFO:__main__:vgg_gram_loss_2: 22.50301170349121 +INFO:__main__:vgg_gram_loss_3: 17.64005470275879 +INFO:__main__:vgg_gram_loss_4: 78.64226531982422 +INFO:__main__:vgg_gram_loss_5: 0.19677816331386566 +INFO:__main__:vgg_loss_0: 12.225306510925293 +INFO:__main__:vgg_loss_1: 30.046615600585938 +INFO:__main__:vgg_loss_2: 44.126468658447266 +INFO:__main__:vgg_loss_3: 31.57122230529785 +INFO:__main__:vgg_loss_4: 54.11068344116211 +INFO:__main__:vgg_loss_5: 1.8025399446487427 +INFO:__main__:validation_loss: 1647.0897216796875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 70001 +INFO:__main__:kl_loss: 114.62054443359375 +INFO:__main__:kl_weight: 0.8000001907348633 +INFO:__main__:likelihood_loss: 1259.611083984375 +INFO:__main__:loss: 1351.3074951171875 +INFO:__main__:vgg_gram_loss_0: 11.280505180358887 +INFO:__main__:vgg_gram_loss_1: 17.14234161376953 +INFO:__main__:vgg_gram_loss_2: 17.81877899169922 +INFO:__main__:vgg_gram_loss_3: 13.883241653442383 +INFO:__main__:vgg_gram_loss_4: 60.410888671875 +INFO:__main__:vgg_gram_loss_5: 0.1228264570236206 +INFO:__main__:vgg_loss_0: 10.088823318481445 +INFO:__main__:vgg_loss_1: 22.671152114868164 +INFO:__main__:vgg_loss_2: 32.982757568359375 +INFO:__main__:vgg_loss_3: 23.601299285888672 +INFO:__main__:vgg_loss_4: 40.587345123291016 +INFO:__main__:vgg_loss_5: 1.33225679397583 +INFO:__main__:validation_loss: 1970.864990234375 +INFO:__main__:global_step: 70251 +INFO:__main__:kl_loss: 122.21644592285156 +INFO:__main__:kl_weight: 0.8100001811981201 +INFO:__main__:likelihood_loss: 1489.11279296875 +INFO:__main__:loss: 1588.108154296875 +INFO:__main__:vgg_gram_loss_0: 5.7129950523376465 +INFO:__main__:vgg_gram_loss_1: 19.401472091674805 +INFO:__main__:vgg_gram_loss_2: 22.117023468017578 +INFO:__main__:vgg_gram_loss_3: 17.118972778320312 +INFO:__main__:vgg_gram_loss_4: 70.82239532470703 +INFO:__main__:vgg_gram_loss_5: 0.1722775101661682 +INFO:__main__:vgg_loss_0: 11.55367660522461 +INFO:__main__:vgg_loss_1: 28.541751861572266 +INFO:__main__:vgg_loss_2: 40.3692626953125 +INFO:__main__:vgg_loss_3: 29.07376480102539 +INFO:__main__:vgg_loss_4: 51.23691940307617 +INFO:__main__:vgg_loss_5: 1.7020846605300903 +INFO:__main__:validation_loss: 1889.73486328125 +INFO:__main__:global_step: 70501 +INFO:__main__:kl_loss: 100.42434692382812 +INFO:__main__:kl_weight: 0.820000171661377 +INFO:__main__:likelihood_loss: 1476.46875 +INFO:__main__:loss: 1558.8167724609375 +INFO:__main__:vgg_gram_loss_0: 7.895826816558838 +INFO:__main__:vgg_gram_loss_1: 18.152254104614258 +INFO:__main__:vgg_gram_loss_2: 19.197431564331055 +INFO:__main__:vgg_gram_loss_3: 16.37513542175293 +INFO:__main__:vgg_gram_loss_4: 74.21931457519531 +INFO:__main__:vgg_gram_loss_5: 0.17780187726020813 +INFO:__main__:vgg_loss_0: 11.138297080993652 +INFO:__main__:vgg_loss_1: 28.024188995361328 +INFO:__main__:vgg_loss_2: 39.97074508666992 +INFO:__main__:vgg_loss_3: 28.34901237487793 +INFO:__main__:vgg_loss_4: 50.11932373046875 +INFO:__main__:vgg_loss_5: 1.6744214296340942 +INFO:__main__:validation_loss: 1503.5784912109375 +INFO:__main__:global_step: 70751 +INFO:__main__:kl_loss: 110.1641845703125 +INFO:__main__:kl_weight: 0.8300001621246338 +INFO:__main__:likelihood_loss: 1849.4932861328125 +INFO:__main__:loss: 1940.9295654296875 +INFO:__main__:vgg_gram_loss_0: 5.697214603424072 +INFO:__main__:vgg_gram_loss_1: 29.907506942749023 +INFO:__main__:vgg_gram_loss_2: 25.6283016204834 +INFO:__main__:vgg_gram_loss_3: 26.94020652770996 +INFO:__main__:vgg_gram_loss_4: 94.22338104248047 +INFO:__main__:vgg_gram_loss_5: 0.1919695883989334 +INFO:__main__:vgg_loss_0: 14.222413063049316 +INFO:__main__:vgg_loss_1: 35.892452239990234 +INFO:__main__:vgg_loss_2: 46.05123519897461 +INFO:__main__:vgg_loss_3: 32.38946533203125 +INFO:__main__:vgg_loss_4: 56.8395881652832 +INFO:__main__:vgg_loss_5: 1.9149153232574463 +INFO:__main__:validation_loss: 2107.130615234375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 71001 +INFO:__main__:kl_loss: 123.37052917480469 +INFO:__main__:kl_weight: 0.8400001525878906 +INFO:__main__:likelihood_loss: 1531.439453125 +INFO:__main__:loss: 1635.0706787109375 +INFO:__main__:vgg_gram_loss_0: 7.619284152984619 +INFO:__main__:vgg_gram_loss_1: 18.909658432006836 +INFO:__main__:vgg_gram_loss_2: 19.578994750976562 +INFO:__main__:vgg_gram_loss_3: 16.31967544555664 +INFO:__main__:vgg_gram_loss_4: 78.66064453125 +INFO:__main__:vgg_gram_loss_5: 0.17580434679985046 +INFO:__main__:vgg_loss_0: 11.272994995117188 +INFO:__main__:vgg_loss_1: 27.591440200805664 +INFO:__main__:vgg_loss_2: 40.4105224609375 +INFO:__main__:vgg_loss_3: 30.06654167175293 +INFO:__main__:vgg_loss_4: 53.899295806884766 +INFO:__main__:vgg_loss_5: 1.7830345630645752 +INFO:__main__:validation_loss: 2099.07861328125 +INFO:__main__:global_step: 71251 +INFO:__main__:kl_loss: 114.05953979492188 +INFO:__main__:kl_weight: 0.8500001430511475 +INFO:__main__:likelihood_loss: 1501.0948486328125 +INFO:__main__:loss: 1598.04541015625 +INFO:__main__:vgg_gram_loss_0: 7.028250217437744 +INFO:__main__:vgg_gram_loss_1: 18.595855712890625 +INFO:__main__:vgg_gram_loss_2: 19.473913192749023 +INFO:__main__:vgg_gram_loss_3: 16.306814193725586 +INFO:__main__:vgg_gram_loss_4: 78.75574493408203 +INFO:__main__:vgg_gram_loss_5: 0.18558269739151 +INFO:__main__:vgg_loss_0: 10.752283096313477 +INFO:__main__:vgg_loss_1: 25.481149673461914 +INFO:__main__:vgg_loss_2: 39.071964263916016 +INFO:__main__:vgg_loss_3: 29.390300750732422 +INFO:__main__:vgg_loss_4: 53.36310577392578 +INFO:__main__:vgg_loss_5: 1.8139877319335938 +INFO:__main__:validation_loss: 2232.254638671875 +INFO:__main__:global_step: 71501 +INFO:__main__:kl_loss: 100.27262878417969 +INFO:__main__:kl_weight: 0.8600001335144043 +INFO:__main__:likelihood_loss: 1220.089111328125 +INFO:__main__:loss: 1306.3236083984375 +INFO:__main__:vgg_gram_loss_0: 4.074394226074219 +INFO:__main__:vgg_gram_loss_1: 12.995302200317383 +INFO:__main__:vgg_gram_loss_2: 16.70504379272461 +INFO:__main__:vgg_gram_loss_3: 13.237479209899902 +INFO:__main__:vgg_gram_loss_4: 62.839412689208984 +INFO:__main__:vgg_gram_loss_5: 0.13876856863498688 +INFO:__main__:vgg_loss_0: 9.493607521057129 +INFO:__main__:vgg_loss_1: 23.230228424072266 +INFO:__main__:vgg_loss_2: 34.059608459472656 +INFO:__main__:vgg_loss_3: 24.03904151916504 +INFO:__main__:vgg_loss_4: 41.82756423950195 +INFO:__main__:vgg_loss_5: 1.3773560523986816 +INFO:__main__:validation_loss: 1982.9581298828125 +INFO:__main__:global_step: 71751 +INFO:__main__:kl_loss: 113.09410858154297 +INFO:__main__:kl_weight: 0.8700001239776611 +INFO:__main__:likelihood_loss: 1513.96337890625 +INFO:__main__:loss: 1612.355224609375 +INFO:__main__:vgg_gram_loss_0: 7.728318214416504 +INFO:__main__:vgg_gram_loss_1: 16.285978317260742 +INFO:__main__:vgg_gram_loss_2: 18.800724029541016 +INFO:__main__:vgg_gram_loss_3: 16.16196060180664 +INFO:__main__:vgg_gram_loss_4: 78.34253692626953 +INFO:__main__:vgg_gram_loss_5: 0.19268853962421417 +INFO:__main__:vgg_loss_0: 11.200359344482422 +INFO:__main__:vgg_loss_1: 27.516067504882812 +INFO:__main__:vgg_loss_2: 41.83439636230469 +INFO:__main__:vgg_loss_3: 30.327730178833008 +INFO:__main__:vgg_loss_4: 52.63300323486328 +INFO:__main__:vgg_loss_5: 1.768884301185608 +INFO:__main__:validation_loss: 1924.7484130859375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 72001 +INFO:__main__:kl_loss: 112.17937469482422 +INFO:__main__:kl_weight: 0.880000114440918 +INFO:__main__:likelihood_loss: 1309.4434814453125 +INFO:__main__:loss: 1408.161376953125 +INFO:__main__:vgg_gram_loss_0: 6.0181403160095215 +INFO:__main__:vgg_gram_loss_1: 15.1677885055542 +INFO:__main__:vgg_gram_loss_2: 17.82264518737793 +INFO:__main__:vgg_gram_loss_3: 13.64476490020752 +INFO:__main__:vgg_gram_loss_4: 66.82454681396484 +INFO:__main__:vgg_gram_loss_5: 0.16753584146499634 +INFO:__main__:vgg_loss_0: 10.165095329284668 +INFO:__main__:vgg_loss_1: 24.31793975830078 +INFO:__main__:vgg_loss_2: 35.764686584472656 +INFO:__main__:vgg_loss_3: 25.912620544433594 +INFO:__main__:vgg_loss_4: 44.63575744628906 +INFO:__main__:vgg_loss_5: 1.447156310081482 +INFO:__main__:validation_loss: 1988.7105712890625 +INFO:__main__:global_step: 72251 +INFO:__main__:kl_loss: 97.12775421142578 +INFO:__main__:kl_weight: 0.8900001049041748 +INFO:__main__:likelihood_loss: 1309.65673828125 +INFO:__main__:loss: 1396.1004638671875 +INFO:__main__:vgg_gram_loss_0: 6.9812140464782715 +INFO:__main__:vgg_gram_loss_1: 14.923712730407715 +INFO:__main__:vgg_gram_loss_2: 17.462730407714844 +INFO:__main__:vgg_gram_loss_3: 14.518150329589844 +INFO:__main__:vgg_gram_loss_4: 63.82509994506836 +INFO:__main__:vgg_gram_loss_5: 0.14979802072048187 +INFO:__main__:vgg_loss_0: 10.59226131439209 +INFO:__main__:vgg_loss_1: 24.807628631591797 +INFO:__main__:vgg_loss_2: 36.32770538330078 +INFO:__main__:vgg_loss_3: 25.696069717407227 +INFO:__main__:vgg_loss_4: 45.144996643066406 +INFO:__main__:vgg_loss_5: 1.5019564628601074 +INFO:__main__:validation_loss: 1940.3394775390625 +INFO:__main__:global_step: 72501 +INFO:__main__:kl_loss: 99.95515441894531 +INFO:__main__:kl_weight: 0.9000000953674316 +INFO:__main__:likelihood_loss: 1565.877685546875 +INFO:__main__:loss: 1655.8372802734375 +INFO:__main__:vgg_gram_loss_0: 5.93556022644043 +INFO:__main__:vgg_gram_loss_1: 16.67200469970703 +INFO:__main__:vgg_gram_loss_2: 23.253843307495117 +INFO:__main__:vgg_gram_loss_3: 18.802677154541016 +INFO:__main__:vgg_gram_loss_4: 80.59404754638672 +INFO:__main__:vgg_gram_loss_5: 0.17576146125793457 +INFO:__main__:vgg_loss_0: 11.554752349853516 +INFO:__main__:vgg_loss_1: 29.07073211669922 +INFO:__main__:vgg_loss_2: 42.50658416748047 +INFO:__main__:vgg_loss_3: 30.3881778717041 +INFO:__main__:vgg_loss_4: 52.49169158935547 +INFO:__main__:vgg_loss_5: 1.7296836376190186 +INFO:__main__:validation_loss: 2278.136474609375 +INFO:__main__:global_step: 72751 +INFO:__main__:kl_loss: 113.69381713867188 +INFO:__main__:kl_weight: 0.9100000858306885 +INFO:__main__:likelihood_loss: 1682.2967529296875 +INFO:__main__:loss: 1785.7581787109375 +INFO:__main__:vgg_gram_loss_0: 9.08698558807373 +INFO:__main__:vgg_gram_loss_1: 22.309814453125 +INFO:__main__:vgg_gram_loss_2: 26.631031036376953 +INFO:__main__:vgg_gram_loss_3: 22.305757522583008 +INFO:__main__:vgg_gram_loss_4: 84.04031372070312 +INFO:__main__:vgg_gram_loss_5: 0.17333360016345978 +INFO:__main__:vgg_loss_0: 12.1488676071167 +INFO:__main__:vgg_loss_1: 30.9793758392334 +INFO:__main__:vgg_loss_2: 43.765037536621094 +INFO:__main__:vgg_loss_3: 30.647972106933594 +INFO:__main__:vgg_loss_4: 52.655250549316406 +INFO:__main__:vgg_loss_5: 1.7155909538269043 +INFO:__main__:validation_loss: 2161.9501953125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 73001 +INFO:__main__:kl_loss: 119.75785064697266 +INFO:__main__:kl_weight: 0.9200000762939453 +INFO:__main__:likelihood_loss: 1499.76416015625 +INFO:__main__:loss: 1609.94140625 +INFO:__main__:vgg_gram_loss_0: 5.143685340881348 +INFO:__main__:vgg_gram_loss_1: 18.755769729614258 +INFO:__main__:vgg_gram_loss_2: 24.124832153320312 +INFO:__main__:vgg_gram_loss_3: 18.614320755004883 +INFO:__main__:vgg_gram_loss_4: 74.55846405029297 +INFO:__main__:vgg_gram_loss_5: 0.16492317616939545 +INFO:__main__:vgg_loss_0: 10.954079627990723 +INFO:__main__:vgg_loss_1: 28.294960021972656 +INFO:__main__:vgg_loss_2: 40.197471618652344 +INFO:__main__:vgg_loss_3: 29.055871963500977 +INFO:__main__:vgg_loss_4: 48.49783706665039 +INFO:__main__:vgg_loss_5: 1.5905847549438477 +INFO:__main__:validation_loss: 2446.968994140625 +INFO:__main__:global_step: 73251 +INFO:__main__:kl_loss: 101.9072494506836 +INFO:__main__:kl_weight: 0.9300000667572021 +INFO:__main__:likelihood_loss: 1496.933837890625 +INFO:__main__:loss: 1591.7076416015625 +INFO:__main__:vgg_gram_loss_0: 7.158200263977051 +INFO:__main__:vgg_gram_loss_1: 19.177204132080078 +INFO:__main__:vgg_gram_loss_2: 20.175968170166016 +INFO:__main__:vgg_gram_loss_3: 17.971181869506836 +INFO:__main__:vgg_gram_loss_4: 71.14128112792969 +INFO:__main__:vgg_gram_loss_5: 0.17106644809246063 +INFO:__main__:vgg_loss_0: 12.636214256286621 +INFO:__main__:vgg_loss_1: 31.10297393798828 +INFO:__main__:vgg_loss_2: 41.990264892578125 +INFO:__main__:vgg_loss_3: 28.953113555908203 +INFO:__main__:vgg_loss_4: 47.32289123535156 +INFO:__main__:vgg_loss_5: 1.5863882303237915 +INFO:__main__:validation_loss: 2045.115478515625 +INFO:__main__:global_step: 73501 +INFO:__main__:kl_loss: 109.71762084960938 +INFO:__main__:kl_weight: 0.940000057220459 +INFO:__main__:likelihood_loss: 1646.0426025390625 +INFO:__main__:loss: 1749.1771240234375 +INFO:__main__:vgg_gram_loss_0: 8.803943634033203 +INFO:__main__:vgg_gram_loss_1: 21.761241912841797 +INFO:__main__:vgg_gram_loss_2: 26.873910903930664 +INFO:__main__:vgg_gram_loss_3: 19.72337532043457 +INFO:__main__:vgg_gram_loss_4: 82.79439544677734 +INFO:__main__:vgg_gram_loss_5: 0.17672832310199738 +INFO:__main__:vgg_loss_0: 11.911394119262695 +INFO:__main__:vgg_loss_1: 28.469045639038086 +INFO:__main__:vgg_loss_2: 41.5036506652832 +INFO:__main__:vgg_loss_3: 30.807981491088867 +INFO:__main__:vgg_loss_4: 54.57335662841797 +INFO:__main__:vgg_loss_5: 1.8094980716705322 +INFO:__main__:validation_loss: 2040.6015625 +INFO:__main__:global_step: 73751 +INFO:__main__:kl_loss: 113.99014282226562 +INFO:__main__:kl_weight: 0.9500000476837158 +INFO:__main__:likelihood_loss: 1590.9906005859375 +INFO:__main__:loss: 1699.28125 +INFO:__main__:vgg_gram_loss_0: 5.220655918121338 +INFO:__main__:vgg_gram_loss_1: 25.686182022094727 +INFO:__main__:vgg_gram_loss_2: 26.844196319580078 +INFO:__main__:vgg_gram_loss_3: 19.788684844970703 +INFO:__main__:vgg_gram_loss_4: 80.70267486572266 +INFO:__main__:vgg_gram_loss_5: 0.18536019325256348 +INFO:__main__:vgg_loss_0: 10.139582633972168 +INFO:__main__:vgg_loss_1: 26.16705322265625 +INFO:__main__:vgg_loss_2: 40.3869743347168 +INFO:__main__:vgg_loss_3: 29.833858489990234 +INFO:__main__:vgg_loss_4: 51.54114532470703 +INFO:__main__:vgg_loss_5: 1.7017500400543213 +INFO:__main__:validation_loss: 1680.5662841796875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 74001 +INFO:__main__:kl_loss: 125.37686920166016 +INFO:__main__:kl_weight: 0.9600000381469727 +INFO:__main__:likelihood_loss: 1664.7703857421875 +INFO:__main__:loss: 1785.1322021484375 +INFO:__main__:vgg_gram_loss_0: 5.143579959869385 +INFO:__main__:vgg_gram_loss_1: 20.92984390258789 +INFO:__main__:vgg_gram_loss_2: 26.126178741455078 +INFO:__main__:vgg_gram_loss_3: 19.386747360229492 +INFO:__main__:vgg_gram_loss_4: 83.82691955566406 +INFO:__main__:vgg_gram_loss_5: 0.1983432024717331 +INFO:__main__:vgg_loss_0: 12.82373332977295 +INFO:__main__:vgg_loss_1: 33.68911361694336 +INFO:__main__:vgg_loss_2: 45.28364944458008 +INFO:__main__:vgg_loss_3: 31.59891700744629 +INFO:__main__:vgg_loss_4: 52.19144821166992 +INFO:__main__:vgg_loss_5: 1.7555961608886719 +INFO:__main__:validation_loss: 2110.444091796875 +INFO:__main__:global_step: 74251 +INFO:__main__:kl_loss: 102.18760681152344 +INFO:__main__:kl_weight: 0.9700000286102295 +INFO:__main__:likelihood_loss: 1639.598388671875 +INFO:__main__:loss: 1738.7203369140625 +INFO:__main__:vgg_gram_loss_0: 9.353968620300293 +INFO:__main__:vgg_gram_loss_1: 20.314062118530273 +INFO:__main__:vgg_gram_loss_2: 22.032384872436523 +INFO:__main__:vgg_gram_loss_3: 17.38332176208496 +INFO:__main__:vgg_gram_loss_4: 79.24800872802734 +INFO:__main__:vgg_gram_loss_5: 0.17939458787441254 +INFO:__main__:vgg_loss_0: 13.099538803100586 +INFO:__main__:vgg_loss_1: 31.539186477661133 +INFO:__main__:vgg_loss_2: 44.58019256591797 +INFO:__main__:vgg_loss_3: 32.09550476074219 +INFO:__main__:vgg_loss_4: 56.25748062133789 +INFO:__main__:vgg_loss_5: 1.8366080522537231 +INFO:__main__:validation_loss: 1797.7974853515625 +INFO:__main__:global_step: 74501 +INFO:__main__:kl_loss: 101.51658630371094 +INFO:__main__:kl_weight: 0.9800000190734863 +INFO:__main__:likelihood_loss: 1649.2950439453125 +INFO:__main__:loss: 1748.78125 +INFO:__main__:vgg_gram_loss_0: 7.402290344238281 +INFO:__main__:vgg_gram_loss_1: 19.013675689697266 +INFO:__main__:vgg_gram_loss_2: 21.814207077026367 +INFO:__main__:vgg_gram_loss_3: 28.09962272644043 +INFO:__main__:vgg_gram_loss_4: 89.76759338378906 +INFO:__main__:vgg_gram_loss_5: 0.17110000550746918 +INFO:__main__:vgg_loss_0: 12.822097778320312 +INFO:__main__:vgg_loss_1: 29.958127975463867 +INFO:__main__:vgg_loss_2: 40.26081085205078 +INFO:__main__:vgg_loss_3: 29.256885528564453 +INFO:__main__:vgg_loss_4: 49.66324996948242 +INFO:__main__:vgg_loss_5: 1.6293420791625977 +INFO:__main__:validation_loss: 1908.570556640625 +INFO:__main__:global_step: 74751 +INFO:__main__:kl_loss: 104.74473571777344 +INFO:__main__:kl_weight: 0.9900000095367432 +INFO:__main__:likelihood_loss: 1764.0521240234375 +INFO:__main__:loss: 1867.7493896484375 +INFO:__main__:vgg_gram_loss_0: 5.571436405181885 +INFO:__main__:vgg_gram_loss_1: 29.543439865112305 +INFO:__main__:vgg_gram_loss_2: 27.647619247436523 +INFO:__main__:vgg_gram_loss_3: 23.159008026123047 +INFO:__main__:vgg_gram_loss_4: 87.82096862792969 +INFO:__main__:vgg_gram_loss_5: 0.1866638958454132 +INFO:__main__:vgg_loss_0: 12.763458251953125 +INFO:__main__:vgg_loss_1: 32.14930725097656 +INFO:__main__:vgg_loss_2: 42.320499420166016 +INFO:__main__:vgg_loss_3: 31.915225982666016 +INFO:__main__:vgg_loss_4: 57.86028289794922 +INFO:__main__:vgg_loss_5: 1.872523546218872 +INFO:__main__:validation_loss: 1926.8133544921875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 75001 +INFO:__main__:kl_loss: 100.13243103027344 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1677.982177734375 +INFO:__main__:loss: 1778.1146240234375 +INFO:__main__:vgg_gram_loss_0: 10.163580894470215 +INFO:__main__:vgg_gram_loss_1: 20.064346313476562 +INFO:__main__:vgg_gram_loss_2: 22.39666748046875 +INFO:__main__:vgg_gram_loss_3: 19.094619750976562 +INFO:__main__:vgg_gram_loss_4: 84.0228042602539 +INFO:__main__:vgg_gram_loss_5: 0.1875264048576355 +INFO:__main__:vgg_loss_0: 13.289976119995117 +INFO:__main__:vgg_loss_1: 32.814308166503906 +INFO:__main__:vgg_loss_2: 44.40785598754883 +INFO:__main__:vgg_loss_3: 31.80345344543457 +INFO:__main__:vgg_loss_4: 55.50514602661133 +INFO:__main__:vgg_loss_5: 1.846155047416687 +INFO:__main__:validation_loss: 1914.8616943359375 +INFO:__main__:global_step: 75251 +INFO:__main__:kl_loss: 93.90699768066406 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1595.639892578125 +INFO:__main__:loss: 1689.546875 +INFO:__main__:vgg_gram_loss_0: 5.344332218170166 +INFO:__main__:vgg_gram_loss_1: 20.589935302734375 +INFO:__main__:vgg_gram_loss_2: 25.127233505249023 +INFO:__main__:vgg_gram_loss_3: 19.319421768188477 +INFO:__main__:vgg_gram_loss_4: 77.65477752685547 +INFO:__main__:vgg_gram_loss_5: 0.1963854730129242 +INFO:__main__:vgg_loss_0: 12.007538795471191 +INFO:__main__:vgg_loss_1: 29.647506713867188 +INFO:__main__:vgg_loss_2: 42.942832946777344 +INFO:__main__:vgg_loss_3: 30.897146224975586 +INFO:__main__:vgg_loss_4: 53.51076126098633 +INFO:__main__:vgg_loss_5: 1.8901301622390747 +INFO:__main__:validation_loss: 1979.13427734375 +INFO:__main__:global_step: 75501 +INFO:__main__:kl_loss: 97.80059814453125 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1691.90771484375 +INFO:__main__:loss: 1789.708251953125 +INFO:__main__:vgg_gram_loss_0: 10.093396186828613 +INFO:__main__:vgg_gram_loss_1: 22.157413482666016 +INFO:__main__:vgg_gram_loss_2: 23.613557815551758 +INFO:__main__:vgg_gram_loss_3: 20.20595359802246 +INFO:__main__:vgg_gram_loss_4: 86.68827056884766 +INFO:__main__:vgg_gram_loss_5: 0.18877571821212769 +INFO:__main__:vgg_loss_0: 12.619112014770508 +INFO:__main__:vgg_loss_1: 29.3277530670166 +INFO:__main__:vgg_loss_2: 42.06028747558594 +INFO:__main__:vgg_loss_3: 32.2247200012207 +INFO:__main__:vgg_loss_4: 57.30364227294922 +INFO:__main__:vgg_loss_5: 1.8986399173736572 +INFO:__main__:validation_loss: 1890.991943359375 +INFO:__main__:global_step: 75751 +INFO:__main__:kl_loss: 109.33937072753906 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1598.21630859375 +INFO:__main__:loss: 1707.5556640625 +INFO:__main__:vgg_gram_loss_0: 7.216796875 +INFO:__main__:vgg_gram_loss_1: 18.278650283813477 +INFO:__main__:vgg_gram_loss_2: 21.29659080505371 +INFO:__main__:vgg_gram_loss_3: 17.454660415649414 +INFO:__main__:vgg_gram_loss_4: 76.95777893066406 +INFO:__main__:vgg_gram_loss_5: 0.1859695166349411 +INFO:__main__:vgg_loss_0: 13.163182258605957 +INFO:__main__:vgg_loss_1: 30.361055374145508 +INFO:__main__:vgg_loss_2: 44.17300796508789 +INFO:__main__:vgg_loss_3: 32.27228927612305 +INFO:__main__:vgg_loss_4: 56.428749084472656 +INFO:__main__:vgg_loss_5: 1.8545019626617432 +INFO:__main__:validation_loss: 1732.8148193359375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 76001 +INFO:__main__:kl_loss: 103.81758117675781 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1586.361328125 +INFO:__main__:loss: 1690.178955078125 +INFO:__main__:vgg_gram_loss_0: 7.6762213706970215 +INFO:__main__:vgg_gram_loss_1: 18.554441452026367 +INFO:__main__:vgg_gram_loss_2: 24.411039352416992 +INFO:__main__:vgg_gram_loss_3: 19.433027267456055 +INFO:__main__:vgg_gram_loss_4: 81.15123748779297 +INFO:__main__:vgg_gram_loss_5: 0.17046280205249786 +INFO:__main__:vgg_loss_0: 11.782315254211426 +INFO:__main__:vgg_loss_1: 27.911771774291992 +INFO:__main__:vgg_loss_2: 42.02341079711914 +INFO:__main__:vgg_loss_3: 30.431303024291992 +INFO:__main__:vgg_loss_4: 52.03547668457031 +INFO:__main__:vgg_loss_5: 1.691562294960022 +INFO:__main__:validation_loss: 2026.08984375 +INFO:__main__:global_step: 76251 +INFO:__main__:kl_loss: 104.38290405273438 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1316.176513671875 +INFO:__main__:loss: 1420.5594482421875 +INFO:__main__:vgg_gram_loss_0: 4.116774082183838 +INFO:__main__:vgg_gram_loss_1: 12.36014461517334 +INFO:__main__:vgg_gram_loss_2: 18.176054000854492 +INFO:__main__:vgg_gram_loss_3: 14.217645645141602 +INFO:__main__:vgg_gram_loss_4: 67.14892578125 +INFO:__main__:vgg_gram_loss_5: 0.16229002177715302 +INFO:__main__:vgg_loss_0: 10.36562442779541 +INFO:__main__:vgg_loss_1: 24.966463088989258 +INFO:__main__:vgg_loss_2: 37.10487365722656 +INFO:__main__:vgg_loss_3: 26.90335464477539 +INFO:__main__:vgg_loss_4: 46.18036651611328 +INFO:__main__:vgg_loss_5: 1.5327625274658203 +INFO:__main__:validation_loss: 1801.510498046875 +INFO:__main__:global_step: 76501 +INFO:__main__:kl_loss: 113.70652770996094 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1440.7744140625 +INFO:__main__:loss: 1554.48095703125 +INFO:__main__:vgg_gram_loss_0: 4.452088832855225 +INFO:__main__:vgg_gram_loss_1: 15.967294692993164 +INFO:__main__:vgg_gram_loss_2: 18.8958740234375 +INFO:__main__:vgg_gram_loss_3: 15.105307579040527 +INFO:__main__:vgg_gram_loss_4: 72.4871597290039 +INFO:__main__:vgg_gram_loss_5: 0.17030729353427887 +INFO:__main__:vgg_loss_0: 10.69140911102295 +INFO:__main__:vgg_loss_1: 26.644725799560547 +INFO:__main__:vgg_loss_2: 39.99558639526367 +INFO:__main__:vgg_loss_3: 29.811471939086914 +INFO:__main__:vgg_loss_4: 52.17962646484375 +INFO:__main__:vgg_loss_5: 1.7540267705917358 +INFO:__main__:validation_loss: 1987.474365234375 +INFO:__main__:global_step: 76751 +INFO:__main__:kl_loss: 95.03602600097656 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1320.235595703125 +INFO:__main__:loss: 1415.2716064453125 +INFO:__main__:vgg_gram_loss_0: 8.905549049377441 +INFO:__main__:vgg_gram_loss_1: 17.867881774902344 +INFO:__main__:vgg_gram_loss_2: 18.062788009643555 +INFO:__main__:vgg_gram_loss_3: 15.119465827941895 +INFO:__main__:vgg_gram_loss_4: 65.11434173583984 +INFO:__main__:vgg_gram_loss_5: 0.14481575787067413 +INFO:__main__:vgg_loss_0: 9.570205688476562 +INFO:__main__:vgg_loss_1: 24.211755752563477 +INFO:__main__:vgg_loss_2: 35.236228942871094 +INFO:__main__:vgg_loss_3: 24.88789176940918 +INFO:__main__:vgg_loss_4: 43.45017623901367 +INFO:__main__:vgg_loss_5: 1.4760310649871826 +INFO:__main__:validation_loss: 2242.143798828125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 77001 +INFO:__main__:kl_loss: 99.5011978149414 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1474.970458984375 +INFO:__main__:loss: 1574.4716796875 +INFO:__main__:vgg_gram_loss_0: 5.013479709625244 +INFO:__main__:vgg_gram_loss_1: 13.940515518188477 +INFO:__main__:vgg_gram_loss_2: 22.664884567260742 +INFO:__main__:vgg_gram_loss_3: 16.834014892578125 +INFO:__main__:vgg_gram_loss_4: 72.3034896850586 +INFO:__main__:vgg_gram_loss_5: 0.16409777104854584 +INFO:__main__:vgg_loss_0: 11.404167175292969 +INFO:__main__:vgg_loss_1: 27.72151756286621 +INFO:__main__:vgg_loss_2: 40.281005859375 +INFO:__main__:vgg_loss_3: 29.359331130981445 +INFO:__main__:vgg_loss_4: 53.53948211669922 +INFO:__main__:vgg_loss_5: 1.7681152820587158 +INFO:__main__:validation_loss: 1739.6978759765625 +INFO:__main__:global_step: 77251 +INFO:__main__:kl_loss: 93.35214233398438 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1837.0057373046875 +INFO:__main__:loss: 1930.35791015625 +INFO:__main__:vgg_gram_loss_0: 3.5470542907714844 +INFO:__main__:vgg_gram_loss_1: 19.482412338256836 +INFO:__main__:vgg_gram_loss_2: 24.20562171936035 +INFO:__main__:vgg_gram_loss_3: 18.212350845336914 +INFO:__main__:vgg_gram_loss_4: 81.92411041259766 +INFO:__main__:vgg_gram_loss_5: 0.19002985954284668 +INFO:__main__:vgg_loss_0: 17.35188865661621 +INFO:__main__:vgg_loss_1: 42.162376403808594 +INFO:__main__:vgg_loss_2: 55.32271957397461 +INFO:__main__:vgg_loss_3: 37.95719528198242 +INFO:__main__:vgg_loss_4: 64.96937561035156 +INFO:__main__:vgg_loss_5: 2.076005458831787 +INFO:__main__:validation_loss: 2063.02197265625 +INFO:__main__:global_step: 77501 +INFO:__main__:kl_loss: 98.62416076660156 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1351.188720703125 +INFO:__main__:loss: 1449.8128662109375 +INFO:__main__:vgg_gram_loss_0: 3.9217097759246826 +INFO:__main__:vgg_gram_loss_1: 18.386978149414062 +INFO:__main__:vgg_gram_loss_2: 17.819812774658203 +INFO:__main__:vgg_gram_loss_3: 13.708714485168457 +INFO:__main__:vgg_gram_loss_4: 62.57135009765625 +INFO:__main__:vgg_gram_loss_5: 0.14589530229568481 +INFO:__main__:vgg_loss_0: 11.606657981872559 +INFO:__main__:vgg_loss_1: 28.303672790527344 +INFO:__main__:vgg_loss_2: 38.20246505737305 +INFO:__main__:vgg_loss_3: 27.203811645507812 +INFO:__main__:vgg_loss_4: 46.85149383544922 +INFO:__main__:vgg_loss_5: 1.515182375907898 +INFO:__main__:validation_loss: 2588.727783203125 +INFO:__main__:global_step: 77751 +INFO:__main__:kl_loss: 95.46920776367188 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1571.1240234375 +INFO:__main__:loss: 1666.59326171875 +INFO:__main__:vgg_gram_loss_0: 5.44347620010376 +INFO:__main__:vgg_gram_loss_1: 17.16338539123535 +INFO:__main__:vgg_gram_loss_2: 20.288297653198242 +INFO:__main__:vgg_gram_loss_3: 17.379610061645508 +INFO:__main__:vgg_gram_loss_4: 81.77371215820312 +INFO:__main__:vgg_gram_loss_5: 0.18628744781017303 +INFO:__main__:vgg_loss_0: 12.258504867553711 +INFO:__main__:vgg_loss_1: 30.573843002319336 +INFO:__main__:vgg_loss_2: 42.83974838256836 +INFO:__main__:vgg_loss_3: 30.539752960205078 +INFO:__main__:vgg_loss_4: 53.980838775634766 +INFO:__main__:vgg_loss_5: 1.797322154045105 +INFO:__main__:validation_loss: 1623.26416015625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 78001 +INFO:__main__:kl_loss: 99.60126495361328 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1897.071533203125 +INFO:__main__:loss: 1996.6728515625 +INFO:__main__:vgg_gram_loss_0: 6.299485206604004 +INFO:__main__:vgg_gram_loss_1: 22.75691032409668 +INFO:__main__:vgg_gram_loss_2: 31.383136749267578 +INFO:__main__:vgg_gram_loss_3: 23.634672164916992 +INFO:__main__:vgg_gram_loss_4: 97.5406265258789 +INFO:__main__:vgg_gram_loss_5: 0.20283500850200653 +INFO:__main__:vgg_loss_0: 14.569567680358887 +INFO:__main__:vgg_loss_1: 33.569129943847656 +INFO:__main__:vgg_loss_2: 50.29151916503906 +INFO:__main__:vgg_loss_3: 35.87866973876953 +INFO:__main__:vgg_loss_4: 61.29372024536133 +INFO:__main__:vgg_loss_5: 1.9939991235733032 +INFO:__main__:validation_loss: 2132.142333984375 +INFO:__main__:global_step: 78251 +INFO:__main__:kl_loss: 109.67769622802734 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1434.6954345703125 +INFO:__main__:loss: 1544.3731689453125 +INFO:__main__:vgg_gram_loss_0: 6.363452911376953 +INFO:__main__:vgg_gram_loss_1: 17.724069595336914 +INFO:__main__:vgg_gram_loss_2: 18.867610931396484 +INFO:__main__:vgg_gram_loss_3: 15.112489700317383 +INFO:__main__:vgg_gram_loss_4: 72.76872253417969 +INFO:__main__:vgg_gram_loss_5: 0.1608508825302124 +INFO:__main__:vgg_loss_0: 10.462724685668945 +INFO:__main__:vgg_loss_1: 25.66767692565918 +INFO:__main__:vgg_loss_2: 39.69873809814453 +INFO:__main__:vgg_loss_3: 28.601001739501953 +INFO:__main__:vgg_loss_4: 49.9183235168457 +INFO:__main__:vgg_loss_5: 1.5934256315231323 +INFO:__main__:validation_loss: 2078.099609375 +INFO:__main__:global_step: 78501 +INFO:__main__:kl_loss: 90.05607604980469 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1633.7392578125 +INFO:__main__:loss: 1723.7952880859375 +INFO:__main__:vgg_gram_loss_0: 6.281897068023682 +INFO:__main__:vgg_gram_loss_1: 15.997424125671387 +INFO:__main__:vgg_gram_loss_2: 22.45941162109375 +INFO:__main__:vgg_gram_loss_3: 22.636030197143555 +INFO:__main__:vgg_gram_loss_4: 87.31340789794922 +INFO:__main__:vgg_gram_loss_5: 0.19724315404891968 +INFO:__main__:vgg_loss_0: 11.639867782592773 +INFO:__main__:vgg_loss_1: 29.515239715576172 +INFO:__main__:vgg_loss_2: 43.279415130615234 +INFO:__main__:vgg_loss_3: 30.972524642944336 +INFO:__main__:vgg_loss_4: 54.58340072631836 +INFO:__main__:vgg_loss_5: 1.8719711303710938 +INFO:__main__:validation_loss: 2089.758544921875 +INFO:__main__:global_step: 78751 +INFO:__main__:kl_loss: 105.66798400878906 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1415.455322265625 +INFO:__main__:loss: 1521.123291015625 +INFO:__main__:vgg_gram_loss_0: 6.239748477935791 +INFO:__main__:vgg_gram_loss_1: 13.505637168884277 +INFO:__main__:vgg_gram_loss_2: 21.595430374145508 +INFO:__main__:vgg_gram_loss_3: 17.695768356323242 +INFO:__main__:vgg_gram_loss_4: 72.23534393310547 +INFO:__main__:vgg_gram_loss_5: 0.16348953545093536 +INFO:__main__:vgg_loss_0: 10.687366485595703 +INFO:__main__:vgg_loss_1: 25.60257911682129 +INFO:__main__:vgg_loss_2: 37.14620590209961 +INFO:__main__:vgg_loss_3: 27.485403060913086 +INFO:__main__:vgg_loss_4: 49.14472198486328 +INFO:__main__:vgg_loss_5: 1.5893570184707642 +INFO:__main__:validation_loss: 1848.4595947265625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 79001 +INFO:__main__:kl_loss: 88.70341491699219 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1411.13037109375 +INFO:__main__:loss: 1499.833740234375 +INFO:__main__:vgg_gram_loss_0: 7.180247783660889 +INFO:__main__:vgg_gram_loss_1: 15.113978385925293 +INFO:__main__:vgg_gram_loss_2: 18.890268325805664 +INFO:__main__:vgg_gram_loss_3: 15.731734275817871 +INFO:__main__:vgg_gram_loss_4: 71.05663299560547 +INFO:__main__:vgg_gram_loss_5: 0.1550329178571701 +INFO:__main__:vgg_loss_0: 10.854331970214844 +INFO:__main__:vgg_loss_1: 24.631467819213867 +INFO:__main__:vgg_loss_2: 37.0428352355957 +INFO:__main__:vgg_loss_3: 28.17113494873047 +INFO:__main__:vgg_loss_4: 51.70978546142578 +INFO:__main__:vgg_loss_5: 1.6886168718338013 +INFO:__main__:validation_loss: 1800.5113525390625 +INFO:__main__:global_step: 79251 +INFO:__main__:kl_loss: 97.91909790039062 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1300.59375 +INFO:__main__:loss: 1398.5128173828125 +INFO:__main__:vgg_gram_loss_0: 5.042819976806641 +INFO:__main__:vgg_gram_loss_1: 13.215660095214844 +INFO:__main__:vgg_gram_loss_2: 17.104307174682617 +INFO:__main__:vgg_gram_loss_3: 13.668588638305664 +INFO:__main__:vgg_gram_loss_4: 66.84371185302734 +INFO:__main__:vgg_gram_loss_5: 0.16382956504821777 +INFO:__main__:vgg_loss_0: 9.83410358428955 +INFO:__main__:vgg_loss_1: 23.349958419799805 +INFO:__main__:vgg_loss_2: 35.2518196105957 +INFO:__main__:vgg_loss_3: 26.384859085083008 +INFO:__main__:vgg_loss_4: 47.64868927001953 +INFO:__main__:vgg_loss_5: 1.610411524772644 +INFO:__main__:validation_loss: 2151.598388671875 +INFO:__main__:global_step: 79501 +INFO:__main__:kl_loss: 88.11454772949219 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1443.957275390625 +INFO:__main__:loss: 1532.07177734375 +INFO:__main__:vgg_gram_loss_0: 4.510222911834717 +INFO:__main__:vgg_gram_loss_1: 13.820660591125488 +INFO:__main__:vgg_gram_loss_2: 18.376134872436523 +INFO:__main__:vgg_gram_loss_3: 15.558046340942383 +INFO:__main__:vgg_gram_loss_4: 75.92063903808594 +INFO:__main__:vgg_gram_loss_5: 0.17562487721443176 +INFO:__main__:vgg_loss_0: 10.981514930725098 +INFO:__main__:vgg_loss_1: 27.408227920532227 +INFO:__main__:vgg_loss_2: 39.91562271118164 +INFO:__main__:vgg_loss_3: 28.83991813659668 +INFO:__main__:vgg_loss_4: 51.591697692871094 +INFO:__main__:vgg_loss_5: 1.6931087970733643 +INFO:__main__:validation_loss: 2482.682861328125 +INFO:__main__:global_step: 79751 +INFO:__main__:kl_loss: 98.8460693359375 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1461.235107421875 +INFO:__main__:loss: 1560.0811767578125 +INFO:__main__:vgg_gram_loss_0: 5.557261943817139 +INFO:__main__:vgg_gram_loss_1: 14.469592094421387 +INFO:__main__:vgg_gram_loss_2: 17.894466400146484 +INFO:__main__:vgg_gram_loss_3: 15.68427562713623 +INFO:__main__:vgg_gram_loss_4: 74.24887084960938 +INFO:__main__:vgg_gram_loss_5: 0.16412346065044403 +INFO:__main__:vgg_loss_0: 11.82037353515625 +INFO:__main__:vgg_loss_1: 28.306625366210938 +INFO:__main__:vgg_loss_2: 40.010498046875 +INFO:__main__:vgg_loss_3: 29.47344398498535 +INFO:__main__:vgg_loss_4: 52.8528938293457 +INFO:__main__:vgg_loss_5: 1.764564871788025 +INFO:__main__:validation_loss: 1962.7208251953125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 80001 +INFO:__main__:kl_loss: 97.75192260742188 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1553.241943359375 +INFO:__main__:loss: 1650.993896484375 +INFO:__main__:vgg_gram_loss_0: 3.591933488845825 +INFO:__main__:vgg_gram_loss_1: 17.818574905395508 +INFO:__main__:vgg_gram_loss_2: 23.086002349853516 +INFO:__main__:vgg_gram_loss_3: 17.242115020751953 +INFO:__main__:vgg_gram_loss_4: 79.01488494873047 +INFO:__main__:vgg_gram_loss_5: 0.176866814494133 +INFO:__main__:vgg_loss_0: 11.17879581451416 +INFO:__main__:vgg_loss_1: 28.167112350463867 +INFO:__main__:vgg_loss_2: 42.98210906982422 +INFO:__main__:vgg_loss_3: 31.404953002929688 +INFO:__main__:vgg_loss_4: 54.22929763793945 +INFO:__main__:vgg_loss_5: 1.7557364702224731 +INFO:__main__:validation_loss: 2135.604736328125 +INFO:__main__:global_step: 80251 +INFO:__main__:kl_loss: 103.41703796386719 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1208.22900390625 +INFO:__main__:loss: 1311.64599609375 +INFO:__main__:vgg_gram_loss_0: 3.1222074031829834 +INFO:__main__:vgg_gram_loss_1: 12.776175498962402 +INFO:__main__:vgg_gram_loss_2: 17.62489891052246 +INFO:__main__:vgg_gram_loss_3: 14.074426651000977 +INFO:__main__:vgg_gram_loss_4: 63.91703414916992 +INFO:__main__:vgg_gram_loss_5: 0.13840441405773163 +INFO:__main__:vgg_loss_0: 8.255149841308594 +INFO:__main__:vgg_loss_1: 20.59969711303711 +INFO:__main__:vgg_loss_2: 31.636394500732422 +INFO:__main__:vgg_loss_3: 23.953367233276367 +INFO:__main__:vgg_loss_4: 44.05984115600586 +INFO:__main__:vgg_loss_5: 1.4882193803787231 +INFO:__main__:validation_loss: 1988.78125 +INFO:__main__:global_step: 80501 +INFO:__main__:kl_loss: 85.78915405273438 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1344.615234375 +INFO:__main__:loss: 1430.4044189453125 +INFO:__main__:vgg_gram_loss_0: 4.585079193115234 +INFO:__main__:vgg_gram_loss_1: 12.211726188659668 +INFO:__main__:vgg_gram_loss_2: 17.756277084350586 +INFO:__main__:vgg_gram_loss_3: 14.097999572753906 +INFO:__main__:vgg_gram_loss_4: 66.73870086669922 +INFO:__main__:vgg_gram_loss_5: 0.15263496339321136 +INFO:__main__:vgg_loss_0: 11.173226356506348 +INFO:__main__:vgg_loss_1: 26.102771759033203 +INFO:__main__:vgg_loss_2: 37.94792938232422 +INFO:__main__:vgg_loss_3: 27.634408950805664 +INFO:__main__:vgg_loss_4: 48.92601776123047 +INFO:__main__:vgg_loss_5: 1.5962773561477661 +INFO:__main__:validation_loss: 2199.76025390625 +INFO:__main__:global_step: 80751 +INFO:__main__:kl_loss: 94.74278259277344 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1472.8819580078125 +INFO:__main__:loss: 1567.624755859375 +INFO:__main__:vgg_gram_loss_0: 4.285419940948486 +INFO:__main__:vgg_gram_loss_1: 15.256309509277344 +INFO:__main__:vgg_gram_loss_2: 20.529253005981445 +INFO:__main__:vgg_gram_loss_3: 16.313154220581055 +INFO:__main__:vgg_gram_loss_4: 76.03982543945312 +INFO:__main__:vgg_gram_loss_5: 0.19359265267848969 +INFO:__main__:vgg_loss_0: 10.367183685302734 +INFO:__main__:vgg_loss_1: 25.529756546020508 +INFO:__main__:vgg_loss_2: 39.88685989379883 +INFO:__main__:vgg_loss_3: 29.635881423950195 +INFO:__main__:vgg_loss_4: 54.714111328125 +INFO:__main__:vgg_loss_5: 1.8250415325164795 +INFO:__main__:validation_loss: 2002.2320556640625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 81001 +INFO:__main__:kl_loss: 98.2622299194336 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1348.048828125 +INFO:__main__:loss: 1446.31103515625 +INFO:__main__:vgg_gram_loss_0: 6.56104040145874 +INFO:__main__:vgg_gram_loss_1: 16.22667121887207 +INFO:__main__:vgg_gram_loss_2: 17.54633140563965 +INFO:__main__:vgg_gram_loss_3: 14.0277681350708 +INFO:__main__:vgg_gram_loss_4: 68.46923065185547 +INFO:__main__:vgg_gram_loss_5: 0.16178075969219208 +INFO:__main__:vgg_loss_0: 10.690400123596191 +INFO:__main__:vgg_loss_1: 25.57149314880371 +INFO:__main__:vgg_loss_2: 35.69831085205078 +INFO:__main__:vgg_loss_3: 26.339086532592773 +INFO:__main__:vgg_loss_4: 46.71260452270508 +INFO:__main__:vgg_loss_5: 1.6050379276275635 +INFO:__main__:validation_loss: 1699.84765625 +INFO:__main__:global_step: 81251 +INFO:__main__:kl_loss: 89.79828643798828 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1316.5489501953125 +INFO:__main__:loss: 1406.3472900390625 +INFO:__main__:vgg_gram_loss_0: 5.309929370880127 +INFO:__main__:vgg_gram_loss_1: 13.208151817321777 +INFO:__main__:vgg_gram_loss_2: 18.72504997253418 +INFO:__main__:vgg_gram_loss_3: 14.291874885559082 +INFO:__main__:vgg_gram_loss_4: 64.89142608642578 +INFO:__main__:vgg_gram_loss_5: 0.14290452003479004 +INFO:__main__:vgg_loss_0: 10.46673583984375 +INFO:__main__:vgg_loss_1: 25.137651443481445 +INFO:__main__:vgg_loss_2: 35.491214752197266 +INFO:__main__:vgg_loss_3: 25.999418258666992 +INFO:__main__:vgg_loss_4: 48.13096618652344 +INFO:__main__:vgg_loss_5: 1.5144715309143066 +INFO:__main__:validation_loss: 1918.27685546875 +INFO:__main__:global_step: 81501 +INFO:__main__:kl_loss: 92.11074829101562 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1847.28759765625 +INFO:__main__:loss: 1939.3983154296875 +INFO:__main__:vgg_gram_loss_0: 6.165489196777344 +INFO:__main__:vgg_gram_loss_1: 19.86362075805664 +INFO:__main__:vgg_gram_loss_2: 33.36058044433594 +INFO:__main__:vgg_gram_loss_3: 27.53851318359375 +INFO:__main__:vgg_gram_loss_4: 94.63838958740234 +INFO:__main__:vgg_gram_loss_5: 0.1914936602115631 +INFO:__main__:vgg_loss_0: 13.2963285446167 +INFO:__main__:vgg_loss_1: 33.984832763671875 +INFO:__main__:vgg_loss_2: 49.02729415893555 +INFO:__main__:vgg_loss_3: 33.766815185546875 +INFO:__main__:vgg_loss_4: 55.780391693115234 +INFO:__main__:vgg_loss_5: 1.8437576293945312 +INFO:__main__:validation_loss: 2006.922119140625 +INFO:__main__:global_step: 81751 +INFO:__main__:kl_loss: 87.86402130126953 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1526.658935546875 +INFO:__main__:loss: 1614.52294921875 +INFO:__main__:vgg_gram_loss_0: 9.031765937805176 +INFO:__main__:vgg_gram_loss_1: 17.036706924438477 +INFO:__main__:vgg_gram_loss_2: 20.1535587310791 +INFO:__main__:vgg_gram_loss_3: 16.7474422454834 +INFO:__main__:vgg_gram_loss_4: 75.70626068115234 +INFO:__main__:vgg_gram_loss_5: 0.18288543820381165 +INFO:__main__:vgg_loss_0: 11.458873748779297 +INFO:__main__:vgg_loss_1: 28.283252716064453 +INFO:__main__:vgg_loss_2: 41.22413635253906 +INFO:__main__:vgg_loss_3: 29.97891616821289 +INFO:__main__:vgg_loss_4: 53.749629974365234 +INFO:__main__:vgg_loss_5: 1.7783355712890625 +INFO:__main__:validation_loss: 1989.7784423828125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 82001 +INFO:__main__:kl_loss: 95.00089263916016 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1487.32275390625 +INFO:__main__:loss: 1582.3236083984375 +INFO:__main__:vgg_gram_loss_0: 3.9157257080078125 +INFO:__main__:vgg_gram_loss_1: 19.87759017944336 +INFO:__main__:vgg_gram_loss_2: 25.975006103515625 +INFO:__main__:vgg_gram_loss_3: 20.047096252441406 +INFO:__main__:vgg_gram_loss_4: 76.06735229492188 +INFO:__main__:vgg_gram_loss_5: 0.1750112771987915 +INFO:__main__:vgg_loss_0: 10.63952922821045 +INFO:__main__:vgg_loss_1: 27.1136417388916 +INFO:__main__:vgg_loss_2: 38.493324279785156 +INFO:__main__:vgg_loss_3: 27.182579040527344 +INFO:__main__:vgg_loss_4: 46.37953186035156 +INFO:__main__:vgg_loss_5: 1.5981570482254028 +INFO:__main__:validation_loss: 1898.049072265625 +INFO:__main__:global_step: 82251 +INFO:__main__:kl_loss: 97.98809051513672 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1274.9139404296875 +INFO:__main__:loss: 1372.9019775390625 +INFO:__main__:vgg_gram_loss_0: 4.689359664916992 +INFO:__main__:vgg_gram_loss_1: 12.45651912689209 +INFO:__main__:vgg_gram_loss_2: 17.494436264038086 +INFO:__main__:vgg_gram_loss_3: 18.9660587310791 +INFO:__main__:vgg_gram_loss_4: 72.00885009765625 +INFO:__main__:vgg_gram_loss_5: 0.13127835094928741 +INFO:__main__:vgg_loss_0: 8.620142936706543 +INFO:__main__:vgg_loss_1: 21.419042587280273 +INFO:__main__:vgg_loss_2: 31.868562698364258 +INFO:__main__:vgg_loss_3: 23.78537368774414 +INFO:__main__:vgg_loss_4: 42.12684631347656 +INFO:__main__:vgg_loss_5: 1.416313648223877 +INFO:__main__:validation_loss: 2347.246826171875 +INFO:__main__:global_step: 82501 +INFO:__main__:kl_loss: 108.59619140625 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1745.371337890625 +INFO:__main__:loss: 1853.967529296875 +INFO:__main__:vgg_gram_loss_0: 5.926742076873779 +INFO:__main__:vgg_gram_loss_1: 23.508352279663086 +INFO:__main__:vgg_gram_loss_2: 24.14362144470215 +INFO:__main__:vgg_gram_loss_3: 20.984792709350586 +INFO:__main__:vgg_gram_loss_4: 82.1069107055664 +INFO:__main__:vgg_gram_loss_5: 0.17420707643032074 +INFO:__main__:vgg_loss_0: 15.884284973144531 +INFO:__main__:vgg_loss_1: 40.44281768798828 +INFO:__main__:vgg_loss_2: 47.49895477294922 +INFO:__main__:vgg_loss_3: 32.251319885253906 +INFO:__main__:vgg_loss_4: 54.399940490722656 +INFO:__main__:vgg_loss_5: 1.7523400783538818 +INFO:__main__:validation_loss: 1879.75390625 +INFO:__main__:global_step: 82751 +INFO:__main__:kl_loss: 100.03776550292969 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1440.7900390625 +INFO:__main__:loss: 1540.8277587890625 +INFO:__main__:vgg_gram_loss_0: 5.242326259613037 +INFO:__main__:vgg_gram_loss_1: 18.2813663482666 +INFO:__main__:vgg_gram_loss_2: 19.813932418823242 +INFO:__main__:vgg_gram_loss_3: 16.2091121673584 +INFO:__main__:vgg_gram_loss_4: 72.57634735107422 +INFO:__main__:vgg_gram_loss_5: 0.16568265855312347 +INFO:__main__:vgg_loss_0: 10.24982738494873 +INFO:__main__:vgg_loss_1: 24.5814266204834 +INFO:__main__:vgg_loss_2: 38.1981315612793 +INFO:__main__:vgg_loss_3: 28.7985897064209 +INFO:__main__:vgg_loss_4: 52.30574417114258 +INFO:__main__:vgg_loss_5: 1.7355220317840576 +INFO:__main__:validation_loss: 2012.91650390625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 83001 +INFO:__main__:kl_loss: 101.6833724975586 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1602.052978515625 +INFO:__main__:loss: 1703.736328125 +INFO:__main__:vgg_gram_loss_0: 6.069923400878906 +INFO:__main__:vgg_gram_loss_1: 14.655524253845215 +INFO:__main__:vgg_gram_loss_2: 22.244489669799805 +INFO:__main__:vgg_gram_loss_3: 17.456438064575195 +INFO:__main__:vgg_gram_loss_4: 78.08736419677734 +INFO:__main__:vgg_gram_loss_5: 0.17986610531806946 +INFO:__main__:vgg_loss_0: 12.502431869506836 +INFO:__main__:vgg_loss_1: 30.994325637817383 +INFO:__main__:vgg_loss_2: 47.128055572509766 +INFO:__main__:vgg_loss_3: 32.84475326538086 +INFO:__main__:vgg_loss_4: 56.45346450805664 +INFO:__main__:vgg_loss_5: 1.7939345836639404 +INFO:__main__:validation_loss: 1939.479248046875 +INFO:__main__:global_step: 83251 +INFO:__main__:kl_loss: 97.91267395019531 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1560.0294189453125 +INFO:__main__:loss: 1657.942138671875 +INFO:__main__:vgg_gram_loss_0: 8.820950508117676 +INFO:__main__:vgg_gram_loss_1: 18.81271743774414 +INFO:__main__:vgg_gram_loss_2: 22.845487594604492 +INFO:__main__:vgg_gram_loss_3: 18.4512996673584 +INFO:__main__:vgg_gram_loss_4: 76.0999755859375 +INFO:__main__:vgg_gram_loss_5: 0.1599634885787964 +INFO:__main__:vgg_loss_0: 13.118925094604492 +INFO:__main__:vgg_loss_1: 29.68277931213379 +INFO:__main__:vgg_loss_2: 40.75514221191406 +INFO:__main__:vgg_loss_3: 29.43019676208496 +INFO:__main__:vgg_loss_4: 52.10874557495117 +INFO:__main__:vgg_loss_5: 1.7197154760360718 +INFO:__main__:validation_loss: 1903.0794677734375 +INFO:__main__:global_step: 83501 +INFO:__main__:kl_loss: 99.29400634765625 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1519.2978515625 +INFO:__main__:loss: 1618.591796875 +INFO:__main__:vgg_gram_loss_0: 7.180703639984131 +INFO:__main__:vgg_gram_loss_1: 16.98862075805664 +INFO:__main__:vgg_gram_loss_2: 19.65035057067871 +INFO:__main__:vgg_gram_loss_3: 16.420713424682617 +INFO:__main__:vgg_gram_loss_4: 75.1695327758789 +INFO:__main__:vgg_gram_loss_5: 0.17087000608444214 +INFO:__main__:vgg_loss_0: 12.835245132446289 +INFO:__main__:vgg_loss_1: 31.030221939086914 +INFO:__main__:vgg_loss_2: 41.98442077636719 +INFO:__main__:vgg_loss_3: 29.603914260864258 +INFO:__main__:vgg_loss_4: 51.14784622192383 +INFO:__main__:vgg_loss_5: 1.6771326065063477 +INFO:__main__:validation_loss: 1946.5439453125 +INFO:__main__:global_step: 83751 +INFO:__main__:kl_loss: 103.40635681152344 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1527.377197265625 +INFO:__main__:loss: 1630.7835693359375 +INFO:__main__:vgg_gram_loss_0: 8.7304048538208 +INFO:__main__:vgg_gram_loss_1: 15.662714958190918 +INFO:__main__:vgg_gram_loss_2: 21.035449981689453 +INFO:__main__:vgg_gram_loss_3: 16.605205535888672 +INFO:__main__:vgg_gram_loss_4: 76.33334350585938 +INFO:__main__:vgg_gram_loss_5: 0.16769273579120636 +INFO:__main__:vgg_loss_0: 11.964091300964355 +INFO:__main__:vgg_loss_1: 27.787216186523438 +INFO:__main__:vgg_loss_2: 41.625858306884766 +INFO:__main__:vgg_loss_3: 30.577280044555664 +INFO:__main__:vgg_loss_4: 53.25604248046875 +INFO:__main__:vgg_loss_5: 1.7301315069198608 +INFO:__main__:validation_loss: 1763.604736328125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 84001 +INFO:__main__:kl_loss: 102.80001831054688 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1866.0308837890625 +INFO:__main__:loss: 1968.8309326171875 +INFO:__main__:vgg_gram_loss_0: 4.783004283905029 +INFO:__main__:vgg_gram_loss_1: 20.34861946105957 +INFO:__main__:vgg_gram_loss_2: 27.948562622070312 +INFO:__main__:vgg_gram_loss_3: 24.024093627929688 +INFO:__main__:vgg_gram_loss_4: 97.29955291748047 +INFO:__main__:vgg_gram_loss_5: 0.20169392228126526 +INFO:__main__:vgg_loss_0: 15.449098587036133 +INFO:__main__:vgg_loss_1: 39.1492805480957 +INFO:__main__:vgg_loss_2: 49.39509201049805 +INFO:__main__:vgg_loss_3: 34.4315185546875 +INFO:__main__:vgg_loss_4: 58.24940490722656 +INFO:__main__:vgg_loss_5: 1.926268219947815 +INFO:__main__:validation_loss: 1952.7216796875 +INFO:__main__:global_step: 84251 +INFO:__main__:kl_loss: 95.65646362304688 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1656.325439453125 +INFO:__main__:loss: 1751.98193359375 +INFO:__main__:vgg_gram_loss_0: 6.038942813873291 +INFO:__main__:vgg_gram_loss_1: 17.359582901000977 +INFO:__main__:vgg_gram_loss_2: 21.96158218383789 +INFO:__main__:vgg_gram_loss_3: 19.036582946777344 +INFO:__main__:vgg_gram_loss_4: 82.67169952392578 +INFO:__main__:vgg_gram_loss_5: 0.20066487789154053 +INFO:__main__:vgg_loss_0: 13.395499229431152 +INFO:__main__:vgg_loss_1: 33.68561553955078 +INFO:__main__:vgg_loss_2: 46.19823455810547 +INFO:__main__:vgg_loss_3: 32.50307083129883 +INFO:__main__:vgg_loss_4: 56.346317291259766 +INFO:__main__:vgg_loss_5: 1.8672676086425781 +INFO:__main__:validation_loss: 1914.456298828125 +INFO:__main__:global_step: 84501 +INFO:__main__:kl_loss: 100.36820220947266 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1481.33837890625 +INFO:__main__:loss: 1581.70654296875 +INFO:__main__:vgg_gram_loss_0: 4.331390857696533 +INFO:__main__:vgg_gram_loss_1: 14.644476890563965 +INFO:__main__:vgg_gram_loss_2: 19.45265007019043 +INFO:__main__:vgg_gram_loss_3: 17.54254150390625 +INFO:__main__:vgg_gram_loss_4: 75.70055389404297 +INFO:__main__:vgg_gram_loss_5: 0.16531334817409515 +INFO:__main__:vgg_loss_0: 12.102217674255371 +INFO:__main__:vgg_loss_1: 29.424427032470703 +INFO:__main__:vgg_loss_2: 41.23120880126953 +INFO:__main__:vgg_loss_3: 29.501041412353516 +INFO:__main__:vgg_loss_4: 50.50730514526367 +INFO:__main__:vgg_loss_5: 1.6645351648330688 +INFO:__main__:validation_loss: 2069.1904296875 +INFO:__main__:global_step: 84751 +INFO:__main__:kl_loss: 94.571533203125 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1551.15234375 +INFO:__main__:loss: 1645.723876953125 +INFO:__main__:vgg_gram_loss_0: 5.290731906890869 +INFO:__main__:vgg_gram_loss_1: 16.31108283996582 +INFO:__main__:vgg_gram_loss_2: 21.96208381652832 +INFO:__main__:vgg_gram_loss_3: 17.365196228027344 +INFO:__main__:vgg_gram_loss_4: 77.48499298095703 +INFO:__main__:vgg_gram_loss_5: 0.17431682348251343 +INFO:__main__:vgg_loss_0: 12.351576805114746 +INFO:__main__:vgg_loss_1: 28.668800354003906 +INFO:__main__:vgg_loss_2: 42.68034744262695 +INFO:__main__:vgg_loss_3: 31.108861923217773 +INFO:__main__:vgg_loss_4: 55.006736755371094 +INFO:__main__:vgg_loss_5: 1.8257193565368652 +INFO:__main__:validation_loss: 2112.8037109375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 85001 +INFO:__main__:kl_loss: 89.73536682128906 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1228.2298583984375 +INFO:__main__:loss: 1317.9652099609375 +INFO:__main__:vgg_gram_loss_0: 4.668101787567139 +INFO:__main__:vgg_gram_loss_1: 12.261878967285156 +INFO:__main__:vgg_gram_loss_2: 15.450517654418945 +INFO:__main__:vgg_gram_loss_3: 12.65491008758545 +INFO:__main__:vgg_gram_loss_4: 64.36504364013672 +INFO:__main__:vgg_gram_loss_5: 0.15288017690181732 +INFO:__main__:vgg_loss_0: 8.78394889831543 +INFO:__main__:vgg_loss_1: 22.08951759338379 +INFO:__main__:vgg_loss_2: 33.46101760864258 +INFO:__main__:vgg_loss_3: 24.657201766967773 +INFO:__main__:vgg_loss_4: 45.53661346435547 +INFO:__main__:vgg_loss_5: 1.5643253326416016 +INFO:__main__:validation_loss: 1937.0225830078125 +INFO:__main__:global_step: 85251 +INFO:__main__:kl_loss: 96.29811096191406 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1386.146240234375 +INFO:__main__:loss: 1482.4443359375 +INFO:__main__:vgg_gram_loss_0: 6.237278461456299 +INFO:__main__:vgg_gram_loss_1: 16.835220336914062 +INFO:__main__:vgg_gram_loss_2: 18.505664825439453 +INFO:__main__:vgg_gram_loss_3: 14.651399612426758 +INFO:__main__:vgg_gram_loss_4: 67.31338500976562 +INFO:__main__:vgg_gram_loss_5: 0.1532503068447113 +INFO:__main__:vgg_loss_0: 11.47293758392334 +INFO:__main__:vgg_loss_1: 27.777219772338867 +INFO:__main__:vgg_loss_2: 38.396507263183594 +INFO:__main__:vgg_loss_3: 27.032386779785156 +INFO:__main__:vgg_loss_4: 47.31303024291992 +INFO:__main__:vgg_loss_5: 1.5409481525421143 +INFO:__main__:validation_loss: 1697.091796875 +INFO:__main__:global_step: 85501 +INFO:__main__:kl_loss: 87.54335021972656 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1281.6160888671875 +INFO:__main__:loss: 1369.159423828125 +INFO:__main__:vgg_gram_loss_0: 3.5342705249786377 +INFO:__main__:vgg_gram_loss_1: 13.145003318786621 +INFO:__main__:vgg_gram_loss_2: 16.067468643188477 +INFO:__main__:vgg_gram_loss_3: 13.703886985778809 +INFO:__main__:vgg_gram_loss_4: 67.61222839355469 +INFO:__main__:vgg_gram_loss_5: 0.15748010575771332 +INFO:__main__:vgg_loss_0: 9.739809036254883 +INFO:__main__:vgg_loss_1: 23.411943435668945 +INFO:__main__:vgg_loss_2: 35.39277648925781 +INFO:__main__:vgg_loss_3: 25.666736602783203 +INFO:__main__:vgg_loss_4: 46.30250930786133 +INFO:__main__:vgg_loss_5: 1.5891127586364746 +INFO:__main__:validation_loss: 2179.515625 +INFO:__main__:global_step: 85751 +INFO:__main__:kl_loss: 94.78518676757812 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1712.78564453125 +INFO:__main__:loss: 1807.57080078125 +INFO:__main__:vgg_gram_loss_0: 3.6299057006835938 +INFO:__main__:vgg_gram_loss_1: 25.8809757232666 +INFO:__main__:vgg_gram_loss_2: 25.304645538330078 +INFO:__main__:vgg_gram_loss_3: 21.423974990844727 +INFO:__main__:vgg_gram_loss_4: 79.38390350341797 +INFO:__main__:vgg_gram_loss_5: 0.17424647510051727 +INFO:__main__:vgg_loss_0: 13.595664978027344 +INFO:__main__:vgg_loss_1: 34.353145599365234 +INFO:__main__:vgg_loss_2: 48.31509017944336 +INFO:__main__:vgg_loss_3: 33.50291061401367 +INFO:__main__:vgg_loss_4: 55.21525955200195 +INFO:__main__:vgg_loss_5: 1.7774025201797485 +INFO:__main__:validation_loss: 1754.9840087890625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 86001 +INFO:__main__:kl_loss: 95.00084686279297 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1531.831298828125 +INFO:__main__:loss: 1626.8321533203125 +INFO:__main__:vgg_gram_loss_0: 4.115212917327881 +INFO:__main__:vgg_gram_loss_1: 21.71470832824707 +INFO:__main__:vgg_gram_loss_2: 21.845443725585938 +INFO:__main__:vgg_gram_loss_3: 17.291040420532227 +INFO:__main__:vgg_gram_loss_4: 75.22225189208984 +INFO:__main__:vgg_gram_loss_5: 0.18292339146137238 +INFO:__main__:vgg_loss_0: 12.092474937438965 +INFO:__main__:vgg_loss_1: 30.105987548828125 +INFO:__main__:vgg_loss_2: 40.74094772338867 +INFO:__main__:vgg_loss_3: 29.764862060546875 +INFO:__main__:vgg_loss_4: 51.57970428466797 +INFO:__main__:vgg_loss_5: 1.7107014656066895 +INFO:__main__:validation_loss: 1896.9442138671875 +INFO:__main__:global_step: 86251 +INFO:__main__:kl_loss: 98.273193359375 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1558.2989501953125 +INFO:__main__:loss: 1656.5721435546875 +INFO:__main__:vgg_gram_loss_0: 5.608706951141357 +INFO:__main__:vgg_gram_loss_1: 20.91752815246582 +INFO:__main__:vgg_gram_loss_2: 22.90658187866211 +INFO:__main__:vgg_gram_loss_3: 19.253355026245117 +INFO:__main__:vgg_gram_loss_4: 73.6556625366211 +INFO:__main__:vgg_gram_loss_5: 0.17318837344646454 +INFO:__main__:vgg_loss_0: 12.03257942199707 +INFO:__main__:vgg_loss_1: 32.35426712036133 +INFO:__main__:vgg_loss_2: 42.37464904785156 +INFO:__main__:vgg_loss_3: 30.064556121826172 +INFO:__main__:vgg_loss_4: 50.6291618347168 +INFO:__main__:vgg_loss_5: 1.689544916152954 +INFO:__main__:validation_loss: 1750.8167724609375 +INFO:__main__:global_step: 86501 +INFO:__main__:kl_loss: 92.73173522949219 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1491.1016845703125 +INFO:__main__:loss: 1583.8333740234375 +INFO:__main__:vgg_gram_loss_0: 5.957441806793213 +INFO:__main__:vgg_gram_loss_1: 18.354604721069336 +INFO:__main__:vgg_gram_loss_2: 21.314176559448242 +INFO:__main__:vgg_gram_loss_3: 17.491716384887695 +INFO:__main__:vgg_gram_loss_4: 73.09115600585938 +INFO:__main__:vgg_gram_loss_5: 0.16766129434108734 +INFO:__main__:vgg_loss_0: 11.346125602722168 +INFO:__main__:vgg_loss_1: 29.41204261779785 +INFO:__main__:vgg_loss_2: 42.23638916015625 +INFO:__main__:vgg_loss_3: 28.904680252075195 +INFO:__main__:vgg_loss_4: 48.382198333740234 +INFO:__main__:vgg_loss_5: 1.562149167060852 +INFO:__main__:validation_loss: 1979.1063232421875 +INFO:__main__:global_step: 86751 +INFO:__main__:kl_loss: 93.53606414794922 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1717.9364013671875 +INFO:__main__:loss: 1811.472412109375 +INFO:__main__:vgg_gram_loss_0: 5.445728302001953 +INFO:__main__:vgg_gram_loss_1: 18.829435348510742 +INFO:__main__:vgg_gram_loss_2: 26.253692626953125 +INFO:__main__:vgg_gram_loss_3: 20.959585189819336 +INFO:__main__:vgg_gram_loss_4: 80.99195098876953 +INFO:__main__:vgg_gram_loss_5: 0.19162844121456146 +INFO:__main__:vgg_loss_0: 14.288912773132324 +INFO:__main__:vgg_loss_1: 37.115596771240234 +INFO:__main__:vgg_loss_2: 46.68117904663086 +INFO:__main__:vgg_loss_3: 33.273616790771484 +INFO:__main__:vgg_loss_4: 57.61492919921875 +INFO:__main__:vgg_loss_5: 1.9410020112991333 +INFO:__main__:validation_loss: 2241.765869140625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 87001 +INFO:__main__:kl_loss: 104.61593627929688 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1541.5565185546875 +INFO:__main__:loss: 1646.1724853515625 +INFO:__main__:vgg_gram_loss_0: 5.409865856170654 +INFO:__main__:vgg_gram_loss_1: 17.30840492248535 +INFO:__main__:vgg_gram_loss_2: 22.8099365234375 +INFO:__main__:vgg_gram_loss_3: 17.3248348236084 +INFO:__main__:vgg_gram_loss_4: 71.65416717529297 +INFO:__main__:vgg_gram_loss_5: 0.17380662262439728 +INFO:__main__:vgg_loss_0: 13.077392578125 +INFO:__main__:vgg_loss_1: 32.17558670043945 +INFO:__main__:vgg_loss_2: 44.8250617980957 +INFO:__main__:vgg_loss_3: 31.064552307128906 +INFO:__main__:vgg_loss_4: 50.850250244140625 +INFO:__main__:vgg_loss_5: 1.637457251548767 +INFO:__main__:validation_loss: 1996.9666748046875 +INFO:__main__:global_step: 87251 +INFO:__main__:kl_loss: 89.46595764160156 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1470.481201171875 +INFO:__main__:loss: 1559.9471435546875 +INFO:__main__:vgg_gram_loss_0: 6.709456920623779 +INFO:__main__:vgg_gram_loss_1: 19.351619720458984 +INFO:__main__:vgg_gram_loss_2: 22.138158798217773 +INFO:__main__:vgg_gram_loss_3: 20.97577476501465 +INFO:__main__:vgg_gram_loss_4: 72.77854919433594 +INFO:__main__:vgg_gram_loss_5: 0.16637827455997467 +INFO:__main__:vgg_loss_0: 11.251394271850586 +INFO:__main__:vgg_loss_1: 28.82857322692871 +INFO:__main__:vgg_loss_2: 38.515464782714844 +INFO:__main__:vgg_loss_3: 26.77165412902832 +INFO:__main__:vgg_loss_4: 45.11300277709961 +INFO:__main__:vgg_loss_5: 1.4962304830551147 +INFO:__main__:validation_loss: 1955.4864501953125 +INFO:__main__:global_step: 87501 +INFO:__main__:kl_loss: 92.83488464355469 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1458.609619140625 +INFO:__main__:loss: 1551.4444580078125 +INFO:__main__:vgg_gram_loss_0: 5.602681636810303 +INFO:__main__:vgg_gram_loss_1: 14.442621231079102 +INFO:__main__:vgg_gram_loss_2: 21.35629653930664 +INFO:__main__:vgg_gram_loss_3: 16.57509994506836 +INFO:__main__:vgg_gram_loss_4: 75.21454620361328 +INFO:__main__:vgg_gram_loss_5: 0.16098904609680176 +INFO:__main__:vgg_loss_0: 10.94645881652832 +INFO:__main__:vgg_loss_1: 27.643186569213867 +INFO:__main__:vgg_loss_2: 41.172218322753906 +INFO:__main__:vgg_loss_3: 28.24155616760254 +INFO:__main__:vgg_loss_4: 48.8176155090332 +INFO:__main__:vgg_loss_5: 1.5486400127410889 +INFO:__main__:validation_loss: 1766.4197998046875 +INFO:__main__:global_step: 87751 +INFO:__main__:kl_loss: 93.96218872070312 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1500.3673095703125 +INFO:__main__:loss: 1594.3294677734375 +INFO:__main__:vgg_gram_loss_0: 6.3223395347595215 +INFO:__main__:vgg_gram_loss_1: 15.351176261901855 +INFO:__main__:vgg_gram_loss_2: 19.354772567749023 +INFO:__main__:vgg_gram_loss_3: 16.245092391967773 +INFO:__main__:vgg_gram_loss_4: 72.87541961669922 +INFO:__main__:vgg_gram_loss_5: 0.16141042113304138 +INFO:__main__:vgg_loss_0: 12.33423137664795 +INFO:__main__:vgg_loss_1: 30.205875396728516 +INFO:__main__:vgg_loss_2: 43.944679260253906 +INFO:__main__:vgg_loss_3: 30.77652931213379 +INFO:__main__:vgg_loss_4: 50.910518646240234 +INFO:__main__:vgg_loss_5: 1.5914095640182495 +INFO:__main__:validation_loss: 1935.1287841796875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 88001 +INFO:__main__:kl_loss: 85.048095703125 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1597.5133056640625 +INFO:__main__:loss: 1682.5614013671875 +INFO:__main__:vgg_gram_loss_0: 2.612126111984253 +INFO:__main__:vgg_gram_loss_1: 18.122039794921875 +INFO:__main__:vgg_gram_loss_2: 20.531103134155273 +INFO:__main__:vgg_gram_loss_3: 16.5625 +INFO:__main__:vgg_gram_loss_4: 75.32939910888672 +INFO:__main__:vgg_gram_loss_5: 0.1899040937423706 +INFO:__main__:vgg_loss_0: 13.327157974243164 +INFO:__main__:vgg_loss_1: 33.10106658935547 +INFO:__main__:vgg_loss_2: 46.50777816772461 +INFO:__main__:vgg_loss_3: 33.21672821044922 +INFO:__main__:vgg_loss_4: 58.121768951416016 +INFO:__main__:vgg_loss_5: 1.8810677528381348 +INFO:__main__:validation_loss: 2089.902587890625 +INFO:__main__:global_step: 88251 +INFO:__main__:kl_loss: 101.83035278320312 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1506.619873046875 +INFO:__main__:loss: 1608.4501953125 +INFO:__main__:vgg_gram_loss_0: 4.240794658660889 +INFO:__main__:vgg_gram_loss_1: 15.625555992126465 +INFO:__main__:vgg_gram_loss_2: 21.770673751831055 +INFO:__main__:vgg_gram_loss_3: 18.047576904296875 +INFO:__main__:vgg_gram_loss_4: 77.87972259521484 +INFO:__main__:vgg_gram_loss_5: 0.18957094848155975 +INFO:__main__:vgg_loss_0: 10.853694915771484 +INFO:__main__:vgg_loss_1: 27.552867889404297 +INFO:__main__:vgg_loss_2: 41.86859893798828 +INFO:__main__:vgg_loss_3: 30.132732391357422 +INFO:__main__:vgg_loss_4: 51.430137634277344 +INFO:__main__:vgg_loss_5: 1.732043743133545 +INFO:__main__:validation_loss: 1755.4676513671875 +INFO:__main__:global_step: 88501 +INFO:__main__:kl_loss: 100.78472137451172 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1329.21142578125 +INFO:__main__:loss: 1429.99609375 +INFO:__main__:vgg_gram_loss_0: 5.605550289154053 +INFO:__main__:vgg_gram_loss_1: 12.872700691223145 +INFO:__main__:vgg_gram_loss_2: 18.679401397705078 +INFO:__main__:vgg_gram_loss_3: 14.170722007751465 +INFO:__main__:vgg_gram_loss_4: 67.79904174804688 +INFO:__main__:vgg_gram_loss_5: 0.15615373849868774 +INFO:__main__:vgg_loss_0: 10.295143127441406 +INFO:__main__:vgg_loss_1: 24.14769744873047 +INFO:__main__:vgg_loss_2: 35.519187927246094 +INFO:__main__:vgg_loss_3: 26.667081832885742 +INFO:__main__:vgg_loss_4: 48.31919479370117 +INFO:__main__:vgg_loss_5: 1.6104235649108887 +INFO:__main__:validation_loss: 2166.90380859375 +INFO:__main__:global_step: 88751 +INFO:__main__:kl_loss: 94.95106506347656 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1810.169189453125 +INFO:__main__:loss: 1905.1202392578125 +INFO:__main__:vgg_gram_loss_0: 6.403245449066162 +INFO:__main__:vgg_gram_loss_1: 17.811805725097656 +INFO:__main__:vgg_gram_loss_2: 27.289871215820312 +INFO:__main__:vgg_gram_loss_3: 20.689159393310547 +INFO:__main__:vgg_gram_loss_4: 92.44652557373047 +INFO:__main__:vgg_gram_loss_5: 0.1994899958372116 +INFO:__main__:vgg_loss_0: 13.647820472717285 +INFO:__main__:vgg_loss_1: 33.76885986328125 +INFO:__main__:vgg_loss_2: 50.712276458740234 +INFO:__main__:vgg_loss_3: 35.57040023803711 +INFO:__main__:vgg_loss_4: 61.45115661621094 +INFO:__main__:vgg_loss_5: 2.0432472229003906 +INFO:__main__:validation_loss: 2195.940185546875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 89001 +INFO:__main__:kl_loss: 87.79652404785156 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1548.9085693359375 +INFO:__main__:loss: 1636.705078125 +INFO:__main__:vgg_gram_loss_0: 5.418597221374512 +INFO:__main__:vgg_gram_loss_1: 18.318815231323242 +INFO:__main__:vgg_gram_loss_2: 22.550067901611328 +INFO:__main__:vgg_gram_loss_3: 17.619138717651367 +INFO:__main__:vgg_gram_loss_4: 75.90799713134766 +INFO:__main__:vgg_gram_loss_5: 0.17810462415218353 +INFO:__main__:vgg_loss_0: 11.607017517089844 +INFO:__main__:vgg_loss_1: 29.64558219909668 +INFO:__main__:vgg_loss_2: 42.75007629394531 +INFO:__main__:vgg_loss_3: 30.81840705871582 +INFO:__main__:vgg_loss_4: 53.244964599609375 +INFO:__main__:vgg_loss_5: 1.722967267036438 +INFO:__main__:validation_loss: 1968.934326171875 +INFO:__main__:global_step: 89251 +INFO:__main__:kl_loss: 99.38996887207031 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1527.5535888671875 +INFO:__main__:loss: 1626.943603515625 +INFO:__main__:vgg_gram_loss_0: 6.409358501434326 +INFO:__main__:vgg_gram_loss_1: 13.839940071105957 +INFO:__main__:vgg_gram_loss_2: 19.78169059753418 +INFO:__main__:vgg_gram_loss_3: 15.48968505859375 +INFO:__main__:vgg_gram_loss_4: 75.2727279663086 +INFO:__main__:vgg_gram_loss_5: 0.19342464208602905 +INFO:__main__:vgg_loss_0: 12.747583389282227 +INFO:__main__:vgg_loss_1: 30.14181900024414 +INFO:__main__:vgg_loss_2: 44.00062561035156 +INFO:__main__:vgg_loss_3: 31.507556915283203 +INFO:__main__:vgg_loss_4: 54.32966995239258 +INFO:__main__:vgg_loss_5: 1.7966530323028564 +INFO:__main__:validation_loss: 2002.3011474609375 +INFO:__main__:global_step: 89501 +INFO:__main__:kl_loss: 79.3800048828125 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1301.2054443359375 +INFO:__main__:loss: 1380.58544921875 +INFO:__main__:vgg_gram_loss_0: 6.198101043701172 +INFO:__main__:vgg_gram_loss_1: 12.816706657409668 +INFO:__main__:vgg_gram_loss_2: 16.51862907409668 +INFO:__main__:vgg_gram_loss_3: 13.665445327758789 +INFO:__main__:vgg_gram_loss_4: 66.41639709472656 +INFO:__main__:vgg_gram_loss_5: 0.16204382479190826 +INFO:__main__:vgg_loss_0: 9.489882469177246 +INFO:__main__:vgg_loss_1: 22.756256103515625 +INFO:__main__:vgg_loss_2: 35.25872039794922 +INFO:__main__:vgg_loss_3: 26.64169692993164 +INFO:__main__:vgg_loss_4: 48.70185089111328 +INFO:__main__:vgg_loss_5: 1.6153570413589478 +INFO:__main__:validation_loss: 2023.2962646484375 +INFO:__main__:global_step: 89751 +INFO:__main__:kl_loss: 94.20720672607422 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1556.41357421875 +INFO:__main__:loss: 1650.6207275390625 +INFO:__main__:vgg_gram_loss_0: 6.064948558807373 +INFO:__main__:vgg_gram_loss_1: 24.91961669921875 +INFO:__main__:vgg_gram_loss_2: 21.996809005737305 +INFO:__main__:vgg_gram_loss_3: 20.935544967651367 +INFO:__main__:vgg_gram_loss_4: 81.74285125732422 +INFO:__main__:vgg_gram_loss_5: 0.17013275623321533 +INFO:__main__:vgg_loss_0: 11.232378959655762 +INFO:__main__:vgg_loss_1: 28.35336685180664 +INFO:__main__:vgg_loss_2: 36.97306442260742 +INFO:__main__:vgg_loss_3: 27.364686965942383 +INFO:__main__:vgg_loss_4: 49.80122756958008 +INFO:__main__:vgg_loss_5: 1.7280945777893066 +INFO:__main__:validation_loss: 2082.096923828125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 90001 +INFO:__main__:kl_loss: 97.37629699707031 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1913.1427001953125 +INFO:__main__:loss: 2010.51904296875 +INFO:__main__:vgg_gram_loss_0: 6.37723970413208 +INFO:__main__:vgg_gram_loss_1: 24.330354690551758 +INFO:__main__:vgg_gram_loss_2: 36.72909164428711 +INFO:__main__:vgg_gram_loss_3: 28.11878776550293 +INFO:__main__:vgg_gram_loss_4: 91.86283111572266 +INFO:__main__:vgg_gram_loss_5: 0.1795268952846527 +INFO:__main__:vgg_loss_0: 13.628457069396973 +INFO:__main__:vgg_loss_1: 35.518577575683594 +INFO:__main__:vgg_loss_2: 52.0661735534668 +INFO:__main__:vgg_loss_3: 35.261512756347656 +INFO:__main__:vgg_loss_4: 56.80059051513672 +INFO:__main__:vgg_loss_5: 1.755385398864746 +INFO:__main__:validation_loss: 2113.16357421875 +INFO:__main__:global_step: 90251 +INFO:__main__:kl_loss: 103.89485168457031 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1303.5218505859375 +INFO:__main__:loss: 1407.416748046875 +INFO:__main__:vgg_gram_loss_0: 4.536561489105225 +INFO:__main__:vgg_gram_loss_1: 11.125533103942871 +INFO:__main__:vgg_gram_loss_2: 15.57563304901123 +INFO:__main__:vgg_gram_loss_3: 13.154977798461914 +INFO:__main__:vgg_gram_loss_4: 66.6130599975586 +INFO:__main__:vgg_gram_loss_5: 0.16151918470859528 +INFO:__main__:vgg_loss_0: 10.108838081359863 +INFO:__main__:vgg_loss_1: 24.10146713256836 +INFO:__main__:vgg_loss_2: 36.98349380493164 +INFO:__main__:vgg_loss_3: 27.460615158081055 +INFO:__main__:vgg_loss_4: 49.258060455322266 +INFO:__main__:vgg_loss_5: 1.6246066093444824 +INFO:__main__:validation_loss: 1869.824462890625 +INFO:__main__:global_step: 90501 +INFO:__main__:kl_loss: 94.542724609375 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1294.7869873046875 +INFO:__main__:loss: 1389.3297119140625 +INFO:__main__:vgg_gram_loss_0: 4.875837326049805 +INFO:__main__:vgg_gram_loss_1: 15.140176773071289 +INFO:__main__:vgg_gram_loss_2: 18.707382202148438 +INFO:__main__:vgg_gram_loss_3: 14.012394905090332 +INFO:__main__:vgg_gram_loss_4: 65.20364379882812 +INFO:__main__:vgg_gram_loss_5: 0.16046075522899628 +INFO:__main__:vgg_loss_0: 8.895411491394043 +INFO:__main__:vgg_loss_1: 23.096973419189453 +INFO:__main__:vgg_loss_2: 35.841888427734375 +INFO:__main__:vgg_loss_3: 25.902448654174805 +INFO:__main__:vgg_loss_4: 45.578739166259766 +INFO:__main__:vgg_loss_5: 1.5420160293579102 +INFO:__main__:validation_loss: 1910.3350830078125 +INFO:__main__:global_step: 90751 +INFO:__main__:kl_loss: 89.89542388916016 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1499.4200439453125 +INFO:__main__:loss: 1589.3154296875 +INFO:__main__:vgg_gram_loss_0: 5.955201148986816 +INFO:__main__:vgg_gram_loss_1: 14.562341690063477 +INFO:__main__:vgg_gram_loss_2: 18.117033004760742 +INFO:__main__:vgg_gram_loss_3: 15.038354873657227 +INFO:__main__:vgg_gram_loss_4: 74.17723083496094 +INFO:__main__:vgg_gram_loss_5: 0.1747456043958664 +INFO:__main__:vgg_loss_0: 12.216191291809082 +INFO:__main__:vgg_loss_1: 29.79570960998535 +INFO:__main__:vgg_loss_2: 43.291282653808594 +INFO:__main__:vgg_loss_3: 30.831018447875977 +INFO:__main__:vgg_loss_4: 53.94190216064453 +INFO:__main__:vgg_loss_5: 1.782986044883728 +INFO:__main__:validation_loss: 2112.77490234375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 91001 +INFO:__main__:kl_loss: 89.55815124511719 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1493.7353515625 +INFO:__main__:loss: 1583.29345703125 +INFO:__main__:vgg_gram_loss_0: 5.907973766326904 +INFO:__main__:vgg_gram_loss_1: 18.873632431030273 +INFO:__main__:vgg_gram_loss_2: 21.225566864013672 +INFO:__main__:vgg_gram_loss_3: 16.34156608581543 +INFO:__main__:vgg_gram_loss_4: 72.262451171875 +INFO:__main__:vgg_gram_loss_5: 0.17061454057693481 +INFO:__main__:vgg_loss_0: 11.40609359741211 +INFO:__main__:vgg_loss_1: 30.71196746826172 +INFO:__main__:vgg_loss_2: 42.38644790649414 +INFO:__main__:vgg_loss_3: 29.4389591217041 +INFO:__main__:vgg_loss_4: 48.474937438964844 +INFO:__main__:vgg_loss_5: 1.5468567609786987 +INFO:__main__:validation_loss: 1779.3458251953125 +INFO:__main__:global_step: 91251 +INFO:__main__:kl_loss: 104.40750122070312 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1683.776611328125 +INFO:__main__:loss: 1788.18408203125 +INFO:__main__:vgg_gram_loss_0: 4.119710445404053 +INFO:__main__:vgg_gram_loss_1: 22.02119255065918 +INFO:__main__:vgg_gram_loss_2: 22.896591186523438 +INFO:__main__:vgg_gram_loss_3: 20.860820770263672 +INFO:__main__:vgg_gram_loss_4: 89.54229736328125 +INFO:__main__:vgg_gram_loss_5: 0.18235264718532562 +INFO:__main__:vgg_loss_0: 13.360998153686523 +INFO:__main__:vgg_loss_1: 33.51197814941406 +INFO:__main__:vgg_loss_2: 42.732948303222656 +INFO:__main__:vgg_loss_3: 31.197961807250977 +INFO:__main__:vgg_loss_4: 54.57076644897461 +INFO:__main__:vgg_loss_5: 1.7577097415924072 +INFO:__main__:validation_loss: 1867.725341796875 +INFO:__main__:global_step: 91501 +INFO:__main__:kl_loss: 96.52860260009766 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1225.991455078125 +INFO:__main__:loss: 1322.52001953125 +INFO:__main__:vgg_gram_loss_0: 3.574167013168335 +INFO:__main__:vgg_gram_loss_1: 15.611968994140625 +INFO:__main__:vgg_gram_loss_2: 16.45143699645996 +INFO:__main__:vgg_gram_loss_3: 14.18681526184082 +INFO:__main__:vgg_gram_loss_4: 64.59764862060547 +INFO:__main__:vgg_gram_loss_5: 0.1451445072889328 +INFO:__main__:vgg_loss_0: 8.793593406677246 +INFO:__main__:vgg_loss_1: 20.945642471313477 +INFO:__main__:vgg_loss_2: 31.84554672241211 +INFO:__main__:vgg_loss_3: 24.00324821472168 +INFO:__main__:vgg_loss_4: 43.59615707397461 +INFO:__main__:vgg_loss_5: 1.446950078010559 +INFO:__main__:validation_loss: 2403.401123046875 +INFO:__main__:global_step: 91751 +INFO:__main__:kl_loss: 98.36302185058594 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1460.9588623046875 +INFO:__main__:loss: 1559.3218994140625 +INFO:__main__:vgg_gram_loss_0: 5.331429958343506 +INFO:__main__:vgg_gram_loss_1: 16.455219268798828 +INFO:__main__:vgg_gram_loss_2: 19.17129898071289 +INFO:__main__:vgg_gram_loss_3: 15.928474426269531 +INFO:__main__:vgg_gram_loss_4: 76.09088897705078 +INFO:__main__:vgg_gram_loss_5: 0.17804428935050964 +INFO:__main__:vgg_loss_0: 10.533103942871094 +INFO:__main__:vgg_loss_1: 26.26702880859375 +INFO:__main__:vgg_loss_2: 39.90780258178711 +INFO:__main__:vgg_loss_3: 29.087141036987305 +INFO:__main__:vgg_loss_4: 51.531227111816406 +INFO:__main__:vgg_loss_5: 1.710135579109192 +INFO:__main__:validation_loss: 1860.9166259765625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 92001 +INFO:__main__:kl_loss: 106.68399047851562 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1220.4700927734375 +INFO:__main__:loss: 1327.154052734375 +INFO:__main__:vgg_gram_loss_0: 4.361277103424072 +INFO:__main__:vgg_gram_loss_1: 13.068266868591309 +INFO:__main__:vgg_gram_loss_2: 16.74493980407715 +INFO:__main__:vgg_gram_loss_3: 13.164230346679688 +INFO:__main__:vgg_gram_loss_4: 63.588558197021484 +INFO:__main__:vgg_gram_loss_5: 0.13908767700195312 +INFO:__main__:vgg_loss_0: 8.668356895446777 +INFO:__main__:vgg_loss_1: 21.730485916137695 +INFO:__main__:vgg_loss_2: 33.44631576538086 +INFO:__main__:vgg_loss_3: 24.48140525817871 +INFO:__main__:vgg_loss_4: 43.31731033325195 +INFO:__main__:vgg_loss_5: 1.3837759494781494 +INFO:__main__:validation_loss: 1851.487060546875 +INFO:__main__:global_step: 92251 +INFO:__main__:kl_loss: 84.63014221191406 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1464.8028564453125 +INFO:__main__:loss: 1549.4329833984375 +INFO:__main__:vgg_gram_loss_0: 4.079217433929443 +INFO:__main__:vgg_gram_loss_1: 15.958740234375 +INFO:__main__:vgg_gram_loss_2: 23.30150604248047 +INFO:__main__:vgg_gram_loss_3: 18.339778900146484 +INFO:__main__:vgg_gram_loss_4: 73.60639953613281 +INFO:__main__:vgg_gram_loss_5: 0.16309905052185059 +INFO:__main__:vgg_loss_0: 11.195048332214355 +INFO:__main__:vgg_loss_1: 27.027301788330078 +INFO:__main__:vgg_loss_2: 40.16344451904297 +INFO:__main__:vgg_loss_3: 28.401493072509766 +INFO:__main__:vgg_loss_4: 49.08714294433594 +INFO:__main__:vgg_loss_5: 1.637388825416565 +INFO:__main__:validation_loss: 2044.39306640625 +INFO:__main__:global_step: 92501 +INFO:__main__:kl_loss: 92.03741455078125 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1344.791259765625 +INFO:__main__:loss: 1436.82861328125 +INFO:__main__:vgg_gram_loss_0: 3.1323773860931396 +INFO:__main__:vgg_gram_loss_1: 18.940420150756836 +INFO:__main__:vgg_gram_loss_2: 18.881811141967773 +INFO:__main__:vgg_gram_loss_3: 14.778685569763184 +INFO:__main__:vgg_gram_loss_4: 67.8260498046875 +INFO:__main__:vgg_gram_loss_5: 0.15912984311580658 +INFO:__main__:vgg_loss_0: 9.718703269958496 +INFO:__main__:vgg_loss_1: 24.44039535522461 +INFO:__main__:vgg_loss_2: 36.7541618347168 +INFO:__main__:vgg_loss_3: 26.59928321838379 +INFO:__main__:vgg_loss_4: 46.220882415771484 +INFO:__main__:vgg_loss_5: 1.506355881690979 +INFO:__main__:validation_loss: 1449.0714111328125 +INFO:__main__:global_step: 92751 +INFO:__main__:kl_loss: 86.67222595214844 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1256.384033203125 +INFO:__main__:loss: 1343.0562744140625 +INFO:__main__:vgg_gram_loss_0: 7.1412200927734375 +INFO:__main__:vgg_gram_loss_1: 16.490821838378906 +INFO:__main__:vgg_gram_loss_2: 18.249526977539062 +INFO:__main__:vgg_gram_loss_3: 14.088301658630371 +INFO:__main__:vgg_gram_loss_4: 62.35163116455078 +INFO:__main__:vgg_gram_loss_5: 0.15313062071800232 +INFO:__main__:vgg_loss_0: 9.92934799194336 +INFO:__main__:vgg_loss_1: 25.015121459960938 +INFO:__main__:vgg_loss_2: 32.953819274902344 +INFO:__main__:vgg_loss_3: 23.149263381958008 +INFO:__main__:vgg_loss_4: 40.36640167236328 +INFO:__main__:vgg_loss_5: 1.3882135152816772 +INFO:__main__:validation_loss: 1944.3094482421875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 93001 +INFO:__main__:kl_loss: 90.28750610351562 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1445.860595703125 +INFO:__main__:loss: 1536.1480712890625 +INFO:__main__:vgg_gram_loss_0: 3.189039468765259 +INFO:__main__:vgg_gram_loss_1: 11.682622909545898 +INFO:__main__:vgg_gram_loss_2: 16.757373809814453 +INFO:__main__:vgg_gram_loss_3: 14.558525085449219 +INFO:__main__:vgg_gram_loss_4: 73.93240356445312 +INFO:__main__:vgg_gram_loss_5: 0.17990677058696747 +INFO:__main__:vgg_loss_0: 10.843522071838379 +INFO:__main__:vgg_loss_1: 27.61995506286621 +INFO:__main__:vgg_loss_2: 42.487030029296875 +INFO:__main__:vgg_loss_3: 31.195941925048828 +INFO:__main__:vgg_loss_4: 54.88895034790039 +INFO:__main__:vgg_loss_5: 1.8368555307388306 +INFO:__main__:validation_loss: 1907.019775390625 +INFO:__main__:global_step: 93251 +INFO:__main__:kl_loss: 105.66995239257812 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1716.762939453125 +INFO:__main__:loss: 1822.432861328125 +INFO:__main__:vgg_gram_loss_0: 5.507288455963135 +INFO:__main__:vgg_gram_loss_1: 16.063644409179688 +INFO:__main__:vgg_gram_loss_2: 23.852792739868164 +INFO:__main__:vgg_gram_loss_3: 18.465246200561523 +INFO:__main__:vgg_gram_loss_4: 86.23794555664062 +INFO:__main__:vgg_gram_loss_5: 0.21492119133472443 +INFO:__main__:vgg_loss_0: 14.631569862365723 +INFO:__main__:vgg_loss_1: 31.57145118713379 +INFO:__main__:vgg_loss_2: 46.99806594848633 +INFO:__main__:vgg_loss_3: 35.329105377197266 +INFO:__main__:vgg_loss_4: 62.38677215576172 +INFO:__main__:vgg_loss_5: 2.0938076972961426 +INFO:__main__:validation_loss: 2323.74365234375 +INFO:__main__:global_step: 93501 +INFO:__main__:kl_loss: 88.48690032958984 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1581.728271484375 +INFO:__main__:loss: 1670.2152099609375 +INFO:__main__:vgg_gram_loss_0: 5.2566752433776855 +INFO:__main__:vgg_gram_loss_1: 14.151083946228027 +INFO:__main__:vgg_gram_loss_2: 20.565196990966797 +INFO:__main__:vgg_gram_loss_3: 17.462369918823242 +INFO:__main__:vgg_gram_loss_4: 77.39970397949219 +INFO:__main__:vgg_gram_loss_5: 0.17609338462352753 +INFO:__main__:vgg_loss_0: 13.479527473449707 +INFO:__main__:vgg_loss_1: 34.215274810791016 +INFO:__main__:vgg_loss_2: 46.57057189941406 +INFO:__main__:vgg_loss_3: 31.594818115234375 +INFO:__main__:vgg_loss_4: 53.76302719116211 +INFO:__main__:vgg_loss_5: 1.711329460144043 +INFO:__main__:validation_loss: 1774.981201171875 +INFO:__main__:global_step: 93751 +INFO:__main__:kl_loss: 95.56268310546875 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1587.37060546875 +INFO:__main__:loss: 1682.933349609375 +INFO:__main__:vgg_gram_loss_0: 3.1884894371032715 +INFO:__main__:vgg_gram_loss_1: 19.36784553527832 +INFO:__main__:vgg_gram_loss_2: 24.69904327392578 +INFO:__main__:vgg_gram_loss_3: 19.751630783081055 +INFO:__main__:vgg_gram_loss_4: 79.70296478271484 +INFO:__main__:vgg_gram_loss_5: 0.17828328907489777 +INFO:__main__:vgg_loss_0: 12.465038299560547 +INFO:__main__:vgg_loss_1: 32.27489471435547 +INFO:__main__:vgg_loss_2: 43.15052032470703 +INFO:__main__:vgg_loss_3: 29.957197189331055 +INFO:__main__:vgg_loss_4: 51.080204010009766 +INFO:__main__:vgg_loss_5: 1.65802001953125 +INFO:__main__:validation_loss: 2090.6181640625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 94001 +INFO:__main__:kl_loss: 95.87492370605469 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1553.4979248046875 +INFO:__main__:loss: 1649.372802734375 +INFO:__main__:vgg_gram_loss_0: 8.404108047485352 +INFO:__main__:vgg_gram_loss_1: 17.118783950805664 +INFO:__main__:vgg_gram_loss_2: 21.06252098083496 +INFO:__main__:vgg_gram_loss_3: 16.949443817138672 +INFO:__main__:vgg_gram_loss_4: 76.5724105834961 +INFO:__main__:vgg_gram_loss_5: 0.18410156667232513 +INFO:__main__:vgg_loss_0: 12.407322883605957 +INFO:__main__:vgg_loss_1: 30.708627700805664 +INFO:__main__:vgg_loss_2: 42.60052490234375 +INFO:__main__:vgg_loss_3: 30.030920028686523 +INFO:__main__:vgg_loss_4: 52.876834869384766 +INFO:__main__:vgg_loss_5: 1.7839725017547607 +INFO:__main__:validation_loss: 1884.85888671875 +INFO:__main__:global_step: 94251 +INFO:__main__:kl_loss: 92.54812622070312 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1401.46337890625 +INFO:__main__:loss: 1494.011474609375 +INFO:__main__:vgg_gram_loss_0: 3.0854320526123047 +INFO:__main__:vgg_gram_loss_1: 16.0695858001709 +INFO:__main__:vgg_gram_loss_2: 20.078083038330078 +INFO:__main__:vgg_gram_loss_3: 14.960135459899902 +INFO:__main__:vgg_gram_loss_4: 67.0829086303711 +INFO:__main__:vgg_gram_loss_5: 0.1540273278951645 +INFO:__main__:vgg_loss_0: 11.4345703125 +INFO:__main__:vgg_loss_1: 28.948116302490234 +INFO:__main__:vgg_loss_2: 40.20440673828125 +INFO:__main__:vgg_loss_3: 27.985851287841797 +INFO:__main__:vgg_loss_4: 48.64845657348633 +INFO:__main__:vgg_loss_5: 1.641096830368042 +INFO:__main__:validation_loss: 1796.9822998046875 +INFO:__main__:global_step: 94501 +INFO:__main__:kl_loss: 94.13693237304688 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1449.8443603515625 +INFO:__main__:loss: 1543.9813232421875 +INFO:__main__:vgg_gram_loss_0: 5.324034214019775 +INFO:__main__:vgg_gram_loss_1: 16.135190963745117 +INFO:__main__:vgg_gram_loss_2: 19.41175651550293 +INFO:__main__:vgg_gram_loss_3: 16.938533782958984 +INFO:__main__:vgg_gram_loss_4: 75.16490936279297 +INFO:__main__:vgg_gram_loss_5: 0.15807634592056274 +INFO:__main__:vgg_loss_0: 10.632599830627441 +INFO:__main__:vgg_loss_1: 25.875459671020508 +INFO:__main__:vgg_loss_2: 38.49040985107422 +INFO:__main__:vgg_loss_3: 28.628623962402344 +INFO:__main__:vgg_loss_4: 51.511234283447266 +INFO:__main__:vgg_loss_5: 1.6980198621749878 +INFO:__main__:validation_loss: 1924.233154296875 +INFO:__main__:global_step: 94751 +INFO:__main__:kl_loss: 91.59573364257812 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1399.910888671875 +INFO:__main__:loss: 1491.506591796875 +INFO:__main__:vgg_gram_loss_0: 4.512006759643555 +INFO:__main__:vgg_gram_loss_1: 15.360969543457031 +INFO:__main__:vgg_gram_loss_2: 18.77492332458496 +INFO:__main__:vgg_gram_loss_3: 15.434577941894531 +INFO:__main__:vgg_gram_loss_4: 72.95138549804688 +INFO:__main__:vgg_gram_loss_5: 0.17269916832447052 +INFO:__main__:vgg_loss_0: 10.283180236816406 +INFO:__main__:vgg_loss_1: 24.56536865234375 +INFO:__main__:vgg_loss_2: 37.27871322631836 +INFO:__main__:vgg_loss_3: 28.04778289794922 +INFO:__main__:vgg_loss_4: 50.876163482666016 +INFO:__main__:vgg_loss_5: 1.7244006395339966 +INFO:__main__:validation_loss: 1961.497314453125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 95001 +INFO:__main__:kl_loss: 82.17642211914062 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1258.5758056640625 +INFO:__main__:loss: 1340.752197265625 +INFO:__main__:vgg_gram_loss_0: 7.235963344573975 +INFO:__main__:vgg_gram_loss_1: 13.288131713867188 +INFO:__main__:vgg_gram_loss_2: 14.8350830078125 +INFO:__main__:vgg_gram_loss_3: 12.24756145477295 +INFO:__main__:vgg_gram_loss_4: 62.040496826171875 +INFO:__main__:vgg_gram_loss_5: 0.1588146835565567 +INFO:__main__:vgg_loss_0: 9.302810668945312 +INFO:__main__:vgg_loss_1: 22.538555145263672 +INFO:__main__:vgg_loss_2: 34.76185989379883 +INFO:__main__:vgg_loss_3: 26.115793228149414 +INFO:__main__:vgg_loss_4: 47.59431838989258 +INFO:__main__:vgg_loss_5: 1.5957850217819214 +INFO:__main__:validation_loss: 2095.010498046875 +INFO:__main__:global_step: 95251 +INFO:__main__:kl_loss: 84.28132629394531 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1552.5518798828125 +INFO:__main__:loss: 1636.833251953125 +INFO:__main__:vgg_gram_loss_0: 5.086937427520752 +INFO:__main__:vgg_gram_loss_1: 20.81475257873535 +INFO:__main__:vgg_gram_loss_2: 20.502317428588867 +INFO:__main__:vgg_gram_loss_3: 16.32721519470215 +INFO:__main__:vgg_gram_loss_4: 72.61461639404297 +INFO:__main__:vgg_gram_loss_5: 0.1618853509426117 +INFO:__main__:vgg_loss_0: 12.905353546142578 +INFO:__main__:vgg_loss_1: 34.28908157348633 +INFO:__main__:vgg_loss_2: 44.21366882324219 +INFO:__main__:vgg_loss_3: 30.061050415039062 +INFO:__main__:vgg_loss_4: 51.858219146728516 +INFO:__main__:vgg_loss_5: 1.6752629280090332 +INFO:__main__:validation_loss: 1584.3267822265625 +INFO:__main__:global_step: 95501 +INFO:__main__:kl_loss: 94.78337860107422 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1478.048095703125 +INFO:__main__:loss: 1572.8314208984375 +INFO:__main__:vgg_gram_loss_0: 4.939256191253662 +INFO:__main__:vgg_gram_loss_1: 15.274670600891113 +INFO:__main__:vgg_gram_loss_2: 22.212146759033203 +INFO:__main__:vgg_gram_loss_3: 16.293222427368164 +INFO:__main__:vgg_gram_loss_4: 70.33675384521484 +INFO:__main__:vgg_gram_loss_5: 0.1675006002187729 +INFO:__main__:vgg_loss_0: 12.024515151977539 +INFO:__main__:vgg_loss_1: 30.23062515258789 +INFO:__main__:vgg_loss_2: 42.67368698120117 +INFO:__main__:vgg_loss_3: 29.679075241088867 +INFO:__main__:vgg_loss_4: 50.133636474609375 +INFO:__main__:vgg_loss_5: 1.644529938697815 +INFO:__main__:validation_loss: 1775.843994140625 +INFO:__main__:global_step: 95751 +INFO:__main__:kl_loss: 98.19869232177734 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1651.919921875 +INFO:__main__:loss: 1750.11865234375 +INFO:__main__:vgg_gram_loss_0: 4.925362586975098 +INFO:__main__:vgg_gram_loss_1: 22.744434356689453 +INFO:__main__:vgg_gram_loss_2: 21.628042221069336 +INFO:__main__:vgg_gram_loss_3: 18.437442779541016 +INFO:__main__:vgg_gram_loss_4: 82.43343353271484 +INFO:__main__:vgg_gram_loss_5: 0.1811065375804901 +INFO:__main__:vgg_loss_0: 13.052212715148926 +INFO:__main__:vgg_loss_1: 32.287330627441406 +INFO:__main__:vgg_loss_2: 43.62583541870117 +INFO:__main__:vgg_loss_3: 31.823040008544922 +INFO:__main__:vgg_loss_4: 57.32054901123047 +INFO:__main__:vgg_loss_5: 1.9251935482025146 +INFO:__main__:validation_loss: 2073.5029296875 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 96001 +INFO:__main__:kl_loss: 91.30955505371094 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1342.24267578125 +INFO:__main__:loss: 1433.55224609375 +INFO:__main__:vgg_gram_loss_0: 3.0729892253875732 +INFO:__main__:vgg_gram_loss_1: 13.904098510742188 +INFO:__main__:vgg_gram_loss_2: 16.82509994506836 +INFO:__main__:vgg_gram_loss_3: 13.945189476013184 +INFO:__main__:vgg_gram_loss_4: 69.04952239990234 +INFO:__main__:vgg_gram_loss_5: 0.16531990468502045 +INFO:__main__:vgg_loss_0: 10.11836051940918 +INFO:__main__:vgg_loss_1: 24.504222869873047 +INFO:__main__:vgg_loss_2: 37.013423919677734 +INFO:__main__:vgg_loss_3: 27.802108764648438 +INFO:__main__:vgg_loss_4: 50.36000442504883 +INFO:__main__:vgg_loss_5: 1.6882240772247314 +INFO:__main__:validation_loss: 1685.74755859375 +INFO:__main__:global_step: 96251 +INFO:__main__:kl_loss: 92.66862487792969 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1331.661376953125 +INFO:__main__:loss: 1424.3299560546875 +INFO:__main__:vgg_gram_loss_0: 5.342665195465088 +INFO:__main__:vgg_gram_loss_1: 16.640743255615234 +INFO:__main__:vgg_gram_loss_2: 20.831180572509766 +INFO:__main__:vgg_gram_loss_3: 15.615817070007324 +INFO:__main__:vgg_gram_loss_4: 64.38277435302734 +INFO:__main__:vgg_gram_loss_5: 0.16786842048168182 +INFO:__main__:vgg_loss_0: 9.960103034973145 +INFO:__main__:vgg_loss_1: 24.2823543548584 +INFO:__main__:vgg_loss_2: 36.11655807495117 +INFO:__main__:vgg_loss_3: 26.21121597290039 +INFO:__main__:vgg_loss_4: 45.203758239746094 +INFO:__main__:vgg_loss_5: 1.5772391557693481 +INFO:__main__:validation_loss: 1770.73681640625 +INFO:__main__:global_step: 96501 +INFO:__main__:kl_loss: 95.00555419921875 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1360.47998046875 +INFO:__main__:loss: 1455.485595703125 +INFO:__main__:vgg_gram_loss_0: 6.228001594543457 +INFO:__main__:vgg_gram_loss_1: 15.008150100708008 +INFO:__main__:vgg_gram_loss_2: 18.683805465698242 +INFO:__main__:vgg_gram_loss_3: 15.087199211120605 +INFO:__main__:vgg_gram_loss_4: 70.29594421386719 +INFO:__main__:vgg_gram_loss_5: 0.1775958389043808 +INFO:__main__:vgg_loss_0: 9.800933837890625 +INFO:__main__:vgg_loss_1: 24.071191787719727 +INFO:__main__:vgg_loss_2: 36.9195671081543 +INFO:__main__:vgg_loss_3: 26.72220230102539 +INFO:__main__:vgg_loss_4: 47.4777717590332 +INFO:__main__:vgg_loss_5: 1.6236779689788818 +INFO:__main__:validation_loss: 1926.9075927734375 +INFO:__main__:global_step: 96751 +INFO:__main__:kl_loss: 98.5336685180664 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1305.034423828125 +INFO:__main__:loss: 1403.568115234375 +INFO:__main__:vgg_gram_loss_0: 4.0622382164001465 +INFO:__main__:vgg_gram_loss_1: 14.667439460754395 +INFO:__main__:vgg_gram_loss_2: 17.532617568969727 +INFO:__main__:vgg_gram_loss_3: 14.533390998840332 +INFO:__main__:vgg_gram_loss_4: 66.78302001953125 +INFO:__main__:vgg_gram_loss_5: 0.1547304391860962 +INFO:__main__:vgg_loss_0: 9.376812934875488 +INFO:__main__:vgg_loss_1: 24.609041213989258 +INFO:__main__:vgg_loss_2: 36.89531326293945 +INFO:__main__:vgg_loss_3: 26.01327133178711 +INFO:__main__:vgg_loss_4: 44.89105987548828 +INFO:__main__:vgg_loss_5: 1.4879685640335083 +INFO:__main__:validation_loss: 1800.1673583984375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 97001 +INFO:__main__:kl_loss: 89.10330200195312 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1280.3338623046875 +INFO:__main__:loss: 1369.4371337890625 +INFO:__main__:vgg_gram_loss_0: 4.3500075340271 +INFO:__main__:vgg_gram_loss_1: 12.08788776397705 +INFO:__main__:vgg_gram_loss_2: 16.261762619018555 +INFO:__main__:vgg_gram_loss_3: 13.150627136230469 +INFO:__main__:vgg_gram_loss_4: 62.9202766418457 +INFO:__main__:vgg_gram_loss_5: 0.1583937555551529 +INFO:__main__:vgg_loss_0: 9.513590812683105 +INFO:__main__:vgg_loss_1: 24.43714714050293 +INFO:__main__:vgg_loss_2: 37.3764533996582 +INFO:__main__:vgg_loss_3: 27.102861404418945 +INFO:__main__:vgg_loss_4: 47.16448211669922 +INFO:__main__:vgg_loss_5: 1.5432795286178589 +INFO:__main__:validation_loss: 2101.328125 +INFO:__main__:global_step: 97251 +INFO:__main__:kl_loss: 89.55133819580078 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1571.9305419921875 +INFO:__main__:loss: 1661.48193359375 +INFO:__main__:vgg_gram_loss_0: 6.366440296173096 +INFO:__main__:vgg_gram_loss_1: 17.186567306518555 +INFO:__main__:vgg_gram_loss_2: 22.23857307434082 +INFO:__main__:vgg_gram_loss_3: 19.055540084838867 +INFO:__main__:vgg_gram_loss_4: 82.12162780761719 +INFO:__main__:vgg_gram_loss_5: 0.17332540452480316 +INFO:__main__:vgg_loss_0: 11.947830200195312 +INFO:__main__:vgg_loss_1: 30.00872230529785 +INFO:__main__:vgg_loss_2: 42.158203125 +INFO:__main__:vgg_loss_3: 29.955196380615234 +INFO:__main__:vgg_loss_4: 51.495567321777344 +INFO:__main__:vgg_loss_5: 1.6785327196121216 +INFO:__main__:validation_loss: 2119.100830078125 +INFO:__main__:global_step: 97501 +INFO:__main__:kl_loss: 85.68862915039062 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1413.4996337890625 +INFO:__main__:loss: 1499.188232421875 +INFO:__main__:vgg_gram_loss_0: 5.056457042694092 +INFO:__main__:vgg_gram_loss_1: 16.240156173706055 +INFO:__main__:vgg_gram_loss_2: 18.919513702392578 +INFO:__main__:vgg_gram_loss_3: 15.154683113098145 +INFO:__main__:vgg_gram_loss_4: 70.04598999023438 +INFO:__main__:vgg_gram_loss_5: 0.167255237698555 +INFO:__main__:vgg_loss_0: 11.275044441223145 +INFO:__main__:vgg_loss_1: 27.611595153808594 +INFO:__main__:vgg_loss_2: 39.640625 +INFO:__main__:vgg_loss_3: 28.003564834594727 +INFO:__main__:vgg_loss_4: 48.94350051879883 +INFO:__main__:vgg_loss_5: 1.6415457725524902 +INFO:__main__:validation_loss: 1701.56982421875 +INFO:__main__:global_step: 97751 +INFO:__main__:kl_loss: 84.13835144042969 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1457.4774169921875 +INFO:__main__:loss: 1541.61572265625 +INFO:__main__:vgg_gram_loss_0: 4.690040588378906 +INFO:__main__:vgg_gram_loss_1: 17.211193084716797 +INFO:__main__:vgg_gram_loss_2: 19.969812393188477 +INFO:__main__:vgg_gram_loss_3: 15.622014045715332 +INFO:__main__:vgg_gram_loss_4: 70.93714904785156 +INFO:__main__:vgg_gram_loss_5: 0.17449937760829926 +INFO:__main__:vgg_loss_0: 11.50278377532959 +INFO:__main__:vgg_loss_1: 28.694732666015625 +INFO:__main__:vgg_loss_2: 40.758872985839844 +INFO:__main__:vgg_loss_3: 29.34892463684082 +INFO:__main__:vgg_loss_4: 50.8730583190918 +INFO:__main__:vgg_loss_5: 1.7124176025390625 +INFO:__main__:validation_loss: 1932.9327392578125 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 98001 +INFO:__main__:kl_loss: 94.17662811279297 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1593.685546875 +INFO:__main__:loss: 1687.8621826171875 +INFO:__main__:vgg_gram_loss_0: 3.502932071685791 +INFO:__main__:vgg_gram_loss_1: 20.326452255249023 +INFO:__main__:vgg_gram_loss_2: 23.129535675048828 +INFO:__main__:vgg_gram_loss_3: 18.049726486206055 +INFO:__main__:vgg_gram_loss_4: 72.21501159667969 +INFO:__main__:vgg_gram_loss_5: 0.17129026353359222 +INFO:__main__:vgg_loss_0: 14.718999862670898 +INFO:__main__:vgg_loss_1: 36.75680160522461 +INFO:__main__:vgg_loss_2: 45.4874382019043 +INFO:__main__:vgg_loss_3: 31.008583068847656 +INFO:__main__:vgg_loss_4: 51.65411376953125 +INFO:__main__:vgg_loss_5: 1.7162220478057861 +INFO:__main__:validation_loss: 2026.9364013671875 +INFO:__main__:global_step: 98251 +INFO:__main__:kl_loss: 86.96865844726562 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1322.3330078125 +INFO:__main__:loss: 1409.3016357421875 +INFO:__main__:vgg_gram_loss_0: 4.844265937805176 +INFO:__main__:vgg_gram_loss_1: 12.612086296081543 +INFO:__main__:vgg_gram_loss_2: 17.624357223510742 +INFO:__main__:vgg_gram_loss_3: 13.71751594543457 +INFO:__main__:vgg_gram_loss_4: 66.47831726074219 +INFO:__main__:vgg_gram_loss_5: 0.15031905472278595 +INFO:__main__:vgg_loss_0: 10.065250396728516 +INFO:__main__:vgg_loss_1: 25.307106018066406 +INFO:__main__:vgg_loss_2: 37.3557243347168 +INFO:__main__:vgg_loss_3: 26.990291595458984 +INFO:__main__:vgg_loss_4: 47.74794006347656 +INFO:__main__:vgg_loss_5: 1.573432207107544 +INFO:__main__:validation_loss: 1809.330322265625 +INFO:__main__:global_step: 98501 +INFO:__main__:kl_loss: 94.30606079101562 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1458.9276123046875 +INFO:__main__:loss: 1553.233642578125 +INFO:__main__:vgg_gram_loss_0: 5.4191575050354 +INFO:__main__:vgg_gram_loss_1: 13.50175952911377 +INFO:__main__:vgg_gram_loss_2: 20.31715202331543 +INFO:__main__:vgg_gram_loss_3: 16.224205017089844 +INFO:__main__:vgg_gram_loss_4: 73.14997863769531 +INFO:__main__:vgg_gram_loss_5: 0.16184565424919128 +INFO:__main__:vgg_loss_0: 12.239108085632324 +INFO:__main__:vgg_loss_1: 30.096282958984375 +INFO:__main__:vgg_loss_2: 41.898929595947266 +INFO:__main__:vgg_loss_3: 28.560882568359375 +INFO:__main__:vgg_loss_4: 48.65317153930664 +INFO:__main__:vgg_loss_5: 1.5630378723144531 +INFO:__main__:validation_loss: 1835.910400390625 +INFO:__main__:global_step: 98751 +INFO:__main__:kl_loss: 94.64007568359375 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1659.61083984375 +INFO:__main__:loss: 1754.2509765625 +INFO:__main__:vgg_gram_loss_0: 4.645040035247803 +INFO:__main__:vgg_gram_loss_1: 24.5362606048584 +INFO:__main__:vgg_gram_loss_2: 27.10276222229004 +INFO:__main__:vgg_gram_loss_3: 21.342594146728516 +INFO:__main__:vgg_gram_loss_4: 77.76422882080078 +INFO:__main__:vgg_gram_loss_5: 0.15825985372066498 +INFO:__main__:vgg_loss_0: 13.069344520568848 +INFO:__main__:vgg_loss_1: 34.16756820678711 +INFO:__main__:vgg_loss_2: 45.76872634887695 +INFO:__main__:vgg_loss_3: 30.8070125579834 +INFO:__main__:vgg_loss_4: 50.9334602355957 +INFO:__main__:vgg_loss_5: 1.6269283294677734 +INFO:__main__:validation_loss: 1906.8941650390625 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt +INFO:__main__:global_step: 99001 +INFO:__main__:kl_loss: 90.21466064453125 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1269.030029296875 +INFO:__main__:loss: 1359.24462890625 +INFO:__main__:vgg_gram_loss_0: 3.1984193325042725 +INFO:__main__:vgg_gram_loss_1: 10.904694557189941 +INFO:__main__:vgg_gram_loss_2: 14.369215965270996 +INFO:__main__:vgg_gram_loss_3: 12.169541358947754 +INFO:__main__:vgg_gram_loss_4: 64.69881439208984 +INFO:__main__:vgg_gram_loss_5: 0.1661791205406189 +INFO:__main__:vgg_loss_0: 10.225075721740723 +INFO:__main__:vgg_loss_1: 24.334396362304688 +INFO:__main__:vgg_loss_2: 35.81004333496094 +INFO:__main__:vgg_loss_3: 26.561912536621094 +INFO:__main__:vgg_loss_4: 49.67290115356445 +INFO:__main__:vgg_loss_5: 1.6948087215423584 +INFO:__main__:validation_loss: 1952.415771484375 +INFO:__main__:global_step: 99251 +INFO:__main__:kl_loss: 108.31912231445312 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1511.6051025390625 +INFO:__main__:loss: 1619.9241943359375 +INFO:__main__:vgg_gram_loss_0: 6.157376766204834 +INFO:__main__:vgg_gram_loss_1: 18.036466598510742 +INFO:__main__:vgg_gram_loss_2: 20.51165771484375 +INFO:__main__:vgg_gram_loss_3: 17.4020938873291 +INFO:__main__:vgg_gram_loss_4: 76.51419830322266 +INFO:__main__:vgg_gram_loss_5: 0.181868776679039 +INFO:__main__:vgg_loss_0: 11.374635696411133 +INFO:__main__:vgg_loss_1: 28.961132049560547 +INFO:__main__:vgg_loss_2: 41.69327926635742 +INFO:__main__:vgg_loss_3: 29.664945602416992 +INFO:__main__:vgg_loss_4: 50.14850616455078 +INFO:__main__:vgg_loss_5: 1.674843430519104 +INFO:__main__:validation_loss: 1989.2823486328125 +INFO:__main__:global_step: 99501 +INFO:__main__:kl_loss: 99.76805114746094 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1542.787841796875 +INFO:__main__:loss: 1642.555908203125 +INFO:__main__:vgg_gram_loss_0: 7.629061222076416 +INFO:__main__:vgg_gram_loss_1: 16.972585678100586 +INFO:__main__:vgg_gram_loss_2: 24.3531436920166 +INFO:__main__:vgg_gram_loss_3: 19.431045532226562 +INFO:__main__:vgg_gram_loss_4: 77.38356018066406 +INFO:__main__:vgg_gram_loss_5: 0.1715278923511505 +INFO:__main__:vgg_loss_0: 11.751837730407715 +INFO:__main__:vgg_loss_1: 27.20474624633789 +INFO:__main__:vgg_loss_2: 40.37405014038086 +INFO:__main__:vgg_loss_3: 29.8823184967041 +INFO:__main__:vgg_loss_4: 51.6987190246582 +INFO:__main__:vgg_loss_5: 1.7049895524978638 +INFO:__main__:validation_loss: 2011.5941162109375 +INFO:__main__:global_step: 99751 +INFO:__main__:kl_loss: 93.1473388671875 +INFO:__main__:kl_weight: 1.0 +INFO:__main__:likelihood_loss: 1233.4144287109375 +INFO:__main__:loss: 1326.561767578125 +INFO:__main__:vgg_gram_loss_0: 5.318906784057617 +INFO:__main__:vgg_gram_loss_1: 12.55135440826416 +INFO:__main__:vgg_gram_loss_2: 14.961703300476074 +INFO:__main__:vgg_gram_loss_3: 12.32796573638916 +INFO:__main__:vgg_gram_loss_4: 62.49756622314453 +INFO:__main__:vgg_gram_loss_5: 0.15295223891735077 +INFO:__main__:vgg_loss_0: 9.65911865234375 +INFO:__main__:vgg_loss_1: 23.544565200805664 +INFO:__main__:vgg_loss_2: 33.62519836425781 +INFO:__main__:vgg_loss_3: 25.02433204650879 +INFO:__main__:vgg_loss_4: 45.396968841552734 +INFO:__main__:vgg_loss_5: 1.6222574710845947 +INFO:__main__:validation_loss: 1988.1361083984375 +INFO:__main__:Saved model to /net/hci-storage02/groupfolders/compvis/pesser/ma/unet/log/2017-11-06T17:04:12/checkpoints/model.ckpt diff --git a/demo/main.py b/demo/main.py new file mode 100644 index 00000000..99d082ed --- /dev/null +++ b/demo/main.py @@ -0,0 +1,149 @@ +import tensorflow as tf +import os +import math +import pickle +import numpy as np +import PIL +import cv2 +import nn +import models +import deeploss + +from tqdm import trange +from model import Model +from batches_pg2 import plot_batch +from get_batches import get_batches +from utils import init_logging, process_batches +from parser import parse_arguments +from config import default_log_dir, config, session + +if __name__ == "__main__": + opt = parse_arguments() + if not os.path.exists(opt.data_index): + raise Exception("Invalid data index: {}".format(opt.data_index)) + + out_dir, logger = init_logging(opt.log_dir) + logger.info(opt) + + if opt.mode == "train": + batch_size = opt.batch_size + img_shape = 2*[opt.spatial_size] + [3] + data_shape = [batch_size] + img_shape + init_shape = [opt.init_batches * batch_size] + img_shape + + batches = get_batches(data_shape, opt.data_index, mask = opt.mask, train = True) + init_batches = get_batches(init_shape, opt.data_index, mask = opt.mask, train = True) + valid_batches = get_batches(data_shape, opt.data_index, mask = opt.mask, train = False) + logger.info("Number of training samples: {}".format(batches.n)) + logger.info("Number of validation samples: {}".format(valid_batches.n)) + if valid_batches.n == 0: + valid_batches = None + + model = Model(opt, out_dir, logger) + if opt.checkpoint is not None: + model.restore_graph(opt.checkpoint) + else: + model.init_graph(next(init_batches)) + model.fit(batches, valid_batches) + + elif opt.mode == "test": + if not opt.checkpoint: + raise Exception("Testing requires --checkpoint") + batch_size = opt.batch_size + img_shape = 2*[opt.spatial_size] + [3] + data_shape = [batch_size] + img_shape + valid_batches = get_batches(data_shape, opt.data_index, mask = opt.mask, train = False) + model = Model(opt, out_dir, logger) + model.restore_graph(opt.checkpoint) + + for i in trange(valid_batches.n // batch_size): + X_batch, C_batch = next(valid_batches) + x_gen = model.test(C_batch) + for k in x_gen: + plot_batch(x_gen[k], os.path.join( + out_dir, + "testing_{}_{:07}.png".format(k, i))) + elif opt.mode == "add_reconstructions": + if not opt.checkpoint: + raise Exception("Testing requires --checkpoint") + batch_size = opt.batch_size + img_shape = 2*[opt.spatial_size] + [3] + data_shape = [batch_size] + img_shape + batches = get_batches(data_shape, opt.data_index, mask = opt.mask, + train = True, return_index_id = True) + valid_batches = get_batches(data_shape, opt.data_index, + mask = opt.mask, train = False, return_index_id = True) + model = Model(opt, out_dir, logger) + model.restore_graph(opt.checkpoint) + + # open index file to get image filenames and update with + # reconstruction data + with open(opt.data_index, "rb") as f: + index = pickle.load(f) + index_dir = os.path.dirname(opt.data_index) + index["reconstruction"] = len(index["imgs"]) * [None] + index["sample"] = len(index["imgs"]) * [None] + + process_batches(model, index_dir, batches, batch_size, index) + process_batches(model, index_dir, valid_batches, batch_size, index) + + # write updated index + with open(opt.data_index, "wb") as f: + pickle.dump(index, f) + logger.info("Wrote {}".format(opt.data_index)) + + elif opt.mode == "transfer": + if not opt.checkpoint: + opt.checkpoint = "log/2017-10-25T16:31:50/checkpoints/model.ckpt-100000" + batch_size = opt.batch_size + img_shape = 2*[opt.spatial_size] + [3] # outputs [256, 256, 3] + data_shape = [batch_size] + img_shape # outputs [8, 256, 256, 3] + valid_batches = get_batches(data_shape, opt.data_index, + mask = opt.mask, train = False) + model = Model(opt, out_dir, logger) + model.restore_graph(opt.checkpoint) + + ids = ["00038", "00281", "01166", "x", "06909", "y", "07586", "07607", "z", "09874"] + # for step in trange(10): + for step in trange(10): + X_batch, C_batch, XN_batch, CN_batch = next(valid_batches) + bs = X_batch.shape[0] + imgs = list() + imgs.append(np.zeros_like(X_batch[0,...])) + for r in range(bs): + imgs.append(C_batch[r,...]) + for i in range(bs): + x_infer = XN_batch[i,...] + c_infer = CN_batch[i,...] + imgs.append(X_batch[i,...]) + + x_infer_batch = x_infer[None,...].repeat(bs, axis = 0) + c_infer_batch = c_infer[None,...].repeat(bs, axis = 0) + c_generate_batch = C_batch + results = model.transfer(x_infer_batch, c_infer_batch, c_generate_batch) + for j in range(bs): + imgs.append(results[j,...]) + imgs = np.stack(imgs, axis = 0) + plot_batch(imgs, os.path.join( + out_dir, + "transfer_{}.png".format(ids[step]))) + + elif opt.mode == "mcmc": + if not opt.checkpoint: + raise Exception("Testing requires --checkpoint") + batch_size = opt.batch_size + img_shape = 2*[opt.spatial_size] + [3] + data_shape = [batch_size] + img_shape + valid_batches = get_batches(data_shape, opt.data_index, mask = opt.mask, train = False) + model = Model(opt, out_dir, logger) + model.restore_graph(opt.checkpoint) + + for i in trange(valid_batches.n // batch_size): + X_batch, C_batch = next(valid_batches) + x_gen = model.mcmc(C_batch) + for k in x_gen: + plot_batch(x_gen[k], os.path.join( + out_dir, + "mcmc_{}_{:07}.png".format(k, i))) + else: + raise NotImplemented() diff --git a/demo/model.py b/demo/model.py new file mode 100644 index 00000000..d1cb040a --- /dev/null +++ b/demo/model.py @@ -0,0 +1,416 @@ +import tensorflow as tf +import os +import numpy as np +import nn +import models +import deeploss + +from tqdm import tqdm, trange +from batches_pg2 import plot_batch, postprocess +from get_batches import get_batches +from config import config, session, N_BOXES + +class Model(object): + def __init__(self, opt, out_dir, logger): + self.batch_size = opt.batch_size + self.img_shape = 2*[opt.spatial_size] + [3] + redux = 2 + self.imgn_shape = 2*[opt.spatial_size//(2**redux)] + [N_BOXES*3] + self.init_batches = opt.init_batches + + self.initial_lr = opt.lr + self.lr_decay_begin = opt.lr_decay_begin + self.lr_decay_end = opt.lr_decay_end + + self.out_dir = out_dir + self.logger = logger + self.log_frequency = opt.log_freq + self.ckpt_frequency = opt.ckpt_freq + self.test_frequency = opt.test_freq + self.checkpoint_best = False + + self.dropout_p = opt.drop_prob + + self.best_loss = float("inf") + self.checkpoint_dir = os.path.join(self.out_dir, "checkpoints") + os.makedirs(self.checkpoint_dir, exist_ok = True) + + self.define_models() + self.define_graph() + + + def define_models(self): + n_latent_scales = 2 + n_scales = 1 + int(np.round(np.log2(self.img_shape[0]))) - 2 + n_filters = 32 + redux = 2 + self.enc_up_pass = models.make_model( + "enc_up", models.enc_up, + n_scales = n_scales - redux, + n_filters = n_filters*2**redux) + self.enc_down_pass = models.make_model( + "enc_down", models.enc_down, + n_scales = n_scales - redux, + n_latent_scales = n_latent_scales) + self.dec_up_pass = models.make_model( + "dec_up", models.dec_up, + n_scales = n_scales, + n_filters = n_filters) + self.dec_down_pass = models.make_model( + "dec_down", models.dec_down, + n_scales = n_scales, + n_latent_scales = n_latent_scales) + self.dec_params = models.make_model( + "dec_params", models.dec_parameters) + + + def train_forward_pass(self, x, c, xn, cn, dropout_p, init = False): + kwargs = {"init": init, "dropout_p": dropout_p} + # encoder + hs = self.enc_up_pass(xn, cn, **kwargs) + es, qs, zs_posterior = self.enc_down_pass(hs, **kwargs) + # decoder + gs = self.dec_up_pass(c, **kwargs) + ds, ps, zs_prior = self.dec_down_pass(gs, zs_posterior, training = True, **kwargs) + params = self.dec_params(ds[-1], **kwargs) + activations = hs + es + gs + ds + return params, qs, ps, activations + + + def test_forward_pass(self, c): + kwargs = {"init": False, "dropout_p": 0.0} + # decoder + gs = self.dec_up_pass(c, **kwargs) + ds, ps, zs_prior = self.dec_down_pass(gs, [], training = False, **kwargs) + params = self.dec_params(ds[-1], **kwargs) + return params + + + def transfer_pass(self, infer_x, infer_c, generate_c): + kwargs = {"init": False, "dropout_p": 0.0} + # infer latent code + hs = self.enc_up_pass(infer_x, infer_c, **kwargs) + es, qs, zs_posterior = self.enc_down_pass(hs, **kwargs) + zs_mean = list(qs) + # generate from inferred latent code and conditioning + gs = self.dec_up_pass(generate_c, **kwargs) + use_mean = True + if use_mean: + ds, ps, zs_prior = self.dec_down_pass(gs, zs_mean, training = True, **kwargs) + else: + ds, ps, zs_prior = self.dec_down_pass(gs, zs_posterior, training = True, **kwargs) + params = self.dec_params(ds[-1], **kwargs) + return params + + + def sample(self, params, **kwargs): + return params + + + def likelihood_loss(self, x, params): + return 5.0*self.vgg19.make_loss_op(x, params) + + + def define_graph(self): + # pretrained net for perceptual loss + #self.vgg19 = deeploss.JigsawFeatures(session) + self.vgg19 = deeploss.VGG19Features(session) + + global_step = tf.Variable(0, trainable = False, name = "global_step") + lr = nn.make_linear_var( + global_step, + self.lr_decay_begin, self.lr_decay_end, + self.initial_lr, 0.0, + 0.0, self.initial_lr) + kl_weight = nn.make_linear_var( + global_step, + self.lr_decay_end // 2, 3 * self.lr_decay_end // 4, + 1e-6, 1.0, + 1e-6, 1.0) + #kl_weight = tf.to_float(0.1) + + # initialization + self.x_init = tf.placeholder( + tf.float32, + shape = [self.init_batches * self.batch_size] + self.img_shape) + self.c_init = tf.placeholder( + tf.float32, + shape = [self.init_batches * self.batch_size] + self.img_shape) + self.xn_init = tf.placeholder( + tf.float32, + shape = [self.init_batches * self.batch_size] + self.imgn_shape) + self.cn_init = tf.placeholder( + tf.float32, + shape = [self.init_batches * self.batch_size] + self.imgn_shape) + _ = self.train_forward_pass( + self.x_init, self.c_init, + self.xn_init, self.cn_init, + dropout_p = self.dropout_p, init = True) + + # training + self.x = tf.placeholder( + tf.float32, + shape = [self.batch_size] + self.img_shape) + self.c = tf.placeholder( + tf.float32, + shape = [self.batch_size] + self.img_shape) + self.xn = tf.placeholder( + tf.float32, + shape = [self.batch_size] + self.imgn_shape) + self.cn = tf.placeholder( + tf.float32, + shape = [self.batch_size] + self.imgn_shape) + # compute parameters of model distribution + params, qs, ps, activations = self.train_forward_pass( + self.x, self.c, + self.xn, self.cn, + dropout_p = self.dropout_p) + # sample from model distribution + sample = self.sample(params) + # maximize likelihood + likelihood_loss = self.likelihood_loss(self.x, params) + kl_loss = tf.to_float(0.0) + for q, p in zip(qs, ps): + self.logger.info("Latent shape: {}".format(q.shape.as_list())) + kl_loss += models.latent_kl(q, p) + loss = likelihood_loss + kl_weight * kl_loss + + # testing + test_forward = self.test_forward_pass(self.c) + test_sample = self.sample(test_forward) + + # reconstruction + reconstruction_params, _, _, _ = self.train_forward_pass( + self.x, self.c, + self.xn, self.cn, + dropout_p = 0.0) + self.reconstruction = self.sample(reconstruction_params) + + # optimization + self.trainable_variables = [v for v in tf.trainable_variables() + if not v in self.vgg19.variables] + optimizer = tf.train.AdamOptimizer(learning_rate = lr, beta1 = 0.5, beta2 = 0.9) + opt_op = optimizer.minimize(loss, var_list = self.trainable_variables) + with tf.control_dependencies([opt_op]): + self.train_op = tf.assign(global_step, global_step + 1) + + + # logging and visualization + self.log_ops = dict() + self.log_ops["global_step"] = global_step + self.log_ops["likelihood_loss"] = likelihood_loss + self.log_ops["kl_loss"] = kl_loss + self.log_ops["kl_weight"] = kl_weight + self.log_ops["loss"] = loss + self.img_ops = dict() + self.img_ops["sample"] = sample + self.img_ops["test_sample"] = test_sample + self.img_ops["x"] = self.x + self.img_ops["c"] = self.c + for i in range(N_BOXES): + self.img_ops["xn{}".format(i)] = self.xn[:,:,:,i*3:(i+1)*3] + for i, l in enumerate(self.vgg19.losses): + self.log_ops["vgg_loss_{}".format(i)] = l + for i, l in enumerate(self.vgg19.gram_losses): + self.log_ops["vgg_gram_loss_{}".format(i)] = l + + # keep seperate train and validation summaries + # only training summary contains histograms + train_summaries = list() + for k, v in self.log_ops.items(): + train_summaries.append(tf.summary.scalar(k, v)) + self.train_summary_op = tf.summary.merge_all() + + valid_summaries = list() + for k, v in self.log_ops.items(): + valid_summaries.append(tf.summary.scalar(k+"_valid", v)) + self.valid_summary_op = tf.summary.merge(valid_summaries) + + # all variables for initialization + self.variables = [v for v in tf.global_variables() + if not v in self.vgg19.variables] + + self.logger.info("Defined graph") + + + def init_graph(self, init_batch): + self.writer = tf.summary.FileWriter( + self.out_dir, + session.graph) + self.saver = tf.train.Saver(self.variables) + initializer_op = tf.variables_initializer(self.variables) + session.run(initializer_op, { + self.xn_init: init_batch[2], + self.cn_init: init_batch[3], + self.x_init: init_batch[0], + self.c_init: init_batch[1]}) + self.logger.info("Initialized model from scratch") + + + def restore_graph(self, restore_path): + self.writer = tf.summary.FileWriter( + self.out_dir, + session.graph) + self.saver = tf.train.Saver(self.variables) + self.saver.restore(session, restore_path) + self.logger.info("Restored model from {}".format(restore_path)) + + + def fit(self, batches, valid_batches = None): + session.run(tf.assign(self.log_ops["global_step"], 0)) + start_step = self.log_ops["global_step"].eval(session) + self.valid_batches = valid_batches + for batch in trange(start_step, self.lr_decay_end): + X_batch, C_batch, XN_batch, CN_batch = next(batches) + feed_dict = { + self.xn: XN_batch, + self.cn: CN_batch, + self.x: X_batch, + self.c: C_batch} + fetch_dict = {"train": self.train_op} + if self.log_ops["global_step"].eval(session) % self.log_frequency == 0: + fetch_dict["log"] = self.log_ops + fetch_dict["img"] = self.img_ops + fetch_dict["summary"] = self.train_summary_op + result = session.run(fetch_dict, feed_dict) + self.log_result(result) + + + def log_result(self, result, **kwargs): + global_step = self.log_ops["global_step"].eval(session) + if "summary" in result: + self.writer.add_summary(result["summary"], global_step) + self.writer.flush() + if "log" in result: + for k in sorted(result["log"]): + v = result["log"][k] + self.logger.info("{}: {}".format(k, v)) + if "img" in result: + for k, v in result["img"].items(): + plot_batch(v, os.path.join( + self.out_dir, + k + "_{:07}.png".format(global_step))) + + if self.valid_batches is not None: + # validation run + X_batch, C_batch, XN_batch, CN_batch = next(self.valid_batches) + feed_dict = { + self.xn: XN_batch, + self.cn: CN_batch, + self.x: X_batch, + self.c: C_batch} + fetch_dict = dict() + fetch_dict["imgs"] = self.img_ops + fetch_dict["summary"] = self.valid_summary_op + fetch_dict["validation_loss"] = self.log_ops["loss"] + result = session.run(fetch_dict, feed_dict) + self.writer.add_summary(result["summary"], global_step) + self.writer.flush() + # display samples + imgs = result["imgs"] + for k, v in imgs.items(): + plot_batch(v, os.path.join( + self.out_dir, + "valid_" + k + "_{:07}.png".format(global_step))) + # log validation loss + validation_loss = result["validation_loss"] + self.logger.info("{}: {}".format("validation_loss", validation_loss)) + if self.checkpoint_best and validation_loss < self.best_loss: + # checkpoint if validation loss improved + self.logger.info("step {}: Validation loss improved from {:.4e} to {:.4e}".format(global_step, self.best_loss, validation_loss)) + self.best_loss = validation_loss + self.make_checkpoint(global_step, prefix = "best_") + if global_step % self.test_frequency == 0: + if self.valid_batches is not None: + # testing + X_batch, C_batch, XN_batch, CN_batch = next(self.valid_batches) + x_gen = self.test(C_batch) + for k in x_gen: + plot_batch(x_gen[k], os.path.join( + self.out_dir, + "testing_{}_{:07}.png".format(k, global_step))) + # transfer + bs = X_batch.shape[0] + imgs = list() + imgs.append(np.zeros_like(X_batch[0,...])) + for r in range(bs): + imgs.append(C_batch[r,...]) + for i in range(bs): + x_infer = XN_batch[i,...] + c_infer = CN_batch[i,...] + # imgs.append(x_infer) + imgs.append(X_batch[i,...]) + + x_infer_batch = x_infer[None,...].repeat(bs, axis = 0) + c_infer_batch = c_infer[None,...].repeat(bs, axis = 0) + c_generate_batch = C_batch + results = model.transfer(x_infer_batch, c_infer_batch, c_generate_batch) + for j in range(bs): + imgs.append(results[j,...]) + imgs = np.stack(imgs, axis = 0) + plot_batch(imgs, os.path.join( + out_dir, + "transfer_{:07}.png".format(global_step))) + if global_step % self.ckpt_frequency == 0: + self.make_checkpoint(global_step) + + + def make_checkpoint(self, global_step, prefix = ""): + fname = os.path.join(self.checkpoint_dir, prefix + "model.ckpt") + self.saver.save( + session, + fname, + global_step = global_step) + self.logger.info("Saved model to {}".format(fname)) + + + def test(self, c_batch): + results = dict() + results["cond"] = c_batch + sample = session.run(self.img_ops["test_sample"], + {self.c: c_batch}) + results["test_sample"] = sample + return results + + + def mcmc(self, c_batch, n_iters = 10): + results = dict() + results["cond"] = c_batch + sample = session.run( + self.img_ops["test_sample"], {self.c: c_batch}) + results["sample_{}".format(0)] = sample + for i in range(n_iters - 1): + sample = session.run( + self.img_ops["sample"], { + self.x: sample, + self.c: c_batch}) + results["sample_{:03}".format(i+1)] = sample + return results + + + def reconstruct(self, x_batch, c_batch): + return session.run( + self.reconstruction, + {self.x: x_batch, self.c: c_batch}) + + + def transfer(self, x_encode, c_encode, c_decode): + initialized = getattr(self, "_init_transfer", False) + if not initialized: + # transfer + self.c_generator = tf.placeholder( + tf.float32, + shape = [self.batch_size] + self.img_shape) + infer_x = self.xn + infer_c = self.cn + generate_c = self.c_generator + transfer_params = self.transfer_pass(infer_x, infer_c, generate_c) + self.transfer_mean_sample = self.sample(transfer_params) + self._init_transfer = True + + return session.run( + self.transfer_mean_sample, { + self.xn: x_encode, + self.cn: c_encode, + self.c_generator: c_decode}) + diff --git a/demo/models.py b/demo/models.py new file mode 100644 index 00000000..b0023b89 --- /dev/null +++ b/demo/models.py @@ -0,0 +1,355 @@ +import tensorflow as tf +import numpy as np +from tensorflow.contrib.framework.python.ops import arg_scope +import nn +import math + + +def model_arg_scope(**kwargs): + """Create new counter and apply arg scope to all arg scoped nn + operations.""" + counters = {} + return arg_scope( + [nn.conv2d, nn.deconv2d, nn.residual_block, nn.dense, nn.activate], + counters = counters, **kwargs) + + +def make_model(name, template, **kwargs): + """Create model with fixed kwargs.""" + run = lambda *args, **kw: template(*args, **dict((k, v) for kws in (kw, kwargs) for k, v in kws.items())) + return tf.make_template(name, run, unique_name_ = name) + + +def dec_up( + c, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", n_filters = 64, max_filters = 128): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + h = nn.nin(c, n_filters) + for l in range(n_scales): + # level module + for i in range(n_residual_blocks): + h = nn.residual_block(h) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = min(2*n_filters, max_filters) + h = nn.downsample(h, n_filters) + return hs + + +def dec_down( + gs, zs_posterior, training, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", + n_latent_scales = 2): + assert n_residual_blocks % 2 == 0 + gs = list(gs) + zs_posterior = list(zs_posterior) + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] # hidden units + ps = [] # priors + zs = [] # prior samples + # prepare input + n_filters = gs[-1].shape.as_list()[-1] + h = nn.nin(gs[-1], n_filters) + for l in range(n_scales): + # level module + ## hidden units + for i in range(n_residual_blocks // 2): + h = nn.residual_block(h, gs.pop()) + hs.append(h) + if l < n_latent_scales: + ## prior + spatial_shape = h.shape.as_list()[1] + n_h_channels = h.shape.as_list()[-1] + if spatial_shape == 1: + ### no spatial correlations + p = latent_parameters(h) + ps.append(p) + z_prior = latent_sample(p) + zs.append(z_prior) + else: + ### four autoregressively modeled groups + if training: + z_posterior_groups = nn.split_groups(zs_posterior[0]) + p_groups = [] + z_groups = [] + p_features = tf.space_to_depth(nn.residual_block(h), 2) + for i in range(4): + p_group = latent_parameters(p_features, num_filters = n_h_channels) + p_groups.append(p_group) + z_group = latent_sample(p_group) + z_groups.append(z_group) + # ar feedback sampled from + if training: + feedback = z_posterior_groups.pop(0) + else: + feedback = z_group + # prepare input for next group + if i + 1 < 4: + p_features = nn.residual_block(p_features, feedback) + if training: + assert not z_posterior_groups + # complete prior parameters + p = nn.merge_groups(p_groups) + ps.append(p) + # complete prior sample + z_prior = nn.merge_groups(z_groups) + zs.append(z_prior) + ## vae feedback sampled from + if training: + ## posterior + z = zs_posterior.pop(0) + else: + ## prior + z = z_prior + for i in range(n_residual_blocks // 2): + n_h_channels = h.shape.as_list()[-1] + h = tf.concat([h, z], axis = -1) + h = nn.nin(h, n_h_channels) + h = nn.residual_block(h, gs.pop()) + hs.append(h) + else: + for i in range(n_residual_blocks // 2): + h = nn.residual_block(h, gs.pop()) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = gs[-1].shape.as_list()[-1] + h = nn.upsample(h, n_filters) + + assert not gs + if training: + assert not zs_posterior + + return hs, ps, zs + + +def encoder( + x, n_out, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", n_filters = 64, max_filters = 128): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + xc = x + h = nn.nin(xc, n_filters) + for l in range(n_scales): + # level module + for i in range(n_residual_blocks): + h = nn.residual_block(h) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = min(2*n_filters, max_filters) + h = nn.downsample(h, n_filters) + h = nn.nin(h, n_out) + hs.append(h) + return hs + + +def feature_encoder( + x, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", n_filters = 64, max_filters = 128): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + xc = x + h = nn.nin(xc, n_filters) + for l in range(n_scales): + # level module + for i in range(n_residual_blocks): + h = nn.residual_block(h) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = min(2*n_filters, max_filters) + h = nn.downsample(h, n_filters) + return hs + + +def cfn( + x, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", n_filters = 64, max_filters = 128): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + xc = x + h = nn.nin(xc, n_filters) + for l in range(n_scales): + # level module + for i in range(n_residual_blocks): + h = nn.residual_block(h) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = min(2*n_filters, max_filters) + h = nn.downsample(h, n_filters) + h_shape = h.shape.as_list() + h = tf.reshape(h, [h_shape[0],1,1,h_shape[1]*h_shape[2]*h_shape[3]]) + h = nn.nin(h, 2*max_filters) + hs.append(h) + return hs + + +def cfn_features( + x, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", n_filters = 64, max_filters = 128): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + xc = x + h = nn.nin(xc, n_filters) + for l in range(n_scales): + # level module + for i in range(n_residual_blocks): + h = nn.residual_block(h) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = min(2*n_filters, max_filters) + h = nn.downsample(h, n_filters) + return hs + + +def classifier( + x, n_out, init = False, dropout_p = 0.5, + activation = "elu"): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + x_shape = x.shape.as_list()#tf.shape(x) + h = tf.reshape(x, [x_shape[0], 1, 1, x_shape[1]*x_shape[2]*x_shape[3]]) + h = nn.activate(h) + h = nn.nin(h, 1024) + h = nn.activate(h) + h = nn.nin(h, n_out) + h = tf.reshape(h, [x_shape[0], n_out]) + return h + + +def enc_up( + x, c, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", n_filters = 64, max_filters = 128): + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] + # prepare input + #xc = tf.concat([x,c], axis = -1) + xc = x + h = nn.nin(xc, n_filters) + for l in range(n_scales): + # level module + for i in range(n_residual_blocks): + h = nn.residual_block(h) + hs.append(h) + # prepare input to next level + if l + 1 < n_scales: + n_filters = min(2*n_filters, max_filters) + h = nn.downsample(h, n_filters) + return hs + + +def enc_down( + gs, init = False, dropout_p = 0.5, + n_scales = 1, n_residual_blocks = 2, activation = "elu", + n_latent_scales = 2): + assert n_residual_blocks % 2 == 0 + gs = list(gs) + with model_arg_scope( + init = init, dropout_p = dropout_p, activation = activation): + # outputs + hs = [] # hidden units + qs = [] # posteriors + zs = [] # samples from posterior + # prepare input + n_filters = gs[-1].shape.as_list()[-1] + h = nn.nin(gs[-1], n_filters) + for l in range(n_scales): + # level module + ## hidden units + for i in range(n_residual_blocks // 2): + h = nn.residual_block(h, gs.pop()) + hs.append(h) + if l < n_latent_scales: + ## posterior parameters + q = latent_parameters(h) + qs.append(q) + ## posterior sample + z = latent_sample(q) + zs.append(z) + ## sample feedback + for i in range(n_residual_blocks // 2): + gz = tf.concat([gs.pop(), z], axis = -1) + h = nn.residual_block(h, gz) + hs.append(h) + else: + """ no need to go down any further + for i in range(n_residual_blocks // 2): + h = nn.residual_block(h, gs.pop()) + hs.append(h) + """ + break + # prepare input to next level + if l + 1 < n_scales: + n_filters = gs[-1].shape.as_list()[-1] + h = nn.upsample(h, n_filters) + + #assert not gs # not true anymore since we break out of the loop + + return hs, qs, zs + + +# Distributions + + +def dec_parameters( + h, init = False, **kwargs): + with model_arg_scope(init = init): + num_filters = 3 + return nn.conv2d(h, num_filters) + + +def latent_parameters( + h, init = False, **kwargs): + num_filters = kwargs.get("num_filters", h.shape.as_list()[-1]) + return nn.conv2d(h, num_filters) + + +def logvarvar(u): + cutoff = tf.to_float(-5) + logvar = tf.maximum(cutoff, u) + var = tf.exp(logvar) + return logvar, var + + +def latent_sample(p): + mean = p + stddev = 1.0 + eps = tf.random_normal(mean.shape, mean = 0.0, stddev = 1.0) + return mean + stddev * eps + + +def latent_kl(q, p): + mean1 = q + mean2 = p + + kl = 0.5 * tf.square(mean2 - mean1) + kl = tf.reduce_sum(kl, axis = [1,2,3]) + kl = tf.reduce_mean(kl) + return kl diff --git a/demo/nn.py b/demo/nn.py new file mode 100644 index 00000000..51f1f3c2 --- /dev/null +++ b/demo/nn.py @@ -0,0 +1,521 @@ +""" +modified from pixelcnn++ +Various tensorflow utilities +""" + +import numpy as np +import tensorflow as tf +from tensorflow.contrib.framework.python.ops import add_arg_scope + + +def int_shape(x): + return x.shape.as_list() + + +def concat_elu(x): + """ like concatenated ReLU (http://arxiv.org/abs/1603.05201), but then with ELU """ + axis = len(x.get_shape()) - 1 + return tf.nn.elu(tf.concat([x, -x], axis)) + + +def log_sum_exp(x): + """ numerically stable log_sum_exp implementation that prevents overflow """ + axis = len(x.get_shape()) - 1 + m = tf.reduce_max(x, axis) + m2 = tf.reduce_max(x, axis, keep_dims=True) + return m + tf.log(tf.reduce_sum(tf.exp(x - m2), axis)) + + +def log_prob_from_logits(x): + """ numerically stable log_softmax implementation that prevents overflow """ + axis = len(x.get_shape()) - 1 + m = tf.reduce_max(x, axis, keep_dims=True) + return x - m - tf.log(tf.reduce_sum(tf.exp(x - m), axis, keep_dims=True)) + + +def ce_loss(x, l): + x = (x + 1.0) / 2.0 + x = tf.clip_by_value(255 * x, 0, 255) + x = tf.cast(x, tf.int32) + reconst_cost = tf.reduce_mean( + tf.nn.sparse_softmax_cross_entropy_with_logits( + logits=tf.reshape(l, [-1, 256]), + labels=tf.reshape(x, [-1]) + ) + ) + return reconst_cost + + +def ce_sample(logits, temp = 1.0): + temp = tf.maximum(tf.convert_to_tensor(1e-5), tf.convert_to_tensor(temp)) + noise = tf.random_uniform(logits.shape, minval = 1e-5, maxval = 1.0 - 1e-5) + pixels = tf.argmax(logits / temp - tf.log(-tf.log(noise)), 4) + pixels = tf.cast(pixels, tf.float32) / 127.5 - 1.0 + return pixels + + +def discretized_mix_logistic_loss(x, l, sum_all=True): + """ log-likelihood for mixture of discretized logistics, assumes the data has been rescaled to [-1,1] interval """ + xs = int_shape( + x) # true image (i.e. labels) to regress to, e.g. (B,32,32,3) + ls = int_shape(l) # predicted distribution, e.g. (B,32,32,100) + # here and below: unpacking the params of the mixture of logistics + nr_mix = int(ls[-1] / 10) + logit_probs = l[:, :, :, :nr_mix] + l = tf.reshape(l[:, :, :, nr_mix:], xs + [nr_mix * 3]) + means = l[:, :, :, :, :nr_mix] + log_scales = tf.maximum(l[:, :, :, :, nr_mix:2 * nr_mix], -2.) + coeffs = tf.nn.tanh(l[:, :, :, :, 2 * nr_mix:3 * nr_mix]) + # here and below: getting the means and adjusting them based on preceding + # sub-pixels + x = tf.reshape(x, xs + [1]) + tf.zeros(xs + [nr_mix]) + m2 = tf.reshape(means[:, :, :, 1, :] + coeffs[:, :, :, 0, :] + * x[:, :, :, 0, :], [xs[0], xs[1], xs[2], 1, nr_mix]) + m3 = tf.reshape(means[:, :, :, 2, :] + coeffs[:, :, :, 1, :] * x[:, :, :, 0, :] + + coeffs[:, :, :, 2, :] * x[:, :, :, 1, :], [xs[0], xs[1], xs[2], 1, nr_mix]) + means = tf.concat([tf.reshape(means[:, :, :, 0, :], [ + xs[0], xs[1], xs[2], 1, nr_mix]), m2, m3], 3) + centered_x = x - means + inv_stdv = tf.exp(-log_scales) + plus_in = inv_stdv * (centered_x + 1. / 255.) + cdf_plus = tf.nn.sigmoid(plus_in) + min_in = inv_stdv * (centered_x - 1. / 255.) + cdf_min = tf.nn.sigmoid(min_in) + # log probability for edge case of 0 (before scaling) + log_cdf_plus = plus_in - tf.nn.softplus(plus_in) + # log probability for edge case of 255 (before scaling) + log_one_minus_cdf_min = -tf.nn.softplus(min_in) + cdf_delta = cdf_plus - cdf_min # probability for all other cases + mid_in = inv_stdv * centered_x + # log probability in the center of the bin, to be used in extreme cases + # (not actually used in our code) + log_pdf_mid = -mid_in - log_scales - 2. * tf.nn.softplus(-mid_in) + + # now select the right output: left edge case, right edge case, normal + # case, extremely low prob case (doesn't actually happen for us) + + # this is what we are really doing, but using the robust version below for extreme cases in other applications and to avoid NaN issue with tf.select() + # log_probs = tf.select(x < -0.999, log_cdf_plus, tf.select(x > 0.999, log_one_minus_cdf_min, tf.log(cdf_delta))) + + # robust version, that still works if probabilities are below 1e-5 (which never happens in our code) + # tensorflow backpropagates through tf.select() by multiplying with zero instead of selecting: this requires use to use some ugly tricks to avoid potential NaNs + # the 1e-12 in tf.maximum(cdf_delta, 1e-12) is never actually used as output, it's purely there to get around the tf.select() gradient issue + # if the probability on a sub-pixel is below 1e-5, we use an approximation + # based on the assumption that the log-density is constant in the bin of + # the observed sub-pixel value + log_probs = tf.where(x < -0.999, log_cdf_plus, tf.where(x > 0.999, log_one_minus_cdf_min, + tf.where(cdf_delta > 1e-5, tf.log(tf.maximum(cdf_delta, 1e-12)), log_pdf_mid - np.log(127.5)))) + + log_probs = tf.reduce_sum(log_probs, 3) + log_prob_from_logits(logit_probs) + return -tf.reduce_mean(tf.reduce_sum(log_sum_exp(log_probs), [1, 2])) + + +def sample_from_discretized_mix_logistic(l, nr_mix, temp1 = 1.0, temp2 = 1.0, mean = False): + if mean: + temp2 = 0.0 + ls = int_shape(l) + xs = ls[:-1] + [3] + # unpack parameters + logit_probs = l[:, :, :, :nr_mix] + l = tf.reshape(l[:, :, :, nr_mix:], xs + [nr_mix * 3]) + # sample mixture indicator from softmax + if not mean: + if temp1 < 1e-5: + sel = tf.one_hot(tf.argmax(logit_probs, 3), depth=nr_mix, dtype=tf.float32) + else: + sel = tf.one_hot(tf.argmax(logit_probs/temp1 - tf.log(-tf.log(tf.random_uniform( + logit_probs.get_shape(), minval=1e-5, maxval=1. - 1e-5))), 3), depth=nr_mix, dtype=tf.float32) + else: + sel = tf.nn.softmax(logit_probs) + sel = tf.reshape(sel, xs[:-1] + [1, nr_mix]) + # select logistic parameters + means = tf.reduce_sum(l[:, :, :, :, :nr_mix] * sel, 4) + log_scales = tf.maximum(tf.reduce_sum( + l[:, :, :, :, nr_mix:2 * nr_mix] * sel, 4), -2.) + coeffs = tf.reduce_sum(tf.nn.tanh( + l[:, :, :, :, 2 * nr_mix:3 * nr_mix]) * sel, 4) + # sample from logistic & clip to interval + # we don't actually round to the nearest 8bit value when sampling + u = tf.random_uniform(means.get_shape(), minval=1e-5, maxval=1. - 1e-5) + x = means + temp2 * tf.exp(log_scales) * (tf.log(u) - tf.log(1. - u)) + x0 = tf.minimum(tf.maximum(x[:, :, :, 0], -1.), 1.) + x1 = tf.minimum(tf.maximum( + x[:, :, :, 1] + coeffs[:, :, :, 0] * x0, -1.), 1.) + x2 = tf.minimum(tf.maximum( + x[:, :, :, 2] + coeffs[:, :, :, 1] * x0 + coeffs[:, :, :, 2] * x1, -1.), 1.) + return tf.concat([tf.reshape(x0, xs[:-1] + [1]), tf.reshape(x1, xs[:-1] + [1]), tf.reshape(x2, xs[:-1] + [1])], 3) + + +''' layers containing trainable variables. ''' + +def get_name(layer_name, counters): + ''' utlity for keeping track of layer names ''' + if not layer_name in counters: + counters[layer_name] = 0 + name = layer_name + '_' + str(counters[layer_name]) + counters[layer_name] += 1 + return name + + +@add_arg_scope +def dense(x, num_units, init_scale=1., counters={}, init=False, **kwargs): + ''' fully connected layer ''' + name = get_name('dense', counters) + with tf.variable_scope(name): + if init: + xs = x.shape.as_list() + # data based initialization of parameters + V = tf.get_variable('V', [xs[1], num_units], tf.float32, tf.random_normal_initializer(0, 0.05)) + V_norm = tf.nn.l2_normalize(V.initialized_value(), [0]) + x_init = tf.matmul(x, V_norm) + m_init, v_init = tf.nn.moments(x_init, [0]) + scale_init = init_scale / tf.sqrt(v_init + 1e-10) + g = tf.get_variable('g', dtype=tf.float32, initializer=scale_init) + b = tf.get_variable('b', dtype=tf.float32, initializer=-m_init * scale_init) + x_init = tf.reshape(scale_init, [1, num_units]) * (x_init - tf.reshape(m_init, [1, num_units])) + + return x_init + else: + V = tf.get_variable("V") + g = tf.get_variable("g") + b = tf.get_variable("b") + with tf.control_dependencies([tf.assert_variables_initialized([V, g, b])]): + # use weight normalization (Salimans & Kingma, 2016) + x = tf.matmul(x, V) + scaler = g / tf.sqrt(tf.reduce_sum(tf.square(V), [0])) + x = tf.reshape(scaler, [1, num_units]) * x + tf.reshape(b, [1, num_units]) + + return x + + +@add_arg_scope +def conv2d(x, num_filters, filter_size=[3, 3], stride=[1, 1], pad='SAME', init_scale=1., counters={}, init=False, **kwargs): + ''' convolutional layer ''' + num_filters = int(num_filters) + strides = [1] + stride + [1] + name = get_name('conv2d', counters) + with tf.variable_scope(name): + if init: + xs = x.shape.as_list() + # data based initialization of parameters + V = tf.get_variable('V', filter_size + [xs[-1], num_filters], + tf.float32, tf.random_normal_initializer(0, 0.05)) + V_norm = tf.nn.l2_normalize(V.initialized_value(), [0, 1, 2]) + x_init = tf.nn.conv2d(x, V_norm, strides, pad) + m_init, v_init = tf.nn.moments(x_init, [0, 1, 2]) + scale_init = init_scale / tf.sqrt(v_init + 1e-8) + g = tf.get_variable('g', dtype=tf.float32, initializer = scale_init) + b = tf.get_variable('b', dtype=tf.float32, initializer = -m_init * scale_init) + x_init = tf.reshape(scale_init, [1, 1, 1, num_filters]) * (x_init - tf.reshape(m_init, [1, 1, 1, num_filters])) + + return x_init + else: + V = tf.get_variable("V") + g = tf.get_variable("g") + b = tf.get_variable("b") + with tf.control_dependencies([tf.assert_variables_initialized([V, g, b])]): + # use weight normalization (Salimans & Kingma, 2016) + W = tf.reshape(g, [1, 1, 1, num_filters]) * tf.nn.l2_normalize(V, [0, 1, 2]) + + # calculate convolutional layer output + x = tf.nn.bias_add(tf.nn.conv2d(x, W, strides, pad), b) + + return x + + +@add_arg_scope +def deconv2d(x, num_filters, filter_size=[3, 3], stride=[1, 1], pad='SAME', init_scale=1., counters={}, init=False, **kwargs): + ''' transposed convolutional layer ''' + num_filters = int(num_filters) + name = get_name('deconv2d', counters) + xs = int_shape(x) + strides = [1] + stride + [1] + if pad == 'SAME': + target_shape = [xs[0], xs[1] * stride[0], + xs[2] * stride[1], num_filters] + else: + target_shape = [xs[0], xs[1] * stride[0] + filter_size[0] - + 1, xs[2] * stride[1] + filter_size[1] - 1, num_filters] + with tf.variable_scope(name): + if init: + # data based initialization of parameters + V = tf.get_variable('V', filter_size + [num_filters, xs[-1]], tf.float32, tf.random_normal_initializer(0, 0.05)) + V_norm = tf.nn.l2_normalize(V.initialized_value(), [0, 1, 3]) + x_init = tf.nn.conv2d_transpose(x, V_norm, target_shape, strides, padding=pad) + m_init, v_init = tf.nn.moments(x_init, [0, 1, 2]) + scale_init = init_scale / tf.sqrt(v_init + 1e-8) + g = tf.get_variable('g', dtype=tf.float32, initializer=scale_init) + b = tf.get_variable('b', dtype=tf.float32, initializer=-m_init * scale_init) + x_init = tf.reshape(scale_init, [1, 1, 1, num_filters]) * (x_init - tf.reshape(m_init, [1, 1, 1, num_filters])) + + return x_init + else: + V = tf.get_variable("V") + g = tf.get_variable("g") + b = tf.get_variable("b") + with tf.control_dependencies([tf.assert_variables_initialized([V, g, b])]): + # use weight normalization (Salimans & Kingma, 2016) + W = tf.reshape(g, [1, 1, num_filters, 1]) * tf.nn.l2_normalize(V, [0, 1, 3]) + + # calculate convolutional layer output + x = tf.nn.conv2d_transpose(x, W, target_shape, strides, padding=pad) + x = tf.nn.bias_add(x, b) + + return x + + +@add_arg_scope +def activate(x, activation, **kwargs): + if activation == None: + return x + elif activation == "elu": + return tf.nn.elu(x) + else: + raise NotImplemented(activation) + + +''' meta-layer consisting of multiple base layers ''' + +def nin(x, num_units): + """ a network in network layer (1x1 CONV) """ + s = int_shape(x) + x = tf.reshape(x, [np.prod(s[:-1]), s[-1]]) + x = dense(x, num_units) + return tf.reshape(x, s[:-1] + [num_units]) + + +def downsample(x, num_units): + return conv2d(x, num_units, stride = [2, 2]) + + +def upsample(x, num_units, method = "subpixel"): + if method == "conv_transposed": + return deconv2d(x, num_units, stride = [2, 2]) + elif method == "subpixel": + x = conv2d(x, 4*num_units) + x = tf.depth_to_space(x, 2) + return x + + +@add_arg_scope +def residual_block(x, a = None, conv=conv2d, init=False, dropout_p=0.0, gated = False, **kwargs): + """Slight variation of original.""" + xs = int_shape(x) + num_filters = xs[-1] + + residual = x + if a is not None: + a = nin(activate(a), num_filters) + residual = tf.concat([residual, a], axis = -1) + residual = activate(residual) + residual = tf.nn.dropout(residual, keep_prob = 1.0 - dropout_p) + residual = conv(residual, num_filters) + if gated: + residual = activate(residual) + residual = tf.nn.dropout(residual, keep_prob = 1.0 - dropout_p) + residual = conv(residual, 2*num_filters) + a, b = tf.split(residual, 2, 3) + residual = a * tf.nn.sigmoid(b) + + return x + residual + + +''' utilities for shifting the image around, efficient alternative to masking convolutions ''' + +def down_shift(x): + xs = int_shape(x) + return tf.concat([tf.zeros([xs[0], 1, xs[2], xs[3]]), x[:, :xs[1] - 1, :, :]], 1) + + +def right_shift(x): + xs = int_shape(x) + return tf.concat([tf.zeros([xs[0], xs[1], 1, xs[3]]), x[:, :, :xs[2] - 1, :]], 2) + + +@add_arg_scope +def down_shifted_conv2d(x, num_filters, filter_size=[2, 3], stride=[1, 1], **kwargs): + x = tf.pad(x, [[0, 0], [filter_size[0] - 1, 0], + [int((filter_size[1] - 1) / 2), int((filter_size[1] - 1) / 2)], [0, 0]]) + return conv2d(x, num_filters, filter_size=filter_size, pad='VALID', stride=stride, **kwargs) + + +@add_arg_scope +def down_shifted_deconv2d(x, num_filters, filter_size=[2, 3], stride=[1, 1], **kwargs): + x = deconv2d(x, num_filters, filter_size=filter_size, + pad='VALID', stride=stride, **kwargs) + xs = int_shape(x) + return x[:, :(xs[1] - filter_size[0] + 1), int((filter_size[1] - 1) / 2):(xs[2] - int((filter_size[1] - 1) / 2)), :] + + +@add_arg_scope +def down_right_shifted_conv2d(x, num_filters, filter_size=[2, 2], stride=[1, 1], **kwargs): + x = tf.pad(x, [[0, 0], [filter_size[0] - 1, 0], + [filter_size[1] - 1, 0], [0, 0]]) + return conv2d(x, num_filters, filter_size=filter_size, pad='VALID', stride=stride, **kwargs) + + +@add_arg_scope +def down_right_shifted_deconv2d(x, num_filters, filter_size=[2, 2], stride=[1, 1], **kwargs): + x = deconv2d(x, num_filters, filter_size=filter_size, + pad='VALID', stride=stride, **kwargs) + xs = int_shape(x) + return x[:, :(xs[1] - filter_size[0] + 1):, :(xs[2] - filter_size[1] + 1), :] + + +################################################################ random tf stuff + + +def make_linear_var( + step, + start, end, + start_value, end_value, + clip_min = 0.0, clip_max = 1.0): + """linear from (a, alpha) to (b, beta), i.e. + (beta - alpha)/(b - a) * (x - a) + alpha""" + linear = ( + (end_value - start_value) / + (end - start) * + (tf.cast(step, tf.float32) - start) + start_value) + return tf.clip_by_value(linear, clip_min, clip_max) + +"""Simple approximation of 2d gaussian kernel.""" +k = np.float32([1,4,6,4,1]) +k = np.outer(k, k) +# normalize and extend to three independent input and output channels +kernel = k[:,:,None,None]/k.sum()*np.eye(3, dtype=np.float32) +def tf_gaussian_subsample(x): + return tf.nn.conv2d( + input = x, + filter = kernel, + strides = [1, 2, 2, 1], + padding = "SAME") + + +# stride 2 subsampling +nnkernel = np.eye(3)[None,None,:,:] +def tf_subsample(x): + return tf.nn.conv2d( + input = x, + filter = nnkernel, + strides = [1, 2, 2, 1], + padding = "SAME") + + +# replace +tf_downsample = tf_gaussian_subsample + + +def tf_pyramid(x, ps, p2 = None): + """Pyramid of x, coarse to fine""" + nd = round(math.log(ps, 2)) + assert 2**nd == ps + xs = x.get_shape().as_list() + assert len(xs) == 4 + b, h, w, c = xs + assert h == w + if p2 is None: + p2 = round(math.log(h,ps)) + assert ps**p2 == h, "{}, {}, {}".format(ps, p2, h) + pyramid = [x] + for i in range(p2): + p = pyramid[-1] + for j in range(nd): + p = tf_downsample(p) + pyramid.append(p) + return list(reversed(pyramid)) + + +def tf_concat_coarse_cond(coarse, cond): + groups = tf.concat([coarse] + 3* [tf.zeros_like(coarse)], axis = 3) + coarse_up = tf.depth_to_space(groups, 2) + return tf.concat([coarse_up, cond], axis = 3) + + +def np_concat_coarse_cond(coarse, cond): + groups = np.concatenate([coarse] + 3* [np.zeros_like(coarse)], axis = 3) + coarse_up = np_depth_to_space(groups, 2) + return np.concatenate([coarse_up, cond], axis = 3) + + +smoothing1d = np.float32([1,2,1]) +difference1d = np.float32([1,0,-1]) +sobelx = np.outer(smoothing1d, difference1d) +sobely = np.transpose(sobelx) +# one dim for number of input channels +sobelx = sobelx[:,:,None] +sobely = sobely[:,:,None] +# stack along new dim for output channels +sobel = np.stack([sobelx, sobely], axis = -1) + +fdx = np.zeros([3,3]) +fdx[1,:] = difference1d +fdx = fdx[:,:,None] + +fdy = np.zeros([3,3]) +fdy[:,1] = difference1d +fdy = fdy[:,:,None] +fd = np.stack([fdx, fdy], axis = -1) +def tf_img_grad(x, use_sobel = True): + """Sobel approximation of gradient.""" + gray = tf.reduce_mean(x, axis = -1, keep_dims = True) + if use_sobel: + filter_ = sobel + else: + filter_ = fd + grad = tf.nn.conv2d( + input = gray, + filter = filter_, + strides = 4*[1], + padding = "SAME") + return grad + + +def tf_grad_loss(x, y): + """Mean squared L2 difference of gradients.""" + gx = tf_img_grad(x) + gy = tf_img_grad(y) + return tf.reduce_mean(tf.contrib.layers.flatten(tf.square(gx - gy))) + + +def tf_grad_mag(x): + """Pointwise L2 norm of gradient.""" + gx = tf_img_grad(x) + return tf.sqrt(tf.reduce_sum(tf.square(gx), axis = -1, keep_dims = True)) + + +def tv_loss(x): + h = 1.0 / x.shape.as_list()[1] + g = tf_img_grad(x, use_sobel = False) + hgl1 = h * tf.sqrt( + tf.reduce_sum( + tf.square(g), + axis = 3)) + return tf.reduce_mean( + tf.reduce_sum( + hgl1, + axis = [1,2])) + + + +def likelihood_loss(target, tail_decoding, loss): + if loss == "l2": + rec_loss = tf.reduce_mean(tf.contrib.layers.flatten( + tf.square(target - tail_decoding))) + elif loss == "l1": + rec_loss = tf.reduce_mean(tf.contrib.layers.flatten( + tf.abs(target - tail_decoding))) + elif loss == "h1": + rec_loss = tf.reduce_mean(tf.contrib.layers.flatten( + tf.square(target - tail_decoding))) + rec_loss += tf_grad_loss(target, tail_decoding) + else: + raise NotImplemented("Unknown loss function: {}".format(loss)) + return rec_loss + + +def split_groups(x, bs = 2): + return tf.split(tf.space_to_depth(x, bs), bs**2, axis = 3) + + +def merge_groups(xs, bs = 2): + return tf.depth_to_space(tf.concat(xs, axis = 3), bs) diff --git a/demo/parser.py b/demo/parser.py new file mode 100644 index 00000000..a99fb949 --- /dev/null +++ b/demo/parser.py @@ -0,0 +1,25 @@ +import argparse +from config import default_log_dir + +def parse_arguments(): + parser = argparse.ArgumentParser() + parser.add_argument("--data_index", required = True, help = "path to training or testing data index") + parser.add_argument("--mode", default = "train", + choices=["train", "test", "mcmc", "add_reconstructions", "transfer"]) + parser.add_argument("--log_dir", default = default_log_dir, help = "path to log into") + parser.add_argument("--batch_size", default = 8, type = int, help = "batch size") + parser.add_argument("--init_batches", default = 4, type = int, help = "number of batches for initialization") + parser.add_argument("--checkpoint", help = "path to checkpoint to restore") + parser.add_argument("--spatial_size", default = 256, type = int, help = "spatial size to resize images to") + parser.add_argument("--lr", default = 1e-3, type = float, help = "initial learning rate") + parser.add_argument("--lr_decay_begin", default = 1000, type = int, help = "steps after which to begin linear lr decay") + parser.add_argument("--lr_decay_end", default = 100000, type = int, help = "step at which lr is zero, i.e. number of training steps") + parser.add_argument("--log_freq", default = 250, type = int, help = "frequency to log") + parser.add_argument("--ckpt_freq", default = 1000, type = int, help = "frequency to checkpoint") + parser.add_argument("--test_freq", default = 1000, type = int, help = "frequency to test") + parser.add_argument("--drop_prob", default = 0.1, type = float, help = "Dropout probability") + parser.add_argument("--mask", dest = "mask", action = "store_true", help = "Use masked data") + parser.add_argument("--no-mask", dest = "mask", action = "store_false", help = "Do not use mask") + parser.set_defaults(mask = True) + return parser.parse_args() + diff --git a/demo/requirements.txt b/demo/requirements.txt new file mode 100644 index 00000000..5411ad05 --- /dev/null +++ b/demo/requirements.txt @@ -0,0 +1,7 @@ +tensorflow==1.2.1 +numpy==1.14.5 +opencv-python==3.4.3.18 +Pillow==5.2.0 +tqdm==4.26.0 +PyYAML==3.13 +h5py==2.8.0 diff --git a/demo/run.sh b/demo/run.sh new file mode 100755 index 00000000..ccb6d602 --- /dev/null +++ b/demo/run.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +python main.py --mode "transfer" \ + --data_index ./datasets/deepfashion/index.p \ + --log_dir ./log \ + --batch_size 8 \ + --init_batches 4 \ + --checkpoint ./checkpoints/model.ckpt-100000 \ + --spatial_size 256 \ + --lr 0.001 \ + --lr_decay_begin 1000 \ + --lr_decay_end 100000 \ + --log_freq 250 \ + --ckpt_freq 1000 \ + --test_freq 1000 \ + --drop_prob 0.1 \ + --mask + # --no-mask + diff --git a/demo/test.py b/demo/test.py new file mode 100644 index 00000000..04b8389b --- /dev/null +++ b/demo/test.py @@ -0,0 +1,55 @@ +from PIL import Image, ImageDraw, ImageFont +import numpy as np + +def draw_rotated_text(image, angle, xy, text, font): + """ Draw text at an angle into an image, takes the same arguments + as Image.text() except for: + + :param image: Image to write text into + :param angle: Angle to write text at + """ + width, height = image.size + max_dim = max(width, height) + mask_size = (max_dim * 2, max_dim * 2) + mask = Image.new('L', mask_size, 0) + draw = ImageDraw.Draw(mask) + draw.text((max_dim, max_dim), text, 255, font=font) + bigger_mask = mask.resize((max_dim*8, max_dim*8), + resample=Image.BICUBIC) + rotated_mask = bigger_mask.rotate(angle).resize( + mask_size, resample=Image.LANCZOS) + mask_xy = (max_dim - xy[0], max_dim - xy[1]) + b_box = mask_xy + (mask_xy[0] + width, mask_xy[1] + height) + mask = rotated_mask.crop(b_box) + color_image = Image.new('RGBA', image.size, (255,255,255)) + image.paste(color_image, mask) + +def draw_middle_line(image, angle, xy): + width, height = image.size + max_dim = max(width, height) + mask_size = (max_dim * 2, max_dim * 2) + mask = Image.new('L', mask_size, 0) + draw = ImageDraw.Draw(mask) + draw.line([(0,0), (width, height)], fill=255, width=7) + bigger_mask = mask.resize((max_dim*8, max_dim*8), + resample=Image.BICUBIC) + rotated_mask = bigger_mask.rotate(angle).resize( + mask_size, resample=Image.LANCZOS) + mask_xy = (max_dim - xy[0], max_dim - xy[1]) + b_box = mask_xy + (mask_xy[0] + width, mask_xy[1] + height) + mask = rotated_mask.crop(b_box) + color_image = Image.new('RGBA', image.size, (255,255,255)) + image.paste(color_image, mask) + +img = np.zeros((256, 256, 3), dtype=np.uint8) +img = Image.fromarray(img) +font = ImageFont.truetype("./arial.ttf", 32) +w,h = img.size + +draw_middle_line(img, -45, (100,100)) +draw_rotated_text(img, -45, (150, 25), 'shape', font=font) +draw_rotated_text(img, -45, (30, 100), 'appearance', font=font) + +img.show() + + diff --git a/demo/timer.py b/demo/timer.py new file mode 100644 index 00000000..538e1b37 --- /dev/null +++ b/demo/timer.py @@ -0,0 +1,17 @@ +import time + +class Timer(object): + def __init__(self): + self.tick() + + + def tick(self): + self.start_time = time.time() + + + def tock(self): + self.end_time = time.time() + time_since_tick = self.end_time - self.start_time + self.tick() + return time_since_tick + diff --git a/demo/utils.py b/demo/utils.py new file mode 100644 index 00000000..e75a4abe --- /dev/null +++ b/demo/utils.py @@ -0,0 +1,54 @@ +import os +import datetime +import shutil +import glob +import logging +import math +import PIL + +from tqdm import trange +from batches_pg2 import postprocess + + +def init_logging(out_base_dir): + # get unique output directory based on current time + os.makedirs(out_base_dir, exist_ok = True) + now = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S") + out_dir = os.path.join(out_base_dir, now) + os.makedirs(out_dir, exist_ok = False) + # copy source code to logging dir to have an idea what the run was about + this_file = os.path.realpath(__file__) + assert(this_file.endswith(".py")) + shutil.copy(this_file, out_dir) + # copy all py files to logging dir + src_dir = os.path.dirname(this_file) + py_files = glob.glob(os.path.join(src_dir, "*.py")) + for py_file in py_files: + shutil.copy(py_file, out_dir) + # init logging + logging.basicConfig(filename = os.path.join(out_dir, 'log.txt')) + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + return out_dir, logger + + +def process_batches(model, index_dir, batches, batch_size, index): + for i in trange(math.ceil(batches.n / batch_size)): + X_batch, C_batch, I_batch = next(batches) + # reconstructions + R_batch = model.reconstruct(X_batch, C_batch) + R_batch = postprocess(R_batch) # to uint8 for saving + # samples from pose + S_batch = model.test(C_batch)["test_sample"] + S_batch = postprocess(S_batch) # to uint8 for saving + for batch_i, i in enumerate(I_batch): + original_fname = index["imgs"][i] + reconstr_fname = original_fname.rsplit(".", 1)[0] + "_reconstruction.png" + reconstr_path = os.path.join(index_dir, reconstr_fname) + sample_fname = original_fname.rsplit(".", 1)[0] + "_sample.png" + sample_path = os.path.join(index_dir, sample_fname) + index["reconstruction"][i] = reconstr_path + index["sample"][i] = sample_path + PIL.Image.fromarray(R_batch[batch_i,...]).save(reconstr_path) + PIL.Image.fromarray(S_batch[batch_i,...]).save(sample_path) +