You can do this adding your variable as a custom metric.
--
You received this message because you are subscribed to the Google Groups "Keras-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to keras-users+unsubscribe@googlegroups.com.
To view this discussion on the web, visit https://groups.google.com/d/msgid/keras-users/87f080e3-b59f-4391-8c55-1b5d79986f70%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
x_entropy_t = K.sum(p_t * K.log(K.epsilon() + p_t), axis=-1, keepdims=True)
full_policy_loss_t = -res_t + X_ENTROPY_BETA * x_entropy_t
tf.summary.scalar("loss_entropy", K.sum(x_entropy_t))
tf.summary.scalar("loss_policy", K.sum(-res_t))
tf.summary.scalar("loss_full", K.sum(full_policy_loss_t))
summary_writer = tf.summary.FileWriter("logs/" + args.name)
def summary(y_true, y_pred):
return tf.summary.merge_all()
value_policy_model.compile(optimizer=Adagrad(), loss=loss_dict, metrics=[summary])
l = value_policy_model.train_on_batch(x_batch, y_batch)
l_dict = dict(zip(value_policy_model.metrics_names, l))
summary_writer.add_summary(l_dict['value_summary'], global_step=iter_idx)
summary_writer.flush()
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-14-fac1a5253ff5> in <module>() 1 vae = Model(x, x_decoded_mean) ----> 2 vae.compile(optimizer='rmsprop', loss=vae_loss, metrics=[summary]) /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/keras/engine/training.pyc in compile(self, optimizer, loss, metrics, loss_weights, sample_weight_mode, **kwargs) 954 metric_fn = metrics_module.get(metric) 955 masked_metric_fn = _masked_objective(metric_fn) --> 956 metric_result = masked_metric_fn(y_true, y_pred, mask=masks[i]) 957 metric_result = { 958 metric_fn.__name__: metric_result /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/keras/engine/training.pyc in masked(y_true, y_pred, mask) 487 score_array /= K.mean(mask) 488 --> 489 return K.mean(score_array) 490 return masked 491 /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/keras/backend/tensorflow_backend.pyc in mean(x, axis, keepdims) 1121 if x.dtype.base_dtype == tf.bool: 1122 x = tf.cast(x, floatx()) -> 1123 return tf.reduce_mean(x, reduction_indices=axis, keep_dims=keepdims) 1124 1125 /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/tensorflow/python/ops/math_ops.pyc in reduce_mean(input_tensor, axis, keep_dims, name, reduction_indices) 1308 _ReductionDims(input_tensor, axis, reduction_indices), 1309 keep_dims, -> 1310 name=name) 1311 1312 /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/tensorflow/python/ops/gen_math_ops.pyc in _mean(input, reduction_indices, keep_dims, name) 1531 result = _op_def_lib.apply_op("Mean", input=input, 1532 reduction_indices=reduction_indices, -> 1533 keep_dims=keep_dims, name=name) 1534 return result 1535 /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/tensorflow/python/framework/op_def_library.pyc in apply_op(self, op_type_name, name, **keywords) 583 _SatisfiesTypeConstraint(base_type, 584 _Attr(op_def, input_arg.type_attr), --> 585 param_name=input_name) 586 attrs[input_arg.type_attr] = attr_value 587 inferred_from[input_arg.type_attr] = input_name /home/david/machineLearning/DeepLearningProteins/venv/lib/python2.7/site-packages/tensorflow/python/framework/op_def_library.pyc in _SatisfiesTypeConstraint(dtype, attr_def, param_name) 59 "allowed values: %s" % 60 (param_name, dtypes.as_dtype(dtype).name, ---> 61 ", ".join(dtypes.as_dtype(x).name for x in allowed_list))) 62 63 TypeError: Value passed to parameter 'input' has DataType string not in list of allowed values: float32, float64, int64, int32, uint8, uint16, int16, int8, complex64, complex128, qint8, quint8, qint32, float16
class CustomTensorBoard(tf.keras.callbacks.TensorBoard):
"""Extends the TensorBoard callback to allow adding custom summaries.
Arguments:
user_defined_freq: frequency (in epochs) at which to compute summaries
defined by the user by calling tf.summary in the model code. If set to
0, user-defined summaries won't be computed. Validation data must be
specified for summary visualization.
kwargs: Passed to tf.keras.callbacks.TensorBoard.
"""
def __init__(self, user_defined_freq=0, **kwargs):
self.user_defined_freq = user_defined_freq
super(CustomTensorBoard, self).__init__(**kwargs)
def on_epoch_begin(self, epoch, logs=None):
"""Add user-def. op to Model eval_function callbacks, reset batch count."""
# check if histogram summary should be run for this epoch
if self.user_defined_freq and epoch % self.user_defined_freq == 0:
self._epoch = epoch
# pylint: disable=protected-access
# add the user-defined summary ops if it should run this epoch
self.model._make_eval_function()
if self.merged not in self.model._eval_function.fetches:
self.model._eval_function.fetches.append(self.merged)
self.model._eval_function.fetch_callbacks[
self.merged] = self._fetch_callback
# pylint: enable=protected-access
super(CustomTensorBoard, self).on_epoch_begin(epoch, logs=None)
def on_epoch_end(self, epoch, logs=None):
"""Checks if summary ops should run next epoch, logs scalar summaries."""
# pop the user-defined summary op after each epoch
if self.user_defined_freq:
# pylint: disable=protected-access
if self.merged in self.model._eval_function.fetches:
self.model._eval_function.fetches.remove(self.merged)
if self.merged in self.model._eval_function.fetch_callbacks:
self.model._eval_function.fetch_callbacks.pop(self.merged)
# pylint: enable=protected-access
super(CustomTensorBoard, self).on_epoch_end(epoch, logs=logs)
TypeError: Value passed to parameter 'input' has DataType string not in list of allowed values: float32, float64, int32, uint8, int16, int8, complex64, int64, qint8, quint8, qint32, bfloat16, uint16, complex128, float16, uint32, uint64
from keras.layers import Input, Dense
from keras.models import Model
# This returns a tensor
inputs = Input(shape=(784,))
# a layer instance is callable on a tensor, and returns a tensor
x = Dense(64, activation='relu')(inputs)
x = Dense(64, activation='relu')(x)
tf.summary.scalar(tf.reduce_mean(x))
predictions = Dense(10, activation='softmax')(x)
# This creates a model that includes
# the Input layer and three Dense layers
model = Model(inputs=inputs, outputs=predictions)
model.fit(x=x_data, y=y_data, callbacks=[CustomTensorboard(...)])