How to add custom summaries to tensorboard when training with tf.keras.Model.fit
up vote
0
down vote
favorite
I'm training a model as:
with tf.Graph().as_default():
with tf.Session(config=tf.ConfigProto(allow_soft_placement = True)) as sess:
K.set_session(sess)
tf.train.create_global_step()
#with tf.device('/gpu:0:'):
m = GAReader.Model(nlayers, data.vocab_size, data.num_chars, W_init,
nhidden, embed_dim, dropout, train_emb,
char_dim, use_feat, gating_fn, words).build_network()
m.compile(optimizer=tf.train.AdamOptimizer(0.01),
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_accuracy])
tensorboard = TensorBoardCustom(log_dir="logs", sess=sess)
m.fit_generator(generator=batch_loader_train, steps_per_epoch=len(batch_loader_train.batch_pool), epochs=100, callbacks=[tensorboard])
and I defined a custom callback extending the keras.callbacks.Tensorboard as:
class TensorBoardCustom(TensorBoard):
def __init__(self, log_dir, sess, **kwargs):
super(TensorBoardCustom, self).__init__(log_dir, **kwargs)
self.sess = sess
def on_batch_end(self, batch, logs=):
summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(self.log_dir)
s = self.sess.run(summary)
writer.add_summary(s, batch)
writer.close()
super(TensorBoardCustom, self).on_batch_end(batch, logs)
and I'm adding a new summary as:
l_docin = tf.keras.layers.Input(shape=(None,))
with tf.name_scope('summaries'):
table = tf.contrib.lookup.index_to_string_table_from_tensor(
self.mapping_string, default_value="UNKNOWN")
words = table.lookup(tf.cast(l_qin, tf.int64))
text = tf.reduce_join(words, 1, separator=' ')
tf.summary.text('text', text)
However, this is not working and I'm getting the following error:
InvalidArgumentError (see above for traceback): You must feed a value for placeholder tensor 'input_2' with dtype float and shape [?,?]
[[node input_2 = Placeholder[dtype=DT_FLOAT, shape=[?,?], _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
Can someone explain why this is happening and how I can correct it? Is there a simpler/better way of adding custom summaries?
python tensorflow machine-learning keras
add a comment |
up vote
0
down vote
favorite
I'm training a model as:
with tf.Graph().as_default():
with tf.Session(config=tf.ConfigProto(allow_soft_placement = True)) as sess:
K.set_session(sess)
tf.train.create_global_step()
#with tf.device('/gpu:0:'):
m = GAReader.Model(nlayers, data.vocab_size, data.num_chars, W_init,
nhidden, embed_dim, dropout, train_emb,
char_dim, use_feat, gating_fn, words).build_network()
m.compile(optimizer=tf.train.AdamOptimizer(0.01),
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_accuracy])
tensorboard = TensorBoardCustom(log_dir="logs", sess=sess)
m.fit_generator(generator=batch_loader_train, steps_per_epoch=len(batch_loader_train.batch_pool), epochs=100, callbacks=[tensorboard])
and I defined a custom callback extending the keras.callbacks.Tensorboard as:
class TensorBoardCustom(TensorBoard):
def __init__(self, log_dir, sess, **kwargs):
super(TensorBoardCustom, self).__init__(log_dir, **kwargs)
self.sess = sess
def on_batch_end(self, batch, logs=):
summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(self.log_dir)
s = self.sess.run(summary)
writer.add_summary(s, batch)
writer.close()
super(TensorBoardCustom, self).on_batch_end(batch, logs)
and I'm adding a new summary as:
l_docin = tf.keras.layers.Input(shape=(None,))
with tf.name_scope('summaries'):
table = tf.contrib.lookup.index_to_string_table_from_tensor(
self.mapping_string, default_value="UNKNOWN")
words = table.lookup(tf.cast(l_qin, tf.int64))
text = tf.reduce_join(words, 1, separator=' ')
tf.summary.text('text', text)
However, this is not working and I'm getting the following error:
InvalidArgumentError (see above for traceback): You must feed a value for placeholder tensor 'input_2' with dtype float and shape [?,?]
[[node input_2 = Placeholder[dtype=DT_FLOAT, shape=[?,?], _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
Can someone explain why this is happening and how I can correct it? Is there a simpler/better way of adding custom summaries?
python tensorflow machine-learning keras
add a comment |
up vote
0
down vote
favorite
up vote
0
down vote
favorite
I'm training a model as:
with tf.Graph().as_default():
with tf.Session(config=tf.ConfigProto(allow_soft_placement = True)) as sess:
K.set_session(sess)
tf.train.create_global_step()
#with tf.device('/gpu:0:'):
m = GAReader.Model(nlayers, data.vocab_size, data.num_chars, W_init,
nhidden, embed_dim, dropout, train_emb,
char_dim, use_feat, gating_fn, words).build_network()
m.compile(optimizer=tf.train.AdamOptimizer(0.01),
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_accuracy])
tensorboard = TensorBoardCustom(log_dir="logs", sess=sess)
m.fit_generator(generator=batch_loader_train, steps_per_epoch=len(batch_loader_train.batch_pool), epochs=100, callbacks=[tensorboard])
and I defined a custom callback extending the keras.callbacks.Tensorboard as:
class TensorBoardCustom(TensorBoard):
def __init__(self, log_dir, sess, **kwargs):
super(TensorBoardCustom, self).__init__(log_dir, **kwargs)
self.sess = sess
def on_batch_end(self, batch, logs=):
summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(self.log_dir)
s = self.sess.run(summary)
writer.add_summary(s, batch)
writer.close()
super(TensorBoardCustom, self).on_batch_end(batch, logs)
and I'm adding a new summary as:
l_docin = tf.keras.layers.Input(shape=(None,))
with tf.name_scope('summaries'):
table = tf.contrib.lookup.index_to_string_table_from_tensor(
self.mapping_string, default_value="UNKNOWN")
words = table.lookup(tf.cast(l_qin, tf.int64))
text = tf.reduce_join(words, 1, separator=' ')
tf.summary.text('text', text)
However, this is not working and I'm getting the following error:
InvalidArgumentError (see above for traceback): You must feed a value for placeholder tensor 'input_2' with dtype float and shape [?,?]
[[node input_2 = Placeholder[dtype=DT_FLOAT, shape=[?,?], _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
Can someone explain why this is happening and how I can correct it? Is there a simpler/better way of adding custom summaries?
python tensorflow machine-learning keras
I'm training a model as:
with tf.Graph().as_default():
with tf.Session(config=tf.ConfigProto(allow_soft_placement = True)) as sess:
K.set_session(sess)
tf.train.create_global_step()
#with tf.device('/gpu:0:'):
m = GAReader.Model(nlayers, data.vocab_size, data.num_chars, W_init,
nhidden, embed_dim, dropout, train_emb,
char_dim, use_feat, gating_fn, words).build_network()
m.compile(optimizer=tf.train.AdamOptimizer(0.01),
loss=tf.keras.losses.categorical_crossentropy,
metrics=[tf.keras.metrics.categorical_accuracy])
tensorboard = TensorBoardCustom(log_dir="logs", sess=sess)
m.fit_generator(generator=batch_loader_train, steps_per_epoch=len(batch_loader_train.batch_pool), epochs=100, callbacks=[tensorboard])
and I defined a custom callback extending the keras.callbacks.Tensorboard as:
class TensorBoardCustom(TensorBoard):
def __init__(self, log_dir, sess, **kwargs):
super(TensorBoardCustom, self).__init__(log_dir, **kwargs)
self.sess = sess
def on_batch_end(self, batch, logs=):
summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(self.log_dir)
s = self.sess.run(summary)
writer.add_summary(s, batch)
writer.close()
super(TensorBoardCustom, self).on_batch_end(batch, logs)
and I'm adding a new summary as:
l_docin = tf.keras.layers.Input(shape=(None,))
with tf.name_scope('summaries'):
table = tf.contrib.lookup.index_to_string_table_from_tensor(
self.mapping_string, default_value="UNKNOWN")
words = table.lookup(tf.cast(l_qin, tf.int64))
text = tf.reduce_join(words, 1, separator=' ')
tf.summary.text('text', text)
However, this is not working and I'm getting the following error:
InvalidArgumentError (see above for traceback): You must feed a value for placeholder tensor 'input_2' with dtype float and shape [?,?]
[[node input_2 = Placeholder[dtype=DT_FLOAT, shape=[?,?], _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]
Can someone explain why this is happening and how I can correct it? Is there a simpler/better way of adding custom summaries?
python tensorflow machine-learning keras
python tensorflow machine-learning keras
edited Nov 11 at 5:23
asked Nov 10 at 6:04
obh
12128
12128
add a comment |
add a comment |
active
oldest
votes
active
oldest
votes
active
oldest
votes
active
oldest
votes
active
oldest
votes
Thanks for contributing an answer to Stack Overflow!
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Some of your past answers have not been well-received, and you're in danger of being blocked from answering.
Please pay close attention to the following guidance:
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53236430%2fhow-to-add-custom-summaries-to-tensorboard-when-training-with-tf-keras-model-fit%23new-answer', 'question_page');
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown