From 3071f8f9de37b973c4eba3194a7279deb5d8a9e0 Mon Sep 17 00:00:00 2001 From: CatherineSue Date: Wed, 30 May 2018 09:54:45 -0700 Subject: [PATCH] Merge tensorboard summary and tensorboard output Merge tensorboard_summary.Summary into tensorboard_output.TensorBoardOutput, which supports graph, scalar, tensor, and histogram logging to tensorboard. The private function in TensorBoardOutput starts with '_'. All other functions are public. What this commit does: 1. Optimize imports 2. Merge class 3. Solve the duplicate fields in custom scalars. (https://github.com/ryanjulian/rllab/pull/88#issuecomment-392245839) --- rllab/misc/logger.py | 30 ++---- rllab/misc/tensorboard_output.py | 168 +++++++++++++++++++++++++++++- rllab/misc/tensorboard_summary.py | 143 ------------------------- 3 files changed, 174 insertions(+), 167 deletions(-) delete mode 100644 rllab/misc/tensorboard_summary.py diff --git a/rllab/misc/logger.py b/rllab/misc/logger.py index dd4890b8a..a86a7f830 100644 --- a/rllab/misc/logger.py +++ b/rllab/misc/logger.py @@ -1,26 +1,22 @@ -import os -import os.path as osp -import pickle -import sys +import base64 import csv import datetime -import dateutil.tz -import joblib import json +import os +import os.path as osp import pickle -import base64 - +import sys from contextlib import contextmanager from enum import Enum + +import dateutil.tz +import joblib import numpy as np -import tensorflow as tf -from rllab.misc.tabulate import tabulate -from rllab.misc.console import mkdir_p -from rllab.misc.console import colorize from rllab.misc.autoargs import get_all_parameters +from rllab.misc.console import mkdir_p, colorize +from rllab.misc.tabulate import tabulate from rllab.misc.tensorboard_output import TensorBoardOutput -from rllab.misc.tensorboard_summary import Summary _prefixes = [] _prefix_str = '' @@ -47,7 +43,6 @@ _tensorboard_step_key = None _tensorboard = TensorBoardOutput() -_tensorboard_summary = Summary() def _add_output(file_name, arr, fds, mode='a'): @@ -90,7 +85,6 @@ def remove_tabular_output(file_name): def set_tensorboard_dir(dir_name): _tensorboard.set_dir(dir_name) - _tensorboard_summary.set_dir(dir_name) def set_snapshot_dir(dir_name): @@ -164,12 +158,11 @@ def record_tensor(key, val): def record_histogram(key, val): - _tensorboard_summary.record_histogram(str(key), val) + _tensorboard.record_histogram(str(key), val) def record_histogram_by_type(histogram_type, key=None, shape=[1000], **kwargs): - _tensorboard_summary.record_histogram_by_type(histogram_type, key, shape, - **kwargs) + _tensorboard.record_histogram_by_type(histogram_type, key, shape, **kwargs) def push_tabular_prefix(key): @@ -232,7 +225,6 @@ def dump_tensorboard(*args, **kwargs): if _tensorboard_step_key and _tensorboard_step_key in tabular_dict: step = tabular_dict[_tensorboard_step_key] _tensorboard.dump_tensorboard(step) - _tensorboard_summary.dump_tensorboard(step) def dump_tabular(*args, **kwargs): diff --git a/rllab/misc/tensorboard_output.py b/rllab/misc/tensorboard_output.py index 04babe417..90d12a688 100644 --- a/rllab/misc/tensorboard_output.py +++ b/rllab/misc/tensorboard_output.py @@ -1,9 +1,14 @@ -import os +import shutil +from os.path import dirname, abspath +import google.protobuf.json_format as json_format import numpy as np import tensorflow as tf +from jsonmerge import merge from tensorboard import summary as summary_lib +from tensorboard.backend.event_processing import plugin_event_multiplexer as event_multiplexer from tensorboard.plugins.custom_scalar import layout_pb2 +from tensorboard.plugins.custom_scalar import metadata import rllab.misc.logger from rllab.misc.console import mkdir_p @@ -13,9 +18,22 @@ class TensorBoardOutput: def __init__(self): self._scalars = tf.Summary() self._scope_tensor = {} + self._has_recorded_tensor = False + self._has_dumped_graph = False + + self._histogram_ds = {} + self._histogram_summary_op = [] + self._session = tf.Session() + self._histogram_distribute_list = [ + 'normal', 'gamma', 'poisson', 'uniform' + ] + self._feed = {} self._default_step = 0 self._writer = None + self._writer_dir = None + self._layout_writer = None + self._layout_writer_dir = None def set_dir(self, dir_name): if not dir_name: @@ -23,8 +41,14 @@ def set_dir(self, dir_name): self._writer.close() self._writer = None else: - mkdir_p(os.path.dirname(dir_name)) + mkdir_p(dirname(dir_name)) + self._writer_dir = dir_name self._writer = tf.summary.FileWriter(dir_name) + + self._layout_writer_dir = dirname(dirname( + abspath(dir_name))) + '/custom_scalar_config' + mkdir_p(self._layout_writer_dir) + self._default_step = 0 assert self._writer is not None rllab.misc.logger.log("tensorboard data will be logged into:" + @@ -41,12 +65,55 @@ def dump_tensorboard(self, step=None): self._dump_graph() self._dump_scalars(run_step) + self._dump_histogram(run_step) self._dump_tensors() + def record_histogram(self, key, val): + if str(key) not in self._histogram_ds: + self._histogram_ds[str(key)] = tf.Variable(val) + self._histogram_summary_op.append( + tf.summary.histogram(str(key), self._histogram_ds[str(key)])) + self._histogram_summary_op_merge = tf.summary.merge( + self._histogram_summary_op) + + self._feed[self._histogram_ds[str(key)]] = val + + def record_histogram_by_type(self, + histogram_type, + key=None, + shape=[1000], + **kwargs): + ''' + distribution type and args: + normal: mean, stddev + gamma: alpha + poisson: lam + uniform: maxval + ''' + if histogram_type not in self._histogram_distribute_list: + raise Exception('histogram type error %s' % histogram_type, + 'builtin type', self._histogram_distribute_list) + + if str(key) not in self._histogram_ds: + self._histogram_ds[str(key)] = self._get_histogram_var_by_type( + histogram_type, shape, **kwargs) + self._histogram_summary_op.append( + tf.summary.histogram( + str(key), self._histogram_ds[str(key)][0])) + self._histogram_summary_op_merge = tf.summary.merge( + self._histogram_summary_op) + + key_list = self._histogram_ds[str(key)][1] + val_list = self._get_histogram_val_by_type(histogram_type, **kwargs) + + for key, val in zip(key_list, val_list): + self._feed[key] = val + def record_scalar(self, key, val): self._scalars.value.add(tag=str(key), simple_value=float(val)) def record_tensor(self, key, val): + self._has_recorded_tensor = True scope = str(key).split('/', 1)[0] if scope not in self._scope_tensor: self._scope_tensor[scope] = [key] @@ -58,7 +125,51 @@ def record_tensor(self, key, val): self._scalars.value.add( tag=key + '/' + str(idx).strip('()'), simple_value=float(v)) + def _get_histogram_var_by_type(self, histogram_type, shape, **kwargs): + if histogram_type == "normal": + # Make a normal distribution, with a shifting mean + mean = tf.Variable(kwargs['mean']) + stddev = tf.Variable(kwargs['stddev']) + return tf.random_normal( + shape=shape, mean=mean, stddev=stddev), [mean, stddev] + elif histogram_type == "gamma": + # Add a gamma distribution + alpha = tf.Variable(kwargs['alpha']) + return tf.random_gamma(shape=shape, alpha=alpha), [alpha] + elif histogram_type == "poisson": + lam = tf.Variable(kwargs['lam']) + return tf.random_poisson(shape=shape, lam=lam), [lam] + elif histogram_type == "uniform": + # Add a uniform distribution + maxval = tf.Variable(kwargs['maxval']) + return tf.random_uniform(shape=shape, maxval=maxval), [maxval] + + raise Exception('histogram type error %s' % histogram_type, + 'builtin type', self._histogram_distribute_list) + + def _get_histogram_val_by_type(self, histogram_type, **kwargs): + if histogram_type == "normal": + # Make a normal distribution, with a shifting mean + return [kwargs['mean'], kwargs['stddev']] + elif histogram_type == "gamma": + # Add a gamma distribution + self.alpha_v = kwargs['alpha'] + return [kwargs['alpha']] + elif histogram_type == "poisson": + return [kwargs['lam']] + elif histogram_type == "uniform": + # Add a uniform distribution + return [kwargs['maxval']] + + raise Exception('histogram type error %s' % histogram_type, + 'builtin type', self._histogram_distribute_list) + def _dump_graph(self): + # We only need to write the graph event once (instead of per step). + if self._has_dumped_graph: + return + + self._has_dumped_graph = True self._writer.add_graph(tf.get_default_graph()) self._writer.flush() @@ -67,7 +178,17 @@ def _dump_scalars(self, step): self._writer.flush() del self._scalars.value[:] + def _dump_histogram(self, step): + if len(self._histogram_summary_op): + summary_str = self._session.run( + self._histogram_summary_op_merge, feed_dict=self._feed) + self._writer.add_summary(summary_str, global_step=step) + self._writer.flush() + def _dump_tensors(self): + if not self._has_recorded_tensor: + return + layout_categories = [] for scope in self._scope_tensor: @@ -83,6 +204,43 @@ def _dump_tensors(self): layout_categories.append(category) if layout_categories: - layout_summary = summary_lib.custom_scalar_pb( - layout_pb2.Layout(category=layout_categories)) - self._writer.add_summary(layout_summary) + layout_proto_to_write = layout_pb2.Layout( + category=layout_categories) + + try: + # Load former layout_proto from self._layout_writer_dir. + multiplexer = event_multiplexer.EventMultiplexer() + multiplexer.AddRunsFromDirectory(self._layout_writer_dir) + multiplexer.Reload() + tensor_events = multiplexer.Tensors( + '.', metadata.CONFIG_SUMMARY_TAG) + shutil.rmtree(self._layout_writer_dir) + + # Parse layout proto from disk. + string_array = tf.make_ndarray(tensor_events[0].tensor_proto) + content = np.asscalar(string_array) + layout_proto_from_disk = layout_pb2.Layout() + layout_proto_from_disk.ParseFromString( + tf.compat.as_bytes(content)) + + # Merge two layout proto. + merged_layout_json = merge( + json_format.MessageToJson(layout_proto_from_disk), + json_format.MessageToJson(layout_proto_to_write)) + merged_layout_proto = layout_pb2.Layout() + json_format.Parse(str(merged_layout_json), merged_layout_proto) + + self._layout_writer = tf.summary.FileWriter( + self._layout_writer_dir) + layout_summary = summary_lib.custom_scalar_pb( + merged_layout_proto) + self._layout_writer.add_summary(layout_summary) + self._layout_writer.close() + except KeyError: + # Write the current layout proto when there is no layout in the disk. + self._layout_writer = tf.summary.FileWriter( + self._layout_writer_dir) + layout_summary = summary_lib.custom_scalar_pb( + layout_proto_to_write) + self._layout_writer.add_summary(layout_summary) + self._layout_writer.close() diff --git a/rllab/misc/tensorboard_summary.py b/rllab/misc/tensorboard_summary.py deleted file mode 100644 index 0e43f2589..000000000 --- a/rllab/misc/tensorboard_summary.py +++ /dev/null @@ -1,143 +0,0 @@ -import os - -import tensorflow as tf - -from rllab.misc.console import mkdir_p - - -class Summary: - def __init__(self): - self._summary_scale = tf.Summary() - self._histogram_ds = {} - self._histogram_summary_op = [] - - self._session = tf.Session() - - self._default_step = 0 - self._step_key = None - self._writer = None - self._histogram_distribute_list = [ - 'normal', 'gamma', 'poisson', 'uniform' - ] - self._feed = {} - - def record_histogram(self, key, val): - if str(key) not in self._histogram_ds: - self._histogram_ds[str(key)] = tf.Variable(val) - self._histogram_summary_op.append( - tf.summary.histogram(str(key), self._histogram_ds[str(key)])) - self._histogram_summary_op_merge = tf.summary.merge( - self._histogram_summary_op) - - self._feed[self._histogram_ds[str(key)]] = val - - def record_histogram_by_type(self, - histogram_type, - key=None, - shape=[1000], - **kwargs): - ''' - distribution type and args: - normal: mean, stddev - gamma: alpha - poisson: lam - uniform: maxval - ''' - if histogram_type not in self._histogram_distribute_list: - raise Exception('histogram type error %s' % histogram_type, - 'builtin type', self._histogram_distribute_list) - - if str(key) not in self._histogram_ds: - self._histogram_ds[str(key)] = self._get_histogram_var_by_type( - histogram_type, shape, **kwargs) - self._histogram_summary_op.append( - tf.summary.histogram( - str(key), self._histogram_ds[str(key)][0])) - self._histogram_summary_op_merge = tf.summary.merge( - self._histogram_summary_op) - - key_list = self._histogram_ds[str(key)][1] - val_list = self._get_histogram_val_by_type(histogram_type, **kwargs) - - for key, val in zip(key_list, val_list): - self._feed[key] = val - - def record_scale(self, key, val): - self._summary_scale.value.add(tag=str(key), simple_value=float(val)) - - def dump_tensorboard(self, step=None): - if not self._writer: - return - run_step = self._default_step - if step: - run_step = step - else: - self._default_step += 1 - - self._dump_histogram(run_step) - self._dump_scale(run_step) - - def set_dir(self, dir_name): - if not dir_name: - if self._writer: - self._writer.close() - self._writer = None - else: - if dir_name[-1] != '/': - dir_name += '/' - mkdir_p(os.path.dirname(dir_name)) - self._writer = tf.summary.FileWriter(dir_name) - self._default_step = 0 - assert self._writer is not None - print("tensorboard data will be logged into:", dir_name) - - def _dump_histogram(self, step): - if len(self._histogram_summary_op): - summary_str = self._session.run( - self._histogram_summary_op_merge, feed_dict=self._feed) - self._writer.add_summary(summary_str, global_step=step) - self._writer.flush() - - def _dump_scale(self, step): - self._writer.add_summary(self._summary_scale, step) - self._writer.flush() - del self._summary_scale.value[:] - - def _get_histogram_var_by_type(self, histogram_type, shape, **kwargs): - if histogram_type == "normal": - # Make a normal distribution, with a shifting mean - mean = tf.Variable(kwargs['mean']) - stddev = tf.Variable(kwargs['stddev']) - return tf.random_normal( - shape=shape, mean=mean, stddev=stddev), [mean, stddev] - elif histogram_type == "gamma": - # Add a gamma distribution - alpha = tf.Variable(kwargs['alpha']) - return tf.random_gamma(shape=shape, alpha=alpha), [alpha] - elif histogram_type == "poisson": - lam = tf.Variable(kwargs['lam']) - return tf.random_poisson(shape=shape, lam=lam), [lam] - elif histogram_type == "uniform": - # And a uniform distribution - maxval = tf.Variable(kwargs['maxval']) - return tf.random_uniform(shape=shape, maxval=maxval), [maxval] - - raise Exception('histogram type error %s' % histogram_type, - 'builtin type', self._histogram_distribute_list) - - def _get_histogram_val_by_type(self, histogram_type, **kwargs): - if histogram_type == "normal": - # Make a normal distribution, with a shifting mean - return [kwargs['mean'], kwargs['stddev']] - elif histogram_type == "gamma": - # Add a gamma distribution - self.alpha_v = kwargs['alpha'] - return [kwargs['alpha']] - elif histogram_type == "poisson": - return [kwargs['lam']] - elif histogram_type == "uniform": - # And a uniform distribution - return [kwargs['maxval']] - - raise Exception('histogram type error %s' % histogram_type, - 'builtin type', self._histogram_distribute_list)