Skip to content

Commit

Permalink
Merge tensorboard summary and tensorboard output
Browse files Browse the repository at this point in the history
Merge tensorboard_summary.Summary into
tensorboard_output.TensorBoardOutput, which supports
graph, scalar, tensor, and histogram logging to
tensorboard.

The private function in TensorBoardOutput starts with '_'.
All other functions are public.

What this commit does:
1. Optimize imports
2. Merge class
3. Solve the duplicate fields in custom scalars.
(#88 (comment))
  • Loading branch information
CatherineSue committed May 30, 2018
1 parent 4315906 commit 3071f8f
Show file tree
Hide file tree
Showing 3 changed files with 174 additions and 167 deletions.
30 changes: 11 additions & 19 deletions rllab/misc/logger.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,22 @@
import os
import os.path as osp
import pickle
import sys
import base64
import csv
import datetime
import dateutil.tz
import joblib
import json
import os
import os.path as osp
import pickle
import base64

import sys
from contextlib import contextmanager
from enum import Enum

import dateutil.tz
import joblib
import numpy as np
import tensorflow as tf

from rllab.misc.tabulate import tabulate
from rllab.misc.console import mkdir_p
from rllab.misc.console import colorize
from rllab.misc.autoargs import get_all_parameters
from rllab.misc.console import mkdir_p, colorize
from rllab.misc.tabulate import tabulate
from rllab.misc.tensorboard_output import TensorBoardOutput
from rllab.misc.tensorboard_summary import Summary

_prefixes = []
_prefix_str = ''
Expand All @@ -47,7 +43,6 @@
_tensorboard_step_key = None

_tensorboard = TensorBoardOutput()
_tensorboard_summary = Summary()


def _add_output(file_name, arr, fds, mode='a'):
Expand Down Expand Up @@ -90,7 +85,6 @@ def remove_tabular_output(file_name):

def set_tensorboard_dir(dir_name):
_tensorboard.set_dir(dir_name)
_tensorboard_summary.set_dir(dir_name)


def set_snapshot_dir(dir_name):
Expand Down Expand Up @@ -164,12 +158,11 @@ def record_tensor(key, val):


def record_histogram(key, val):
_tensorboard_summary.record_histogram(str(key), val)
_tensorboard.record_histogram(str(key), val)


def record_histogram_by_type(histogram_type, key=None, shape=[1000], **kwargs):
_tensorboard_summary.record_histogram_by_type(histogram_type, key, shape,
**kwargs)
_tensorboard.record_histogram_by_type(histogram_type, key, shape, **kwargs)


def push_tabular_prefix(key):
Expand Down Expand Up @@ -232,7 +225,6 @@ def dump_tensorboard(*args, **kwargs):
if _tensorboard_step_key and _tensorboard_step_key in tabular_dict:
step = tabular_dict[_tensorboard_step_key]
_tensorboard.dump_tensorboard(step)
_tensorboard_summary.dump_tensorboard(step)


def dump_tabular(*args, **kwargs):
Expand Down
168 changes: 163 additions & 5 deletions rllab/misc/tensorboard_output.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
import os
import shutil
from os.path import dirname, abspath

import google.protobuf.json_format as json_format
import numpy as np
import tensorflow as tf
from jsonmerge import merge
from tensorboard import summary as summary_lib
from tensorboard.backend.event_processing import plugin_event_multiplexer as event_multiplexer
from tensorboard.plugins.custom_scalar import layout_pb2
from tensorboard.plugins.custom_scalar import metadata

import rllab.misc.logger
from rllab.misc.console import mkdir_p
Expand All @@ -13,18 +18,37 @@ class TensorBoardOutput:
def __init__(self):
self._scalars = tf.Summary()
self._scope_tensor = {}
self._has_recorded_tensor = False
self._has_dumped_graph = False

self._histogram_ds = {}
self._histogram_summary_op = []
self._session = tf.Session()
self._histogram_distribute_list = [
'normal', 'gamma', 'poisson', 'uniform'
]
self._feed = {}

self._default_step = 0
self._writer = None
self._writer_dir = None
self._layout_writer = None
self._layout_writer_dir = None

def set_dir(self, dir_name):
if not dir_name:
if self._writer:
self._writer.close()
self._writer = None
else:
mkdir_p(os.path.dirname(dir_name))
mkdir_p(dirname(dir_name))
self._writer_dir = dir_name
self._writer = tf.summary.FileWriter(dir_name)

self._layout_writer_dir = dirname(dirname(
abspath(dir_name))) + '/custom_scalar_config'
mkdir_p(self._layout_writer_dir)

self._default_step = 0
assert self._writer is not None
rllab.misc.logger.log("tensorboard data will be logged into:" +
Expand All @@ -41,12 +65,55 @@ def dump_tensorboard(self, step=None):

self._dump_graph()
self._dump_scalars(run_step)
self._dump_histogram(run_step)
self._dump_tensors()

def record_histogram(self, key, val):
if str(key) not in self._histogram_ds:
self._histogram_ds[str(key)] = tf.Variable(val)
self._histogram_summary_op.append(
tf.summary.histogram(str(key), self._histogram_ds[str(key)]))
self._histogram_summary_op_merge = tf.summary.merge(
self._histogram_summary_op)

self._feed[self._histogram_ds[str(key)]] = val

def record_histogram_by_type(self,
histogram_type,
key=None,
shape=[1000],
**kwargs):
'''
distribution type and args:
normal: mean, stddev
gamma: alpha
poisson: lam
uniform: maxval
'''
if histogram_type not in self._histogram_distribute_list:
raise Exception('histogram type error %s' % histogram_type,
'builtin type', self._histogram_distribute_list)

if str(key) not in self._histogram_ds:
self._histogram_ds[str(key)] = self._get_histogram_var_by_type(
histogram_type, shape, **kwargs)
self._histogram_summary_op.append(
tf.summary.histogram(
str(key), self._histogram_ds[str(key)][0]))
self._histogram_summary_op_merge = tf.summary.merge(
self._histogram_summary_op)

key_list = self._histogram_ds[str(key)][1]
val_list = self._get_histogram_val_by_type(histogram_type, **kwargs)

for key, val in zip(key_list, val_list):
self._feed[key] = val

def record_scalar(self, key, val):
self._scalars.value.add(tag=str(key), simple_value=float(val))

def record_tensor(self, key, val):
self._has_recorded_tensor = True
scope = str(key).split('/', 1)[0]
if scope not in self._scope_tensor:
self._scope_tensor[scope] = [key]
Expand All @@ -58,7 +125,51 @@ def record_tensor(self, key, val):
self._scalars.value.add(
tag=key + '/' + str(idx).strip('()'), simple_value=float(v))

def _get_histogram_var_by_type(self, histogram_type, shape, **kwargs):
if histogram_type == "normal":
# Make a normal distribution, with a shifting mean
mean = tf.Variable(kwargs['mean'])
stddev = tf.Variable(kwargs['stddev'])
return tf.random_normal(
shape=shape, mean=mean, stddev=stddev), [mean, stddev]
elif histogram_type == "gamma":
# Add a gamma distribution
alpha = tf.Variable(kwargs['alpha'])
return tf.random_gamma(shape=shape, alpha=alpha), [alpha]
elif histogram_type == "poisson":
lam = tf.Variable(kwargs['lam'])
return tf.random_poisson(shape=shape, lam=lam), [lam]
elif histogram_type == "uniform":
# Add a uniform distribution
maxval = tf.Variable(kwargs['maxval'])
return tf.random_uniform(shape=shape, maxval=maxval), [maxval]

raise Exception('histogram type error %s' % histogram_type,
'builtin type', self._histogram_distribute_list)

def _get_histogram_val_by_type(self, histogram_type, **kwargs):
if histogram_type == "normal":
# Make a normal distribution, with a shifting mean
return [kwargs['mean'], kwargs['stddev']]
elif histogram_type == "gamma":
# Add a gamma distribution
self.alpha_v = kwargs['alpha']
return [kwargs['alpha']]
elif histogram_type == "poisson":
return [kwargs['lam']]
elif histogram_type == "uniform":
# Add a uniform distribution
return [kwargs['maxval']]

raise Exception('histogram type error %s' % histogram_type,
'builtin type', self._histogram_distribute_list)

def _dump_graph(self):
# We only need to write the graph event once (instead of per step).
if self._has_dumped_graph:
return

self._has_dumped_graph = True
self._writer.add_graph(tf.get_default_graph())
self._writer.flush()

Expand All @@ -67,7 +178,17 @@ def _dump_scalars(self, step):
self._writer.flush()
del self._scalars.value[:]

def _dump_histogram(self, step):
if len(self._histogram_summary_op):
summary_str = self._session.run(
self._histogram_summary_op_merge, feed_dict=self._feed)
self._writer.add_summary(summary_str, global_step=step)
self._writer.flush()

def _dump_tensors(self):
if not self._has_recorded_tensor:
return

layout_categories = []

for scope in self._scope_tensor:
Expand All @@ -83,6 +204,43 @@ def _dump_tensors(self):
layout_categories.append(category)

if layout_categories:
layout_summary = summary_lib.custom_scalar_pb(
layout_pb2.Layout(category=layout_categories))
self._writer.add_summary(layout_summary)
layout_proto_to_write = layout_pb2.Layout(
category=layout_categories)

try:
# Load former layout_proto from self._layout_writer_dir.
multiplexer = event_multiplexer.EventMultiplexer()
multiplexer.AddRunsFromDirectory(self._layout_writer_dir)
multiplexer.Reload()
tensor_events = multiplexer.Tensors(
'.', metadata.CONFIG_SUMMARY_TAG)
shutil.rmtree(self._layout_writer_dir)

# Parse layout proto from disk.
string_array = tf.make_ndarray(tensor_events[0].tensor_proto)
content = np.asscalar(string_array)
layout_proto_from_disk = layout_pb2.Layout()
layout_proto_from_disk.ParseFromString(
tf.compat.as_bytes(content))

# Merge two layout proto.
merged_layout_json = merge(
json_format.MessageToJson(layout_proto_from_disk),
json_format.MessageToJson(layout_proto_to_write))
merged_layout_proto = layout_pb2.Layout()
json_format.Parse(str(merged_layout_json), merged_layout_proto)

self._layout_writer = tf.summary.FileWriter(
self._layout_writer_dir)
layout_summary = summary_lib.custom_scalar_pb(
merged_layout_proto)
self._layout_writer.add_summary(layout_summary)
self._layout_writer.close()
except KeyError:
# Write the current layout proto when there is no layout in the disk.
self._layout_writer = tf.summary.FileWriter(
self._layout_writer_dir)
layout_summary = summary_lib.custom_scalar_pb(
layout_proto_to_write)
self._layout_writer.add_summary(layout_summary)
self._layout_writer.close()
Loading

0 comments on commit 3071f8f

Please sign in to comment.