-
Notifications
You must be signed in to change notification settings - Fork 43
Expand file tree
/
Copy pathcustom_plot.py
More file actions
57 lines (46 loc) · 2.52 KB
/
custom_plot.py
File metadata and controls
57 lines (46 loc) · 2.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import numpy as np
from bokeh.plotting import figure, output_file, save # , show
from tensorboard.backend.event_processing import event_accumulator
file_path = "/tigress/alexeys/worked_Graphs/Graph16_momSGD_new/"
ea1 = event_accumulator.EventAccumulator(
file_path + "events.out.tfevents.1502649990.tiger-i19g10")
ea1.Reload()
ea2 = event_accumulator.EventAccumulator(
file_path + "events.out.tfevents.1502652797.tiger-i19g10")
ea2.Reload()
histograms = ea1.Tags()['histograms']
# ages': [], 'audio': [], 'histograms': ['input_2_out',
# 'time_distributed_1_out', 'lstm_1/kernel_0', 'lstm_1/kernel_0_grad',
# 'lstm_1/recurrent_kernel_0', 'lstm_1/recurrent_kernel_0_grad',
# 'lstm_1/bias_0', 'lstm_1/bias_0_grad', 'lstm_1_out', 'dropout_1_out',
# 'lstm_2/kernel_0', 'lstm_2/kernel_0_grad', 'lstm_2/recurrent_kernel_0',
# 'lstm_2/recurrent_kernel_0_grad', 'lstm_2/bias_0', 'lstm_2/bias_0_grad',
# 'lstm_2_out', 'dropout_2_out', 'time_distributed_2/kernel_0',
# 'time_distributed_2/kernel_0_grad', 'time_distributed_2/bias_0',
# 'time_distributed_2/bias_0_grad', 'time_distributed_2_out'], 'scalars':
# ['val_roc', 'val_loss', 'train_loss'], 'distributions': ['input_2_out',
# 'time_distributed_1_out', 'lstm_1/kernel_0', 'lstm_1/kernel_0_grad',
# 'lstm_1/recurrent_kernel_0', 'lstm_1/recurrent_kernel_0_grad',
# 'lstm_1/bias_0', 'lstm_1/bias_0_grad', 'lstm_1_out', 'dropout_1_out',
# 'lstm_2/kernel_0', 'lstm_2/kernel_0_grad', 'lstm_2/recurrent_kernel_0',
# 'lstm_2/recurrent_kernel_0_grad', 'lstm_2/bias_0', 'lstm_2/bias_0_grad',
# 'lstm_2_out', 'dropout_2_out', 'time_distributed_2/kernel_0',
# 'time_distributed_2/kernel_0_grad', 'time_distributed_2/bias_0',
# 'time_distributed_2/bias_0_grad', 'time_distributed_2_out'], 'tensors':
# [], 'graph': True, 'meta_graph': True, 'run_metadata': []}
for h in histograms:
x1 = np.array(ea1.Histograms(h)[0].histogram_value.bucket_limit[:-1])
y1 = ea1.Histograms(h)[0].histogram_value.bucket[:-1]
x2 = np.array(ea2.Histograms(h)[0].histogram_value.bucket_limit[:-1])
y2 = ea2.Histograms(h)[0].histogram_value.bucket[:-1]
h = h.replace("/", "_")
p = figure(title=h, y_axis_label="Arbitrary units",
x_axis_label="Arbitrary units")
# , y_axis_type="log")
p.line(x1, y1, legend="float16, SGD with momentum",
line_color="green", line_width=2)
p.line(x2, y2, legend="float32, SGD with momentum",
line_color="indigo", line_width=2)
p.legend.location = "top_right"
output_file("plot" + h + ".html", title=h)
save(p) # open a browser