forked from PPPLDeepLearning/plasma-python
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcustom_plot.py
More file actions
35 lines (24 loc) · 2.46 KB
/
custom_plot.py
File metadata and controls
35 lines (24 loc) · 2.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
import numpy as np
from bokeh.plotting import figure, show, output_file, save
from tensorboard.backend.event_processing import event_accumulator
ea1 = event_accumulator.EventAccumulator("/tigress/alexeys/worked_Graphs/Graph16_momSGD_new/events.out.tfevents.1502649990.tiger-i19g10")
ea1.Reload()
ea2 = event_accumulator.EventAccumulator("/tigress/alexeys/worked_Graphs/Graph32_momSGD_new/events.out.tfevents.1502652797.tiger-i19g10")
ea2.Reload()
histograms = ea1.Tags()['histograms']
#ages': [], 'audio': [], 'histograms': ['input_2_out', 'time_distributed_1_out', 'lstm_1/kernel_0', 'lstm_1/kernel_0_grad', 'lstm_1/recurrent_kernel_0', 'lstm_1/recurrent_kernel_0_grad', 'lstm_1/bias_0', 'lstm_1/bias_0_grad', 'lstm_1_out', 'dropout_1_out', 'lstm_2/kernel_0', 'lstm_2/kernel_0_grad', 'lstm_2/recurrent_kernel_0', 'lstm_2/recurrent_kernel_0_grad', 'lstm_2/bias_0', 'lstm_2/bias_0_grad', 'lstm_2_out', 'dropout_2_out', 'time_distributed_2/kernel_0', 'time_distributed_2/kernel_0_grad', 'time_distributed_2/bias_0', 'time_distributed_2/bias_0_grad', 'time_distributed_2_out'], 'scalars': ['val_roc', 'val_loss', 'train_loss'], 'distributions': ['input_2_out', 'time_distributed_1_out', 'lstm_1/kernel_0', 'lstm_1/kernel_0_grad', 'lstm_1/recurrent_kernel_0', 'lstm_1/recurrent_kernel_0_grad', 'lstm_1/bias_0', 'lstm_1/bias_0_grad', 'lstm_1_out', 'dropout_1_out', 'lstm_2/kernel_0', 'lstm_2/kernel_0_grad', 'lstm_2/recurrent_kernel_0', 'lstm_2/recurrent_kernel_0_grad', 'lstm_2/bias_0', 'lstm_2/bias_0_grad', 'lstm_2_out', 'dropout_2_out', 'time_distributed_2/kernel_0', 'time_distributed_2/kernel_0_grad', 'time_distributed_2/bias_0', 'time_distributed_2/bias_0_grad', 'time_distributed_2_out'], 'tensors': [], 'graph': True, 'meta_graph': True, 'run_metadata': []}
for h in histograms:
x1 = np.array(ea1.Histograms(h)[0].histogram_value.bucket_limit[:-1])
y1 = ea1.Histograms(h)[0].histogram_value.bucket[:-1]
x2 = np.array(ea2.Histograms(h)[0].histogram_value.bucket_limit[:-1])
y2 = ea2.Histograms(h)[0].histogram_value.bucket[:-1]
h = h.replace("/","_")
p = figure(title=h, y_axis_label="Arbitrary units", x_axis_label="Arbitrary units")
# ,y_axis_type="log")
p.line(x1, y1, legend="float16, SGD with momentum",
line_color="green", line_width=2)
p.line(x2, y2, legend="float32, SGD with momentum",
line_color="indigo", line_width=2)
p.legend.location = "top_right"
output_file("plot"+h+".html", title=h)
save(p) # open a browser