Skip to content

Commit 97da50f

Browse files
committed
Dont import "tf.X"
1 parent 80c2a77 commit 97da50f

File tree

5 files changed

+16
-14
lines changed

5 files changed

+16
-14
lines changed

plasma/models/builder.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@
66
# KGF: see below synchronization--- output is launched here
77
#
88
# KGF: (was used only in hyper_build_model())
9-
from tf.keras.layers import (
9+
from tensorflow.keras.layers import (
1010
Input,
1111
Dense, Activation, Dropout, Lambda,
1212
Reshape, Flatten, Permute, # RepeatVector
1313
LSTM, CuDNNLSTM, SimpleRNN, BatchNormalization,
1414
Convolution1D, MaxPooling1D, TimeDistributed,
1515
Concatenate
1616
)
17-
from tf.keras.callbacks import Callback
18-
from tf.keras.regularizers import l2 # l1, l1_l2
17+
from tensorflow.keras.callbacks import Callback
18+
from tensorflow.keras.regularizers import l2 # l1, l1_l2
1919

2020
import re
2121
import os

plasma/models/custom_loss.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import numpy as np
22

33
import tensorflow as tf
4-
import tf.keras.backend as K
5-
from tf.keras.losses import squared_hinge
4+
import tensorflow.keras.backend as K
5+
from tensorflow.keras.losses import squared_hinge
66

77
_EPSILON = tf.keras.backend.epsilon()
88

plasma/models/mpi_runner.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@
7676
# TODO(KGF): above, builder.py (bug workaround), mpi_launch_tensorflow.py,
7777
# and runner.py are the only files that import tensorflow directly
7878

79-
from tf.keras.backend import set_session
79+
from tensorflow.keras.backend import set_session
8080
# KGF: next 3 lines dump many TensorFlow diagnostics to stderr.
8181
# All MPI ranks first "Successfully opened dynamic library libcuda"
8282
# then, one by one: ID GPU, libcudart, libcublas, libcufft, ...
@@ -92,9 +92,9 @@
9292
g.comm.Barrier()
9393
if i == g.task_index:
9494
print('[{}] importing Keras'.format(g.task_index))
95-
import tf.keras.backend as K
96-
from tf.keras.utils import Progbar
97-
import tf.keras.callbacks as cbks
95+
import tensorflow.keras.backend as K
96+
from tensorflow.keras.utils import Progbar
97+
import tensorflow.keras.callbacks as cbks
9898

9999
g.flush_all_inorder()
100100
g.pprint_unique(conf)
@@ -254,7 +254,9 @@ def set_lr(self, lr):
254254
def compile(self, optimizer, clipnorm, loss='mse'):
255255
# TODO(KGF): check the following import taken from runner.py
256256
# Was not in this file, originally.
257-
from tf.keras.optimizers import SGD, Adam, RMSprop, Nadam, TFOptimizer
257+
from tensorflow.keras.optimizers import (
258+
SGD, Adam, RMSprop, Nadam, TFOptimizer
259+
)
258260
if optimizer == 'sgd':
259261
optimizer_class = SGD(lr=self.DUMMY_LR, clipnorm=clipnorm)
260262
elif optimizer == 'momentum_sgd':

plasma/models/targets.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
# mae_np, squared_hinge_np,
99
)
1010
import tensorflow as tf # noqa
11-
import tf.keras.backend as K
12-
from tf.keras.losses import hinge
11+
import tensorflow.keras.backend as K
12+
from tensorflow.keras.losses import hinge
1313

1414
# synchronize output from TensorFlow initialization via Keras backend
1515
if g.comm is not None:

plasma/models/tcn.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from typing import List, Tuple
22
import tensorflow as tf
3-
from tf.keras import optimizers
4-
from tf.keras.layers import (
3+
from tensorflow.keras import optimizers
4+
from tensorflow.keras.layers import (
55
Activation, Lambda, Conv1D, SpatialDropout1D, Dense, BatchNormalization,
66
Input, Model, Layer
77
)

0 commit comments

Comments
 (0)