-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathdistributed_parameter_server.py
More file actions
46 lines (36 loc) · 1.27 KB
/
distributed_parameter_server.py
File metadata and controls
46 lines (36 loc) · 1.27 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
"""
@File : distributed_parameter_server.py
@Author: Dong Wang
@Date : 7/21/2021
"""
from threading import Thread
from socketserver import TCPServer
from SemiFlow.layer import Dense
from SemiFlow.Model import Sequential
import numpy as np
from SemiFlow.utils.distributed_tools import ParameterServer, Handler
def global_model():
Asamples = np.random.multivariate_normal([6, 6], [[1, 0], [0, 1]], 200)
Bsamples = np.random.multivariate_normal([1, 1], [[1, 0], [0, 1]], 200)
x_train = np.vstack((Asamples, Bsamples))
y_train = np.vstack((np.array([[0, 1]] * 200), np.array([[1, 0]] * 200)))
print(x_train.shape, y_train.shape)
num_classes = 2
batch_size = 10
epochs = 20
model = Sequential()
model.add(Dense(units=2, activation='relu', input_shape=(2,)))
model.add(Dense(units=num_classes, activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy', optimizer='sgd', learning_rate=0.05)
return model
if __name__ == '__main__':
my_model = global_model()
Handler.global_server = ParameterServer(my_model)
NWORKERS = 3
serv = TCPServer(('', 10086), Handler)
for n in range(NWORKERS):
t = Thread(target=serv.serve_forever)
t.daemon = True
t.start()
serv.serve_forever()