2021-12-30 09:23:31 +01:00
|
|
|
from data import data
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
x,y,z=data()
|
|
|
|
x=np.concatenate([np.expand_dims(zw,1) for zw in [x,y,z]],axis=1)
|
|
|
|
|
|
|
|
|
|
|
|
from tensorflow import keras
|
|
|
|
from mu import *
|
|
|
|
from n2ulayer import ulayer
|
|
|
|
|
|
|
|
from loss import loss2d
|
|
|
|
|
|
|
|
dim=int(x.shape[1])
|
|
|
|
pdim=2
|
|
|
|
|
|
|
|
inp=keras.layers.Input(x.shape[1:])
|
|
|
|
q=inp
|
|
|
|
|
|
|
|
q=partr(q,pdim,dim,ulayer)
|
|
|
|
q=cutdown(q,pdim)
|
|
|
|
|
|
|
|
model=keras.models.Model(inp,q)
|
|
|
|
|
|
|
|
model.summary()
|
|
|
|
|
|
|
|
#opt=keras.optimizers.Adam(lr=0.0001)
|
|
|
|
#opt=keras.optimizers.Adam(lr=0.001)
|
|
|
|
opt=keras.optimizers.Adam(lr=0.01)
|
|
|
|
|
|
|
|
model.compile(opt,loss=loss2d)
|
|
|
|
|
|
|
|
model.fit(x,x,
|
|
|
|
epochs=10000,
|
|
|
|
shuffle=False,
|
|
|
|
validation_split=0.2,
|
|
|
|
callbacks=[keras.callbacks.EarlyStopping(patience=250,monitor="loss",restore_best_weights=True)])
|
|
|
|
|
|
|
|
|
|
|
|
mats=[]
|
|
|
|
for lay in model.layers[1:]:
|
|
|
|
if not ("ulayer" in str(type(lay))):continue
|
|
|
|
#print(dir(lay))
|
|
|
|
#try:
|
|
|
|
mats.append(lay.numpify())
|
|
|
|
#except:
|
|
|
|
# pass
|
|
|
|
|
|
|
|
mat=None
|
|
|
|
for m in mats:
|
|
|
|
if mat is None:
|
|
|
|
mat=m
|
|
|
|
else:
|
|
|
|
mat=np.dot(m,mat)
|
|
|
|
|
|
|
|
mat=mat[:pdim]
|
|
|
|
|
|
|
|
print(mat)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
loss=model.evaluate(x[:800],x[:800])
|
|
|
|
print(loss)
|
|
|
|
p=model.predict(x[:800])
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
|
|
|
|
plt.plot(p[:,0],p[:,1],".",alpha=0.75)
|
|
|
|
plt.title(str(loss))
|
2022-01-03 15:29:55 +01:00
|
|
|
plt.show()
|
2021-12-30 09:23:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|