# TensorFlow and tf.keras #import tensorflow as tf import keras # Helper libraries import numpy as np import matplotlib.pyplot as plt import math import time from data import loadfile from sklearn.metrics import roc_curve, auc latent=5 def cauc(n,a): p=np.concatenate((n,a),axis=0) l=np.concatenate((np.zeros(len(n)),np.ones(len(a))),axis=0) fpr, tpr, threshold = roc_curve(l, p) auc_score=auc(fpr,tpr) return auc_score def train(x,y,pth,typ,retry=0): x=x.astype("float32") y=y.astype("float32") #x-=np.mean(x,axis=0)#no correcting here #x/=(np.std(x,axis=0)+0.00001) train=x[np.where(y[:,0]==0)] test_a=x[np.where(y[:,0]==1)] test_n=train[:len(test_a)] train=train[len(test_a):] #global dd #dd=train #exit() dim=x.shape[1] #denses=[0.8,0.6,0.5,0.6,0.8,] #denses=[int(math.ceil(zw*dim)) for zw in denses] d1=(latent*dim*dim)**(1/3) d2=(latent*latent*dim)**(1/3) d1=int(d1) d2=int(d2) denses=[d1,d2,latent,d2,d1] #print(train.shape) model = keras.Sequential([ keras.layers.Dense(dim,activation='relu',input_shape=train.shape[1:]), *[keras.layers.Dense(dense, activation='relu') for dense in denses], keras.layers.Dense(dim,activation="linear") ]) model.compile(optimizer='adam', loss="mse", metrics=[]) model.summary() model.fit(train, train, epochs=25, validation_split=0.2, callbacks=[ #keras.callbacks.CSVLogger(pth+"history.csv"), #keras.callbacks.ModelCheckpoint(pth+"epoch{epoch:02d}.tf", # save_weights_only=False) ]) #model.save(pth+"whole.tf") #model.save_weights(pth+"weigths.tf") lss = model.evaluate(train,train, verbose=2) #if lss>500 and retry<3: # print("retrying") # return train_func(x,y,pth,typ,retry=retry+1) p=model.predict(train) pn=model.predict(test_n) pa=model.predict(test_a) mp=np.mean(p,axis=0) dn=np.mean((pn-mp)**2,axis=1) da=np.mean((pa-mp)**2,axis=1) auc=cauc(dn,da) print(f"auc={auc}") if __name__ == '__main__': x,y=loadfile() train(x,y,".","")