# Csar Fdez, UdL, 2025 # Changes from v1: Normalization # IN v1, each failure type has its own normalization pars (mean and stdevs) # In v2, mean and stdev is the same for all data # v3.py trains the models looping in TIME_STEPS (4,8,12,16,20,24,....) finding the optimal Threshold factor # Derived from v3_class, derived from v3.py with code from v1_multifailure.py # This code don't train for multiple time steps !! # partial and total blocked condenser merged in one class. # Construction of train and test sets changed. Now is done by days import pandas as pd import matplotlib.pyplot as plt import datetime import numpy as np import keras import os.path from keras import layers from optparse import OptionParser import copy import pickle parser = OptionParser() parser.add_option("-t", "--train", dest="train", help="Trains the models (false)", default=False, action="store_true") parser.add_option("-n", "--timesteps", dest="timesteps", help="TIME STEPS ", default=12) parser.add_option("-r", "--transition", dest="transition", help="Includes transition data (false)", default=False, action="store_true") parser.add_option("-p", "--plot", dest="plot", help="Only plot data (false)", default=False, action="store_true") #parser.add_option("-f", "--thresholdfactor", dest="TF", help="Threshold Factor ", default=1.4) # threshold makes no sense when classifying, becaues we apply many models and decide class for the less MSE (options, args) = parser.parse_args() # data files arrays. Index: # 0. No failure # 1. Blocked evaporator # 2. Full Blocked condenser # 3. Partial Blocked condenser # 4 Fan condenser not working # 5. Open door NumberOfFailures=4 # So far, we have only data for the first 4 types of failures datafiles=[[],[]] # 0 for train, 1 for test for i in range(NumberOfFailures+1): datafiles[0].append([]) datafiles[1].append([]) # Next set of ddata corresponds to Freezer, SP=-26 datafiles[0][0]=['2024-08-07_5_','2024-08-08_5_','2025-01-25_5_','2025-01-26_5_'] datafiles[0][1]=['2024-12-11_5_', '2024-12-12_5_','2024-12-13_5_'] datafiles[0][2]=['2024-12-18_5_','2024-12-21_5_','2024-12-22_5_','2024-12-23_5_','2024-12-24_5_'] datafiles[0][3]=['2024-12-28_5_','2024-12-29_5_','2024-12-30_5_'] datafiles[0][4]=['2025-02-13_5_','2025-02-14_5_'] if options.transition: datafiles[1][0]=['2025-01-27_5_','2025-01-28_5_'] datafiles[1][1]=['2024-12-14_5_','2024-12-15_5_','2024-12-16_5_'] # with TRANSITION datafiles[1][2]=['2024-12-17_5_','2024-12-19_5_','2024-12-25_5_','2024-12-26_5_'] # with TRANSITION datafiles[1][3]=['2024-12-27_5_','2024-12-31_5_','2025-01-01_5_'] # with TRANSITION datafiles[1][4]=['2025-02-12_5_','2025-02-15_5_','2025-02-16_5_'] else: datafiles[1][0]=['2025-01-27_5_','2025-01-28_5_'] datafiles[1][1]=['2024-12-14_5_','2024-12-15_5_'] datafiles[1][2]=['2024-12-19_5_','2024-12-25_5_','2024-12-26_5_'] datafiles[1][3]=['2024-12-31_5_','2025-01-01_5_'] datafiles[1][4]=['2025-02-15_5_','2025-02-16_5_'] #datafiles[0][4]=['2025-02-05_5_'] #datafiles[1][4]=['2025-02-05_5_'] #r1s5 supply air flow temperature #r1s1 inlet evaporator temperature #r1s4 condenser outlet # VAriables r1s4 and pa1 apiii may not exists in cloud controlers features=['r1 s1','r1 s4','r1 s5','pa1 apiii'] features=['r1 s1','r1 s4','r1 s5'] features=['r1 s5'] # Feature combination suggested by AKO #features=['r1 s1','r1 s4','r1 s5','pa1 apiii'] features=['r1 s1','r1 s4','r1 s5'] #features=['r1 s1','r1 s5','pa1 apiii'] #features=['r1 s5','pa1 apiii'] #features=['r1 s1','r1 s5'] #features=['r1 s5'] featureNames={} featureNames['r1 s1']='$T_{evap}$' featureNames['r1 s4']='$T_{cond}$' featureNames['r1 s5']='$T_{air}$' featureNames['pa1 apiii']='$P_{elec}$' unitNames={} unitNames['r1 s1']='$(^{o}C)$' unitNames['r1 s4']='$(^{o}C)$' unitNames['r1 s5']='$(^{o}C)$' unitNames['pa1 apiii']='$(W)$' NumFeatures=len(features) df_list=[[],[]] for i in range(NumberOfFailures+1): df_list[0].append([]) df_list[1].append([]) for i in range(NumberOfFailures+1): dftemp=[] for f in datafiles[0][i]: print(" ", f) df1 = pd.read_csv('./data/'+f+'.csv') dftemp.append(df1) df_list[0][i]=pd.concat(dftemp) for i in range(NumberOfFailures+1): dftemp=[] for f in datafiles[1][i]: print(" ", f) df1 = pd.read_csv('./data/'+f+'.csv') dftemp.append(df1) df_list[1][i]=pd.concat(dftemp) # subsampled to 5' = 30 * 10" # We consider smaples every 5' because in production, we will only have data at this frequency subsamplingrate=30 dataframe=[[],[]] for i in range(NumberOfFailures+1): dataframe[0].append([]) dataframe[1].append([]) for i in range(NumberOfFailures+1): datalength=df_list[0][i].shape[0] dataframe[0][i]=df_list[0][i].iloc[range(0,datalength,subsamplingrate)][features] dataframe[0][i].reset_index(inplace=True,drop=True) dataframe[0][i].dropna(inplace=True) for i in range(NumberOfFailures+1): datalength=df_list[1][i].shape[0] dataframe[1][i]=df_list[1][i].iloc[range(0,datalength,subsamplingrate)][features] dataframe[1][i].reset_index(inplace=True,drop=True) dataframe[1][i].dropna(inplace=True) # Train data i [0] and test data is [1] dataTrain=[] dataTest=[] for i in range(NumberOfFailures+1): dataTrain.append(dataframe[0][i]) dataTest.append(dataframe[1][i]) # Calculate means and stdev a=dataTrain[0] for i in range(1,NumberOfFailures+1): a=np.vstack((a,dataTrain[i])) means=a.mean(axis=0) stdevs=a.std(axis=0) def normalize2(train,test): return( (train-means)/stdevs, (test-means)/stdevs ) dataTrainNorm=[] dataTestNorm=[] for i in range(NumberOfFailures+1): dataTrainNorm.append([]) dataTestNorm.append([]) for i in range(NumberOfFailures+1): (dataTrainNorm[i],dataTestNorm[i])=normalize2(dataTrain[i],dataTest[i]) NumFilters=64 KernelSize=7 DropOut=0.2 def create_sequences(values, time_steps): output = [] for i in range(len(values) - time_steps + 1): output.append(values[i : (i + time_steps)]) return np.stack(output) def listToString(l): r='' for i in l: r+=str(i) return(r.replace(' ','')) model=[] modelckpt_callback =[] es_callback =[] path_checkpoint=[] timesteps=int(options.timesteps) x_train=[] for i in range(NumberOfFailures+1): x_train.append(create_sequences(dataTrainNorm[i],timesteps)) model.append([]) model[i] = keras.Sequential( [ layers.Input(shape=(x_train[i].shape[1], x_train[i].shape[2])), layers.Conv1D( filters=NumFilters, kernel_size=KernelSize, padding="same", strides=2, activation="relu", ), layers.Dropout(rate=DropOut), layers.Conv1D( filters=int(NumFilters/2), kernel_size=KernelSize, padding="same", strides=2, activation="relu", ), layers.Conv1DTranspose( filters=int(NumFilters/2), kernel_size=KernelSize, padding="same", strides=2, activation="relu", ), layers.Dropout(rate=DropOut), layers.Conv1DTranspose( filters=NumFilters, kernel_size=KernelSize, padding="same", strides=2, activation="relu", ), layers.Conv1DTranspose(filters=x_train[i].shape[2], kernel_size=KernelSize, padding="same"), ] ) model[i].compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse") model[i].summary() path_checkpoint.append("model_class_v5_"+str(i)+"_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5") es_callback.append(keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)) modelckpt_callback.append(keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint[i], verbose=1, save_weights_only=True, save_best_only=True,)) if options.train: history=[] for i in range(NumberOfFailures+1): history.append(model[i].fit( x_train[i], x_train[i], epochs=400, batch_size=128, validation_split=0.3, callbacks=[ es_callback[i], modelckpt_callback[i] ],)) x_train_pred=model[i].predict(x_train[i]) else: for i in range(NumberOfFailures+1): model[i].load_weights(path_checkpoint[i]) # Let's plot some features colorline=['black','violet','lightcoral','cyan','lime','grey'] colordot=['grey','darkviolet','red','blue','green','black'] #featuresToPlot=['r1 s1','r1 s2','r1 s3','pa1 apiii'] featuresToPlot=features indexesToPlot=[] for i in featuresToPlot: indexesToPlot.append(features.index(i)) def plotData(): NumFeaturesToPlot=len(indexesToPlot) plt.rcParams.update({'font.size': 16}) fig, axes = plt.subplots( nrows=NumFeaturesToPlot, ncols=1, figsize=(15, 10), dpi=80, facecolor="w", edgecolor="k",sharex=True ) for i in range(NumFeaturesToPlot): init=0 end=testRanges[0][1] for j in range(NumberOfFailures+1): if NumFeaturesToPlot==1: axes.plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Class "+str(j), color=colorline[j],linewidth=1) else: axes[i].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Class "+str(j), color=colorline[j],linewidth=1) if j0: Sensitivity[i]=TP[i]/(TP[i]+FN[i]) else: Sensitivity[i]=0 Precision[i]=TP[i]/(TP[i]+FP[i]) S=Sensitivity.mean() P=Precision.mean() F1=2*S*P/(S+P) print("Sensitivity: ",Sensitivity) print("S: ",S) print("Precision: ",Precision) print("P: ",P) print("F1-Score: ",F1) anomalyMetric(classes,testRanges,testClasses) plotData4() exit(0) # Compute delay until correct detection for a list of ranges (when transition data exists) def computeDelay(l,classes,testRanges,testClasses): d=np.zeros(len(l)) NoFailsInARow=4 ind=0 for i in l: start=testRanges[i][0] count=0 while start