cesar 1 день назад
Родитель
Сommit
e8dfd34777
2 измененных файлов: 801 добавлений и 0 удалений
  1. 56
    0
      plotFscore.py
  2. 745
    0
      v3.py

+ 56
- 0
plotFscore.py Просмотреть файл

@@ -0,0 +1,56 @@
1
+import matplotlib.pyplot as plt
2
+import numpy as np
3
+import pickle
4
+
5
+listOfFeatures=[['r1 s1'], ['r1 s4'], ['r1 s5'], ['r1 s1','r1 s4'], ['r1 s1','r1 s5'], ['r1 s4','r1 s5'], ['r1 s1','r1 s4','r1 s5'] ]
6
+featureNames={}
7
+featureNames['r1 s1']='$T_{evap}$'
8
+featureNames['r1 s4']='$T_{cond}$'
9
+featureNames['r1 s5']='$T_{air}$'
10
+featureNames['pa1 apiii']='$P_{elec}$'
11
+
12
+
13
+def listToString(l):
14
+    r=''
15
+    for i in l:
16
+        r+=str(i)
17
+    return(r.replace(' ',''))
18
+
19
+FS=[]
20
+for l in listOfFeatures:
21
+    print(l)
22
+    file = open('FScore'+listToString(l)+'.pk', 'rb')
23
+    FS.append(pickle.load(file))
24
+    file.close()
25
+
26
+plt.rcParams.update({'font.size': 16})
27
+fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(14, 10), dpi=80, facecolor="w", edgecolor="k",sharex=True)
28
+tsToPlot=[4,8,12,16]
29
+for row in range(2):
30
+    for col in range(2):
31
+        ind=row*2+col
32
+        for k in range(len(FS)):
33
+            ar=np.array((FS[k][tsToPlot[ind]]))
34
+
35
+            s='['
36
+            for i in range(len(listOfFeatures[k])):
37
+                s+=featureNames[listOfFeatures[k][i]]
38
+                if i < len(listOfFeatures[k])-1:
39
+                    s+=', '
40
+            s+=']'
41
+
42
+
43
+            axes[row][col].plot(ar[:,0],ar[:,1],label=s,linewidth=3)
44
+#axes.set_xlabel("Threshold factor")
45
+        if col==0:
46
+            axes[row][col].set_ylabel("FScore")
47
+        if row==1:
48
+            axes[row][col].set_xlabel("Threshold Factor ($TF$)")
49
+        axes[row][col].grid()
50
+        axes[row][col].set_title('$ns=$'+str(tsToPlot[ind]))
51
+axes[0][0].legend(loc='lower right')
52
+#plt.title(str(features))
53
+plt.show()
54
+
55
+
56
+

+ 745
- 0
v3.py Просмотреть файл

@@ -0,0 +1,745 @@
1
+# Csar Fdez, UdL, 2025
2
+# Changes from v1:   Normalization 
3
+# IN v1, each failure type has its own normalization pars (mean and stdevs)
4
+# In v2, mean and stdev is the same for all data
5
+# v3.py trains the models looping in TIME_STEPS (4,8,12,16,20,24,....) finding the optimal Threshold factor
6
+import pandas as pd
7
+import matplotlib.pyplot as plt
8
+import datetime
9
+import numpy as np
10
+import keras
11
+import os.path
12
+from keras import layers
13
+from optparse import OptionParser
14
+import copy
15
+import pickle
16
+
17
+
18
+parser = OptionParser()
19
+parser.add_option("-t", "--train", dest="train", help="Trains the models (false)", default=False, action="store_true")
20
+parser.add_option("-o", "--optimizetf", dest="optimizetf", help="Optimzes Threshold Factor (false)", default=False, action="store_true")
21
+parser.add_option("-n", "--timesteps", dest="timesteps", help="TIME STEPS ", default=12)
22
+parser.add_option("-f", "--thresholdfactor", dest="TF", help="Threshold Factor ", default=1.4)
23
+
24
+(options, args) = parser.parse_args()
25
+
26
+
27
+# data files arrays. Index:
28
+# 0.  No failure
29
+# 1.  Blocked evaporator
30
+# 2.   Full Blocked condenser
31
+# 3.   Partial Blocked condenser
32
+# 4   Fan condenser not working
33
+# 5.  Open door
34
+
35
+
36
+NumberOfFailures=4  # So far, we have only data for the first 4 types of failures
37
+datafiles=[]
38
+for i in range(NumberOfFailures+1):
39
+    datafiles.append([])
40
+
41
+# Next set of ddata corresponds to Freezer, SP=-26
42
+datafiles[0]=['2024-08-07_5_','2024-08-08_5_','2025-01-25_5_','2025-01-26_5_','2025-01-27_5_','2025-01-28_5_'] 
43
+datafiles[1]=['2024-12-11_5_', '2024-12-12_5_','2024-12-13_5_','2024-12-14_5_','2024-12-15_5_'] 
44
+#datafiles[1]=['2024-12-17_5_','2024-12-16_5_','2024-12-11_5_', '2024-12-12_5_','2024-12-13_5_','2024-12-14_5_','2024-12-15_5_'] #   This have transitions
45
+datafiles[2]=['2024-12-18_5_','2024-12-19_5_'] 
46
+datafiles[3]=['2024-12-21_5_','2024-12-22_5_','2024-12-23_5_','2024-12-24_5_','2024-12-25_5_','2024-12-26_5_'] 
47
+datafiles[4]=['2024-12-28_5_','2024-12-29_5_','2024-12-30_5_','2024-12-31_5_','2025-01-01_5_'] 
48
+#datafiles[4]=['2024-12-27_5_','2024-12-28_5_','2024-12-29_5_','2024-12-30_5_','2024-12-31_5_','2025-01-01_5_']  #   This have transitions
49
+
50
+#datafiles[4]=[] 
51
+
52
+# Features suggested by Xavier
53
+# Care with 'tc s3' because on datafiles[0] is always nulll
54
+# Seems to be incoropored in new tests
55
+
56
+#r1s5 supply air flow temperature
57
+#r1s1 inlet evaporator temperature
58
+#r1s4 condenser outlet
59
+
60
+# VAriables r1s4 and pa1 apiii  may not exists in cloud controlers
61
+
62
+
63
+features=['r1 s1','r1 s4','r1 s5','pa1 apiii']
64
+features=['r1 s1','r1 s4','r1 s5']
65
+featureNames={}
66
+featureNames['r1 s1']='$T_{evap}$'
67
+featureNames['r1 s4']='$T_{cond}$'
68
+featureNames['r1 s5']='$T_{air}$'
69
+featureNames['pa1 apiii']='$P_{elec}$'
70
+
71
+unitNames={}
72
+unitNames['r1 s1']='$(^{o}C)$'
73
+unitNames['r1 s4']='$(^{o}C)$'
74
+unitNames['r1 s5']='$(^{o}C)$'
75
+unitNames['pa1 apiii']='$(W)$'
76
+
77
+
78
+#features=['r1 s1','r1 s2','r1 s3','r1 s4','r1 s5','r1 s6','r1 s7','r1 s8','r1 s9','r1 s10','r2 s1','r2 s2','r2 s3','r2 s4','r2 s5','r2 s6','r2 s7','r2 s8','r2 s9','pa1 apiii','tc s1','tc s2']
79
+
80
+#features=['r2 s2', 'tc s1','r1 s10','r1 s6','r2 s8']
81
+
82
+NumFeatures=len(features)
83
+
84
+df_list=[]
85
+for i in range(NumberOfFailures+1):
86
+    df_list.append([])
87
+
88
+for i in range(NumberOfFailures+1):
89
+    dftemp=[]
90
+    for f in datafiles[i]:
91
+        print("                 ", f)
92
+        #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
93
+        df1 = pd.read_csv('./data/'+f+'.csv')
94
+        dftemp.append(df1)
95
+    df_list[i]=pd.concat(dftemp)
96
+
97
+
98
+# subsampled to 5'  =  30 * 10"
99
+# We consider smaples every 5' because in production, we will only have data at this frequency
100
+subsamplingrate=30
101
+
102
+dataframe=[]
103
+for i in range(NumberOfFailures+1):
104
+    dataframe.append([])
105
+
106
+for i in range(NumberOfFailures+1):
107
+    datalength=df_list[i].shape[0]
108
+    dataframe[i]=df_list[i].iloc[range(0,datalength,subsamplingrate)][features]
109
+    dataframe[i].reset_index(inplace=True,drop=True)
110
+    dataframe[i].dropna(inplace=True)
111
+
112
+
113
+# Train data is first 2/3 of data
114
+# Test data is: last 1/3 of data 
115
+dataTrain=[]
116
+dataTest=[]
117
+for i in range(NumberOfFailures+1):
118
+    dataTrain.append(dataframe[i].values[0:int(dataframe[i].shape[0]*2/3),:])
119
+    dataTest.append(dataframe[i].values[int(dataframe[i].shape[0]*2/3):,:])
120
+
121
+# Calculate means and stdev
122
+a=dataTrain[0]
123
+for i in range(1,NumberOfFailures+1):
124
+    a=np.vstack((a,dataTrain[i]))
125
+
126
+means=a.mean(axis=0) 
127
+stdevs=a.std(axis=0)
128
+def normalize2(train,test):
129
+    return( (train-means)/stdevs, (test-means)/stdevs )
130
+
131
+dataTrainNorm=[]
132
+dataTestNorm=[]
133
+for i in range(NumberOfFailures+1):
134
+    dataTrainNorm.append([])
135
+    dataTestNorm.append([])
136
+
137
+for i in range(NumberOfFailures+1):
138
+    (dataTrainNorm[i],dataTestNorm[i])=normalize2(dataTrain[i],dataTest[i])
139
+
140
+def plotData():    
141
+    fig, axes = plt.subplots(
142
+        nrows=NumberOfFailures+1, ncols=2, figsize=(15, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
143
+    )
144
+    for i in range(NumberOfFailures+1):
145
+        axes[i][0].plot(np.concatenate((dataTrainNorm[i][:,0],dataTestNorm[i][:,0])),label="Fail "+str(i)+",  feature 0")
146
+        axes[i][1].plot(np.concatenate((dataTrainNorm[i][:,1],dataTestNorm[i][:,1])),label="Fail "+str(i)+",  feature 1")
147
+    #axes[1].legend()
148
+    #axes[0].set_ylabel(features[0])
149
+    #axes[1].set_ylabel(features[1])
150
+    plt.show()
151
+
152
+#plotData()
153
+#exit(0)
154
+
155
+
156
+NumFilters=64
157
+KernelSize=7
158
+DropOut=0.2
159
+ThresholdFactor=1.4
160
+def create_sequences(values, time_steps):
161
+    output = []
162
+    for i in range(len(values) - time_steps + 1):
163
+        output.append(values[i : (i + time_steps)])
164
+    return np.stack(output)
165
+
166
+def AtLeastOneTrue(x):
167
+    for i in range(NumFeatures):
168
+        if x[i]:
169
+            return True
170
+    return False
171
+
172
+def anomalyMetric(th,ts,testList):  # first of list is non failure data
173
+    # FP, TP: false/true positive
174
+    # TN, FN: true/false negative
175
+    # Sensitivity (recall): probab failure detection if data is fail: TP/(TP+FN)
176
+    # Specificity: true negative ratio given  data is OK: TN/(TN+FP)
177
+    # Accuracy: Rate of correct predictions:  (TN+TP)/(TN+TP+FP+FN)
178
+    # Precision: Rate of positive results:  TP/(TP+FP)  
179
+    # F1-score: predictive performance measure: 2*Precision*Sensitity/(Precision+Sensitity)
180
+    # F2-score: predictive performance measure:  2*Specificity*Sensitity/(Specificity+Sensitity)
181
+
182
+    x_test = create_sequences(testList[0],ts)
183
+    x_test_pred = model.predict(x_test)
184
+    test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
185
+    anomalies = test_mae_loss > th
186
+    count=0
187
+    for i in range(anomalies.shape[0]):
188
+        if AtLeastOneTrue(anomalies[i]):
189
+            count+=1
190
+    FP=count
191
+    TN=anomalies.shape[0]-count
192
+    count=0
193
+    TP=np.zeros((NumberOfFailures))
194
+    FN=np.zeros((NumberOfFailures))
195
+    Sensitivity=np.zeros((NumberOfFailures))
196
+    Precision=np.zeros((NumberOfFailures))
197
+    for i in range(1,len(testList)):
198
+        x_test = create_sequences(testList[i],ts)
199
+        x_test_pred = model.predict(x_test)
200
+        test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
201
+        anomalies = test_mae_loss > th
202
+        count=0
203
+        for j in range(anomalies.shape[0]):
204
+            if AtLeastOneTrue(anomalies[j]):
205
+                count+=1
206
+        TP[i-1] = count
207
+        FN[i-1] = anomalies.shape[0]-count
208
+        Sensitivity[i-1]=TP[i-1]/(TP[i-1]+FN[i-1])
209
+        Precision[i-1]=TP[i-1]/(TP[i-1]+FP)
210
+
211
+    GlobalSensitivity=TP.sum()/(TP.sum()+FN.sum())
212
+    Specificity=TN/(TN+FP)
213
+    Accuracy=(TN+TP.sum())/(TN+TP.sum()+FP+FN.sum())
214
+    GlobalPrecision=TP.sum()/(TP.sum()+FP)
215
+    F1Score= 2*GlobalPrecision*GlobalSensitivity/(GlobalPrecision+GlobalSensitivity)
216
+    F2Score = 2*Specificity*GlobalSensitivity/(Specificity+GlobalSensitivity)
217
+
218
+    print("Sensitivity: ",Sensitivity)
219
+    print("Global Sensitivity: ",GlobalSensitivity)
220
+    #print("Precision: ",Precision)
221
+    #print("Global Precision: ",GlobalPrecision)
222
+    print("Specifity: ",Specificity)
223
+    #print("Accuracy: ",Accuracy)
224
+    #print("F1Score: ",F1Score)
225
+    print("F2Score: ",F2Score)
226
+    #print("FP: ",FP)
227
+    #return Sensitivity+Specifity
228
+    return F2Score
229
+
230
+FScoreHash={}
231
+threshold={}
232
+def getFScore(timestep,datalist):
233
+    FScoreHash[timestep]=[]
234
+    # plots FSCore as a function of Threshold  Factor
235
+    tf=0.3
236
+    while tf<8:
237
+        th=threshold[timestep]*tf
238
+        r=anomalyMetric(th,timestep,datalist)
239
+        FScoreHash[timestep].append([tf,r])
240
+        if tf<2:
241
+            tf+=0.1
242
+        else:
243
+            tf+=0.5
244
+
245
+
246
+def plotFScore(FS):
247
+    plt.rcParams.update({'font.size': 16})
248
+    fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(14, 10), dpi=80, facecolor="w", edgecolor="k")
249
+    for k in FS.keys():
250
+        ar=np.array((FS[k]))
251
+        axes.plot(ar[:,0],ar[:,1],label="$ns=$"+str(k),linewidth=3)
252
+    axes.set_xlabel("Threshold factor ($TF$)")
253
+    axes.set_ylabel("FScore")
254
+    axes.legend()
255
+    axes.grid()
256
+    s='['
257
+    for i in range(len(features)):
258
+        s+=featureNames[features[i]]
259
+        if i < len(features)-1:
260
+            s+=', '
261
+    s+=']'
262
+    plt.title(s)
263
+    plt.show()
264
+
265
+def listToString(l):
266
+    r=''
267
+    for i in l:
268
+        r+=str(i)
269
+    return(r.replace(' ',''))
270
+
271
+if options.train:
272
+    for timesteps in range(4,21,4):
273
+        x_train=[]
274
+        for i in range(NumberOfFailures+1):
275
+            x_train.append(create_sequences(dataTrainNorm[i],timesteps))
276
+
277
+        model = keras.Sequential(
278
+            [
279
+                layers.Input(shape=(x_train[0].shape[1], x_train[0].shape[2])),
280
+                layers.Conv1D(
281
+                    filters=NumFilters,
282
+                    kernel_size=KernelSize,
283
+                    padding="same",
284
+                    strides=2,
285
+                    activation="relu",
286
+                ),
287
+                layers.Dropout(rate=DropOut),
288
+                layers.Conv1D(
289
+                    filters=int(NumFilters/2),
290
+                    kernel_size=KernelSize,
291
+                    padding="same",
292
+                    strides=2,
293
+                    activation="relu",
294
+                ),
295
+                layers.Conv1DTranspose(
296
+                    filters=int(NumFilters/2),
297
+                    kernel_size=KernelSize,
298
+                    padding="same",
299
+                    strides=2,
300
+                    activation="relu",
301
+                ),
302
+                layers.Dropout(rate=DropOut),
303
+                layers.Conv1DTranspose(
304
+                    filters=NumFilters,
305
+                    kernel_size=KernelSize,
306
+                    padding="same",
307
+                    strides=2,
308
+                    activation="relu",
309
+                ),
310
+                layers.Conv1DTranspose(filters=x_train[i].shape[2], kernel_size=KernelSize, padding="same"),
311
+            ]
312
+        )
313
+        model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
314
+        model.summary()
315
+        path_checkpoint="model_noclass_v2_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5"
316
+        es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
317
+        modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
318
+
319
+        history=model.fit( x_train[0], x_train[0], epochs=400, batch_size=128, validation_split=0.3, callbacks=[  es_callback, modelckpt_callback      ],)
320
+
321
+        x_train_pred=model.predict(x_train[0])
322
+        train_mae_loss=np.mean(np.abs(x_train_pred - x_train[0]), axis=1)
323
+        threshold[timesteps]=np.max(train_mae_loss,axis=0)
324
+    file = open('threshold'+listToString(features)+'.pk', 'wb')
325
+    pickle.dump(threshold, file)
326
+    file.close()
327
+    exit(0)
328
+else:
329
+    file = open('threshold'+listToString(features)+'.pk', 'rb')
330
+    threshold=pickle.load(file)
331
+    file.close()
332
+
333
+
334
+    x_train=[]
335
+    for i in range(NumberOfFailures+1):
336
+        x_train.append(create_sequences(dataTrainNorm[i],int(options.timesteps)))
337
+
338
+    model = keras.Sequential(
339
+        [
340
+            layers.Input(shape=(x_train[0].shape[1], x_train[0].shape[2])),
341
+            layers.Conv1D(
342
+                filters=NumFilters,
343
+                kernel_size=KernelSize,
344
+                padding="same",
345
+                strides=2,
346
+                activation="relu",
347
+            ),
348
+            layers.Dropout(rate=DropOut),
349
+            layers.Conv1D(
350
+                filters=int(NumFilters/2),
351
+                kernel_size=KernelSize,
352
+                padding="same",
353
+                strides=2,
354
+                activation="relu",
355
+            ),
356
+            layers.Conv1DTranspose(
357
+                filters=int(NumFilters/2),
358
+                kernel_size=KernelSize,
359
+                padding="same",
360
+                strides=2,
361
+                activation="relu",
362
+            ),
363
+            layers.Dropout(rate=DropOut),
364
+            layers.Conv1DTranspose(
365
+                filters=NumFilters,
366
+                kernel_size=KernelSize,
367
+                padding="same",
368
+                strides=2,
369
+                activation="relu",
370
+            ),
371
+            layers.Conv1DTranspose(filters=x_train[i].shape[2], kernel_size=KernelSize, padding="same"),
372
+        ]
373
+    )
374
+    model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
375
+    model.summary()
376
+
377
+    
378
+    if options.optimizetf:
379
+        for timesteps in range(4,21,4):
380
+            path_checkpoint="model_noclass_v2_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5"
381
+            es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
382
+            modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
383
+            model.load_weights(path_checkpoint)
384
+            getFScore(timesteps,[dataTestNorm[0],dataTrainNorm[1],dataTrainNorm[2],dataTrainNorm[3],dataTrainNorm[4]])
385
+        file = open('FScore'+listToString(features)+'.pk', 'wb')
386
+        pickle.dump(FScoreHash, file)
387
+        file.close()
388
+
389
+
390
+    path_checkpoint="model_noclass_v2_"+str(options.timesteps)+listToString(features)+"_checkpoint.weights.h5"
391
+    es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
392
+    modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
393
+    model.load_weights(path_checkpoint)
394
+
395
+
396
+    file = open('FScore'+listToString(features)+'.pk', 'rb')
397
+    FS=pickle.load(file)
398
+    file.close()
399
+
400
+
401
+    #plotFScore(FS)
402
+    #exit(0)
403
+
404
+TIME_STEPS=int(options.timesteps)
405
+#  1st scenario. Detect only anomaly.  Later, we will classiffy it
406
+# Test data=  testnormal + testfail1 + testtail2 + testfail3 + testfail4 + testnormal
407
+#d=np.vstack((dataTestNorm[0],dataTestNorm[1],dataTestNorm[2],dataTestNorm[3],dataTestNorm[4],dataTestNorm[0]))
408
+# For Failure data, we can use Train data becasue not used for training and includes the firsts samples
409
+#datalist=[dataTestNorm[0],dataTrainNorm[1],dataTrainNorm[2],dataTrainNorm[3],dataTrainNorm[4]]
410
+datalist=[dataTestNorm[0],dataTestNorm[1],dataTestNorm[2],dataTestNorm[3],dataTestNorm[4]]
411
+d=np.vstack((datalist))
412
+
413
+x_test = create_sequences(d,int(options.timesteps))
414
+x_test_pred = model.predict(x_test)
415
+test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
416
+
417
+
418
+# Define ranges for plotting in different colors
419
+testRanges=[]
420
+
421
+r=0
422
+for i in range(len(datalist)):
423
+    testRanges.append([r,r+datalist[i].shape[0]])
424
+    r+=datalist[i].shape[0]
425
+
426
+#r=dataTestNorm[0].shape[0]
427
+#testRanges.append([0,r])
428
+#for i in range(1,NumberOfFailures+1):
429
+#    rnext=r+dataTrainNorm[i].shape[0]
430
+#    testRanges.append([r,rnext] )
431
+#    r=rnext
432
+
433
+# Drop the last TIME_STEPS for plotting
434
+testRanges[NumberOfFailures][1]=testRanges[NumberOfFailures][1]-TIME_STEPS
435
+
436
+
437
+anomalies = test_mae_loss > threshold[int(options.timesteps)]*float(options.TF)
438
+anomalous_data_indices = []
439
+for i in range(anomalies.shape[0]):
440
+    if AtLeastOneTrue(anomalies[i]):
441
+    #if anomalies[i][0] or anomalies[i][1] or anomalies[i][2] or anomalies[i][3]:
442
+        anomalous_data_indices.append(i)
443
+
444
+# Let's plot some features
445
+
446
+colorline=['violet','lightcoral','cyan','lime','grey']
447
+colordot=['darkviolet','red','blue','green','black']
448
+
449
+#featuresToPlot=['r1 s1','r1 s2','r1 s3','pa1 apiii']
450
+featuresToPlot=features
451
+
452
+indexesToPlot=[]
453
+for i in featuresToPlot:
454
+    indexesToPlot.append(features.index(i))
455
+
456
+def plotData3():
457
+    NumFeaturesToPlot=len(indexesToPlot)
458
+    plt.rcParams.update({'font.size': 16})
459
+    fig, axes = plt.subplots(
460
+        nrows=NumFeaturesToPlot, ncols=1, figsize=(15, 10), dpi=80, facecolor="w", edgecolor="k",sharex=True
461
+    )
462
+    for i in range(NumFeaturesToPlot):
463
+        init=0
464
+        end=testRanges[0][1]
465
+        axes[i].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
466
+        init=end
467
+        end+=(testRanges[1][1]-testRanges[1][0])
468
+        for j in range(1,NumberOfFailures+1):
469
+            axes[i].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Fail type "+str(j), color=colorline[j-1])
470
+            if j<NumberOfFailures:
471
+                init=end
472
+                end+=(testRanges[j+1][1]-testRanges[j+1][0])
473
+        x=[]
474
+        y=[]
475
+        for k in anomalous_data_indices:
476
+            if (k+TIME_STEPS)<x_test.shape[0]:
477
+                x.append(k+TIME_STEPS)
478
+                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]]*stdevs[i]+means[i])
479
+        axes[i].plot(x,y ,color='grey',marker='.',linewidth=0,label="Fail detection" )
480
+
481
+        if i==(NumFeatures-1):
482
+            axes[i].legend(loc='right')
483
+        s=''
484
+        s+=featureNames[features[indexesToPlot[i]]]
485
+        s+=' '+unitNames[features[indexesToPlot[i]]]
486
+        axes[i].set_ylabel(s)
487
+        axes[i].grid()
488
+    axes[NumFeaturesToPlot-1].set_xlabel("Sample number")
489
+    plt.show()
490
+
491
+
492
+anomalyMetric(threshold[int(options.timesteps)]*float(options.TF), int(options.timesteps),datalist)
493
+
494
+
495
+#plotData3()
496
+
497
+
498
+def plotData5():
499
+    model1 = keras.Sequential(
500
+        [
501
+            layers.Input(shape=(4, 3)),
502
+            layers.Conv1D(
503
+                filters=NumFilters,
504
+                kernel_size=KernelSize,
505
+                padding="same",
506
+                strides=2,
507
+                activation="relu",
508
+            ),
509
+            layers.Dropout(rate=DropOut),
510
+            layers.Conv1D(
511
+                filters=int(NumFilters/2),
512
+                kernel_size=KernelSize,
513
+                padding="same",
514
+                strides=2,
515
+                activation="relu",
516
+            ),
517
+            layers.Conv1DTranspose(
518
+                filters=int(NumFilters/2),
519
+                kernel_size=KernelSize,
520
+                padding="same",
521
+                strides=2,
522
+                activation="relu",
523
+            ),
524
+            layers.Dropout(rate=DropOut),
525
+            layers.Conv1DTranspose(
526
+                filters=NumFilters,
527
+                kernel_size=KernelSize,
528
+                padding="same",
529
+                strides=2,
530
+                activation="relu",
531
+            ),
532
+            layers.Conv1DTranspose(filters=3, kernel_size=KernelSize, padding="same"),
533
+        ]
534
+    )
535
+    model1.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
536
+    model1.summary()
537
+    path_checkpoint="model_noclass_v2_"+str(4)+listToString(features)+"_checkpoint.weights.h5"
538
+    es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
539
+    modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
540
+    model1.load_weights(path_checkpoint)
541
+
542
+    model2 = keras.Sequential(
543
+        [
544
+            layers.Input(shape=(20, 3)),
545
+            layers.Conv1D(
546
+                filters=NumFilters,
547
+                kernel_size=KernelSize,
548
+                padding="same",
549
+                strides=2,
550
+                activation="relu",
551
+            ),
552
+            layers.Dropout(rate=DropOut),
553
+            layers.Conv1D(
554
+                filters=int(NumFilters/2),
555
+                kernel_size=KernelSize,
556
+                padding="same",
557
+                strides=2,
558
+                activation="relu",
559
+            ),
560
+            layers.Conv1DTranspose(
561
+                filters=int(NumFilters/2),
562
+                kernel_size=KernelSize,
563
+                padding="same",
564
+                strides=2,
565
+                activation="relu",
566
+            ),
567
+            layers.Dropout(rate=DropOut),
568
+            layers.Conv1DTranspose(
569
+                filters=NumFilters,
570
+                kernel_size=KernelSize,
571
+                padding="same",
572
+                strides=2,
573
+                activation="relu",
574
+            ),
575
+            layers.Conv1DTranspose(filters=3, kernel_size=KernelSize, padding="same"),
576
+        ]
577
+    )
578
+    model2.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
579
+    model2.summary()
580
+    path_checkpoint="model_noclass_v2_"+str(20)+listToString(features)+"_checkpoint.weights.h5"
581
+    es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
582
+    modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
583
+    model2.load_weights(path_checkpoint)
584
+
585
+    datalist=[dataTestNorm[0],dataTestNorm[3],dataTestNorm[2],dataTestNorm[1],dataTestNorm[4]]
586
+    d=np.vstack((datalist))
587
+    x_test = create_sequences(d,4)
588
+    x_test_pred = model1.predict(x_test)
589
+    test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
590
+    testRanges=[]
591
+    TIME_STEPS=4
592
+    r=0
593
+    for i in range(len(datalist)):
594
+        testRanges.append([r,r+datalist[i].shape[0]])
595
+        r+=datalist[i].shape[0]
596
+    testRanges[NumberOfFailures][1]=testRanges[NumberOfFailures][1]-TIME_STEPS
597
+    anomalies = test_mae_loss > threshold[4]*float(options.TF)
598
+    anomalous_data_indices = []
599
+    for i in range(anomalies.shape[0]):
600
+        if AtLeastOneTrue(anomalies[i]):
601
+            anomalous_data_indices.append(i)
602
+
603
+    plt.rcParams.update({'font.size': 16})
604
+    fig, axes = plt.subplots(
605
+        nrows=2, ncols=1, figsize=(15, 7), dpi=80, facecolor="w", edgecolor="k" , sharex=True
606
+ )
607
+    for i in range(1):
608
+        init=0
609
+        end=testRanges[0][1]
610
+        axes[i].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
611
+        init=end
612
+        end+=(testRanges[1][1]-testRanges[1][0])
613
+        for j in range(1,NumberOfFailures+1):
614
+            axes[i].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Fail type "+str(j), color=colorline[j-1])
615
+            if j<NumberOfFailures:
616
+                init=end
617
+                end+=(testRanges[j+1][1]-testRanges[j+1][0])
618
+        x=[]
619
+        y=[]
620
+        for k in anomalous_data_indices:
621
+            if (k+TIME_STEPS)<x_test.shape[0]:
622
+                x.append(k+TIME_STEPS)
623
+                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]]*stdevs[i]+means[i])
624
+        axes[i].plot(x,y ,color='grey',marker='.',linewidth=0,label="Fail detection" )
625
+
626
+        if i==(NumFeatures-1):
627
+            axes[i].legend(loc='right')
628
+        s=''
629
+        s+=featureNames[features[indexesToPlot[i]]]
630
+        s+=' '+unitNames[features[indexesToPlot[i]]]
631
+        axes[i].set_ylabel(s)
632
+        axes[i].grid()
633
+
634
+
635
+    x_test = create_sequences(d,20)
636
+    x_test_pred = model2.predict(x_test)
637
+    test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
638
+    testRanges=[]
639
+    r=0
640
+    TIME_STEPS=20
641
+    for i in range(len(datalist)):
642
+        testRanges.append([r,r+datalist[i].shape[0]])
643
+        r+=datalist[i].shape[0]
644
+    testRanges[NumberOfFailures][1]=testRanges[NumberOfFailures][1]-TIME_STEPS
645
+    anomalies = test_mae_loss > threshold[20]*float(options.TF)
646
+    anomalous_data_indices = []
647
+    for i in range(anomalies.shape[0]):
648
+        if AtLeastOneTrue(anomalies[i]):
649
+            anomalous_data_indices.append(i)
650
+    print(testRanges)
651
+    for i in range(1):
652
+        init=0
653
+        end=testRanges[0][1]
654
+        axes[i+1].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
655
+        init=end
656
+        end+=(testRanges[1][1]-testRanges[1][0])
657
+        for j in range(1,NumberOfFailures+1):
658
+            if j==1:
659
+                axes[i+1].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Fail type 3", color=colorline[j-1])
660
+            else:
661
+                axes[i+1].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i], color=colorline[j-1])
662
+            if j<NumberOfFailures:
663
+                init=end
664
+                end+=(testRanges[j+1][1]-testRanges[j+1][0])
665
+        x=[]
666
+        y=[]
667
+        for k in anomalous_data_indices:
668
+            if (k+TIME_STEPS)<x_test.shape[0]:
669
+                x.append(k+TIME_STEPS)
670
+                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]]*stdevs[i]+means[i])
671
+        axes[i+1].plot(x,y ,color='grey',marker='.',linewidth=0,label="Fail detection" )
672
+        if i==0:
673
+            axes[i+1].legend(loc='right')
674
+        s=''
675
+        s+=featureNames[features[indexesToPlot[i]]]
676
+        s+=' '+unitNames[features[indexesToPlot[i]]]
677
+        axes[i+1].set_ylabel(s)
678
+        axes[i+1].grid()
679
+    
680
+    axes[0].set_xlim(460,480)
681
+    axes[1].set_xlim(460,480)
682
+
683
+    axes[0].set_title('$ns=4$')
684
+    axes[1].set_title('$ns=20$')
685
+    axes[1].set_xlabel("Sample number")
686
+    plt.show()
687
+
688
+
689
+
690
+plotData5()
691
+exit(0)
692
+
693
+
694
+
695
+#  2nd scenario. Detect only anomaly.  Later, we will classiffy it
696
+# Test data=  testnormal + testfail1 + testtail2 + testfail3 + testfail4 + testnormal
697
+#d=np.vstack((dataTestNorm[0],dataTestNorm[1],dataTestNorm[2],dataTestNorm[3],dataTestNorm[4],dataTestNorm[0]))
698
+num=100
699
+d=np.vstack((dataTestNorm[0][0:num,:],dataTestNorm[1][0:num,:],dataTestNorm[0][num:2*num,:],dataTestNorm[2][70:70+num,:],dataTestNorm[0][2*num-90:3*num-90,:],dataTestNorm[3][50:num+50,:],dataTestNorm[0][150:150+num,:],dataTestNorm[4][0:num+TIME_STEPS,:]))
700
+
701
+x_test = create_sequences(d,int(options.timesteps))
702
+x_test_pred = model.predict(x_test)
703
+test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
704
+
705
+
706
+anomalies = test_mae_loss > threshold[int(options.timesteps)]*float(options.TF)
707
+anomalous_data_indices = []
708
+for i in range(anomalies.shape[0]):
709
+    if AtLeastOneTrue(anomalies[i]):
710
+    #if anomalies[i][0] or anomalies[i][1] or anomalies[i][2] or anomalies[i][3]:
711
+        anomalous_data_indices.append(i)
712
+
713
+def plotData4():
714
+    NumFeaturesToPlot=len(indexesToPlot)
715
+    plt.rcParams.update({'font.size': 16})
716
+    fig, axes = plt.subplots(
717
+        nrows=NumFeaturesToPlot, ncols=1, figsize=(15, 10), dpi=80, facecolor="w", edgecolor="k",sharex=True
718
+    )
719
+    for i in range(NumFeaturesToPlot):
720
+        for j in range(1,NumberOfFailures+1):
721
+            if j==1:
722
+                axes[i].plot(range((j-1)*2*num,(j-1)*2*num+num),x_test[(j-1)*2*num:(j-1)*2*num+num,0,indexesToPlot[i]],label="No fail", color='C0')
723
+            else:
724
+                axes[i].plot(range((j-1)*2*num,(j-1)*2*num+num),x_test[(j-1)*2*num:(j-1)*2*num+num,0,indexesToPlot[i]], color='C0')
725
+            axes[i].plot(range(j*2*num-num,j*2*num),x_test[j*2*num-num:j*2*num,0,indexesToPlot[i]],label="File type "+str(j),color=colorline[j-1])
726
+        x=[]
727
+        y=[]
728
+        for k in anomalous_data_indices:
729
+            if (k+TIME_STEPS)<x_test.shape[0]:
730
+                x.append(k+TIME_STEPS)
731
+                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]])
732
+        axes[i].plot(x,y ,color='grey',marker='.',linewidth=0,label="Fail detection" )
733
+
734
+        if i==0:
735
+            axes[i].legend(bbox_to_anchor=(0.9, 0.4))
736
+
737
+        s=''
738
+        s+=featureNames[features[indexesToPlot[i]]]
739
+        axes[i].set_ylabel(s)
740
+        axes[i].grid()
741
+    axes[NumFeaturesToPlot-1].set_xlabel("Sample number")
742
+    plt.show()
743
+
744
+
745
+plotData4()

Powered by TurnKey Linux.