cesar 2 months ago
parent
commit
cf4f677e55
3 changed files with 84 additions and 225 deletions
  1. BIN
      paper/Adapt25_Paper_Template_updated_AKO_v1.docx
  2. 56
    0
      plotFscore_v4.py
  3. 28
    225
      v4.py

BIN
paper/Adapt25_Paper_Template_updated_AKO_v1.docx View File


+ 56
- 0
plotFscore_v4.py View File

1
+import matplotlib.pyplot as plt
2
+import numpy as np
3
+import pickle
4
+
5
+listOfFeatures=[['r1 s1'], ['r1 s4'], ['r1 s5'], ['r1 s1','r1 s4'], ['r1 s1','r1 s5'], ['r1 s4','r1 s5'], ['r1 s1','r1 s4','r1 s5'] ]
6
+featureNames={}
7
+featureNames['r1 s1']='$T_{evap}$'
8
+featureNames['r1 s4']='$T_{cond}$'
9
+featureNames['r1 s5']='$T_{air}$'
10
+featureNames['pa1 apiii']='$P_{elec}$'
11
+
12
+
13
+def listToString(l):
14
+    r=''
15
+    for i in l:
16
+        r+=str(i)
17
+    return(r.replace(' ',''))
18
+
19
+FS=[]
20
+for l in listOfFeatures:
21
+    print(l)
22
+    file = open('FScore_v4_'+listToString(l)+'.pk', 'rb')
23
+    FS.append(pickle.load(file))
24
+    file.close()
25
+
26
+plt.rcParams.update({'font.size': 16})
27
+fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(14, 10), dpi=80, facecolor="w", edgecolor="k",sharex=True)
28
+tsToPlot=[4,8,12,16]
29
+for row in range(2):
30
+    for col in range(2):
31
+        ind=row*2+col
32
+        for k in range(len(FS)):
33
+            ar=np.array((FS[k][tsToPlot[ind]]))
34
+
35
+            s='['
36
+            for i in range(len(listOfFeatures[k])):
37
+                s+=featureNames[listOfFeatures[k][i]]
38
+                if i < len(listOfFeatures[k])-1:
39
+                    s+=', '
40
+            s+=']'
41
+
42
+
43
+            axes[row][col].plot(ar[:,0],ar[:,1],label=s,linewidth=3)
44
+#axes.set_xlabel("Threshold factor")
45
+        if col==0:
46
+            axes[row][col].set_ylabel("F1-Score")
47
+        if row==1:
48
+            axes[row][col].set_xlabel("Threshold Factor ($tf$)")
49
+        axes[row][col].grid()
50
+        axes[row][col].set_title('$ns=$'+str(tsToPlot[ind]))
51
+axes[0][0].legend(loc='lower right')
52
+#plt.title(str(features))
53
+plt.show()
54
+
55
+
56
+

+ 28
- 225
v4.py View File

8
 # because dataTrain is not stacke before create_sequences,  so, 
8
 # because dataTrain is not stacke before create_sequences,  so, 
9
 #  the sets are not aligned in time
9
 #  the sets are not aligned in time
10
 
10
 
11
-# Optimizxation of threshold factor changed, bacause is based on F1-Score  AKI
11
+# Because in this case we don't observe into transitories, we keep independently each train set
12
+
13
+# Optimizxation of threshold factor changed, bacause is based on F1-Score
12
 
14
 
13
 import pandas as pd
15
 import pandas as pd
14
 import matplotlib.pyplot as plt
16
 import matplotlib.pyplot as plt
69
 
71
 
70
 features=['r1 s1','r1 s4','r1 s5','pa1 apiii']
72
 features=['r1 s1','r1 s4','r1 s5','pa1 apiii']
71
 features=['r1 s1','r1 s4','r1 s5']
73
 features=['r1 s1','r1 s4','r1 s5']
74
+features=['r1 s1','r1 s5']
72
 featureNames={}
75
 featureNames={}
73
 featureNames['r1 s1']='$T_{evap}$'
76
 featureNames['r1 s1']='$T_{evap}$'
74
 featureNames['r1 s4']='$T_{cond}$'
77
 featureNames['r1 s4']='$T_{cond}$'
185
     # Precision: Rate of positive results:  TP/(TP+FP)  
188
     # Precision: Rate of positive results:  TP/(TP+FP)  
186
     # F1-score: predictive performance measure: 2*Precision*Sensitity/(Precision+Sensitity)
189
     # F1-score: predictive performance measure: 2*Precision*Sensitity/(Precision+Sensitity)
187
     # F2-score: predictive performance measure:  2*Specificity*Sensitity/(Specificity+Sensitity)
190
     # F2-score: predictive performance measure:  2*Specificity*Sensitity/(Specificity+Sensitity)
188
-
189
     x_test = create_sequences(testList[0],ts)
191
     x_test = create_sequences(testList[0],ts)
190
     x_test_pred = model.predict(x_test)
192
     x_test_pred = model.predict(x_test)
191
     test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
193
     test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
214
         FN[i-1] = anomalies.shape[0]-count
216
         FN[i-1] = anomalies.shape[0]-count
215
         Sensitivity[i-1]=TP[i-1]/(TP[i-1]+FN[i-1])
217
         Sensitivity[i-1]=TP[i-1]/(TP[i-1]+FN[i-1])
216
         Precision[i-1]=TP[i-1]/(TP[i-1]+FP)
218
         Precision[i-1]=TP[i-1]/(TP[i-1]+FP)
217
-
218
     GlobalSensitivity=TP.sum()/(TP.sum()+FN.sum())
219
     GlobalSensitivity=TP.sum()/(TP.sum()+FN.sum())
219
     Specificity=TN/(TN+FP)
220
     Specificity=TN/(TN+FP)
220
     Accuracy=(TN+TP.sum())/(TN+TP.sum()+FP+FN.sum())
221
     Accuracy=(TN+TP.sum())/(TN+TP.sum()+FP+FN.sum())
221
     GlobalPrecision=TP.sum()/(TP.sum()+FP)
222
     GlobalPrecision=TP.sum()/(TP.sum()+FP)
222
     F1Score= 2*GlobalPrecision*GlobalSensitivity/(GlobalPrecision+GlobalSensitivity)
223
     F1Score= 2*GlobalPrecision*GlobalSensitivity/(GlobalPrecision+GlobalSensitivity)
223
     F2Score = 2*Specificity*GlobalSensitivity/(Specificity+GlobalSensitivity)
224
     F2Score = 2*Specificity*GlobalSensitivity/(Specificity+GlobalSensitivity)
224
-
225
-    print("Sensitivity: ",Sensitivity)
225
+    print("Global Precision: ",GlobalPrecision)
226
+    print("Precision: ",Precision)
226
     print("Global Sensitivity: ",GlobalSensitivity)
227
     print("Global Sensitivity: ",GlobalSensitivity)
227
-    #print("Precision: ",Precision)
228
-    #print("Global Precision: ",GlobalPrecision)
229
-    print("Specifity: ",Specificity)
228
+    print("Sensitivity: ",Sensitivity)
229
+    #print("Specifity: ",Specificity)
230
     #print("Accuracy: ",Accuracy)
230
     #print("Accuracy: ",Accuracy)
231
-    #print("F1Score: ",F1Score)
232
-    print("F2Score: ",F2Score)
231
+    print("F1Score: ",F1Score)
232
+    #print("F2Score: ",F2Score)
233
     #print("FP: ",FP)
233
     #print("FP: ",FP)
234
     #return Sensitivity+Specifity
234
     #return Sensitivity+Specifity
235
-    return F2Score
235
+    return F1Score
236
 
236
 
237
 FScoreHash={}
237
 FScoreHash={}
238
 threshold={}
238
 threshold={}
239
 def getFScore(timestep,datalist):
239
 def getFScore(timestep,datalist):
240
     FScoreHash[timestep]=[]
240
     FScoreHash[timestep]=[]
241
-    # plots FSCore as a function of Threshold  Factor
242
     tf=0.3
241
     tf=0.3
243
     while tf<8:
242
     while tf<8:
244
         th=threshold[timestep]*tf
243
         th=threshold[timestep]*tf
257
         ar=np.array((FS[k]))
256
         ar=np.array((FS[k]))
258
         axes.plot(ar[:,0],ar[:,1],label="$ns=$"+str(k),linewidth=3)
257
         axes.plot(ar[:,0],ar[:,1],label="$ns=$"+str(k),linewidth=3)
259
     axes.set_xlabel("Threshold factor ($tf$)")
258
     axes.set_xlabel("Threshold factor ($tf$)")
260
-    axes.set_ylabel("FScore")
259
+    axes.set_ylabel("F1-Score")
261
     axes.legend()
260
     axes.legend()
262
     axes.grid()
261
     axes.grid()
263
     s='['
262
     s='['
319
         )
318
         )
320
         model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
319
         model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
321
         model.summary()
320
         model.summary()
322
-        path_checkpoint="model_noclass_v2_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5"
321
+        path_checkpoint="model_noclass_v4_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5"
323
         es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
322
         es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
324
         modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
323
         modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
325
 
324
 
328
         x_train_pred=model.predict(x_train[0])
327
         x_train_pred=model.predict(x_train[0])
329
         train_mae_loss=np.mean(np.abs(x_train_pred - x_train[0]), axis=1)
328
         train_mae_loss=np.mean(np.abs(x_train_pred - x_train[0]), axis=1)
330
         threshold[timesteps]=np.max(train_mae_loss,axis=0)
329
         threshold[timesteps]=np.max(train_mae_loss,axis=0)
331
-    file = open('threshold'+listToString(features)+'.pk', 'wb')
330
+    file = open('threshold_v4_'+listToString(features)+'.pk', 'wb')
332
     pickle.dump(threshold, file)
331
     pickle.dump(threshold, file)
333
     file.close()
332
     file.close()
334
     exit(0)
333
     exit(0)
335
 else:
334
 else:
336
-    file = open('threshold'+listToString(features)+'.pk', 'rb')
335
+    file = open('threshold_v4_'+listToString(features)+'.pk', 'rb')
337
     threshold=pickle.load(file)
336
     threshold=pickle.load(file)
338
     file.close()
337
     file.close()
339
 
338
 
384
     
383
     
385
     if options.optimizetf:
384
     if options.optimizetf:
386
         for timesteps in range(4,21,4):
385
         for timesteps in range(4,21,4):
387
-            path_checkpoint="model_noclass_v2_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5"
386
+            path_checkpoint="model_noclass_v4_"+str(timesteps)+listToString(features)+"_checkpoint.weights.h5"
388
             es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
387
             es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
389
             modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
388
             modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
390
             model.load_weights(path_checkpoint)
389
             model.load_weights(path_checkpoint)
391
             getFScore(timesteps,[dataTestNorm[0],dataTrainNorm[1],dataTrainNorm[2],dataTrainNorm[3],dataTrainNorm[4]])
390
             getFScore(timesteps,[dataTestNorm[0],dataTrainNorm[1],dataTrainNorm[2],dataTrainNorm[3],dataTrainNorm[4]])
392
-        file = open('FScore'+listToString(features)+'.pk', 'wb')
391
+        file = open('FScore_v4_'+listToString(features)+'.pk', 'wb')
393
         pickle.dump(FScoreHash, file)
392
         pickle.dump(FScoreHash, file)
394
         file.close()
393
         file.close()
395
 
394
 
396
 
395
 
397
-    path_checkpoint="model_noclass_v2_"+str(options.timesteps)+listToString(features)+"_checkpoint.weights.h5"
396
+    path_checkpoint="model_noclass_v4_"+str(options.timesteps)+listToString(features)+"_checkpoint.weights.h5"
398
     es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
397
     es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
399
     modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
398
     modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
400
     model.load_weights(path_checkpoint)
399
     model.load_weights(path_checkpoint)
401
 
400
 
402
 
401
 
403
-    file = open('FScore'+listToString(features)+'.pk', 'rb')
402
+    file = open('FScore_v4_'+listToString(features)+'.pk', 'rb')
404
     FS=pickle.load(file)
403
     FS=pickle.load(file)
405
     file.close()
404
     file.close()
406
 
405
 
415
 # For Failure data, we can use Train data becasue not used for training and includes the firsts samples
414
 # For Failure data, we can use Train data becasue not used for training and includes the firsts samples
416
 #datalist=[dataTestNorm[0],dataTrainNorm[1],dataTrainNorm[2],dataTrainNorm[3],dataTrainNorm[4]]
415
 #datalist=[dataTestNorm[0],dataTrainNorm[1],dataTrainNorm[2],dataTrainNorm[3],dataTrainNorm[4]]
417
 datalist=[dataTestNorm[0],dataTestNorm[1],dataTestNorm[2],dataTestNorm[3],dataTestNorm[4]]
416
 datalist=[dataTestNorm[0],dataTestNorm[1],dataTestNorm[2],dataTestNorm[3],dataTestNorm[4]]
418
-d=np.vstack((datalist))
419
 
417
 
420
-x_test = create_sequences(d,int(options.timesteps))
418
+
419
+x_test=create_sequences(datalist[0],int(options.timesteps))
420
+for i in range(1,len(datalist)):
421
+    x_test=np.vstack((x_test,create_sequences(datalist[i],int(options.timesteps))))
421
 x_test_pred = model.predict(x_test)
422
 x_test_pred = model.predict(x_test)
422
 test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
423
 test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
423
 
424
 
424
 
425
 
425
 # Define ranges for plotting in different colors
426
 # Define ranges for plotting in different colors
426
 testRanges=[]
427
 testRanges=[]
427
-
428
 r=0
428
 r=0
429
 for i in range(len(datalist)):
429
 for i in range(len(datalist)):
430
-    testRanges.append([r,r+datalist[i].shape[0]])
431
-    r+=datalist[i].shape[0]
430
+    testRanges.append([r,r+datalist[i].shape[0]-int(options.timesteps)])
431
+    r+=datalist[i].shape[0]-int(options.timesteps)
432
 
432
 
433
 #r=dataTestNorm[0].shape[0]
433
 #r=dataTestNorm[0].shape[0]
434
 #testRanges.append([0,r])
434
 #testRanges.append([0,r])
437
 #    testRanges.append([r,rnext] )
437
 #    testRanges.append([r,rnext] )
438
 #    r=rnext
438
 #    r=rnext
439
 
439
 
440
-# Drop the last TIME_STEPS for plotting
441
-testRanges[NumberOfFailures][1]=testRanges[NumberOfFailures][1]-TIME_STEPS
442
 
440
 
443
 
441
 
444
 anomalies = test_mae_loss > threshold[int(options.timesteps)]*float(options.TF)
442
 anomalies = test_mae_loss > threshold[int(options.timesteps)]*float(options.TF)
470
         x=[]
468
         x=[]
471
         y=[]
469
         y=[]
472
         for k in anomalous_data_indices:
470
         for k in anomalous_data_indices:
473
-            if (k+TIME_STEPS)<x_test.shape[0]:
474
-                x.append(k+TIME_STEPS)
475
-                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]]*stdevs[i]+means[i])
471
+            if (k)<x_test.shape[0]:
472
+                x.append(k)
473
+                y.append(x_test[k,0,indexesToPlot[i]]*stdevs[i]+means[i])
476
         axes[i].plot(x,y ,color='black',marker='.',linewidth=0,label="Fail detection" )
474
         axes[i].plot(x,y ,color='black',marker='.',linewidth=0,label="Fail detection" )
477
 
475
 
478
-
479
         init=0
476
         init=0
480
         end=testRanges[0][1]
477
         end=testRanges[0][1]
481
         axes[i].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
478
         axes[i].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
487
                 init=end
484
                 init=end
488
                 end+=(testRanges[j+1][1]-testRanges[j+1][0])
485
                 end+=(testRanges[j+1][1]-testRanges[j+1][0])
489
 
486
 
490
-
491
         if i==(NumFeatures-1):
487
         if i==(NumFeatures-1):
492
             axes[i].legend(loc='right')
488
             axes[i].legend(loc='right')
493
         s=''
489
         s=''
505
 plotData3()
501
 plotData3()
506
 
502
 
507
 
503
 
508
-def plotData5():
509
-    model1 = keras.Sequential(
510
-        [
511
-            layers.Input(shape=(4, 3)),
512
-            layers.Conv1D(
513
-                filters=NumFilters,
514
-                kernel_size=KernelSize,
515
-                padding="same",
516
-                strides=2,
517
-                activation="relu",
518
-            ),
519
-            layers.Dropout(rate=DropOut),
520
-            layers.Conv1D(
521
-                filters=int(NumFilters/2),
522
-                kernel_size=KernelSize,
523
-                padding="same",
524
-                strides=2,
525
-                activation="relu",
526
-            ),
527
-            layers.Conv1DTranspose(
528
-                filters=int(NumFilters/2),
529
-                kernel_size=KernelSize,
530
-                padding="same",
531
-                strides=2,
532
-                activation="relu",
533
-            ),
534
-            layers.Dropout(rate=DropOut),
535
-            layers.Conv1DTranspose(
536
-                filters=NumFilters,
537
-                kernel_size=KernelSize,
538
-                padding="same",
539
-                strides=2,
540
-                activation="relu",
541
-            ),
542
-            layers.Conv1DTranspose(filters=3, kernel_size=KernelSize, padding="same"),
543
-        ]
544
-    )
545
-    model1.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
546
-    model1.summary()
547
-    path_checkpoint="model_noclass_v2_"+str(4)+listToString(features)+"_checkpoint.weights.h5"
548
-    es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
549
-    modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
550
-    model1.load_weights(path_checkpoint)
551
-
552
-    model2 = keras.Sequential(
553
-        [
554
-            layers.Input(shape=(20, 3)),
555
-            layers.Conv1D(
556
-                filters=NumFilters,
557
-                kernel_size=KernelSize,
558
-                padding="same",
559
-                strides=2,
560
-                activation="relu",
561
-            ),
562
-            layers.Dropout(rate=DropOut),
563
-            layers.Conv1D(
564
-                filters=int(NumFilters/2),
565
-                kernel_size=KernelSize,
566
-                padding="same",
567
-                strides=2,
568
-                activation="relu",
569
-            ),
570
-            layers.Conv1DTranspose(
571
-                filters=int(NumFilters/2),
572
-                kernel_size=KernelSize,
573
-                padding="same",
574
-                strides=2,
575
-                activation="relu",
576
-            ),
577
-            layers.Dropout(rate=DropOut),
578
-            layers.Conv1DTranspose(
579
-                filters=NumFilters,
580
-                kernel_size=KernelSize,
581
-                padding="same",
582
-                strides=2,
583
-                activation="relu",
584
-            ),
585
-            layers.Conv1DTranspose(filters=3, kernel_size=KernelSize, padding="same"),
586
-        ]
587
-    )
588
-    model2.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
589
-    model2.summary()
590
-    path_checkpoint="model_noclass_v2_"+str(20)+listToString(features)+"_checkpoint.weights.h5"
591
-    es_callback=keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
592
-    modelckpt_callback=keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint, verbose=1, save_weights_only=True, save_best_only=True,)
593
-    model2.load_weights(path_checkpoint)
594
-
595
-    datalist=[dataTestNorm[0],dataTestNorm[3],dataTestNorm[2],dataTestNorm[1],dataTestNorm[4]]
596
-    d=np.vstack((datalist))
597
-    x_test = create_sequences(d,4)
598
-    x_test_pred = model1.predict(x_test)
599
-    test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
600
-    testRanges=[]
601
-    TIME_STEPS=4
602
-    r=0
603
-    for i in range(len(datalist)):
604
-        testRanges.append([r,r+datalist[i].shape[0]])
605
-        r+=datalist[i].shape[0]
606
-    testRanges[NumberOfFailures][1]=testRanges[NumberOfFailures][1]-TIME_STEPS
607
-    anomalies = test_mae_loss > threshold[4]*float(options.TF)
608
-    anomalous_data_indices = []
609
-    for i in range(anomalies.shape[0]):
610
-        if AtLeastOneTrue(anomalies[i]):
611
-            anomalous_data_indices.append(i)
612
-
613
-    plt.rcParams.update({'font.size': 16})
614
-    fig, axes = plt.subplots(
615
-        nrows=2, ncols=1, figsize=(15, 7), dpi=80, facecolor="w", edgecolor="k" , sharex=True
616
- )
617
-    for i in range(1):
618
-        init=0
619
-        end=testRanges[0][1]
620
-        axes[i].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
621
-        init=end
622
-        end+=(testRanges[1][1]-testRanges[1][0])
623
-        for j in range(1,NumberOfFailures+1):
624
-            axes[i].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Fail type "+str(j), color=colorline[j-1])
625
-            if j<NumberOfFailures:
626
-                init=end
627
-                end+=(testRanges[j+1][1]-testRanges[j+1][0])
628
-        x=[]
629
-        y=[]
630
-        for k in anomalous_data_indices:
631
-            if (k+TIME_STEPS)<x_test.shape[0]:
632
-                x.append(k+TIME_STEPS)
633
-                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]]*stdevs[i]+means[i])
634
-        axes[i].plot(x,y ,color='black',marker='.',linewidth=0,label="Fail detection" )
635
-
636
-        if i==(NumFeatures-1):
637
-            axes[i].legend(loc='right')
638
-        s=''
639
-        s+=featureNames[features[indexesToPlot[i]]]
640
-        s+=' '+unitNames[features[indexesToPlot[i]]]
641
-        axes[i].set_ylabel(s)
642
-        axes[i].grid()
643
-
644
-
645
-    x_test = create_sequences(d,20)
646
-    x_test_pred = model2.predict(x_test)
647
-    test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
648
-    testRanges=[]
649
-    r=0
650
-    TIME_STEPS=20
651
-    for i in range(len(datalist)):
652
-        testRanges.append([r,r+datalist[i].shape[0]])
653
-        r+=datalist[i].shape[0]
654
-    testRanges[NumberOfFailures][1]=testRanges[NumberOfFailures][1]-TIME_STEPS
655
-    anomalies = test_mae_loss > threshold[20]*float(options.TF)
656
-    anomalous_data_indices = []
657
-    for i in range(anomalies.shape[0]):
658
-        if AtLeastOneTrue(anomalies[i]):
659
-            anomalous_data_indices.append(i)
660
-    print(testRanges)
661
-    for i in range(1):
662
-        init=0
663
-        end=testRanges[0][1]
664
-        axes[i+1].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="No fail")
665
-        init=end
666
-        end+=(testRanges[1][1]-testRanges[1][0])
667
-        for j in range(1,NumberOfFailures+1):
668
-            if j==1:
669
-                axes[i+1].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i],label="Fail type 3", color=colorline[j-1])
670
-            else:
671
-                axes[i+1].plot(range(init,end),x_test[testRanges[j][0]:testRanges[j][1],0,indexesToPlot[i]]*stdevs[i]+means[i], color=colorline[j-1])
672
-            if j<NumberOfFailures:
673
-                init=end
674
-                end+=(testRanges[j+1][1]-testRanges[j+1][0])
675
-        x=[]
676
-        y=[]
677
-        for k in anomalous_data_indices:
678
-            if (k+TIME_STEPS)<x_test.shape[0]:
679
-                x.append(k+TIME_STEPS)
680
-                y.append(x_test[k+TIME_STEPS,0,indexesToPlot[i]]*stdevs[i]+means[i])
681
-        axes[i+1].plot(x,y ,color='black',marker='.',linewidth=0,label="Fail detection" )
682
-        if i==0:
683
-            axes[i+1].legend(loc='right')
684
-        s=''
685
-        s+=featureNames[features[indexesToPlot[i]]]
686
-        s+=' '+unitNames[features[indexesToPlot[i]]]
687
-        axes[i+1].set_ylabel(s)
688
-        axes[i+1].grid()
689
-    
690
-    axes[0].set_xlim(460,480)
691
-    axes[1].set_xlim(460,480)
692
-
693
-    axes[0].set_title('$ns=4$')
694
-    axes[1].set_title('$ns=20$')
695
-    axes[1].set_xlabel("Sample number")
696
-    plt.show()
697
-
698
-
699
-
700
-plotData5()
701
 exit(0)
504
 exit(0)
702
 
505
 
703
 
506
 

Powered by TurnKey Linux.