cesar 1 天前
父节点
当前提交
3b9f8c6fbb

+ 7907
- 0
data/2024-08-07_3_.csv
文件差异内容过多而无法显示
查看文件


+ 7906
- 0
data/2024-08-07_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7908
- 0
data/2024-08-08_3_.csv
文件差异内容过多而无法显示
查看文件


+ 2900
- 0
data/2024-08-08_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7907
- 0
data/2024-08-09_3_.csv
文件差异内容过多而无法显示
查看文件


+ 7902
- 0
data/2024-08-10_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7903
- 0
data/2024-08-11_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7899
- 0
data/2024-08-12_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7905
- 0
data/2024-08-13_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7844
- 0
data/2024-12-11_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7847
- 0
data/2024-12-12_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7846
- 0
data/2024-12-13_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7847
- 0
data/2024-12-14_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7845
- 0
data/2024-12-15_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7850
- 0
data/2024-12-18_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7833
- 0
data/2024-12-19_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7822
- 0
data/2024-12-20_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7828
- 0
data/2024-12-28_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7821
- 0
data/2024-12-29_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7825
- 0
data/2024-12-30_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7825
- 0
data/2024-12-31_5_.csv
文件差异内容过多而无法显示
查看文件


+ 7828
- 0
data/2025-01-01_5_.csv
文件差异内容过多而无法显示
查看文件


二进制
model_0._checkpoint.weights.h5 查看文件


二进制
model_1._checkpoint.weights.h5 查看文件


二进制
model_2._checkpoint.weights.h5 查看文件


二进制
model_3._checkpoint.weights.h5 查看文件


二进制
model_4._checkpoint.weights.h5 查看文件


+ 356
- 0
v0_multifailure.py 查看文件

@@ -0,0 +1,356 @@
1
+# Csar Fdez, UdL, 2025
2
+import pandas as pd
3
+import matplotlib.pyplot as plt
4
+import datetime
5
+import numpy as np
6
+import keras
7
+import os.path
8
+import pickle
9
+from keras import layers
10
+from optparse import OptionParser
11
+
12
+
13
+parser = OptionParser()
14
+parser.add_option("-t", "--train", dest="train", help="Trains the models (false)", default=False, action="store_true")
15
+
16
+(options, args) = parser.parse_args()
17
+
18
+
19
+# data files arrays. Index:
20
+# 0.  No failure
21
+# 1.  Blocked evaporator
22
+# 2.   Blocked condenser
23
+# 3   Fan condenser not working
24
+# 4.  Open door
25
+
26
+NumberOfFailures=4
27
+NumberOfFailures=3  # So far, we have only data for the first 3 types of failures
28
+datafiles=[]
29
+for i in range(NumberOfFailures+1):
30
+    datafiles.append([])
31
+
32
+# Next set of ddata corresponds to Freezer, SP=-26
33
+datafiles[0]=['2024-08-07_5_','2024-08-08_5_'] 
34
+datafiles[1]=['2024-12-11_5_', '2024-12-12_5_','2024-12-13_5_','2024-12-14_5_','2024-12-15_5_'] 
35
+datafiles[2]=['2024-12-18_5_','2024-12-19_5_','2024-12-20_5_'] 
36
+datafiles[3]=['2024-12-28_5_','2024-12-29_5_','2024-12-30_5_','2024-12-31_5_','2025-01-01_5_'] 
37
+#datafiles[4]=[] 
38
+
39
+# Features suggested by Xavier
40
+features=['r1 s1','r1 s4','r1 s5','pa1 apiii']
41
+NumFeatures=len(features)
42
+
43
+df_list=[]
44
+for i in range(NumberOfFailures+1):
45
+    df_list.append([])
46
+
47
+for i in range(NumberOfFailures+1):
48
+    dftemp=[]
49
+    for f in datafiles[i]:
50
+        print("                 ", f)
51
+        #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
52
+        df1 = pd.read_csv('./data/'+f+'.csv')
53
+        dftemp.append(df1)
54
+    df_list[i]=pd.concat(dftemp)
55
+
56
+
57
+# subsampled to 5'  =  30 * 10"
58
+# We consider smaples every 5' because in production, we will only have data at this frequency
59
+subsamplingrate=30
60
+
61
+dataframe=[]
62
+for i in range(NumberOfFailures+1):
63
+    dataframe.append([])
64
+
65
+for i in range(NumberOfFailures+1):
66
+    datalength=df_list[i].shape[0]
67
+    dataframe[i]=df_list[i].iloc[range(0,datalength,subsamplingrate)][features]
68
+    dataframe[i].reset_index(inplace=True,drop=True)
69
+    dataframe[i].dropna(inplace=True)
70
+
71
+
72
+# Train data is first 2/3 of data
73
+# Test data is: last 1/3 of data 
74
+dataTrain=[]
75
+dataTest=[]
76
+for i in range(NumberOfFailures+1):
77
+    dataTrain.append(dataframe[i].values[0:int(dataframe[i].shape[0]*2/3),:])
78
+    dataTest.append(dataframe[i].values[int(dataframe[i].shape[0]*2/3):,:])
79
+
80
+
81
+def normalize2(train,test):
82
+    # merges train and test
83
+    means=[]
84
+    stdevs=[]
85
+    for i in range(NumFeatures):
86
+        means.append(train[:,i].mean())
87
+        stdevs.append(train[:,i].std())
88
+    return( (train-means)/stdevs, (test-means)/stdevs )
89
+
90
+dataTrainNorm=[]
91
+dataTestNorm=[]
92
+for i in range(NumberOfFailures+1):
93
+    dataTrainNorm.append([])
94
+    dataTestNorm.append([])
95
+
96
+for i in range(NumberOfFailures+1):
97
+    (dataTrainNorm[i],dataTestNorm[i])=normalize2(dataTrain[i],dataTest[i])
98
+
99
+def plotData():    
100
+    fig, axes = plt.subplots(
101
+        nrows=NumberOfFailures+1, ncols=2, figsize=(15, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
102
+    )
103
+    for i in range(NumberOfFailures+1):
104
+        axes[i][0].plot(dataTrainNorm[i][:,0],label="Fail 0,  feature 0")
105
+        axes[i][1].plot(dataTrainNorm[i][:,1],label="Fail 0,  feature 1")
106
+    #axes[1].legend()
107
+    #axes[0].set_ylabel(features[0])
108
+    #axes[1].set_ylabel(features[1])
109
+    plt.show()
110
+
111
+#plotData()
112
+
113
+
114
+TIME_STEPS = 24
115
+def create_sequences(values, time_steps=TIME_STEPS):
116
+    output = []
117
+    for i in range(len(values) - time_steps + 1):
118
+        output.append(values[i : (i + time_steps)])
119
+    return np.stack(output)
120
+
121
+x_train=[]
122
+for i in range(NumberOfFailures+1):
123
+    x_train.append(create_sequences(dataTrainNorm[i]))
124
+
125
+
126
+model=[]
127
+modelckpt_callback =[]
128
+es_callback =[]
129
+path_checkpoint=[]
130
+for i in range(NumberOfFailures+1):
131
+    model.append([])
132
+    model[i] = keras.Sequential(
133
+        [
134
+            layers.Input(shape=(x_train[i].shape[1], x_train[i].shape[2])),
135
+            layers.Conv1D(
136
+                filters=64,
137
+                kernel_size=7,
138
+                padding="same",
139
+                strides=2,
140
+                activation="relu",
141
+            ),
142
+            layers.Dropout(rate=0.2),
143
+            layers.Conv1D(
144
+                filters=32,
145
+                kernel_size=7,
146
+                padding="same",
147
+                strides=2,
148
+                activation="relu",
149
+            ),
150
+            layers.Conv1DTranspose(
151
+                filters=32,
152
+                kernel_size=7,
153
+                padding="same",
154
+                strides=2,
155
+                activation="relu",
156
+            ),
157
+            layers.Dropout(rate=0.2),
158
+            layers.Conv1DTranspose(
159
+                filters=64,
160
+                kernel_size=7,
161
+                padding="same",
162
+                strides=2,
163
+                activation="relu",
164
+            ),
165
+            layers.Conv1DTranspose(filters=x_train[i].shape[2], kernel_size=7, padding="same"),
166
+        ]
167
+    )
168
+    model[i].compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
169
+    model[i].summary()
170
+    path_checkpoint.append("model_"+str(i)+"._checkpoint.weights.h5")
171
+    es_callback.append(keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15))
172
+    modelckpt_callback.append(keras.callbacks.ModelCheckpoint( monitor="val_loss", filepath=path_checkpoint[i], verbose=1, save_weights_only=True, save_best_only=True,))
173
+
174
+
175
+if options.train:
176
+    history=[]
177
+    for i in range(NumberOfFailures+1):
178
+        history.append(model[i].fit( x_train[i], x_train[i], epochs=400, batch_size=128, validation_split=0.3, callbacks=[  es_callback[i], modelckpt_callback[i]      ],))
179
+
180
+    fig, axes = plt.subplots(
181
+        nrows=int(np.ceil((NumberOfFailures+1)/2)), ncols=2, figsize=(15, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
182
+    )
183
+    for i in range(int(np.ceil((NumberOfFailures+1)/2))):
184
+        for j in range(2):
185
+            r=2*i+j
186
+            if r < NumberOfFailures+1:
187
+                axes[i][j].plot(history[r].history["loss"], label="Training Loss")
188
+                axes[i][j].plot(history[r].history["val_loss"], label="Val Loss")
189
+                axes[i][j].legend()
190
+    plt.show()
191
+else:
192
+    for i in range(NumberOfFailures+1):
193
+        model[i].load_weights(path_checkpoint[i])
194
+
195
+
196
+x_train_pred=[]
197
+train_mae_loss=[]
198
+threshold=[]
199
+for i in range(NumberOfFailures+1):
200
+    x_train_pred.append(model[i].predict(x_train[i]))
201
+    train_mae_loss.append(np.mean(np.abs(x_train_pred[i] - x_train[i]), axis=1))
202
+    threshold.append(np.max(train_mae_loss[i],axis=0))
203
+
204
+print("Threshold : ",threshold)
205
+for i in range(NumberOfFailures+1):
206
+    threshold[i]=threshold[i]*2
207
+# Threshold is enlarged because, otherwise, for subsamples at 5' have many false positives
208
+
209
+
210
+#  1st scenario. Detect only anomaly.  Later, we will classiffy it
211
+# Test data=  testnormal + testfail1 + testtail2 + testfail3 + testfail4 + testnormal
212
+d=np.vstack((dataTestNorm[0],dataTestNorm[1],dataTestNorm[2],dataTestNorm[3],dataTestNorm[0]))
213
+
214
+x_test = create_sequences(d)
215
+x_test_pred = model[0].predict(x_test)
216
+test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
217
+
218
+testRanges=[]
219
+testRanges.append([0,dataTestNorm[0].shape[0]])
220
+testRanges.append([dataTestNorm[0].shape[0], dataTestNorm[0].shape[0]+dataTestNorm[1].shape[0]  ])
221
+testRanges.append([dataTestNorm[0].shape[0]+dataTestNorm[1].shape[0], dataTestNorm[0].shape[0]+dataTestNorm[1].shape[0]+ dataTestNorm[2].shape[0] ])
222
+testRanges.append([dataTestNorm[0].shape[0]+dataTestNorm[1].shape[0]+ dataTestNorm[2].shape[0], dataTestNorm[0].shape[0]+dataTestNorm[1].shape[0]+ dataTestNorm[2].shape[0]+ dataTestNorm[3].shape[0]])
223
+testRanges.append([dataTestNorm[0].shape[0]+dataTestNorm[1].shape[0]+ dataTestNorm[2].shape[0]+ dataTestNorm[3].shape[0] , x_test.shape[0]   ])
224
+
225
+
226
+def AtLeastOneTrue(x):
227
+    for i in range(NumFeatures):
228
+        if x[i]:
229
+            return True
230
+    return False
231
+
232
+anomalies = test_mae_loss > threshold[0]
233
+anomalous_data_indices = []
234
+for i in range(anomalies.shape[0]):
235
+    if AtLeastOneTrue(anomalies[i]):
236
+    #if anomalies[i][0] or anomalies[i][1] or anomalies[i][2] or anomalies[i][3]:
237
+        anomalous_data_indices.append(i)
238
+
239
+#print(anomalous_data_indices)
240
+
241
+
242
+# Let's plot only a couple of features
243
+def plotData2():    
244
+    fig, axes = plt.subplots(
245
+        nrows=2, ncols=1, figsize=(15, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
246
+    )
247
+    axes[0].plot(range(len(x_train[0])),x_train[0][:,0,0],label="normal")
248
+    axes[0].plot(range(len(x_train[0]),len(x_train[0])+len(x_test)),x_test[:,0,0],label="abnormal")
249
+    axes[0].plot(len(x_train[0])+np.array(anomalous_data_indices),x_test[anomalous_data_indices,0,0],color='red',marker='.',linewidth=0,label="abnormal detection")
250
+    axes[0].legend()
251
+    axes[1].plot(range(len(x_train[0])),x_train[0][:,0,1],label="normal")
252
+    axes[1].plot(range(len(x_train[0]),len(x_train[0])+len(x_test)),x_test[:,0,1],label="abnormal")
253
+    axes[1].plot(len(x_train[0])+np.array(anomalous_data_indices),x_test[anomalous_data_indices,0,1],color='red',marker='.',linewidth=0,label="abnormal detection")
254
+    axes[1].legend()
255
+    axes[0].set_ylabel(features[0])
256
+    axes[1].set_ylabel(features[1])
257
+    plt.show()
258
+
259
+#plotData2()
260
+
261
+
262
+#   2nd scenario. Go over anomalies and classify it by less error
263
+'''   
264
+#This code works, but too slow
265
+anomalous_data_type=[]
266
+for i in anomalous_data_indices:
267
+    error=[]
268
+    for m in range(1,NumberOfFailures+1):
269
+        error.append(np.mean(np.mean(np.abs(model[m].predict(x_test[i:i+1,:,:])-x_test[i:i+1,:,:]),axis=1)))
270
+    anomalous_data_type.append(np.argmin(error)+1)
271
+'''
272
+
273
+anomalous_data_type=[]
274
+x_test_predict=[]
275
+for m in range(NumberOfFailures+1):
276
+    x_test_predict.append(model[m].predict(x_test))
277
+
278
+
279
+for i in anomalous_data_indices:
280
+    error=[]
281
+    for m in range(1,NumberOfFailures+1):
282
+        error.append(np.mean(np.mean(np.abs(x_test_predict[m][i:i+1,:,:]-x_test[i:i+1,:,:]),axis=1)))
283
+    anomalous_data_type.append(np.argmin(error)+1)
284
+
285
+
286
+# For plotting purposes
287
+
288
+
289
+anomalous_data_indices_by_failure=[]
290
+for i in range(NumberOfFailures+1):
291
+    anomalous_data_indices_by_failure.append([])
292
+
293
+for i in range(len(anomalous_data_indices)):
294
+    print(i," ",anomalous_data_type[i])
295
+    anomalous_data_indices_by_failure[anomalous_data_type[i]].append(anomalous_data_indices[i])  
296
+
297
+def plotData2():    
298
+    fig, axes = plt.subplots(
299
+        nrows=2, ncols=1, figsize=(25, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
300
+    )
301
+    init=0
302
+    end=len(x_train[0])
303
+    axes[0].plot(range(init,end),x_train[0][:,0,0],label="normal train")
304
+    #axes.plot(range(len(x_train[0]),len(x_train[0])+len(x_test)),x_test[:,0,0],label="abnormal")
305
+    init=end
306
+    end+=testRanges[0][1]
307
+    axes[0].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,0],label="normal test")
308
+    init=end
309
+    end+=(testRanges[1][1]-testRanges[1][0])
310
+    axes[0].plot(range(init,end),x_test[testRanges[1][0]:testRanges[1][1],0,0],label="fail type 1", color="lightcoral")
311
+    init=end
312
+    end+=(testRanges[2][1]-testRanges[2][0])
313
+    axes[0].plot(range(init,end),x_test[testRanges[2][0]:testRanges[2][1],0,0],label="fail type 2", color="cyan")
314
+    init=end
315
+    end+=(testRanges[3][1]-testRanges[3][0])
316
+    axes[0].plot(range(init,end),x_test[testRanges[3][0]:testRanges[3][1],0,0],label="fail type 3", color="lime")
317
+
318
+
319
+    axes[0].plot(len(x_train[0])+np.array(anomalous_data_indices_by_failure[1]),x_test[anomalous_data_indices_by_failure[1],0,0],color='red',marker='.',linewidth=0,label="abnormal detection type 1")
320
+    axes[0].plot(len(x_train[0])+np.array(anomalous_data_indices_by_failure[2]),x_test[anomalous_data_indices_by_failure[2],0,0],color='blue',marker='.',linewidth=0,label="abnormal detection type 2")
321
+    axes[0].plot(len(x_train[0])+np.array(anomalous_data_indices_by_failure[3]),x_test[anomalous_data_indices_by_failure[3],0,0],color='green',marker='.',linewidth=0,label="abnormal detection type 3")
322
+    axes[0].legend()
323
+    axes[0].set_ylabel(features[0])
324
+
325
+    init=0
326
+    end=len(x_train[0])
327
+    axes[1].plot(range(init,end),x_train[0][:,0,1],label="normal train")
328
+    #axes.plot(range(len(x_train[0]),len(x_train[0])+len(x_test)),x_test[:,0,0],label="abnormal")
329
+    init=end
330
+    end+=testRanges[0][1]
331
+    axes[1].plot(range(init,end),x_test[testRanges[0][0]:testRanges[0][1],0,1],label="normal test")
332
+    init=end
333
+    end+=(testRanges[1][1]-testRanges[1][0])
334
+    axes[1].plot(range(init,end),x_test[testRanges[1][0]:testRanges[1][1],0,1],label="fail type 1", color="lightcoral")
335
+    init=end
336
+    end+=(testRanges[2][1]-testRanges[2][0])
337
+    axes[1].plot(range(init,end),x_test[testRanges[2][0]:testRanges[2][1],0,1],label="fail type 2", color="cyan")
338
+    init=end
339
+    end+=(testRanges[3][1]-testRanges[3][0])
340
+    axes[1].plot(range(init,end),x_test[testRanges[3][0]:testRanges[3][1],0,1],label="fail type 3", color="lime")
341
+
342
+
343
+    axes[1].plot(len(x_train[0])+np.array(anomalous_data_indices_by_failure[1]),x_test[anomalous_data_indices_by_failure[1],0,1],color='red',marker='.',linewidth=0,label="abnormal detection type 1")
344
+    axes[1].plot(len(x_train[0])+np.array(anomalous_data_indices_by_failure[2]),x_test[anomalous_data_indices_by_failure[2],0,1],color='blue',marker='.',linewidth=0,label="abnormal detection type 2")
345
+    axes[1].plot(len(x_train[0])+np.array(anomalous_data_indices_by_failure[3]),x_test[anomalous_data_indices_by_failure[3],0,1],color='green',marker='.',linewidth=0,label="abnormal detection type 3")
346
+    axes[1].legend()
347
+    axes[1].set_ylabel(features[1])
348
+
349
+
350
+
351
+
352
+
353
+    plt.show()
354
+
355
+plotData2()
356
+

Powered by TurnKey Linux.