Csar Fdez il y a 3 semaines
Parent
révision
ff77399490
1 fichiers modifiés avec 440 ajouts et 0 suppressions
  1. 440
    0
      v3_unsupervised.py

+ 440
- 0
v3_unsupervised.py Voir le fichier

@@ -0,0 +1,440 @@
1
+# Csar Fdez, UdL, 2025
2
+#  Unsupervised classification. Uses tslearn
3
+#  https://tslearn.readthedocs.io/en/stable/index.html
4
+
5
+# Be careful with v0_unsupervised  and all versions for supervised.
6
+# because dataTrain is not stacke before create_sequences,  so, 
7
+#  the sets are not aligned in time
8
+
9
+# Same as v2 but including multiple KM metrics
10
+# No hi ha color entre DTW i Euclidea. Amb Euclidea, al test "32" no distingueix entre Class0 i class 2
11
+
12
+import pandas as pd
13
+import matplotlib.pyplot as plt
14
+import datetime
15
+import numpy as np
16
+import os.path
17
+from optparse import OptionParser
18
+import copy
19
+import pickle
20
+from tslearn.clustering import TimeSeriesKMeans
21
+from tslearn.neighbors import KNeighborsTimeSeries 
22
+from collections import Counter
23
+
24
+parser = OptionParser()
25
+parser.add_option("-t", "--train", dest="train", help="Trains the models (false)", default=False, action="store_true")
26
+parser.add_option("-p", "--plot", dest="plot", help="Plot Data (false)", default=False, action="store_true")
27
+parser.add_option("-n", "--timesteps", dest="timesteps", help="TIME STEPS ", default=12)
28
+parser.add_option("-m", "--metric", dest="metric", help="KM metric (0:euclidean, 1:dtw, 2: softdtw)", default=1)
29
+
30
+(options, args) = parser.parse_args()
31
+
32
+
33
+
34
+# data files arrays. Index:
35
+# 0.  No failure
36
+# 1.  Blocked evaporator
37
+# 2.   Full Blocked condenser
38
+# 3   Fan condenser not working
39
+# 4.  Open door
40
+
41
+
42
+NumberOfFailures=4  
43
+datafiles={}
44
+listofFacilitySetpoint=['5-26','5-18','5-22','30','32','34']
45
+
46
+for j in listofFacilitySetpoint:
47
+    datafiles[j]=[]
48
+    for i in range(NumberOfFailures+1):
49
+        datafiles[j].append([])
50
+
51
+# Freezer, SP=-26
52
+datafiles['5-26'][0]=['2025-01-25_5_','2025-01-26_5_','2025-01-29_5_','2025-01-30_5_','2025-01-31_5_','2025-02-01_5_','2025-02-02_5_','2025-02-03_5_','2025-02-04_5_']
53
+datafiles['5-26'][1]=['2024-12-11_5_', '2024-12-12_5_','2024-12-13_5_','2024-12-14_5_','2024-12-15_5_']
54
+datafiles['5-26'][2]=['2024-12-21_5_','2024-12-22_5_','2024-12-23_5_','2024-12-24_5_','2024-12-25_5_','2024-12-26_5_']
55
+datafiles['5-26'][3]=['2024-12-28_5_','2024-12-29_5_','2024-12-30_5_','2024-12-31_5_','2025-01-01_5_'] 
56
+datafiles['5-26'][4]=['2025-02-13_5_','2025-02-14_5_','2025-02-15_5_','2025-02-16_5_','2025-02-17_5_','2025-02-18_5_','2025-02-19_5_'] 
57
+
58
+# Freezer, SP=-18
59
+datafiles['5-18'][0]=['2025-01-21_5_','2025-01-22_5_','2025-01-23_5_'] # 
60
+datafiles['5-18'][1]=['2025-02-17_5_','2025-02-18_5_','2025-02-19_5_'] # 
61
+datafiles['5-18'][2]=['2025-03-10_5_','2025-03-11_5_','2025-03-12_5_'] # 
62
+datafiles['5-18'][3]=['2025-01-04_5_','2025-01-05_5_','2025-01-06_5_','2025-01-07_5_'] # 
63
+datafiles['5-18'][4]=['2025-04-30_5_','2025-05-01_5_','2025-05-02_5_','2025-05-03_5_','2025-05-04_5_','2025-05-05_5_'] #  Faltens aquestes dades 
64
+
65
+# Freezer, SP=-22
66
+datafiles['5-22'][0]=['2025-03-13_5_','2025-03-14_5_','2025-03-15_5_','2025-03-16_5_']  
67
+datafiles['5-22'][1]=['2025-03-21_5_','2025-03-22_5_','2025-03-23_5_','2025-03-24_5_','2025-03-25_5_']  #  
68
+datafiles['5-22'][2]=['2025-03-26_5_','2025-03-27_5_','2025-03-28_5_']  
69
+datafiles['5-22'][3]=['2025-03-31_5_','2025-04-01_5_','2025-04-02_5_','2025-04-03_5_']  
70
+datafiles['5-22'][4]=['2025-03-17_5_']  
71
+
72
+# Refrigerator  0  
73
+datafiles['30'][0]=['2025-01-21_3_','2025-01-22_3_','2025-01-23_3_','2025-01-24_3_','2025-01-25_3_','2025-01-26_3_']
74
+datafiles['30'][1]=['2024-12-11_3_','2024-12-12_3_','2024-12-13_3_','2024-12-14_3_','2024-12-15_3_']
75
+datafiles['30'][2]=['2024-12-18_3_','2024-12-19_3_','2024-12-20_3_']
76
+datafiles['30'][3]=['2024-12-28_3_','2024-12-29_3_','2024-12-30_3_','2024-12-31_3_','2025-01-01_3_']
77
+datafiles['30'][4]=['2025-02-12_3_','2025-02-13_3_','2025-02-14_3_','2025-02-15_3_','2025-02-16_3_','2025-02-17_3_','2025-02-18_3_','2025-02-19_3_']   # es solapa amb ventilador no funcionant. i els dies 20 i 21 no hi son
78
+
79
+
80
+# Refrigerator  2  
81
+datafiles['32'][0]=['2025-03-13_3_','2025-03-14_3_','2025-03-15_3_','2025-03-16_3_']
82
+datafiles['32'][1]=['2025-03-10_3_']
83
+datafiles['32'][2]=['2025-03-17_3_']
84
+datafiles['32'][3]=['2025-03-24_3_','2025-03-25_3_','2025-03-26_3_']
85
+datafiles['32'][4]=['2025-03-27_3_','2025-03-28_3_']
86
+
87
+
88
+# Refrigerator  4  
89
+datafiles['34'][0]=['2025-03-31_3_','2025-04-01_3_','2025-04-02_3_','2025-04-03_3_']
90
+datafiles['34'][1]=['2025-04-25_3_','2025-04-26_3_','2025-04-27_3_','2025-04-28_3_'] # aquestes dades no hi son
91
+datafiles['34'][2]=['2025-04-11_3_','2025-04-12_3_','2025-04-13_3_','2025-04-14_3_']
92
+datafiles['34'][3]=['2025-04-30_3_','2025-05-01_3_','2025-05-02_3_','2025-05-03_3_','2025-05-04_3_','2025-05-05_3_']
93
+datafiles['34'][4]=['2025-04-23_3_','2025-04-24_3_','2025-04-25_3_']
94
+
95
+
96
+
97
+# data files arrays. Index:
98
+# 0.  No failure
99
+# 1.  Blocked evaporator
100
+# 2.   Full Blocked condenser
101
+# 3   Fan condenser not working
102
+# 4.  Open door
103
+
104
+
105
+facilitySetpoint='32'
106
+# Features suggested by Xavier
107
+# Care with 'tc s3' because on datafiles[0] is always nulll
108
+# Seems to be incoropored in new tests
109
+
110
+#r1s5 supply air flow temperature
111
+#r1s1 inlet evaporator temperature
112
+#r1s4 condenser outlet
113
+
114
+# VAriables r1s4 and pa1 apiii  may not exists in cloud controlers
115
+
116
+
117
+features=['r1 s1','r1 s4','r1 s5','pa1 apiii']
118
+features=['r1 s1','r1 s4','r1 s5']
119
+featureNames={}
120
+featureNames['r1 s1']='$T_{evap}$'
121
+featureNames['r1 s4']='$T_{cond}$'
122
+featureNames['r1 s5']='$T_{air}$'
123
+featureNames['pa1 apiii']='$P_{elec}$'
124
+
125
+unitNames={}
126
+unitNames['r1 s1']='$(^{o}C)$'
127
+unitNames['r1 s4']='$(^{o}C)$'
128
+unitNames['r1 s5']='$(^{o}C)$'
129
+unitNames['pa1 apiii']='$(W)$'
130
+
131
+
132
+#features=['r1 s1','r1 s2','r1 s3','r1 s4','r1 s5','r1 s6','r1 s7','r1 s8','r1 s9','r1 s10','r2 s1','r2 s2','r2 s3','r2 s4','r2 s5','r2 s6','r2 s7','r2 s8','r2 s9','pa1 apiii','tc s1','tc s2']
133
+
134
+#features=['r2 s2', 'tc s1','r1 s10','r1 s6','r2 s8']
135
+
136
+NumFeatures=len(features)
137
+
138
+df_list=[]
139
+for i in range(NumberOfFailures+1):
140
+    df_list.append([])
141
+
142
+for i in range(NumberOfFailures+1):
143
+    dftemp=[]
144
+    for f in datafiles[facilitySetpoint][i]:
145
+        print("                 ", f)
146
+        #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
147
+        df1 = pd.read_csv('./data/'+f+'.csv')
148
+        dftemp.append(df1)
149
+    df_list[i]=pd.concat(dftemp)
150
+
151
+
152
+# subsampled to 5'  =  30 * 10"
153
+# We consider smaples every 5' because in production, we will only have data at this frequency
154
+subsamplingrate=30
155
+
156
+dataframe=[]
157
+for i in range(NumberOfFailures+1):
158
+    dataframe.append([])
159
+
160
+for i in range(NumberOfFailures+1):
161
+    datalength=df_list[i].shape[0]
162
+    dataframe[i]=df_list[i].iloc[range(0,datalength,subsamplingrate)][features]
163
+    dataframe[i].reset_index(inplace=True,drop=True)
164
+    dataframe[i].dropna(inplace=True)
165
+
166
+
167
+# Train data is first 2/3 of data
168
+# Test data is: last 1/3 of data 
169
+dataTrain=[]
170
+dataTest=[]
171
+NumberOfSamplesForTest=0
172
+
173
+for i in range(NumberOfFailures+1):
174
+    dataTrain.append(dataframe[i].values[0:int(dataframe[i].shape[0]*2/3),:])
175
+    if NumberOfSamplesForTest==0:  # Take all
176
+        dataTest.append(dataframe[i].values[int(dataframe[i].shape[0]*2/3):,:])
177
+    else:
178
+        dataTest.append(dataframe[i].values[int(dataframe[i].shape[0]*2/3):int(dataframe[i].shape[0]*2/3)+NumberOfSamplesForTest,:])
179
+
180
+# Calculate means and stdev
181
+a=dataTrain[0]
182
+for i in range(1,NumberOfFailures+1):
183
+    a=np.vstack((a,dataTrain[i]))
184
+
185
+means=a.mean(axis=0) 
186
+stdevs=a.std(axis=0)
187
+def normalize2(train,test):
188
+    return( (train-means)/stdevs, (test-means)/stdevs )
189
+
190
+dataTrainNorm=[]
191
+dataTestNorm=[]
192
+for i in range(NumberOfFailures+1):
193
+    dataTrainNorm.append([])
194
+    dataTestNorm.append([])
195
+
196
+for i in range(NumberOfFailures+1):
197
+    (dataTrainNorm[i],dataTestNorm[i])=normalize2(dataTrain[i],dataTest[i])
198
+
199
+def plotData():    
200
+    fig, axes = plt.subplots(
201
+        nrows=NumberOfFailures+1, ncols=NumFeatures, figsize=(15, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
202
+    )
203
+    for i in range(NumberOfFailures+1):
204
+        for  j in range(NumFeatures):
205
+            axes[i][j].plot(np.concatenate((dataTrainNorm[i][:,j],dataTestNorm[i][:,j])),label="Fail "+str(i)+",  feature 0")
206
+    #axes[1].legend()
207
+    #axes[0].set_ylabel(features[0])
208
+    #axes[1].set_ylabel(features[1])
209
+    plt.show()
210
+
211
+if options.plot:
212
+    plotData()
213
+    exit()
214
+
215
+def create_sequences(values, time_steps):
216
+    output = []
217
+    for i in range(len(values) - time_steps ):
218
+        output.append(values[i : (i + time_steps)])
219
+    return np.stack(output)
220
+
221
+def listToString(l):
222
+    r=''
223
+    for i in l:
224
+        r+=str(i)
225
+    return(r.replace(' ',''))
226
+
227
+timesteps=int(options.timesteps)
228
+
229
+X=dataTrainNorm[0]
230
+for i in range(1,NumberOfFailures+1):
231
+    X=np.vstack((X,dataTrainNorm[i]))
232
+
233
+xtrain=create_sequences(X,timesteps)
234
+
235
+# set metric
236
+metric=int(options.metric)
237
+metricstring="euclidean"
238
+if metric==0:
239
+    metrictring="euclidean"
240
+elif metric==1:
241
+    metricstring="dtw"
242
+elif metric==2:
243
+    metricstring="softdtw"
244
+else:
245
+    print("Error in metric: ",metric)
246
+    exit(0)
247
+
248
+
249
+km = TimeSeriesKMeans(n_clusters=NumberOfFailures+1, metric=metricstring, random_state=0)
250
+modelpath="model_v3_unsupervised_metric_"+metricstring+"_"+facilitySetpoint+"_"+str(timesteps)+listToString(features)+".pk"
251
+if options.train:
252
+    km.fit(xtrain)
253
+    km.to_pickle(modelpath)
254
+else:
255
+    km.from_pickle(modelpath)
256
+    #km.fit_predict(xtrain)
257
+
258
+colorline=['violet','lightcoral','cyan','lime','grey']
259
+colordot=['darkviolet','red','blue','green','black']
260
+
261
+
262
+featuresToPlot=features
263
+indexesToPlot=[]
264
+for i in featuresToPlot:
265
+    indexesToPlot.append(features.index(i))
266
+
267
+
268
+def plot(data,ranges):
269
+    km.fit_predict(data)
270
+# Expand data to plot with the timesteps samples of last sample
271
+    datatoplot=data[:,0,:]
272
+    datatoplot=np.vstack((datatoplot,data[ranges[len(ranges)-1][1],:,:]))
273
+
274
+    labels=[]    #   Labels are assigned randomly by classifer
275
+    for i in range(len(ranges)):
276
+        b=Counter(km.labels_[ranges[i][0]:ranges[i][1]])
277
+        labels.append(b.most_common(1)[0][0])
278
+
279
+    print("\n\n\n LABELS: ",labels,"\n\n")
280
+
281
+    NumFeaturesToPlot=len(indexesToPlot)
282
+    plt.rcParams.update({'font.size': 16})
283
+    fig, axes = plt.subplots(
284
+        nrows=NumFeaturesToPlot, ncols=1, figsize=(15, 10), dpi=80, facecolor="w", edgecolor="k",sharex=True
285
+    )
286
+    for i in range(NumFeaturesToPlot):
287
+        init=0
288
+        end=ranges[0][1]
289
+        labelsplotted=[]
290
+        for j in range(len(ranges)):
291
+            if j==(len(ranges)-1): # Plot the last timesteps
292
+                classtype=labels.index(labels[j])
293
+                if classtype in labelsplotted:
294
+                    axes[i].plot(range(init,end+timesteps),datatoplot[ranges[j][0]:ranges[j][1]+timesteps,indexesToPlot[i]]*stdevs[i]+means[i], color=colorline[classtype],linewidth=1)
295
+                else:
296
+                    axes[i].plot(range(init,end+timesteps),datatoplot[ranges[j][0]:ranges[j][1]+timesteps,indexesToPlot[i]]*stdevs[i]+means[i], label="Class: "+str(classtype), color=colorline[classtype],linewidth=1)
297
+                    labelsplotted.append(classtype)
298
+            else:
299
+                classtype=labels.index(labels[j])
300
+                if classtype in labelsplotted:
301
+                    axes[i].plot(range(init,end),datatoplot[ranges[j][0]:ranges[j][1],indexesToPlot[i]]*stdevs[i]+means[i], color=colorline[classtype],linewidth=1)
302
+                else:
303
+                    axes[i].plot(range(init,end),datatoplot[ranges[j][0]:ranges[j][1],indexesToPlot[i]]*stdevs[i]+means[i], label="Class: "+str(classtype), color=colorline[classtype],linewidth=1)
304
+                    labelsplotted.append(classtype)
305
+            init=end
306
+            if j<(len(ranges)-1):
307
+                end+=(ranges[j+1][1]-ranges[j+1][0])
308
+        x=[]
309
+        y=[]
310
+        for j in range(len(ranges)):
311
+            x.append([])
312
+            y.append([])
313
+
314
+        for j in range(len(ranges)):
315
+            for k in range(ranges[j][0],ranges[j][1]):
316
+                try:  # Idont know why sometimes fails index !!!!
317
+                    x[labels.index(km.labels_[k])].append(k+timesteps)
318
+                    y[labels.index(km.labels_[k])].append(datatoplot[k+timesteps,indexesToPlot[i]]*stdevs[i]+means[i])
319
+                except:
320
+                    x[0].append(k+timesteps)
321
+                    y[0].append(datatoplot[k+timesteps,indexesToPlot[i]]*stdevs[i]+means[i])
322
+
323
+        labelsplotted=[]
324
+        for j in range(len(ranges)):
325
+            classtype=labels.index(labels[j])
326
+            if classtype in labelsplotted:
327
+                axes[i].plot(x[j],y[j] ,color=colordot[labels.index(labels[j])],marker='.',linewidth=0)
328
+            else:
329
+                axes[i].plot(x[j],y[j] ,color=colordot[labels.index(labels[j])],marker='.',linewidth=0,label="Class type "+str(j) )
330
+                labelsplotted.append(classtype)
331
+
332
+        if i==(NumFeatures-1):
333
+            axes[i].legend(loc='right')
334
+        s=''
335
+        s+=featureNames[features[indexesToPlot[i]]]
336
+        s+=' '+unitNames[features[indexesToPlot[i]]]
337
+        axes[i].set_ylabel(s)
338
+        axes[i].grid()
339
+
340
+    axes[NumFeaturesToPlot-1].set_xlabel("Sample number")
341
+    plt.show()
342
+        
343
+   
344
+'''
345
+Ranges=[]
346
+r=0
347
+for i in range(NumberOfFailures+1):
348
+    Ranges.append([r,r+dataTrainNorm[i].shape[0]])
349
+    r+=dataTrainNorm[i].shape[0]
350
+# Drop the last TIME_STEPS for plotting
351
+Ranges[NumberOfFailures][1]=Ranges[NumberOfFailures][1]-timesteps-1
352
+
353
+plot(xtrain,Ranges)
354
+'''
355
+# Try with test data
356
+X=dataTestNorm[0]
357
+Ranges=[[0,dataTestNorm[0].shape[0]]]
358
+r=dataTestNorm[0].shape[0]
359
+for i in range(1,NumberOfFailures+1):
360
+    X=np.vstack((X,dataTestNorm[i]))
361
+    Ranges.append([r,r+dataTestNorm[i].shape[0]])
362
+    r+=dataTestNorm[i].shape[0]
363
+
364
+#X=np.vstack((X,dataTestNorm[0]))  # We add a last segment of no fail data
365
+#Ranges.append([r,r+dataTestNorm[0].shape[0]])
366
+
367
+Ranges[len(Ranges)-1][1]=Ranges[len(Ranges)-1][1]-timesteps-1
368
+
369
+xtest=create_sequences(X,timesteps)
370
+
371
+
372
+
373
+
374
+def anomalyMetric(labels,ranges):
375
+    # Takes ONLY the firsts segments of Ranges
376
+    # FP, TP: false/true positive
377
+    # TN, FN: true/false negative
378
+    # Sensitivity (recall): probab failure detection if data is fail: TP/(TP+FN)
379
+    # Precision: Rate of positive results:  TP/(TP+FP)  
380
+    # F1-score: predictive performance measure: 2*Precision*Sensitity/(Precision+Sensitity)
381
+
382
+    lab=[]    #   Labels are assigned randomly by classifer
383
+    TP=[]
384
+    FN=[]
385
+    TPFP=[]
386
+    for i in range(NumberOfFailures+1):
387
+        TP.append([])
388
+        FN.append([])
389
+        TPFP.append([])
390
+        b=Counter(labels[ranges[i][0]:ranges[i][1]])
391
+        lab.append(b.most_common(1)[0][0])
392
+
393
+    for i in range(NumberOfFailures+1):
394
+        counttp=0
395
+        countfn=0
396
+        for j in range(ranges[i][0],ranges[i][1]-timesteps):
397
+            if lab.index(labels[j])==i:
398
+                counttp+=1
399
+            else:
400
+                countfn+=1
401
+        TP[i]=counttp
402
+        FN[i]=countfn
403
+
404
+    for i in range(NumberOfFailures+1):
405
+        count=0
406
+        for ii in range(NumberOfFailures+1):
407
+            for j in range(ranges[ii][0],ranges[ii][1]-timesteps):
408
+                if lab.index(labels[j])==i:
409
+                    count+=1
410
+            TPFP[i]=count
411
+
412
+    segmentLength=[]
413
+    for i in range(NumberOfFailures+1):
414
+        segmentLength.append(ranges[i][1]-timesteps-ranges[i][0])
415
+    totalSegmentLength=0
416
+    for i in range(NumberOfFailures+1):
417
+        totalSegmentLength+=segmentLength[i]
418
+
419
+
420
+    Sensitivity=0
421
+    Precision=0
422
+    for i in range(NumberOfFailures+1):
423
+        Sensitivity+=TP[i]/(TP[i]+FN[i])*segmentLength[i]/totalSegmentLength
424
+         Precision+=TP[i]/(TPFP[i])*segmentLength[i]/totalSegmentLength
425
+
426
+
427
+    print(lab)
428
+    print("TP: ",TP)
429
+    print("FN: ",FN)
430
+    print("TPFP: ",TPFP)
431
+    print("Sensitivity: ",Sensitivity)
432
+    print("Precision: ",Precision)
433
+    print("F1-Score: ",2*Precision*Sensitivity/(Sensitivity+Precision))
434
+
435
+
436
+km.fit_predict(xtest)
437
+anomalyMetric(km.labels_,Ranges)
438
+plot(xtest,Ranges)
439
+
440
+

Powered by TurnKey Linux.