cesar hace 3 días
padre
commit
a0f11d1a83
Se han modificado 5 ficheros con 270 adiciones y 0 borrados
  1. BIN
      Descripción tests anomalías.docx
  2. 22
    0
      README.txt
  3. BIN
      model._checkpoint.weights.h5
  4. 52
    0
      v1.py
  5. 196
    0
      v2.py

BIN
Descripción tests anomalías.docx Ver fichero


+ 22
- 0
README.txt Ver fichero

@@ -0,0 +1,22 @@
1
+Anomaly test docu link: 
2
+
3
+
4
+https://ako0.sharepoint.com/:x:/r/teams/InvestigacinSaaS/_layouts/15/Doc.aspx?sourcedoc=%7B5097593F-ADFA-7FC9-9652-D12395F95ACF%7D&file=Tests%20anomal%25u00edas.xlsx&action=default&mobileredirect=true
5
+
6
+
7
+Data lives here:
8
+https://ako0.sharepoint.com/teams/InvestigacinSaaS/AKOSYS/Forms/AllItems.aspx?csf=1&web=1&e=QMcdQ6&CID=49ca9884%2Dfb0c%2D4962%2D90e9%2De8fd3b929ea4&FolderCTID=0x0120005ABC4637161C864C9CB64E44FA154909&id=%2Fteams%2FInvestigacinSaaS%2FAKOSYS%2FFood%20retail%20lab%2Freport%5Fv0
9
+
10
+
11
+name means:  
12
+        2024-07-01_X_.csv
13
+        
14
+        X means:
15
+                1. Arcón congelación
16
+                2. Mural abierto refrigeración 
17
+                3. Mural cerrado refrigeración 
18
+                4. Vitrina carnicería 
19
+                5. Mural cerrado congelación     (This is our facility so far)
20
+
21
+
22
+

BIN
model._checkpoint.weights.h5 Ver fichero


+ 52
- 0
v1.py Ver fichero

@@ -0,0 +1,52 @@
1
+import pandas as pd
2
+import matplotlib.pyplot as plt
3
+import datetime
4
+import numpy as np
5
+
6
+#normal_datafiles_list=['2025-01-08_5_','2025-01-09_5_','2025-01-10_5_','2025-01-11_5_']
7
+normal_datafiles_list=['2025-01-09_5_','2025-01-10_5_','2025-01-11_5_']
8
+anormal_datafiles_list=['2025-01-04_5_','2025-01-05_5_','2025-01-06_5_','2025-01-07_5_']
9
+
10
+cols=['r1 s1','r1 s4','r1 s5','pa1 apiii']
11
+
12
+df_list=[]
13
+for f in normal_datafiles_list:
14
+    #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
15
+    df1 = pd.read_csv('./data/'+f+'.csv')
16
+    df_list.append(df1)
17
+
18
+df=pd.concat(df_list)
19
+datalength=df.shape[0]
20
+# subsampled to 5'  =  30 * 10"
21
+normaldataframe=df.iloc[range(0,datalength,30)][cols]
22
+normaldataframe.reset_index(inplace=True,drop=True)
23
+normaldata=normaldataframe.values
24
+
25
+
26
+df_list=[]
27
+for f in anormal_datafiles_list:
28
+    #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
29
+    df1 = pd.read_csv('./data/'+f+'.csv')
30
+    df_list.append(df1)
31
+
32
+df=pd.concat(df_list)
33
+datalength=df.shape[0]
34
+# subsampled to 5'  =  30 * 10"
35
+anormaldataframe=df.iloc[range(0,datalength,30)][cols]
36
+anormaldataframe.reset_index(inplace=True,drop=True)
37
+anormaldata=anormaldataframe.values
38
+
39
+
40
+nplots=len(cols)
41
+
42
+plt.rcParams.update({'font.size': 10})
43
+f,ax = plt.subplots(int(np.ceil(nplots/2)),2,figsize=(24,17), dpi=80, facecolor='white', edgecolor='k')    
44
+for i in range(int(np.ceil(nplots/2))):
45
+    for j in range(2):
46
+        r=i*2+j
47
+        if r<nplots:
48
+            ax[i][j].plot(normaldata[:,r],label='normal')
49
+            ax[i][j].plot(anormaldata[:,r],label='abnormal')
50
+            ax[i][j].set_title(anormaldataframe.columns[r])
51
+            ax[i][j].legend()
52
+plt.show()

+ 196
- 0
v2.py Ver fichero

@@ -0,0 +1,196 @@
1
+# Csar Fdez, UdL, 2025
2
+import pandas as pd
3
+import matplotlib.pyplot as plt
4
+import datetime
5
+import numpy as np
6
+import keras
7
+import os.path
8
+import pickle
9
+from keras import layers
10
+from optparse import OptionParser
11
+
12
+#   facility type 5. Mural cerrado de congelación (closed freezer). Set point at -18  (we will have two possible setpoints, -18 and -26)
13
+# This code only deals with a given failure type
14
+# Data for abnormal functioning corresponds to Condenser Fan failure
15
+
16
+parser = OptionParser()
17
+parser.add_option("-t", "--train", dest="train", help="Trains the models (false)", default=False, action="store_true")
18
+
19
+(options, args) = parser.parse_args()
20
+
21
+
22
+normal_datafiles_list=['2025-01-09_5_','2025-01-10_5_','2025-01-11_5_']
23
+anormal_datafiles_list=['2025-01-04_5_','2025-01-05_5_','2025-01-06_5_','2025-01-07_5_']
24
+
25
+# Features suggested by Xavier
26
+features=['r1 s1','r1 s4','r1 s5','pa1 apiii']
27
+NumFeatures=len(features)
28
+
29
+df_list=[]
30
+for f in normal_datafiles_list:
31
+    #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
32
+    df1 = pd.read_csv('./data/'+f+'.csv')
33
+    df_list.append(df1)
34
+
35
+df=pd.concat(df_list)
36
+datalength=df.shape[0]
37
+# subsampled to 5'  =  30 * 10"
38
+# We consider smaples every 5' because in production, we will only have data at this frequency
39
+subsamplingrate=30
40
+subsamplingrate=30
41
+
42
+
43
+normaldataframe=df.iloc[range(0,datalength,subsamplingrate)][features]
44
+normaldataframe.reset_index(inplace=True,drop=True)
45
+
46
+
47
+df_list=[]
48
+for f in anormal_datafiles_list:
49
+    #df1 = pd.read_csv('./data/'+f+'.csv', parse_dates=['datetime'], dayfirst=True, index_col='datetime')
50
+    df1 = pd.read_csv('./data/'+f+'.csv')
51
+    df_list.append(df1)
52
+
53
+df=pd.concat(df_list)
54
+datalength=df.shape[0]
55
+# subsampled to 5'  =  30 * 10"
56
+anormaldataframe=df.iloc[range(0,datalength,subsamplingrate)][features]
57
+anormaldataframe.reset_index(inplace=True,drop=True)
58
+
59
+
60
+# Train data is first 2/3 of normaldata
61
+# Test data is: last 1/3 of normaldata + anormaldata + last 1/3 of normaldata
62
+dataTrain=normaldataframe.values[0:int(normaldataframe.shape[0]*2/3),:]
63
+dataTest=np.vstack((normaldataframe.values[int(normaldataframe.shape[0]*2/3)+1:,:],anormaldataframe.values, normaldataframe.values[int(normaldataframe.shape[0]*2/3)+1:,:] ))
64
+
65
+
66
+def normalize2():
67
+    # merges train and test
68
+    means=[]
69
+    stdevs=[]
70
+    for i in range(NumFeatures):
71
+        means.append(dataTrain[:,i].mean())
72
+        stdevs.append(dataTrain[:,i].std())
73
+    return( (dataTrain-means)/stdevs, (dataTest-means)/stdevs )
74
+
75
+(dataTrainNorm,dataTestNorm)=normalize2()
76
+
77
+TIME_STEPS = 24
78
+def create_sequences(values, time_steps=TIME_STEPS):
79
+    output = []
80
+    for i in range(len(values) - time_steps + 1):
81
+        output.append(values[i : (i + time_steps)])
82
+    return np.stack(output)
83
+
84
+x_train = create_sequences(dataTrainNorm)
85
+
86
+model = keras.Sequential(
87
+    [
88
+        layers.Input(shape=(x_train.shape[1], x_train.shape[2])),
89
+        layers.Conv1D(
90
+            filters=64,
91
+            kernel_size=7,
92
+            padding="same",
93
+            strides=2,
94
+            activation="relu",
95
+        ),
96
+        layers.Dropout(rate=0.2),
97
+        layers.Conv1D(
98
+            filters=32,
99
+            kernel_size=7,
100
+            padding="same",
101
+            strides=2,
102
+            activation="relu",
103
+        ),
104
+        layers.Conv1DTranspose(
105
+            filters=32,
106
+            kernel_size=7,
107
+            padding="same",
108
+            strides=2,
109
+            activation="relu",
110
+        ),
111
+        layers.Dropout(rate=0.2),
112
+        layers.Conv1DTranspose(
113
+            filters=64,
114
+            kernel_size=7,
115
+            padding="same",
116
+            strides=2,
117
+            activation="relu",
118
+        ),
119
+        layers.Conv1DTranspose(filters=x_train.shape[2], kernel_size=7, padding="same"),
120
+    ]
121
+)
122
+model.compile(optimizer=keras.optimizers.Adam(learning_rate=0.001), loss="mse")
123
+model.summary()
124
+
125
+path_checkpoint = "model._checkpoint.weights.h5"
126
+es_callback = keras.callbacks.EarlyStopping(monitor="val_loss", min_delta=0, patience=15)
127
+
128
+modelckpt_callback = keras.callbacks.ModelCheckpoint(
129
+    monitor="val_loss",
130
+    filepath=path_checkpoint,
131
+    verbose=1,
132
+    save_weights_only=True,
133
+    save_best_only=True,
134
+)
135
+
136
+
137
+if options.train:
138
+    history = model.fit(
139
+        x_train,
140
+        x_train,
141
+        epochs=400,
142
+        batch_size=128,
143
+        validation_split=0.3,
144
+        callbacks=[  es_callback, modelckpt_callback      ],
145
+    )
146
+
147
+    plt.plot(history.history["loss"], label="Training Loss")
148
+    plt.plot(history.history["val_loss"], label="Validation Loss")
149
+    plt.legend()
150
+    plt.show()
151
+else:
152
+    model.load_weights(path_checkpoint)
153
+
154
+
155
+x_train_pred = model.predict(x_train)
156
+train_mae_loss = np.mean(np.abs(x_train_pred - x_train), axis=1)
157
+threshold = np.max(train_mae_loss,axis=0)
158
+
159
+print("Threshold : ",threshold)
160
+threshold=threshold*2
161
+# Threshold is enlarged because, otherwise, for subsamples at 5' have many false positives
162
+
163
+x_test = create_sequences(dataTestNorm)
164
+x_test_pred = model.predict(x_test)
165
+test_mae_loss = np.mean(np.abs(x_test_pred - x_test), axis=1)
166
+
167
+anomalies = test_mae_loss > threshold
168
+anomalous_data_indices = []
169
+for i in range(anomalies.shape[0]):
170
+    if anomalies[i][0] or anomalies[i][1]:
171
+        anomalous_data_indices.append(i)
172
+
173
+#print(anomalous_data_indices)
174
+
175
+
176
+# Let's plot only a couple of features
177
+def plotData2():    
178
+    fig, axes = plt.subplots(
179
+        nrows=2, ncols=1, figsize=(15, 20), dpi=80, facecolor="w", edgecolor="k",sharex=True
180
+    )
181
+    axes[0].plot(range(len(x_train)),x_train[:,0,0],label="normal")
182
+    axes[0].plot(range(len(x_train),len(x_train)+len(x_test)),x_test[:,0,0],label="abnormal")
183
+    axes[0].plot(len(x_train)+np.array(anomalous_data_indices),x_test[anomalous_data_indices,0,0],color='red',marker='.',linewidth=0,label="abnormal detection")
184
+    axes[0].legend()
185
+    axes[1].plot(range(len(x_train)),x_train[:,0,1],label="normal")
186
+    axes[1].plot(range(len(x_train),len(x_train)+len(x_test)),x_test[:,0,1],label="abnormal")
187
+    axes[1].plot(len(x_train)+np.array(anomalous_data_indices),x_test[anomalous_data_indices,0,1],color='red',marker='.',linewidth=0,label="abnormal detection")
188
+    axes[1].legend()
189
+    axes[0].set_ylabel(features[0])
190
+    axes[1].set_ylabel(features[1])
191
+    plt.show()
192
+
193
+plotData2()
194
+
195
+
196
+

Powered by TurnKey Linux.