AshmithaIRRI commited on
Commit
0b3f5ea
·
verified ·
1 Parent(s): d2dd8db

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -33
app.py CHANGED
@@ -26,14 +26,6 @@ import tempfile
26
  import matplotlib.pyplot as plt
27
  import seaborn as sns
28
  #------------------------------------------GRUModel-------------------------------------
29
- import numpy as np
30
- from sklearn.preprocessing import MinMaxScaler
31
- from sklearn.ensemble import RandomForestRegressor
32
- from tensorflow.keras.models import Sequential
33
- from tensorflow.keras.layers import GRU, Dense, BatchNormalization, Dropout, LeakyReLU
34
- from tensorflow.keras import regularizers
35
- from tensorflow.keras.optimizers import Adam
36
- from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping
37
 
38
  def GRUModel(trainX, trainy, testX=None, testy=None, epochs=1000, batch_size=64, learning_rate=0.0001,
39
  l1_reg=0.001, l2_reg=0.001, dropout_rate=0.2, feature_selection=True, top_k=10):
@@ -60,31 +52,31 @@ def GRUModel(trainX, trainy, testX=None, testy=None, epochs=1000, batch_size=64,
60
  model.add(GRU(512, input_shape=(trainX.shape[1], trainX.shape[2]), return_sequences=False,
61
  kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
62
 
63
- for units in [256, 128, 64, 32]:
64
- model.add(Dense(256, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
65
- model.add(BatchNormalization())
66
- model.add(Dropout(dropout_rate))
67
- model.add(LeakyReLU(alpha=0.1))
68
-
69
- model.add(Dense(256, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
70
- model.add(BatchNormalization())
71
- model.add(Dropout(dropout_rate))
72
- model.add(LeakyReLU(alpha=0.1))
73
-
74
- model.add(Dense(128, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
75
- model.add(BatchNormalization())
76
- model.add(Dropout(dropout_rate))
77
- model.add(LeakyReLU(alpha=0.1))
78
-
79
- model.add(Dense(64, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
80
- model.add(BatchNormalization())
81
- model.add(Dropout(dropout_rate))
82
- model.add(LeakyReLU(alpha=0.1))
83
-
84
- model.add(Dense(32, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
85
- model.add(BatchNormalization())
86
- model.add(Dropout(dropout_rate))
87
- model.add(LeakyReLU(alpha=0.1))
88
 
89
  model.add(Dense(1, activation="relu")) # Output layer
90
 
 
26
  import matplotlib.pyplot as plt
27
  import seaborn as sns
28
  #------------------------------------------GRUModel-------------------------------------
 
 
 
 
 
 
 
 
29
 
30
  def GRUModel(trainX, trainy, testX=None, testy=None, epochs=1000, batch_size=64, learning_rate=0.0001,
31
  l1_reg=0.001, l2_reg=0.001, dropout_rate=0.2, feature_selection=True, top_k=10):
 
52
  model.add(GRU(512, input_shape=(trainX.shape[1], trainX.shape[2]), return_sequences=False,
53
  kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
54
 
55
+
56
+ model.add(Dense(512, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
57
+ model.add(BatchNormalization())
58
+ model.add(Dropout(dropout_rate))
59
+ model.add(LeakyReLU(alpha=0.1))
60
+
61
+ model.add(Dense(256, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
62
+ model.add(BatchNormalization())
63
+ model.add(Dropout(dropout_rate))
64
+ model.add(LeakyReLU(alpha=0.1))
65
+
66
+ model.add(Dense(128, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
67
+ model.add(BatchNormalization())
68
+ model.add(Dropout(dropout_rate))
69
+ model.add(LeakyReLU(alpha=0.1))
70
+
71
+ model.add(Dense(64, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
72
+ model.add(BatchNormalization())
73
+ model.add(Dropout(dropout_rate))
74
+ model.add(LeakyReLU(alpha=0.1))
75
+
76
+ model.add(Dense(32, kernel_initializer='he_normal', kernel_regularizer=regularizers.l1_l2(l1=l1_reg, l2=l2_reg)))
77
+ model.add(BatchNormalization())
78
+ model.add(Dropout(dropout_rate))
79
+ model.add(LeakyReLU(alpha=0.1))
80
 
81
  model.add(Dense(1, activation="relu")) # Output layer
82