-
Notifications
You must be signed in to change notification settings - Fork 785
/
Hyperparameter_Tuning_NN.py
113 lines (78 loc) · 2.3 KB
/
Hyperparameter_Tuning_NN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
# -*- coding: utf-8 -*-
"""HyperParameter_for_NN.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1j8j0Jdi32aadwMA84qReIHVbLI6i0U65
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df = pd.read_csv('/content/customer_churn_dataset-training-master.csv')
df.head()
df.shape
df.isnull().sum()
df = df.drop(['CustomerID'], axis=1)
df.head()
df = df.dropna()
df.isnull().sum()
df.info()
cat_col = []
num_col = []
for col in df.columns:
if df[col].dtype == 'object':
cat_col.append(col)
else:
num_col.append(col)
from sklearn.preprocessing import StandardScaler, LabelEncoder
label_encoders = {}
for col in cat_col:
label_encoders[col] = LabelEncoder()
df[col] = label_encoders[col].fit_transform(df[col])
label_encoders
df.head()
X = df.iloc[:,:-1]
X
y = df['Churn']
y
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
SD = StandardScaler()
X_train = SD.fit_transform(X_train)
X_test = SD.transform(X_test)
!pip install keras-tuner
from tensorflow import keras
from tensorflow.keras import layers
import keras_tuner
from kerastuner.tuners import RandomSearch
def build_model(hp):
model = keras.Sequential()
for i in range(hp.Int('num_layers', 2,20)):
model.add(layers.Dense(units = hp.Int('units_' + str(i),
min_value = 16,
max_value = 512,
step = 32),
activation = 'relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(
optimizer = keras.optimizers.Adam(
hp.Choice('learning_rate', [1e-2, 1e-3, 1e-4])),
loss = 'binary_crossentropy',
metrics = ['accuracy'])
return model
tuner = RandomSearch(
build_model,
objective = 'val_accuracy',
max_trials = 5,
executions_per_trial = 3,
directory = '/content/Hyperparameter',
project_name = 'HyperParameter_Tuning'
)
tuner.search_space_summary()
tuner.search(
X_train,
y_train,
epochs = 10,
validation_data = (X_test, y_test)
)
tuner.results_summary()
#This python code is helpful for hyperparameter tuning of NN.