Creating a logistic regression to predict absenteeism

Import the relevant libraries

In [1]:
# import the relevant libraries
import pandas as pd
import numpy as np

Load the data

In [5]:
# load the preprocessed CSV data
data_preprocessed = pd.read_csv('Absenteeism_preprocessed.csv')
In [6]:
# eyeball the data
data_preprocessed.head()
Out[6]:
Reason_1 Reason_2 Reason_3 Reason_4 Month Value Day of the Week Transportation Expense Distance to Work Age Daily Work Load Average Body Mass Index Education Children Pet Absenteeism Time in Hours
0 0 0 0 1 7 1 289 36 33 239.554 30 0 2 1 4
1 0 0 0 0 7 1 118 13 50 239.554 31 0 1 0 0
2 0 0 0 1 7 2 179 51 38 239.554 31 0 0 0 2
3 1 0 0 0 7 3 279 5 39 239.554 24 0 2 0 4
4 0 0 0 1 7 3 289 36 33 239.554 30 0 2 1 2
In [7]:
data_preprocessed.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 700 entries, 0 to 699
Data columns (total 15 columns):
Reason_1                     700 non-null int64
Reason_2                     700 non-null int64
Reason_3                     700 non-null int64
Reason_4                     700 non-null int64
Month Value                  700 non-null int64
Day of the Week              700 non-null int64
Transportation Expense       700 non-null int64
Distance to Work             700 non-null int64
Age                          700 non-null int64
Daily Work Load Average      700 non-null float64
Body Mass Index              700 non-null int64
Education                    700 non-null int64
Children                     700 non-null int64
Pet                          700 non-null int64
Absenteeism Time in Hours    700 non-null int64
dtypes: float64(1), int64(14)
memory usage: 82.1 KB

Create the targets

In [8]:
# find the median of 'Absenteeism Time in Hours'
data_preprocessed['Absenteeism Time in Hours'].median()
Out[8]:
3.0
In [9]:
# what we've decided to do is to take the median of the dataset as a cut-off line
# in this way the dataset will be balanced (there will be roughly equal number of 0s and 1s for the logistic regression)

# note that what line does is to assign 1 to anyone who has been absent 4 hours or more (more than 3 hours)
# that is the equivalent of taking half a day off

# targets = np.where(data_preprocessed['Absenteeism Time in Hours'] > 3, 1, 0)

# parameterized code
targets = np.where(data_preprocessed['Absenteeism Time in Hours'] > 
                   data_preprocessed['Absenteeism Time in Hours'].median(), 1, 0)
In [10]:
# eyeball the targets
targets
Out[10]:
array([1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0,
       1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1,
       0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
       0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1,
       0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
       0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0,
       1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1,
       0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1,
       1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1,
       0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0,
       0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0,
       0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1,
       1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0,
       1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0,
       1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1,
       1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1,
       1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1,
       0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1,
       1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1,
       1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0,
       1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1,
       0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1,
       0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0,
       1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1,
       1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0])
In [11]:
# create a Series in the original data frame that will contain the targets for the regression
data_preprocessed['Excessive Absenteeism'] = targets
In [12]:
# check what happened
# maybe manually see how the targets were created
data_preprocessed.head()
Out[12]:
Reason_1 Reason_2 Reason_3 Reason_4 Month Value Day of the Week Transportation Expense Distance to Work Age Daily Work Load Average Body Mass Index Education Children Pet Absenteeism Time in Hours Excessive Absenteeism
0 0 0 0 1 7 1 289 36 33 239.554 30 0 2 1 4 1
1 0 0 0 0 7 1 118 13 50 239.554 31 0 1 0 0 0
2 0 0 0 1 7 2 179 51 38 239.554 31 0 0 0 2 0
3 1 0 0 0 7 3 279 5 39 239.554 24 0 2 0 4 1
4 0 0 0 1 7 3 289 36 33 239.554 30 0 2 1 2 0

A comment on the targets

In [13]:
# check if dataset is balanced (what % of targets are 1s)
# targets.sum() will give us the number of 1s that there are
# the shape[0] will give us the length of the targets array
targets.sum() / targets.shape[0]
Out[13]:
0.45571428571428574
In [14]:
# create a checkpoint by dropping the unnecessary variables
# also drop the variables we 'eliminated' after exploring the weights
data_with_targets = data_preprocessed.drop(['Absenteeism Time in Hours','Day of the Week',
                                            'Daily Work Load Average','Distance to Work'],axis=1)
In [15]:
# check if the line above is a checkpoint :)

# if data_with_targets is data_preprocessed = True, then the two are pointing to the same object
# if it is False, then the two variables are completely different and this is in fact a checkpoint
data_with_targets is data_preprocessed
Out[15]:
False
In [16]:
# check what's inside
data_with_targets.head()
Out[16]:
Reason_1 Reason_2 Reason_3 Reason_4 Month Value Transportation Expense Age Body Mass Index Education Children Pet Excessive Absenteeism
0 0 0 0 1 7 289 33 30 0 2 1 1
1 0 0 0 0 7 118 50 31 0 1 0 0
2 0 0 0 1 7 179 38 31 0 0 0 0
3 1 0 0 0 7 279 39 24 0 2 0 1
4 0 0 0 1 7 289 33 30 0 2 1 0

Select the inputs for the regression

In [17]:
data_with_targets.shape
Out[17]:
(700, 12)
In [18]:
# Selects all rows and all columns until 14 (excluding)
data_with_targets.iloc[:,:14]
Out[18]:
Reason_1 Reason_2 Reason_3 Reason_4 Month Value Transportation Expense Age Body Mass Index Education Children Pet Excessive Absenteeism
0 0 0 0 1 7 289 33 30 0 2 1 1
1 0 0 0 0 7 118 50 31 0 1 0 0
2 0 0 0 1 7 179 38 31 0 0 0 0
3 1 0 0 0 7 279 39 24 0 2 0 1
4 0 0 0 1 7 289 33 30 0 2 1 0
5 0 0 0 1 10 179 38 31 0 0 0 0
6 0 0 0 1 7 361 28 27 0 1 4 1
7 0 0 0 1 7 260 36 23 0 4 0 1
8 0 0 1 0 6 155 34 25 0 2 0 1
9 0 0 0 1 7 235 37 29 1 1 1 1
10 1 0 0 0 7 260 36 23 0 4 0 1
11 1 0 0 0 7 260 36 23 0 4 0 1
12 1 0 0 0 7 260 36 23 0 4 0 1
13 1 0 0 0 7 179 38 31 0 0 0 0
14 0 0 0 1 7 179 38 31 0 0 0 1
15 1 0 0 0 7 246 41 23 0 0 0 1
16 0 0 0 1 7 179 38 31 0 0 0 0
17 0 0 1 0 7 179 38 31 0 0 0 1
18 1 0 0 0 7 189 33 25 0 2 2 1
19 0 0 0 1 5 248 47 32 0 2 1 0
20 1 0 0 0 12 330 28 25 1 0 0 1
21 1 0 0 0 3 179 38 31 0 0 0 0
22 1 0 0 0 10 361 28 27 0 1 4 1
23 0 0 0 1 8 260 36 23 0 4 0 1
24 0 0 1 0 8 289 33 30 0 2 1 1
25 0 0 0 1 8 361 28 27 0 1 4 1
26 0 0 0 1 4 289 33 30 0 2 1 0
27 0 0 0 1 12 157 29 22 0 0 0 1
28 0 0 1 0 8 289 33 30 0 2 1 1
29 0 0 0 1 8 179 38 31 0 0 0 0
... ... ... ... ... ... ... ... ... ... ... ... ...
670 0 0 0 1 4 155 34 25 0 2 0 1
671 0 0 1 0 4 225 28 24 0 1 2 1
672 1 0 0 0 4 118 50 31 0 1 0 0
673 0 0 0 1 4 179 30 19 1 0 0 0
674 0 0 0 1 7 235 37 29 1 1 1 1
675 0 0 1 0 9 225 41 28 1 2 2 0
676 0 0 0 1 9 235 32 25 1 0 0 0
677 1 0 0 0 9 118 37 28 0 0 0 0
678 0 0 0 1 9 235 43 38 0 1 0 1
679 1 0 0 0 10 179 30 19 1 0 0 0
680 0 0 0 1 10 291 40 25 0 1 1 0
681 1 0 0 0 10 225 41 28 1 2 2 1
682 0 0 1 0 11 300 43 25 0 2 1 1
683 0 0 0 1 11 225 41 28 1 2 2 1
684 0 0 0 1 11 179 30 19 1 0 0 0
685 0 0 0 1 5 118 50 31 0 1 0 0
686 1 0 0 0 5 118 50 31 0 1 0 0
687 0 0 0 1 5 118 37 28 0 0 0 0
688 0 0 0 0 5 118 50 31 0 1 0 0
689 0 0 0 1 5 179 30 19 1 0 0 0
690 0 0 0 0 5 378 36 21 0 2 4 0
691 0 1 0 0 5 179 40 22 1 2 0 0
692 1 0 0 0 5 155 34 25 0 2 0 1
693 1 0 0 0 5 235 32 25 1 0 0 1
694 0 0 0 1 5 291 40 25 0 1 1 1
695 1 0 0 0 5 179 40 22 1 2 0 1
696 1 0 0 0 5 225 28 24 0 1 2 0
697 1 0 0 0 5 330 28 25 1 0 0 1
698 0 0 0 1 5 235 32 25 1 0 0 0
699 0 0 0 1 5 291 40 25 0 1 1 0

700 rows × 12 columns

In [19]:
# Selects all rows and all columns but the last one (basically the same operation)
data_with_targets.iloc[:,:-1]
Out[19]:
Reason_1 Reason_2 Reason_3 Reason_4 Month Value Transportation Expense Age Body Mass Index Education Children Pet
0 0 0 0 1 7 289 33 30 0 2 1
1 0 0 0 0 7 118 50 31 0 1 0
2 0 0 0 1 7 179 38 31 0 0 0
3 1 0 0 0 7 279 39 24 0 2 0
4 0 0 0 1 7 289 33 30 0 2 1
5 0 0 0 1 10 179 38 31 0 0 0
6 0 0 0 1 7 361 28 27 0 1 4
7 0 0 0 1 7 260 36 23 0 4 0
8 0 0 1 0 6 155 34 25 0 2 0
9 0 0 0 1 7 235 37 29 1 1 1
10 1 0 0 0 7 260 36 23 0 4 0
11 1 0 0 0 7 260 36 23 0 4 0
12 1 0 0 0 7 260 36 23 0 4 0
13 1 0 0 0 7 179 38 31 0 0 0
14 0 0 0 1 7 179 38 31 0 0 0
15 1 0 0 0 7 246 41 23 0 0 0
16 0 0 0 1 7 179 38 31 0 0 0
17 0 0 1 0 7 179 38 31 0 0 0
18 1 0 0 0 7 189 33 25 0 2 2
19 0 0 0 1 5 248 47 32 0 2 1
20 1 0 0 0 12 330 28 25 1 0 0
21 1 0 0 0 3 179 38 31 0 0 0
22 1 0 0 0 10 361 28 27 0 1 4
23 0 0 0 1 8 260 36 23 0 4 0
24 0 0 1 0 8 289 33 30 0 2 1
25 0 0 0 1 8 361 28 27 0 1 4
26 0 0 0 1 4 289 33 30 0 2 1
27 0 0 0 1 12 157 29 22 0 0 0
28 0 0 1 0 8 289 33 30 0 2 1
29 0 0 0 1 8 179 38 31 0 0 0
... ... ... ... ... ... ... ... ... ... ... ...
670 0 0 0 1 4 155 34 25 0 2 0
671 0 0 1 0 4 225 28 24 0 1 2
672 1 0 0 0 4 118 50 31 0 1 0
673 0 0 0 1 4 179 30 19 1 0 0
674 0 0 0 1 7 235 37 29 1 1 1
675 0 0 1 0 9 225 41 28 1 2 2
676 0 0 0 1 9 235 32 25 1 0 0
677 1 0 0 0 9 118 37 28 0 0 0
678 0 0 0 1 9 235 43 38 0 1 0
679 1 0 0 0 10 179 30 19 1 0 0
680 0 0 0 1 10 291 40 25 0 1 1
681 1 0 0 0 10 225 41 28 1 2 2
682 0 0 1 0 11 300 43 25 0 2 1
683 0 0 0 1 11 225 41 28 1 2 2
684 0 0 0 1 11 179 30 19 1 0 0
685 0 0 0 1 5 118 50 31 0 1 0
686 1 0 0 0 5 118 50 31 0 1 0
687 0 0 0 1 5 118 37 28 0 0 0
688 0 0 0 0 5 118 50 31 0 1 0
689 0 0 0 1 5 179 30 19 1 0 0
690 0 0 0 0 5 378 36 21 0 2 4
691 0 1 0 0 5 179 40 22 1 2 0
692 1 0 0 0 5 155 34 25 0 2 0
693 1 0 0 0 5 235 32 25 1 0 0
694 0 0 0 1 5 291 40 25 0 1 1
695 1 0 0 0 5 179 40 22 1 2 0
696 1 0 0 0 5 225 28 24 0 1 2
697 1 0 0 0 5 330 28 25 1 0 0
698 0 0 0 1 5 235 32 25 1 0 0
699 0 0 0 1 5 291 40 25 0 1 1

700 rows × 11 columns

In [20]:
# Create a variable that will contain the inputs (everything without the targets)
unscaled_inputs = data_with_targets.iloc[:,:-1]

Standardize the data

In [21]:
#Import library
from sklearn.preprocessing import StandardScaler


# define scaler as an object
absenteeism_scaler = StandardScaler()
In [22]:
# import the libraries needed to create the Custom Scaler
# note that all of them are a part of the sklearn package

from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.preprocessing import StandardScaler

# create the Custom Scaler class

class CustomScaler(BaseEstimator,TransformerMixin): 
    
    # init or what information we need to declare a CustomScaler object
    # and what is calculated/declared as we do
    
    def __init__(self,columns,copy=True,with_mean=True,with_std=True):
        
        # scaler is nothing but a Standard Scaler object
        self.scaler = StandardScaler(copy,with_mean,with_std)
        # with some columns 'twist'
        self.columns = columns
        self.mean_ = None
        self.var_ = None
        
    
    # the fit method, which, again based on StandardScale
    
    def fit(self, X, y=None):
        self.scaler.fit(X[self.columns], y)
        self.mean_ = np.mean(X[self.columns])
        self.var_ = np.var(X[self.columns])
        return self
    
    # the transform method which does the actual scaling

    def transform(self, X, y=None, copy=None):
        
        # record the initial order of the columns
        init_col_order = X.columns
        
        # scale all features that you chose when creating the instance of the class
        X_scaled = pd.DataFrame(self.scaler.transform(X[self.columns]), columns=self.columns)
        
        # declare a variable containing all information that was not scaled
        X_not_scaled = X.loc[:,~X.columns.isin(self.columns)]
        
        # return a data frame which contains all scaled features and all 'not scaled' features
        # use the original order (that you recorded in the beginning)
        return pd.concat([X_not_scaled, X_scaled], axis=1)[init_col_order]
In [23]:
# check what are all columns that we've got
unscaled_inputs.columns.values
Out[23]:
array(['Reason_1', 'Reason_2', 'Reason_3', 'Reason_4', 'Month Value',
       'Transportation Expense', 'Age', 'Body Mass Index', 'Education',
       'Children', 'Pet'], dtype=object)
In [63]:
# choose the columns to scale
# we later augmented this code and put it in comments
# columns_to_scale = ['Month Value','Day of the Week', 'Transportation Expense', 'Distance to Work',
       #'Age', 'Daily Work Load Average', 'Body Mass Index', 'Children', 'Pet']
    
# select the columns to omit
columns_to_omit = ['Reason_1', 'Reason_2', 'Reason_3', 'Reason_4','Education']
In [26]:
# create the columns to scale, based on the columns to omit
# use list comprehension to iterate over the list
columns_to_scale = [x for x in unscaled_inputs.columns.values if x not in columns_to_omit]
In [27]:
# declare a scaler object, specifying the columns you want to scale
absenteeism_scaler = CustomScaler(columns_to_scale)
In [28]:
# fit the data (calculate mean and standard deviation); they are automatically stored inside the object 
absenteeism_scaler.fit(unscaled_inputs)
Out[28]:
CustomScaler(columns=['Month Value', 'Transportation Expense', 'Age', 'Body Mass Index', 'Children', 'Pet'],
       copy=None, with_mean=None, with_std=None)
In [29]:
# standardizes the data, using the transform method 
# in the last line, we fitted the data - in other words
# we found the internal parameters of a model that will be used to transform data. 
# transforming applies these parameters to our data
# note that when we get new data, we can just call 'scaler' again and transform it in the same way as now
scaled_inputs = absenteeism_scaler.transform(unscaled_inputs)
In [30]:
# the scaled_inputs are now an ndarray, because sklearn works with ndarrays
scaled_inputs
Out[30]:
Reason_1 Reason_2 Reason_3 Reason_4 Month Value Transportation Expense Age Body Mass Index Education Children Pet
0 0 0 0 1 0.030796 1.005844 -0.536062 0.767431 0 0.880469 0.268487
1 0 0 0 0 0.030796 -1.574681 2.130803 1.002633 0 -0.019280 -0.589690
2 0 0 0 1 0.030796 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
3 1 0 0 0 0.030796 0.854936 0.405184 -0.643782 0 0.880469 -0.589690
4 0 0 0 1 0.030796 1.005844 -0.536062 0.767431 0 0.880469 0.268487
5 0 0 0 1 0.929019 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
6 0 0 0 1 0.030796 2.092381 -1.320435 0.061825 0 -0.019280 2.843016
7 0 0 0 1 0.030796 0.568211 -0.065439 -0.878984 0 2.679969 -0.589690
8 0 0 1 0 -0.268611 -1.016322 -0.379188 -0.408580 0 0.880469 -0.589690
9 0 0 0 1 0.030796 0.190942 0.091435 0.532229 1 -0.019280 0.268487
10 1 0 0 0 0.030796 0.568211 -0.065439 -0.878984 0 2.679969 -0.589690
11 1 0 0 0 0.030796 0.568211 -0.065439 -0.878984 0 2.679969 -0.589690
12 1 0 0 0 0.030796 0.568211 -0.065439 -0.878984 0 2.679969 -0.589690
13 1 0 0 0 0.030796 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
14 0 0 0 1 0.030796 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
15 1 0 0 0 0.030796 0.356940 0.718933 -0.878984 0 -0.919030 -0.589690
16 0 0 0 1 0.030796 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
17 0 0 1 0 0.030796 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
18 1 0 0 0 0.030796 -0.503235 -0.536062 -0.408580 0 0.880469 1.126663
19 0 0 0 1 -0.568019 0.387122 1.660180 1.237836 0 0.880469 0.268487
20 1 0 0 0 1.527833 1.624567 -1.320435 -0.408580 1 -0.919030 -0.589690
21 1 0 0 0 -1.166834 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
22 1 0 0 0 0.929019 2.092381 -1.320435 0.061825 0 -0.019280 2.843016
23 0 0 0 1 0.330204 0.568211 -0.065439 -0.878984 0 2.679969 -0.589690
24 0 0 1 0 0.330204 1.005844 -0.536062 0.767431 0 0.880469 0.268487
25 0 0 0 1 0.330204 2.092381 -1.320435 0.061825 0 -0.019280 2.843016
26 0 0 0 1 -0.867426 1.005844 -0.536062 0.767431 0 0.880469 0.268487
27 0 0 0 1 1.527833 -0.986140 -1.163560 -1.114186 0 -0.919030 -0.589690
28 0 0 1 0 0.330204 1.005844 -0.536062 0.767431 0 0.880469 0.268487
29 0 0 0 1 0.330204 -0.654143 0.248310 1.002633 0 -0.919030 -0.589690
... ... ... ... ... ... ... ... ... ... ... ...
670 0 0 0 1 -0.867426 -1.016322 -0.379188 -0.408580 0 0.880469 -0.589690
671 0 0 1 0 -0.867426 0.040034 -1.320435 -0.643782 0 -0.019280 1.126663
672 1 0 0 0 -0.867426 -1.574681 2.130803 1.002633 0 -0.019280 -0.589690
673 0 0 0 1 -0.867426 -0.654143 -1.006686 -1.819793 1 -0.919030 -0.589690
674 0 0 0 1 0.030796 0.190942 0.091435 0.532229 1 -0.019280 0.268487
675 0 0 1 0 0.629611 0.040034 0.718933 0.297027 1 0.880469 1.126663
676 0 0 0 1 0.629611 0.190942 -0.692937 -0.408580 1 -0.919030 -0.589690
677 1 0 0 0 0.629611 -1.574681 0.091435 0.297027 0 -0.919030 -0.589690
678 0 0 0 1 0.629611 0.190942 1.032682 2.649049 0 -0.019280 -0.589690
679 1 0 0 0 0.929019 -0.654143 -1.006686 -1.819793 1 -0.919030 -0.589690
680 0 0 0 1 0.929019 1.036026 0.562059 -0.408580 0 -0.019280 0.268487
681 1 0 0 0 0.929019 0.040034 0.718933 0.297027 1 0.880469 1.126663
682 0 0 1 0 1.228426 1.171843 1.032682 -0.408580 0 0.880469 0.268487
683 0 0 0 1 1.228426 0.040034 0.718933 0.297027 1 0.880469 1.126663
684 0 0 0 1 1.228426 -0.654143 -1.006686 -1.819793 1 -0.919030 -0.589690
685 0 0 0 1 -0.568019 -1.574681 2.130803 1.002633 0 -0.019280 -0.589690
686 1 0 0 0 -0.568019 -1.574681 2.130803 1.002633 0 -0.019280 -0.589690
687 0 0 0 1 -0.568019 -1.574681 0.091435 0.297027 0 -0.919030 -0.589690
688 0 0 0 0 -0.568019 -1.574681 2.130803 1.002633 0 -0.019280 -0.589690
689 0 0 0 1 -0.568019 -0.654143 -1.006686 -1.819793 1 -0.919030 -0.589690
690 0 0 0 0 -0.568019 2.348925 -0.065439 -1.349389 0 0.880469 2.843016
691 0 1 0 0 -0.568019 -0.654143 0.562059 -1.114186 1 0.880469 -0.589690
692 1 0 0 0 -0.568019 -1.016322 -0.379188 -0.408580 0 0.880469 -0.589690
693 1 0 0 0 -0.568019 0.190942 -0.692937 -0.408580 1 -0.919030 -0.589690
694 0 0 0 1 -0.568019 1.036026 0.562059 -0.408580 0 -0.019280 0.268487
695 1 0 0 0 -0.568019 -0.654143 0.562059 -1.114186 1 0.880469 -0.589690
696 1 0 0 0 -0.568019 0.040034 -1.320435 -0.643782 0 -0.019280 1.126663
697 1 0 0 0 -0.568019 1.624567 -1.320435 -0.408580 1 -0.919030 -0.589690
698 0 0 0 1 -0.568019 0.190942 -0.692937 -0.408580 1 -0.919030 -0.589690
699 0 0 0 1 -0.568019 1.036026 0.562059 -0.408580 0 -0.019280 0.268487

700 rows × 11 columns

In [31]:
# check the shape of the inputs
scaled_inputs.shape
Out[31]:
(700, 11)

Split the data into train & test and shuffle

Import the relevant module

In [32]:
# import train_test_split so we can split our data into train and test
from sklearn.model_selection import train_test_split

Split

In [33]:
# check how this method works
train_test_split(scaled_inputs, targets)
Out[33]:
[     Reason_1  Reason_2  Reason_3  Reason_4  Month Value  \
 407         0         0         0         0    -1.166834   
 473         0         0         0         1     0.030796   
 582         1         0         0         0    -1.765648   
 498         0         0         0         1    -0.568019   
 694         0         0         0         1    -0.568019   
 63          0         0         0         1     0.929019   
 43          0         0         1         0     0.330204   
 286         0         0         0         1     0.629611   
 540         0         0         0         1     1.228426   
 160         1         0         0         0    -1.466241   
 83          0         0         1         0     1.527833   
 148         0         0         0         1    -1.466241   
 691         0         1         0         0    -0.568019   
 342         0         0         0         1    -0.568019   
 249         0         0         1         0     1.228426   
 576         1         0         0         0     1.228426   
 300         0         0         0         0     0.929019   
 668         0         1         0         0    -0.867426   
 346         0         0         0         1     1.527833   
 393         0         0         0         1     0.929019   
 660         1         0         0         0     0.629611   
 333         0         0         0         1     1.228426   
 688         0         0         0         0    -0.568019   
 692         1         0         0         0    -0.568019   
 572         0         0         0         1    -0.867426   
 54          0         0         0         0     0.629611   
 296         1         0         0         0     1.527833   
 20          1         0         0         0     1.527833   
 295         1         0         0         0     1.527833   
 287         1         0         0         0     0.629611   
 ..        ...       ...       ...       ...          ...   
 199         1         0         0         0    -0.867426   
 284         0         0         0         1     0.629611   
 345         1         0         0         0     1.527833   
 504         0         0         0         1     0.629611   
 227         1         0         0         0    -0.268611   
 418         0         0         0         1     1.527833   
 138         1         0         0         0    -1.166834   
 200         0         0         1         0    -0.867426   
 30          0         0         1         0     0.330204   
 336         0         0         0         0     1.228426   
 608         0         0         0         1    -1.466241   
 596         1         0         0         0    -1.466241   
 689         0         0         0         1    -0.568019   
 143         0         0         0         1     1.527833   
 23          0         0         0         1     0.330204   
 191         1         0         0         0    -0.268611   
 559         0         0         0         1     0.330204   
 308         0         0         0         1     0.929019   
 39          0         0         0         1     0.330204   
 205         0         0         0         1    -0.568019   
 6           0         0         0         1     0.030796   
 164         1         0         0         0     0.030796   
 626         0         0         0         1     0.330204   
 621         0         0         0         1    -0.268611   
 508         1         0         0         0    -0.568019   
 283         0         0         0         1     0.629611   
 555         1         0         0         0    -0.568019   
 338         0         0         0         1     1.228426   
 149         0         0         0         1    -1.466241   
 304         0         0         0         1     0.929019   
 
      Transportation Expense       Age  Body Mass Index  Education  Children  \
 407               -1.574681  2.130803         1.002633          0 -0.019280   
 473               -1.574681  2.130803         1.002633          0 -0.019280   
 582                1.005844 -0.536062         0.767431          0  0.880469   
 498               -1.016322 -0.379188        -0.408580          0  0.880469   
 694                1.036026  0.562059        -0.408580          0 -0.019280   
 63                -1.574681  0.091435         0.297027          0 -0.919030   
 43                 0.190942  1.032682         2.649049          0 -0.019280   
 286                1.036026  0.562059        -0.408580          0 -0.019280   
 540                2.092381 -1.320435         0.061825          0 -0.019280   
 160                0.568211 -0.065439        -0.878984          0  2.679969   
 83                -0.654143  0.562059        -1.114186          1  0.880469   
 148               -0.654143 -1.006686        -1.819793          1 -0.919030   
 691               -0.654143  0.562059        -1.114186          1  0.880469   
 342                0.190942  0.091435         0.532229          1 -0.019280   
 249                1.005844 -0.536062         0.767431          0  0.880469   
 576                1.005844  1.973929         2.178644          0 -0.919030   
 300                0.190942  1.032682         2.649049          0 -0.019280   
 668                0.190942 -0.692937        -0.408580          1 -0.919030   
 346               -0.654143  0.248310         1.002633          0 -0.919030   
 393                0.568211 -0.065439        -0.878984          0  2.679969   
 660                2.213108 -0.849811        -0.408580          0  1.780219   
 333               -0.654143  0.248310         1.002633          0 -0.919030   
 688               -1.574681  2.130803         1.002633          0 -0.019280   
 692               -1.016322 -0.379188        -0.408580          0  0.880469   
 572               -0.654143  0.562059        -1.114186          1  0.880469   
 54                 1.005844 -0.536062         0.767431          0  0.880469   
 296               -1.574681  0.091435         0.297027          0 -0.919030   
 20                 1.624567 -1.320435        -0.408580          1 -0.919030   
 295               -0.654143 -1.006686        -1.819793          1 -0.919030   
 287                1.036026  0.562059        -0.408580          0 -0.019280   
 ..                      ...       ...              ...        ...       ...   
 199               -1.016322 -0.379188        -0.408580          0  0.880469   
 284               -1.574681  2.130803         1.002633          0 -0.019280   
 345                0.356940  0.718933        -0.878984          0 -0.919030   
 504               -0.654143 -1.006686        -1.819793          1 -0.919030   
 227                0.356940  0.718933        -0.878984          0 -0.919030   
 418               -0.654143  0.248310         1.002633          0 -0.919030   
 138                0.356940  0.718933        -0.878984          0 -0.919030   
 200                2.348925 -0.065439        -1.349389          0  0.880469   
 30                -0.654143  0.248310         1.002633          0 -0.919030   
 336                2.348925 -0.065439        -1.349389          0  0.880469   
 608               -1.016322 -0.379188        -0.408580          0  0.880469   
 596                0.040034 -1.320435        -0.643782          0 -0.019280   
 689               -0.654143 -1.006686        -1.819793          1 -0.919030   
 143                1.005844 -0.536062         0.767431          0  0.880469   
 23                 0.568211 -0.065439        -0.878984          0  2.679969   
 191               -0.654143  0.248310         1.002633          0 -0.919030   
 559                0.040034 -1.320435        -0.643782          0 -0.019280   
 308               -0.654143 -1.006686        -1.819793          1 -0.919030   
 39                 0.568211 -0.065439        -0.878984          0  2.679969   
 205               -1.016322 -0.379188        -0.408580          0  0.880469   
 6                  2.092381 -1.320435         0.061825          0 -0.019280   
 164               -0.654143  0.562059        -1.114186          1  0.880469   
 626                0.040034 -1.320435        -0.643782          0 -0.019280   
 621                0.387122  1.660180         1.237836          0  0.880469   
 508                0.190942 -0.692937        -0.408580          1 -0.919030   
 283                0.190942  1.032682         2.649049          0 -0.019280   
 555               -0.654143  0.248310         1.002633          0 -0.919030   
 338               -0.654143  0.248310         1.002633          0 -0.919030   
 149               -0.578689 -1.477309        -1.349389          0 -0.919030   
 304                0.190942  1.032682         2.649049          0 -0.019280   
 
           Pet  
 407 -0.589690  
 473 -0.589690  
 582  0.268487  
 498 -0.589690  
 694  0.268487  
 63  -0.589690  
 43  -0.589690  
 286  0.268487  
 540  2.843016  
 160 -0.589690  
 83  -0.589690  
 148 -0.589690  
 691 -0.589690  
 342  0.268487  
 249  0.268487  
 576  1.126663  
 300 -0.589690  
 668 -0.589690  
 346 -0.589690  
 393 -0.589690  
 660 -0.589690  
 333 -0.589690  
 688 -0.589690  
 692 -0.589690  
 572 -0.589690  
 54   0.268487  
 296 -0.589690  
 20  -0.589690  
 295 -0.589690  
 287  0.268487  
 ..        ...  
 199 -0.589690  
 284 -0.589690  
 345 -0.589690  
 504 -0.589690  
 227 -0.589690  
 418 -0.589690  
 138 -0.589690  
 200  2.843016  
 30  -0.589690  
 336  2.843016  
 608 -0.589690  
 596  1.126663  
 689 -0.589690  
 143  0.268487  
 23  -0.589690  
 191 -0.589690  
 559  1.126663  
 308 -0.589690  
 39  -0.589690  
 205 -0.589690  
 6    2.843016  
 164 -0.589690  
 626  1.126663  
 621  0.268487  
 508 -0.589690  
 283 -0.589690  
 555 -0.589690  
 338 -0.589690  
 149 -0.589690  
 304 -0.589690  
 
 [525 rows x 11 columns],
      Reason_1  Reason_2  Reason_3  Reason_4  Month Value  \
 604         0         0         0         1    -1.466241   
 134         0         0         0         1    -1.765648   
 563         0         0         0         1     1.527833   
 615         0         0         0         1    -1.466241   
 224         1         0         0         0    -0.268611   
 231         1         0         0         0     0.330204   
 614         0         0         0         1    -1.466241   
 360         0         0         0         1     0.629611   
 386         0         0         0         1    -1.466241   
 232         0         0         0         1     0.629611   
 126         0         0         0         1    -1.765648   
 221         0         0         0         1    -0.268611   
 419         0         0         0         1    -0.867426   
 642         0         0         0         1    -1.166834   
 322         1         0         0         0     1.228426   
 190         0         0         0         1    -0.867426   
 455         1         0         0         0    -0.268611   
 509         0         0         0         1    -0.268611   
 658         1         0         0         0    -0.568019   
 632         0         0         0         1    -1.166834   
 254         0         0         0         1     0.030796   
 233         1         0         0         0     0.929019   
 76          0         0         0         1     0.929019   
 382         0         0         0         1    -1.466241   
 33          0         0         1         0     0.330204   
 123         0         0         0         1     1.527833   
 178         1         0         0         0    -1.166834   
 538         1         0         0         0     1.228426   
 500         0         0         0         1     0.330204   
 121         0         0         0         1     1.228426   
 ..        ...       ...       ...       ...          ...   
 400         0         0         0         0    -1.166834   
 678         0         0         0         1     0.629611   
 430         0         0         0         1     1.228426   
 341         0         0         0         1    -0.568019   
 193         0         0         0         1     0.330204   
 404         1         0         0         0    -1.166834   
 397         0         0         0         1    -1.166834   
 277         0         0         0         0     0.629611   
 591         0         0         0         1     0.330204   
 476         0         0         0         1     0.030796   
 166         0         0         0         1    -1.166834   
 480         0         0         0         1    -1.166834   
 607         0         0         0         1    -1.466241   
 319         1         0         0         0     0.330204   
 465         0         0         0         1     0.030796   
 128         0         0         0         1     1.527833   
 489         0         0         0         1     0.330204   
 428         0         0         0         1     0.929019   
 466         0         0         0         1     0.030796   
 49          1         0         0         0     0.629611   
 657         0         0         1         0    -0.867426   
 309         0         0         0         1     0.929019   
 198         0         0         1         0    -0.867426   
 332         1         0         0         0     1.228426   
 181         0         0         0         1    -1.166834   
 469         0         0         0         1     0.030796   
 429         0         0         0         1     0.929019   
 443         0         0         0         1    -0.268611   
 352         1         0         0         0     1.527833   
 630         1         0         0         0    -1.166834   
 
      Transportation Expense       Age  Body Mass Index  Education  Children  \
 604               -0.654143 -1.006686        -1.819793          1 -0.919030   
 134               -1.574681  0.091435         0.297027          0 -0.919030   
 563                0.190942  1.032682         2.649049          0 -0.019280   
 615               -0.654143  0.248310         1.002633          0 -0.919030   
 224                0.356940  0.718933        -0.878984          0 -0.919030   
 231               -1.574681  2.130803         1.002633          0 -0.019280   
 614                0.040034 -1.320435        -0.643782          0 -0.019280   
 360               -1.016322 -0.379188        -0.408580          0  0.880469   
 386               -0.654143  0.248310         1.002633          0 -0.919030   
 232                2.348925 -0.065439        -1.349389          0  0.880469   
 126               -1.574681  0.091435         0.297027          0 -0.919030   
 221                2.092381 -1.320435         0.061825          0 -0.019280   
 419               -1.574681  2.130803         1.002633          0 -0.019280   
 642                0.387122  1.660180         1.237836          0  0.880469   
 322                2.348925 -0.065439        -1.349389          0  0.880469   
 190                0.568211 -0.065439        -0.878984          0  2.679969   
 455               -0.654143  0.248310         1.002633          0 -0.919030   
 509                0.356940  0.718933        -0.878984          0 -0.919030   
 658               -0.503235 -0.536062        -0.408580          0  0.880469   
 632               -0.654143  0.248310         1.002633          0 -0.919030   
 254                1.005844 -0.536062         0.767431          0  0.880469   
 233               -0.654143  0.248310         1.002633          0 -0.919030   
 76                 0.040034 -1.320435        -0.643782          0 -0.019280   
 382                0.356940  0.718933        -0.878984          0 -0.919030   
 33                 0.190942  1.817054         1.473038          0 -0.019280   
 123               -1.574681  0.091435         0.297027          0 -0.919030   
 178                0.040034 -1.320435        -0.643782          0 -0.019280   
 538                0.190942 -0.692937        -0.408580          1 -0.919030   
 500               -0.654143 -1.006686        -1.819793          1 -0.919030   
 121               -1.574681  0.091435         0.297027          0 -0.919030   
 ..                      ...       ...              ...        ...       ...   
 400                2.213108 -0.849811        -0.408580          0  1.780219   
 678                0.190942  1.032682         2.649049          0 -0.019280   
 430                2.092381 -1.320435         0.061825          0 -0.019280   
 341               -0.654143  0.248310         1.002633          0 -0.919030   
 193                1.036026  0.562059        -0.408580          0 -0.019280   
 404               -1.574681  2.130803         1.002633          0 -0.019280   
 397                2.348925 -0.065439        -1.349389          0  0.880469   
 277                1.036026 -0.692937        -0.878984          0 -0.919030   
 591               -0.654143 -1.006686        -1.819793          1 -0.919030   
 476                0.190942  1.032682         2.649049          0 -0.019280   
 166                0.568211 -0.065439        -0.878984          0  2.679969   
 480               -0.654143  0.248310         1.002633          0 -0.919030   
 607               -0.654143  0.248310         1.002633          0 -0.919030   
 319               -0.654143  0.248310         1.002633          0 -0.919030   
 465               -1.574681  0.091435         0.297027          0 -0.919030   
 128               -1.574681  0.091435         0.297027          0 -0.919030   
 489                1.624567 -1.320435        -0.408580          1 -0.919030   
 428               -0.654143 -1.006686        -1.819793          1 -0.919030   
 466                0.568211 -0.065439        -0.878984          0  2.679969   
 49                 1.036026  0.562059        -0.408580          0 -0.019280   
 657                0.387122  1.660180         1.237836          0  0.880469   
 309               -1.574681  2.130803         1.002633          0 -0.019280   
 198                1.005844 -0.536062         0.767431          0  0.880469   
 332                0.356940  0.718933        -0.878984          0 -0.919030   
 181                0.040034 -1.320435        -0.643782          0 -0.019280   
 469                0.190942  1.817054         1.473038          0 -0.019280   
 429               -0.654143  0.248310         1.002633          0 -0.919030   
 443               -1.574681  0.091435         0.297027          0 -0.919030   
 352                0.190942  1.032682         2.649049          0 -0.019280   
 630               -0.654143  0.248310         1.002633          0 -0.919030   
 
           Pet  
 604 -0.589690  
 134 -0.589690  
 563 -0.589690  
 615 -0.589690  
 224 -0.589690  
 231 -0.589690  
 614  1.126663  
 360 -0.589690  
 386 -0.589690  
 232  2.843016  
 126 -0.589690  
 221  2.843016  
 419 -0.589690  
 642  0.268487  
 322  2.843016  
 190 -0.589690  
 455 -0.589690  
 509 -0.589690  
 658  1.126663  
 632 -0.589690  
 254  0.268487  
 233 -0.589690  
 76   1.126663  
 382 -0.589690  
 33   3.701192  
 123 -0.589690  
 178  1.126663  
 538 -0.589690  
 500 -0.589690  
 121 -0.589690  
 ..        ...  
 400 -0.589690  
 678 -0.589690  
 430  2.843016  
 341 -0.589690  
 193  0.268487  
 404 -0.589690  
 397  2.843016  
 277 -0.589690  
 591 -0.589690  
 476 -0.589690  
 166 -0.589690  
 480 -0.589690  
 607 -0.589690  
 319 -0.589690  
 465 -0.589690  
 128 -0.589690  
 489 -0.589690  
 428 -0.589690  
 466 -0.589690  
 49   0.268487  
 657  0.268487  
 309 -0.589690  
 198  0.268487  
 332 -0.589690  
 181  1.126663  
 469  3.701192  
 429 -0.589690  
 443 -0.589690  
 352 -0.589690  
 630 -0.589690  
 
 [175 rows x 11 columns],
 array([0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0,
        0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0,
        0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0,
        0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1,
        0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0,
        0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
        0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0,
        0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0,
        0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1,
        1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1,
        0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1,
        0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0,
        0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1,
        0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1,
        1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1,
        0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0,
        0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0,
        0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0,
        1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,
        0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1,
        0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0,
        1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0,
        0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1,
        1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0]),
 array([0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
        0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1,
        1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1,
        1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0,
        1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0,
        0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0,
        0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0,
        1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1])]
In [34]:
# declare 4 variables for the split
x_train, x_test, y_train, y_test = train_test_split(scaled_inputs, targets, #train_size = 0.8, 
                                                                            test_size = 0.2, random_state = 20)
In [35]:
# check the shape of the train inputs and targets
print (x_train.shape, y_train.shape)
(560, 11) (560,)
In [36]:
# check the shape of the test inputs and targets
print (x_test.shape, y_test.shape)
(140, 11) (140,)

Logistic regression with sklearn

In [37]:
# import the LogReg model from sklearn
from sklearn.linear_model import LogisticRegression

# import the 'metrics' module, which includes important metrics we may want to use
from sklearn import metrics

Training the model

In [38]:
# create a logistic regression object
reg = LogisticRegression()
In [39]:
# fit our train inputs
# that is basically the whole training part of the machine learning
reg.fit(x_train,y_train)
Out[39]:
LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
          intercept_scaling=1, max_iter=100, multi_class='ovr', n_jobs=1,
          penalty='l2', random_state=None, solver='liblinear', tol=0.0001,
          verbose=0, warm_start=False)
In [40]:
# assess the train accuracy of the model
reg.score(x_train,y_train)
Out[40]:
0.775

Manually check the accuracy

In [41]:
# find the model outputs according to our model
model_outputs = reg.predict(x_train)
model_outputs
Out[41]:
array([0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0,
       0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1,
       0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0,
       0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0,
       0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0,
       0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0,
       0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0,
       1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0,
       0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1,
       1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1,
       1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1,
       0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1,
       0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0,
       0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0,
       0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1,
       1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1,
       0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0,
       1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0,
       0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0,
       0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,
       0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0,
       1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0,
       0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0,
       0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1,
       0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0,
       0, 1, 0, 1, 1, 1, 0, 0, 0, 0])
In [42]:
# compare them with the targets
y_train
Out[42]:
array([0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0,
       1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1,
       1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0,
       0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1,
       1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0,
       0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1,
       0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0,
       0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1,
       1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0,
       1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0,
       0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0,
       0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0,
       1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0,
       0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1,
       1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1,
       0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0,
       1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0,
       0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1,
       0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0,
       0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0,
       1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0,
       1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1,
       0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0,
       0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0,
       0, 0, 0, 1, 1, 1, 1, 0, 1, 0])
In [43]:
# ACTUALLY compare the two variables
model_outputs == y_train
Out[43]:
array([ True,  True,  True,  True,  True,  True,  True,  True,  True,
        True, False,  True, False, False,  True,  True,  True,  True,
       False,  True, False,  True, False, False,  True,  True,  True,
       False,  True,  True,  True,  True,  True,  True,  True,  True,
       False, False, False, False,  True,  True,  True,  True, False,
        True,  True,  True,  True,  True, False,  True,  True,  True,
        True,  True,  True,  True,  True, False,  True,  True,  True,
        True,  True,  True,  True,  True,  True, False,  True,  True,
        True,  True,  True, False,  True,  True,  True,  True,  True,
       False,  True, False,  True,  True, False, False, False,  True,
        True,  True,  True,  True,  True,  True,  True, False,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True, False,  True,  True,  True,  True,
       False,  True,  True,  True,  True, False,  True,  True,  True,
        True,  True,  True,  True,  True, False,  True,  True,  True,
        True, False,  True,  True,  True,  True,  True,  True, False,
        True, False,  True, False,  True,  True,  True,  True, False,
       False, False,  True,  True, False,  True,  True,  True,  True,
        True, False,  True, False,  True, False,  True, False, False,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True, False,  True,  True,  True,
        True, False,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True, False,  True, False,
       False,  True,  True,  True,  True,  True,  True,  True, False,
        True, False,  True, False,  True,  True,  True,  True, False,
        True, False, False,  True,  True,  True,  True,  True, False,
       False, False,  True, False,  True,  True,  True, False,  True,
        True,  True,  True,  True,  True,  True, False,  True,  True,
        True,  True,  True,  True,  True,  True, False,  True,  True,
        True, False, False,  True,  True,  True,  True,  True,  True,
       False,  True,  True,  True,  True,  True,  True, False, False,
       False,  True,  True,  True,  True, False,  True, False,  True,
       False,  True,  True,  True,  True,  True, False,  True, False,
       False,  True,  True,  True,  True,  True, False,  True,  True,
        True,  True,  True, False,  True, False,  True,  True,  True,
        True,  True,  True,  True,  True, False,  True,  True, False,
       False,  True,  True,  True,  True,  True,  True,  True, False,
        True,  True,  True, False, False,  True,  True,  True, False,
        True, False,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
       False,  True,  True, False,  True, False,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True, False,  True,  True,  True, False,  True,
        True, False,  True, False,  True,  True,  True, False,  True,
        True,  True,  True,  True,  True, False,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
       False,  True,  True, False,  True, False,  True,  True, False,
        True,  True,  True, False,  True,  True, False,  True, False,
        True,  True,  True,  True,  True, False, False,  True,  True,
        True,  True,  True,  True,  True,  True,  True, False,  True,
       False,  True,  True,  True, False, False,  True,  True,  True,
        True, False,  True,  True,  True,  True,  True,  True,  True,
        True,  True, False,  True,  True, False, False,  True,  True,
       False,  True,  True,  True,  True,  True,  True, False, False,
        True,  True, False,  True,  True,  True,  True, False,  True,
        True,  True,  True,  True, False,  True,  True,  True,  True,
        True, False,  True,  True, False,  True,  True,  True,  True,
       False,  True,  True,  True,  True,  True, False,  True,  True,
       False, False, False,  True,  True, False,  True,  True,  True,
       False,  True,  True,  True,  True,  True,  True,  True,  True,
        True, False,  True,  True,  True,  True,  True,  True,  True,
        True,  True, False,  True,  True,  True,  True, False,  True,
       False,  True])
In [44]:
# find out in how many instances we predicted correctly
np.sum((model_outputs==y_train))
Out[44]:
434
In [45]:
# get the total number of instances
model_outputs.shape[0]
Out[45]:
560
In [46]:
# calculate the accuracy of the model
np.sum((model_outputs==y_train)) / model_outputs.shape[0]
Out[46]:
0.775

Finding the intercept and coefficients

In [47]:
# get the intercept (bias) of our model
reg.intercept_
Out[47]:
array([-1.43138127])
In [48]:
# get the coefficients (weights) of our model
reg.coef_
Out[48]:
array([[ 2.60237227,  0.84350002,  2.94078723,  0.63723433,  0.00565051,
         0.61953401, -0.17635497,  0.28410321, -0.26372527,  0.35195032,
        -0.27369766]])
In [49]:
# check what were the names of our columns
unscaled_inputs.columns.values
Out[49]:
array(['Reason_1', 'Reason_2', 'Reason_3', 'Reason_4', 'Month Value',
       'Transportation Expense', 'Age', 'Body Mass Index', 'Education',
       'Children', 'Pet'], dtype=object)
In [50]:
# save the names of the columns in an ad-hoc variable
feature_name = unscaled_inputs.columns.values
In [51]:
# use the coefficients from this table (they will be exported later and will be used in Tableau)
# transpose the model coefficients (model.coef_) and throws them into a df (a vertical organization, so that they can be
# multiplied by certain matrices later) 
summary_table = pd.DataFrame (columns=['Feature name'], data = feature_name)

# add the coefficient values to the summary table
summary_table['Coefficient'] = np.transpose(reg.coef_)

# display the summary table
summary_table
Out[51]:
Feature name Coefficient
0 Reason_1 2.602372
1 Reason_2 0.843500
2 Reason_3 2.940787
3 Reason_4 0.637234
4 Month Value 0.005651
5 Transportation Expense 0.619534
6 Age -0.176355
7 Body Mass Index 0.284103
8 Education -0.263725
9 Children 0.351950
10 Pet -0.273698
In [52]:
# do a little Python trick to move the intercept to the top of the summary table
# move all indices by 1
summary_table.index = summary_table.index + 1

# add the intercept at index 0
summary_table.loc[0] = ['Intercept', reg.intercept_[0]]

# sort the df by index
summary_table = summary_table.sort_index()
summary_table
Out[52]:
Feature name Coefficient
0 Intercept -1.431381
1 Reason_1 2.602372
2 Reason_2 0.843500
3 Reason_3 2.940787
4 Reason_4 0.637234
5 Month Value 0.005651
6 Transportation Expense 0.619534
7 Age -0.176355
8 Body Mass Index 0.284103
9 Education -0.263725
10 Children 0.351950
11 Pet -0.273698

Interpreting the coefficients

In [53]:
# create a new Series called: 'Odds ratio' which will show the.. odds ratio of each feature
summary_table['Odds_ratio'] = np.exp(summary_table.Coefficient)
In [54]:
# display the df
summary_table
Out[54]:
Feature name Coefficient Odds_ratio
0 Intercept -1.431381 0.238979
1 Reason_1 2.602372 13.495716
2 Reason_2 0.843500 2.324489
3 Reason_3 2.940787 18.930743
4 Reason_4 0.637234 1.891243
5 Month Value 0.005651 1.005667
6 Transportation Expense 0.619534 1.858062
7 Age -0.176355 0.838320
8 Body Mass Index 0.284103 1.328570
9 Education -0.263725 0.768185
10 Children 0.351950 1.421838
11 Pet -0.273698 0.760562
In [55]:
# sort the table according to odds ratio
# note that by default, the sort_values method sorts values by 'ascending'
summary_table.sort_values('Odds_ratio', ascending=False)
Out[55]:
Feature name Coefficient Odds_ratio
3 Reason_3 2.940787 18.930743
1 Reason_1 2.602372 13.495716
2 Reason_2 0.843500 2.324489
4 Reason_4 0.637234 1.891243
6 Transportation Expense 0.619534 1.858062
10 Children 0.351950 1.421838
8 Body Mass Index 0.284103 1.328570
5 Month Value 0.005651 1.005667
7 Age -0.176355 0.838320
9 Education -0.263725 0.768185
11 Pet -0.273698 0.760562
0 Intercept -1.431381 0.238979

Testing the model

In [56]:
# assess the test accuracy of the model
reg.score(x_test,y_test)
Out[56]:
0.7357142857142858
In [57]:
# find the predicted probabilities of each class
# the first column shows the probability of a particular observation to be 0, while the second one - to be 1
predicted_proba = reg.predict_proba(x_test)

# let's check that out
predicted_proba
Out[57]:
array([[0.75308922, 0.24691078],
       [0.60926091, 0.39073909],
       [0.4859575 , 0.5140425 ],
       [0.7552847 , 0.2447153 ],
       [0.0839675 , 0.9160325 ],
       [0.30192695, 0.69807305],
       [0.30166774, 0.69833226],
       [0.1151045 , 0.8848955 ],
       [0.73775967, 0.26224033],
       [0.75403176, 0.24596824],
       [0.50719215, 0.49280785],
       [0.19719276, 0.80280724],
       [0.06163196, 0.93836804],
       [0.70917025, 0.29082975],
       [0.29280547, 0.70719453],
       [0.5241047 , 0.4758953 ],
       [0.50676929, 0.49323071],
       [0.50888352, 0.49111648],
       [0.367008  , 0.632992  ],
       [0.06355661, 0.93644339],
       [0.73644831, 0.26355169],
       [0.7552847 , 0.2447153 ],
       [0.47457156, 0.52542844],
       [0.47288443, 0.52711557],
       [0.22026535, 0.77973465],
       [0.73808685, 0.26191315],
       [0.51184512, 0.48815488],
       [0.87683579, 0.12316421],
       [0.23445563, 0.76554437],
       [0.7552847 , 0.2447153 ],
       [0.61087074, 0.38912926],
       [0.28414073, 0.71585927],
       [0.29943679, 0.70056321],
       [0.50634641, 0.49365359],
       [0.7552847 , 0.2447153 ],
       [0.40453482, 0.59546518],
       [0.73743223, 0.26256777],
       [0.21439711, 0.78560289],
       [0.56310493, 0.43689507],
       [0.39544424, 0.60455576],
       [0.75559726, 0.24440274],
       [0.50110763, 0.49889237],
       [0.73874043, 0.26125957],
       [0.55298784, 0.44701216],
       [0.19310398, 0.80689602],
       [0.39168937, 0.60831063],
       [0.27751475, 0.72248525],
       [0.75465877, 0.24534123],
       [0.75144366, 0.24855634],
       [0.75590955, 0.24409045],
       [0.49772404, 0.50227596],
       [0.67413797, 0.32586203],
       [0.30192695, 0.69807305],
       [0.75302024, 0.24697976],
       [0.17932441, 0.82067559],
       [0.60845511, 0.39154489],
       [0.09280493, 0.90719507],
       [0.73302148, 0.26697852],
       [0.64208684, 0.35791316],
       [0.64169795, 0.35830205],
       [0.29600002, 0.70399998],
       [0.3008583 , 0.6991417 ],
       [0.73202705, 0.26797295],
       [0.21881596, 0.78118404],
       [0.75380354, 0.24619646],
       [0.75340367, 0.24659633],
       [0.90974125, 0.09025875],
       [0.73710452, 0.26289548],
       [0.2325724 , 0.7674276 ],
       [0.70777255, 0.29222745],
       [0.73939295, 0.26060705],
       [0.64716344, 0.35283656],
       [0.11680764, 0.88319236],
       [0.56393718, 0.43606282],
       [0.406166  , 0.593834  ],
       [0.7552847 , 0.2447153 ],
       [0.23415211, 0.76584789],
       [0.25308247, 0.74691753],
       [0.29723426, 0.70276574],
       [0.36543731, 0.63456269],
       [0.73841377, 0.26158623],
       [0.9123835 , 0.0876165 ],
       [0.73003114, 0.26996886],
       [0.28139582, 0.71860418],
       [0.53366197, 0.46633803],
       [0.87738286, 0.12261714],
       [0.30050256, 0.69949744],
       [0.47288443, 0.52711557],
       [0.7464465 , 0.2535535 ],
       [0.28414073, 0.71585927],
       [0.80468048, 0.19531952],
       [0.86529144, 0.13470856],
       [0.75245951, 0.24754049],
       [0.7527745 , 0.2472255 ],
       [0.75403176, 0.24596824],
       [0.12887064, 0.87112936],
       [0.75245951, 0.24754049],
       [0.27480928, 0.72519072],
       [0.75270546, 0.24729454],
       [0.80308014, 0.19691986],
       [0.40535014, 0.59464986],
       [0.28414073, 0.71585927],
       [0.29900425, 0.70099575],
       [0.29424034, 0.70575966],
       [0.5635211 , 0.4364789 ],
       [0.56643175, 0.43356825],
       [0.75308922, 0.24691078],
       [0.12887064, 0.87112936],
       [0.23944262, 0.76055738],
       [0.86548852, 0.13451148],
       [0.91265361, 0.08734639],
       [0.08800759, 0.91199241],
       [0.34705733, 0.65294267],
       [0.61207652, 0.38792348],
       [0.40575801, 0.59424199],
       [0.3970632 , 0.6029368 ],
       [0.21411229, 0.78588771],
       [0.15452766, 0.84547234],
       [0.43678858, 0.56321142],
       [0.73677655, 0.26322345],
       [0.75371785, 0.24628215],
       [0.8761031 , 0.1238969 ],
       [0.19746072, 0.80253928],
       [0.50237647, 0.49762353],
       [0.75403176, 0.24596824],
       [0.70847189, 0.29152811],
       [0.75559726, 0.24440274],
       [0.86489655, 0.13510345],
       [0.30014706, 0.69985294],
       [0.73644831, 0.26355169],
       [0.39503985, 0.60496015],
       [0.75559726, 0.24440274],
       [0.75465877, 0.24534123],
       [0.7060199 , 0.2939801 ],
       [0.73169505, 0.26830495],
       [0.40902552, 0.59097448],
       [0.50634641, 0.49365359],
       [0.70742251, 0.29257749],
       [0.75270546, 0.24729454],
       [0.56268867, 0.43731133]])
In [58]:
predicted_proba.shape
Out[58]:
(140, 2)
In [59]:
# select ONLY the probabilities referring to 1s
predicted_proba[:,1]
Out[59]:
array([0.24691078, 0.39073909, 0.5140425 , 0.2447153 , 0.9160325 ,
       0.69807305, 0.69833226, 0.8848955 , 0.26224033, 0.24596824,
       0.49280785, 0.80280724, 0.93836804, 0.29082975, 0.70719453,
       0.4758953 , 0.49323071, 0.49111648, 0.632992  , 0.93644339,
       0.26355169, 0.2447153 , 0.52542844, 0.52711557, 0.77973465,
       0.26191315, 0.48815488, 0.12316421, 0.76554437, 0.2447153 ,
       0.38912926, 0.71585927, 0.70056321, 0.49365359, 0.2447153 ,
       0.59546518, 0.26256777, 0.78560289, 0.43689507, 0.60455576,
       0.24440274, 0.49889237, 0.26125957, 0.44701216, 0.80689602,
       0.60831063, 0.72248525, 0.24534123, 0.24855634, 0.24409045,
       0.50227596, 0.32586203, 0.69807305, 0.24697976, 0.82067559,
       0.39154489, 0.90719507, 0.26697852, 0.35791316, 0.35830205,
       0.70399998, 0.6991417 , 0.26797295, 0.78118404, 0.24619646,
       0.24659633, 0.09025875, 0.26289548, 0.7674276 , 0.29222745,
       0.26060705, 0.35283656, 0.88319236, 0.43606282, 0.593834  ,
       0.2447153 , 0.76584789, 0.74691753, 0.70276574, 0.63456269,
       0.26158623, 0.0876165 , 0.26996886, 0.71860418, 0.46633803,
       0.12261714, 0.69949744, 0.52711557, 0.2535535 , 0.71585927,
       0.19531952, 0.13470856, 0.24754049, 0.2472255 , 0.24596824,
       0.87112936, 0.24754049, 0.72519072, 0.24729454, 0.19691986,
       0.59464986, 0.71585927, 0.70099575, 0.70575966, 0.4364789 ,
       0.43356825, 0.24691078, 0.87112936, 0.76055738, 0.13451148,
       0.08734639, 0.91199241, 0.65294267, 0.38792348, 0.59424199,
       0.6029368 , 0.78588771, 0.84547234, 0.56321142, 0.26322345,
       0.24628215, 0.1238969 , 0.80253928, 0.49762353, 0.24596824,
       0.29152811, 0.24440274, 0.13510345, 0.69985294, 0.26355169,
       0.60496015, 0.24440274, 0.24534123, 0.2939801 , 0.26830495,
       0.59097448, 0.49365359, 0.29257749, 0.24729454, 0.43731133])

Save the model

In [60]:
# import the relevant module
import pickle
In [61]:
# pickle the model file
with open('model', 'wb') as file:
    pickle.dump(reg, file)
In [62]:
# pickle the scaler file
with open('scaler','wb') as file:
    pickle.dump(absenteeism_scaler, file)
In [ ]: