0% found this document useful (0 votes)
17 views

6 Logistic Regression Iris

Uploaded by

Asli Account
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
17 views

6 Logistic Regression Iris

Uploaded by

Asli Account
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 3

25/04/2024, 21:35 logistics_regression+softmax_regression.

ipynb - Colab

Logistics regression using IRIS dataset

import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, precision_score, recall_score, confusion_matrix

# Load the Iris dataset


iris = load_iris()

# Split the data into training and testing sets


X = iris["data"][:, 3:] # petal width
y = (iris["target"] == 2).astype(np.int64) # 1 if Iris virginica, else 0
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
print(y_train)

[0 1 1 1 1 0 1 0 0 1 1 1 1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 0 0 0 1 0 0 0 1 0
0 1 1 1 1 0 0 1 1 0 1 0 1 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 0 1 0 0
1 0 1 0 0 1 0 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 0 1 0]

# Create a logistic regression model


model = LogisticRegression()

# Train the model


model.fit(X_train, y_train)

▾ LogisticRegression
LogisticRegression()

# Make predictions on the test set


y_pred = model.predict(X_test)

print(y_pred)

[1 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 1 0 0 1 1 0 0
0 0 0 1 0 1 0 0]

# Calculate the accuracy, precision, recall, and confusion matrix


accuracy = accuracy_score(y_test, y_pred)
precision = precision_score(y_test, y_pred)
recall = recall_score(y_test, y_pred)
confusion_matrix = confusion_matrix(y_test, y_pred)

# Print the results


print("Accuracy:", accuracy)
print("Precision:", precision)
print("Recall:", recall)
print("Confusion matrix:\n", confusion_matrix)

Accuracy: 0.9777777777777777
Precision: 1.0
Recall: 0.9090909090909091
Confusion matrix:
[[34 0]
[ 1 10]]

Softmax Regression on IRIS dataset

import numpy as np
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression

# Load the iris dataset


iris = load_iris()

print(iris)

{'data': array([[5.1, 3.5, 1.4, 0.2],


[4.9, 3. , 1.4, 0.2],
[4.7, 3.2, 1.3, 0.2],
[4.6, 3.1, 1.5, 0.2],

https://colab.research.google.com/drive/1FGTXQRqv54bMh6Jz3SOwV17Ld3nchbTG#scrollTo=Fbt4Bblu_7ss&printMode=true 1/3
25/04/2024, 21:35 logistics_regression+softmax_regression.ipynb - Colab
[5. , 3.6, 1.4, 0.2],
[5.4, 3.9, 1.7, 0.4],
[4.6, 3.4, 1.4, 0.3],
[5. , 3.4, 1.5, 0.2],
[4.4, 2.9, 1.4, 0.2],
[4.9, 3.1, 1.5, 0.1],
[5.4, 3.7, 1.5, 0.2],
[4.8, 3.4, 1.6, 0.2],
[4.8, 3. , 1.4, 0.1],
[4.3, 3. , 1.1, 0.1],
[5.8, 4. , 1.2, 0.2],
[5.7, 4.4, 1.5, 0.4],
[5.4, 3.9, 1.3, 0.4],
[5.1, 3.5, 1.4, 0.3],
[5.7, 3.8, 1.7, 0.3],
[5.1, 3.8, 1.5, 0.3],
[5.4, 3.4, 1.7, 0.2],
[5.1, 3.7, 1.5, 0.4],
[4.6, 3.6, 1. , 0.2],
[5.1, 3.3, 1.7, 0.5],
[4.8, 3.4, 1.9, 0.2],
[5. , 3. , 1.6, 0.2],
[5. , 3.4, 1.6, 0.4],
[5.2, 3.5, 1.5, 0.2],
[5.2, 3.4, 1.4, 0.2],
[4.7, 3.2, 1.6, 0.2],
[4.8, 3.1, 1.6, 0.2],
[5.4, 3.4, 1.5, 0.4],
[5.2, 4.1, 1.5, 0.1],
[5.5, 4.2, 1.4, 0.2],
[4.9, 3.1, 1.5, 0.2],
[5. , 3.2, 1.2, 0.2],
[5.5, 3.5, 1.3, 0.2],
[4.9, 3.6, 1.4, 0.1],
[4.4, 3. , 1.3, 0.2],
[5.1, 3.4, 1.5, 0.2],
[5. , 3.5, 1.3, 0.3],
[4.5, 2.3, 1.3, 0.3],
[4.4, 3.2, 1.3, 0.2],
[5. , 3.5, 1.6, 0.6],
[5.1, 3.8, 1.9, 0.4],
[4.8, 3. , 1.4, 0.3],
[5.1, 3.8, 1.6, 0.2],
[4.6, 3.2, 1.4, 0.2],
[5.3, 3.7, 1.5, 0.2],
[5. , 3.3, 1.4, 0.2],
[7. , 3.2, 4.7, 1.4],
[6.4, 3.2, 4.5, 1.5],
[6.9, 3.1, 4.9, 1.5],
[5.5, 2.3, 4. , 1.3],
[6.5, 2.8, 4.6, 1.5],
[5.7, 2.8, 4.5, 1.3],
[6 3 3 3 4 7 1 6]

# Split the data into training and testing sets


from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target, test_size=0.25)

# Create a softmax regression classifier


clf = LogisticRegression(multi_class='multinomial', solver='lbfgs')

# Train the classifier


clf.fit(X_train, y_train)

▾ LogisticRegression
LogisticRegression(multi_class='multinomial')

# Make predictions on the train set


y_pred_train = clf.predict(X_train)

# Make predictions on the test set


y_pred = clf.predict(X_test)

# Calculate the accuracy


accuracy = accuracy_score(y_train, y_pred_train)
print(accuracy)

0.9910714285714286

# Calculate the accuracy


accuracy = accuracy_score(y_test, y_pred)

https://colab.research.google.com/drive/1FGTXQRqv54bMh6Jz3SOwV17Ld3nchbTG#scrollTo=Fbt4Bblu_7ss&printMode=true 2/3
25/04/2024, 21:35 logistics_regression+softmax_regression.ipynb - Colab

# Print the accuracy for test set


print("Accuracy:", accuracy)

Accuracy: 0.9473684210526315

from sklearn.metrics import confusion_matrix


# Compute the confusion matrix
cm = confusion_matrix(y_test, y_pred)

# Print the confusion matrix


print(cm)

[[13 0 0]
[ 0 12 2]
[ 0 0 11]]

from sklearn.metrics import confusion_matrix


# Compute the confusion matrix for train set
cm = confusion_matrix(y_train, y_pred_train)

# Print the confusion matrix for train set


print(cm)

[[37 0 0]
[ 0 35 1]
[ 0 0 39]]

# Create a new iris flower


new_flower = [5.0, 3.6, 1.3, 0.25]

# Predict the species of the new flower


prediction = clf.predict([new_flower])

# Print the prediction


print("Predicted species:", iris.target_names[prediction])

Predicted species: ['setosa']

https://colab.research.google.com/drive/1FGTXQRqv54bMh6Jz3SOwV17Ld3nchbTG#scrollTo=Fbt4Bblu_7ss&printMode=true 3/3

You might also like