Uploaded by vc713186

assignment

advertisement
assignment
July 22, 2023
[1]: import pandas as pd
import numpy as np
from sklearn.datasets import load_iris
import warnings
warnings.filterwarnings("ignore")
[2]: li = load_iris()
feature = pd.DataFrame(li.data, columns = li.feature_names)
label = pd.DataFrame(li.target, columns = ['Target'])
feature.shape
label
[2]:
0
1
2
3
4
..
145
146
147
148
149
Target
0
0
0
0
0
…
2
2
2
2
2
[150 rows x 1 columns]
[3]: from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(feature,label, test_size=0.
↪3, random_state=15)
[4]: print(x_train.shape,x_test.shape,y_train.shape,y_test.shape)
(105, 4) (45, 4) (105, 1) (45, 1)
[5]: from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
1
[6]: lg = LogisticRegression()
tc = DecisionTreeClassifier()
svc = SVC()
[7]: lg.fit(x_train,y_train)
tc.fit(x_train,y_train)
svc.fit(x_train,y_train)
[7]: SVC()
[8]: test_predict_lg = lg.predict(x_test)
test_predict_tc = tc.predict(x_test)
test_predict_svc = svc.predict(x_test)
[9]: train_predict_lg = lg.predict(x_train)
train_predict_tc = tc.predict(x_train)
train_predict_lg = svc.predict(x_train)
[10]: from sklearn.metrics import accuracy_score
print("---For Accuracy on test data---")
print("Linear regression = ", accuracy_score(y_test,test_predict_lg))
print("Tree Classifier = ", accuracy_score(y_test,test_predict_tc))
print("SVC = ", accuracy_score(y_test,test_predict_tc))
---For Accuracy on test data--Linear regression = 1.0
Tree Classifier = 0.9777777777777777
SVC = 0.9777777777777777
[11]: print("---For Accuracy on train data---")
print("Linear regression = ", accuracy_score(y_train,train_predict_lg))
print("Tree Classifier = ", accuracy_score(y_train,train_predict_tc))
print("SVC = ", accuracy_score(y_train,train_predict_tc))
---For Accuracy on train data--Linear regression = 0.9714285714285714
Tree Classifier = 1.0
SVC = 1.0
[12]: lg.predict([[5,1.5,2.5,1.2]])
[12]: array([1])
[ ]:
2
Download