- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 }& H0 ?! r, _ R" I7 w
import matplotlib.pyplot as plt. ?! X0 }" `5 d* w- S
; T9 i, A9 K' U- N) ^0 }* w
import utilities # M) P* s( ~ S9 e
5 ~2 i7 y. X* A* M. Z# Load input data e7 t. Z$ j/ h1 Z8 @
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 R0 B) u' e! K! T. p# [X, y = utilities.load_data(input_file)
& x- _( C% N- y6 }3 l" n$ f( K
$ _* a. u: R. ~7 F% ?. P* r###############################################. M9 c5 @% M$ S+ v3 c
# Separate the data into classes based on 'y'
, \7 X; z( i" kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ x2 x1 `3 X- e7 z0 r
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
5 D- M: K/ U0 _' U% j$ P0 ^& D5 ~1 ^) C/ n+ x( l. n
# Plot the input data4 Q1 p7 O+ n0 X( ?. \# Y; I
plt.figure()
- z! z% [0 b* ], v! |0 Fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')( [4 k3 i0 b6 B3 y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. d& y% n1 |9 {$ y: f1 _plt.title('Input data'); o3 s" \0 \% X" c
3 |* ?8 c- Z- D###############################################
8 u6 M G+ v* U$ G4 z. H# n4 v# Train test split and SVM training
) O" J" M% r' Y" D4 dfrom sklearn import cross_validation5 k2 z9 Q+ P+ h1 e- ^6 K4 a# u
from sklearn.svm import SVC
" L) I: b3 E+ l7 Y8 c- ^ m; {- s6 e' f0 l4 s
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)! }) q4 ~) O, A3 P+ X. ^; N8 }# K
% s- ^8 F8 Y% B/ e6 T+ C" M#params = {'kernel': 'linear'}, _8 M# h3 |( _3 i
#params = {'kernel': 'poly', 'degree': 3}
; a# \6 m) w* }0 z6 S# L$ Lparams = {'kernel': 'rbf'}
{; ]9 o4 F4 w' x% xclassifier = SVC(**params)" W. X9 _2 N, d4 A2 ?0 P2 X
classifier.fit(X_train, y_train)
" ~" o- S7 a& O( @. R7 w8 Z& Qutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 e* A; S+ {- D' Y% g$ F) P& b: P& F) h- d# x9 c1 f
y_test_pred = classifier.predict(X_test)
) I* }# n+ S4 @0 a9 wutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
: |/ {8 f2 S" L( ?& x6 I; f+ { D9 s0 j, x# m! d
###############################################. n2 s/ w( O; h! m* b) f3 f
# Evaluate classifier performance
& s, g; G8 @4 P3 D2 ^4 G- I2 s m1 i5 E# J: F0 U
from sklearn.metrics import classification_report
9 O; r" j1 S5 K7 @
& |2 S6 ?* L( P. ntarget_names = ['Class-' + str(int(i)) for i in set(y)]* V! o. p n0 d& W L8 u- O
print "\n" + "#"*30
5 ~% ]+ ?! y; f5 W# W3 f2 oprint "\nClassifier performance on training dataset\n": z8 A" `: t' H5 ]' J
print classification_report(y_train, classifier.predict(X_train), target_names=target_names). y- A4 [ z. `6 U+ `, v
print "#"*30 + "\n"
. J: b7 X7 ? E- n3 w& i
9 j+ K! f2 ]% {: y1 Tprint "#"*30
3 y9 ?7 d0 y) L rprint "\nClassification report on test dataset\n"
! u2 j) j+ c4 Y2 x" g* g3 }4 mprint classification_report(y_test, y_test_pred, target_names=target_names)8 M d% _4 k [$ i z
print "#"*30 + "\n"1 A4 }! k9 U! p2 E4 w
, j: L) \+ ?4 Y0 \ J* Z4 k |
|