- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np+ N- |1 V# O& X5 {( M( a
import matplotlib.pyplot as plt' @) @) Y5 u7 J& J* i5 n2 I `# k
# R* |; h+ @: E" v* Oimport utilities
; _8 G4 T9 ?: [$ k0 r% W0 J
+ h, A6 T$ r) E& K8 }8 r: k9 S e# Load input data
7 o7 P7 U) K# Y# sinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 y6 c" k, b# E6 x/ s$ r) j9 _X, y = utilities.load_data(input_file)7 A+ @9 Y. @2 k% |7 ^, b( X2 c8 w
9 J% h; w1 D3 t6 s0 v& e6 Z/ z
###############################################2 @7 v' t9 J4 Q v
# Separate the data into classes based on 'y'
6 i- I* n6 N$ H' [7 g2 `' `class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
$ U! q! u9 k, ^, Wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
" O/ W/ z% ~/ e5 w1 Y9 A0 x5 X) O. B8 W: j
# Plot the input data* y1 W1 s$ \0 l$ c
plt.figure(): ~7 `' W* b3 J7 I$ t
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 L; q+ v8 [2 I; F: D, F# G! splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')% w* M' _4 ?5 Y( T4 V- F
plt.title('Input data')4 f" t6 c. ]5 ?% I8 n+ Q y& J
4 K$ b% P2 `& i ~###############################################
- ^3 |+ E: a+ P) B5 T# Train test split and SVM training, ?; W* u) u! d
from sklearn import cross_validation
- }8 f8 X2 c- nfrom sklearn.svm import SVC [# t& S* y/ T4 X& j1 }; `
# V; {1 T9 k4 {7 |9 |8 N
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
/ v6 ^8 z* o1 ~9 r* B. g; ^
! y c8 U! H6 t N#params = {'kernel': 'linear'}
* R4 \6 A; u, f6 k#params = {'kernel': 'poly', 'degree': 3}
. I0 {3 J6 ]9 A, Q/ c$ Qparams = {'kernel': 'rbf'}0 ]" A2 N0 S+ n( k+ V
classifier = SVC(**params)
$ o5 a2 X1 ^# f; o Jclassifier.fit(X_train, y_train)' n, A% N$ X, T
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 W9 F2 g7 B9 g# F% b
/ ?, Q( N/ p0 ly_test_pred = classifier.predict(X_test)
$ i {6 f6 o1 H. G; R3 iutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
, }1 U0 M1 ]5 ]9 }& j9 T, v6 a0 I
###############################################1 @7 ^0 P" h$ M. s3 L& r
# Evaluate classifier performance4 O7 j; w1 r m/ _9 T, i. Y2 c
& [2 X! i9 m# H2 t
from sklearn.metrics import classification_report/ _& b5 W2 H7 X6 N G
" S2 N4 U2 @: f$ G1 L" j2 t
target_names = ['Class-' + str(int(i)) for i in set(y)]
0 p" n% @3 Z2 Tprint "\n" + "#"*30
( P; |, w( D9 Y0 y9 p0 `print "\nClassifier performance on training dataset\n"/ z" ]1 J: ]/ |; F
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 a1 n7 {1 ]2 g4 b2 b9 ?print "#"*30 + "\n"
4 E5 D& c' n0 r" R
+ J% M: V( A8 F4 y9 y8 Nprint "#"*30
2 r' t, d+ H/ q4 Q5 H- a/ sprint "\nClassification report on test dataset\n"
) T; ]+ Z$ z) H* ~# y. qprint classification_report(y_test, y_test_pred, target_names=target_names)4 b% G$ Z% `- |) z! p, P
print "#"*30 + "\n"/ w! w! r) ?( m# D
- |2 s+ O1 j/ E( s9 Y; M- S- T& P( ~ |
|