- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np# W' t1 K& w, b' c2 B9 t
import matplotlib.pyplot as plt. C( H" D* z# r3 R; w9 n, g
- \+ v. T- }/ G" a2 ~- a% i8 b) Rimport utilities
& D1 @# A' t1 _6 y6 _3 S2 b( B( `. ^2 _* `, M) M2 c
# Load input data) [, N4 R9 `5 E) k0 i- s, ]
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- V: }6 m, b3 h/ Y5 T1 D% ~
X, y = utilities.load_data(input_file)
: s F% J$ n! q- y$ \6 f9 O I' S- S: R$ x
###############################################
2 Q- T/ @" R$ ~6 P$ @# b' n# Separate the data into classes based on 'y'
/ J$ n6 w7 ]% {/ C. N" M1 p3 Tclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])7 T/ {$ P: w9 t: B( P' i+ O+ V: G
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ J8 @) C4 W. D5 W. n7 y* D
9 t* ~' \; z& a! T9 |
# Plot the input data
! U/ H3 e, l' j: P5 L+ Yplt.figure()
( j' ?( |+ K- k) R# J1 X1 Aplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')( `, F6 R6 L P* u
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" M# S' E& h3 C3 Y1 D. e) ^* Q7 ]plt.title('Input data')
3 U0 M; L( b& {/ t; `7 E/ [) p' @- f3 V3 V) N+ l
###############################################7 e: t4 b, M _+ Z
# Train test split and SVM training
* P& |; F9 p! u9 A! O, Zfrom sklearn import cross_validation; C; a3 l, h! |+ W* z5 l: c
from sklearn.svm import SVC
0 F2 H: m0 r- w) O
: g2 J" v/ {) kX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 x. \1 Y0 ~) X4 J( M. K: a
, {; N- s7 G- t, _3 \6 I
#params = {'kernel': 'linear'}1 t7 F# z- [) x' q, F
#params = {'kernel': 'poly', 'degree': 3}
* `/ U; ~3 h, F1 I: rparams = {'kernel': 'rbf'}
2 j9 y, B' T6 O% e7 tclassifier = SVC(**params)
. h- @! ]6 K3 S( w, y2 J# ~4 m# {classifier.fit(X_train, y_train)
* X3 `6 y+ U% l* I% v# [! ~utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). G5 C7 b3 P2 \' h; O, h+ n
' o& Y$ x' g6 S, K. b f* h( Qy_test_pred = classifier.predict(X_test)
6 S+ V' g* {' ^0 e" y0 futilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
) t# r9 p4 @* W) B- z9 P, g, G0 j0 p( E7 P
###############################################8 D" P/ T) x+ ?. ?1 V
# Evaluate classifier performance
, b, Z6 h) J W4 x* K
# C" A4 V& O3 s1 l* a# X2 J" V9 zfrom sklearn.metrics import classification_report
5 U _8 v: V$ i! b7 s o; ^8 E8 e; t6 J) O
target_names = ['Class-' + str(int(i)) for i in set(y)]
7 w5 T! S0 l' B( D8 r) U* Gprint "\n" + "#"*30. a& G, m6 J9 o) Y+ `" E: K) t$ J
print "\nClassifier performance on training dataset\n" y0 l1 V9 ~4 A2 o. }5 \0 H% D9 d
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 Q% f8 o5 m/ Sprint "#"*30 + "\n"$ z+ J4 J5 _ c( k
! E3 w1 k' H3 w7 F% J
print "#"*30
% m% e/ O5 E3 X4 b6 s: }$ k. K3 Hprint "\nClassification report on test dataset\n"
- H" Y; ]2 r$ O" L$ j6 Bprint classification_report(y_test, y_test_pred, target_names=target_names)+ H4 B2 K, H- S- ~+ C1 L
print "#"*30 + "\n"
. M/ g3 e; z- p3 `" d+ c! D( [$ V/ p: A( c. D1 @5 V7 v
|
|