- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np5 Y9 _* N0 }9 ^0 M! j& `/ S
import matplotlib.pyplot as plt7 A+ \+ P' d" u- u8 w" b' R
1 ]4 Z/ [) w" v* g7 V* H! ?; \import utilities
6 H: `: C% F# a5 ]$ e1 I
. ~. Z" f/ u4 ~" ~. Z" t) \# Load input data7 b' Y9 @5 N' @. ~& z
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
1 r* B9 {) p! |8 ZX, y = utilities.load_data(input_file)) b2 k& C# T' |1 y- |; Q/ D3 Y# @. w
0 ^* [$ L" ^5 D' B' A
###############################################: @3 j4 }, [; q
# Separate the data into classes based on 'y', N' J1 |, n8 P+ [6 f: X
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])6 ~/ i& A3 L2 O" l& T1 \9 o1 N: B8 r
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
; c1 U! L3 n& m; o; p! {) z/ s- k r6 F9 F7 I; ^0 U$ L
# Plot the input data
: z2 _! o2 ^3 W9 s/ _" Z9 m- ^/ E) oplt.figure()6 M8 @1 k7 Q- {
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
( H, y9 I ~/ O1 s. e$ Jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
# U6 V4 j* x5 R+ r9 [+ cplt.title('Input data')' v/ C2 @! `3 D
8 V4 P/ [- F% v###############################################
; M7 a1 U8 E3 f: \9 {! ]# Train test split and SVM training
# G( b+ d! e& T: V! _from sklearn import cross_validation
- ~1 s* R' ~/ h, b2 Z0 l' W+ y) tfrom sklearn.svm import SVC/ @: m6 v8 r4 d5 @0 {" O" ]* k
2 s. \* h* j6 E+ }3 ?* uX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 s+ W! Z' J8 F& C8 ?( }, ^
6 [+ V G @% f3 D& B
#params = {'kernel': 'linear'}6 s7 f& [0 Y2 r2 B2 S8 M
#params = {'kernel': 'poly', 'degree': 3}$ g1 B# ?% X6 d- O# v& |
params = {'kernel': 'rbf'}
" V! |, k5 [8 x+ Q U2 ]" @$ {/ Dclassifier = SVC(**params)
0 ? |. R$ u: C% S: I u+ h' W: _% Cclassifier.fit(X_train, y_train)
. b" k. s; J9 y |+ Xutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'), D& T1 O( q* Y9 B3 w2 @; z
. i; o* @* D! K$ l8 X% U! p0 Ey_test_pred = classifier.predict(X_test)/ f5 C3 z3 n* w9 S$ j, z- E# d
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% w( M) w$ B! q6 c# P2 H6 N: j5 I0 m5 _$ K* N1 ~
###############################################3 p" B% O) q: T1 q
# Evaluate classifier performance
' Z4 o5 z" u$ [7 b1 ]
7 V& g, \3 H/ D# F4 T& p: S( }from sklearn.metrics import classification_report8 O2 A3 m$ I4 B) Z2 k
) R% K5 c2 O0 m: q6 W+ |( U9 qtarget_names = ['Class-' + str(int(i)) for i in set(y)]: b/ j. ~: P+ ? e9 l: n
print "\n" + "#"*30
: d2 O9 M# G# C& s& Rprint "\nClassifier performance on training dataset\n"
1 e1 U/ v9 l" ^8 N$ S& J; j% hprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
/ @6 U3 k0 }4 k# ~& ~6 wprint "#"*30 + "\n"
2 w8 d5 W0 v3 v5 u/ T, g
/ P7 R( L4 F- `- C+ nprint "#"*30/ ]7 d b% h. f5 D1 i9 O8 D
print "\nClassification report on test dataset\n"
5 Z1 i$ ^/ {; P7 `5 }( I* v8 \print classification_report(y_test, y_test_pred, target_names=target_names): ^' s3 V% L9 O4 r& p0 Y
print "#"*30 + "\n"# h# `4 z! h) N! u7 ^2 T) X
/ d5 ^3 L# q- Y# D _ |
|