- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
( R+ d6 y- H0 ?5 ~8 h b2 V. _2 Gimport matplotlib.pyplot as plt& R: F( }3 n' \/ A3 w/ S3 \3 R
/ f# E0 j/ o: _6 V- s- @import utilities
3 _2 n' [. R) S' F1 p) R: z- s" @' O) R M( U3 U6 X( e7 e2 u8 `
# Load input data" w5 t4 b2 ?+ t
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
) v/ Y# r& u @" H6 K/ [X, y = utilities.load_data(input_file)
: J- S" Q5 z4 f' I6 _8 K6 B+ Q+ B. |8 M
###############################################
5 P. [9 z# a) Y7 c# Separate the data into classes based on 'y'
) T* J9 p* V: e2 tclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
& Z) j5 @: H7 J3 U& x' ^class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
. @7 v9 v; w/ t
i- I/ }0 I1 ]( F, e/ q! a _# O9 @# Plot the input data; ?3 i) r* i6 [" O; T# i; k; |
plt.figure()
! `( U ?% s6 C- Z+ G @plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 `* h0 c5 Z+ v Y. W4 a, ^: l- U1 Qplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ N0 o& } i; U; Z2 T# c1 c. rplt.title('Input data')* H w5 `0 `% Q0 B9 X
9 S% e6 V C( z: I* n: d4 w###############################################
$ X- @% S" b) f# [5 R& |# Train test split and SVM training8 O9 T# j6 g( a: X0 u+ |0 \9 Y
from sklearn import cross_validation
B! G( E$ v, E _. j" Cfrom sklearn.svm import SVC
( x5 Z; T3 z" d1 E* [4 |( Q
3 i/ q9 @. d" `8 r$ S" v7 s) K BX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
" s: K: T8 s* h7 r
$ L f7 g N! ^! _. K- l#params = {'kernel': 'linear'}
' T [1 d U6 l @7 g4 {0 i#params = {'kernel': 'poly', 'degree': 3}
8 Y4 G+ n) v# F _; g6 r+ Vparams = {'kernel': 'rbf'}. x- a9 B5 i6 K: x& d& i! F3 W# B
classifier = SVC(**params)
' V# U. R ?/ z) u1 mclassifier.fit(X_train, y_train)4 ?& D8 W0 { C% K& \+ k
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
3 b4 L' }# m3 L* ?. {
0 V. U2 B8 C2 I/ @y_test_pred = classifier.predict(X_test)
0 K( ?4 z2 r9 F! U% sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
6 B: w$ v9 r2 \& i9 V2 y" O3 p! h8 }3 r6 E v8 F, m/ r$ j9 h! b" S! q
###############################################$ q% p0 l2 M+ i! U
# Evaluate classifier performance; Z6 ~8 k& J% c5 `5 o
* r# c1 j* u' ~& w, i: lfrom sklearn.metrics import classification_report. g0 [ C- C @9 Q
# J! B/ x) _ |
target_names = ['Class-' + str(int(i)) for i in set(y)]" R. K! n% J3 }' ~
print "\n" + "#"*305 z j/ n+ H* y9 e, j- E" |
print "\nClassifier performance on training dataset\n", {, a, }% ]; r5 j
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
' v1 f7 g9 g& Yprint "#"*30 + "\n"
% K3 b7 Z& j5 Z, ^2 X$ s4 `0 P% H, d0 \* I
print "#"*30
$ r* ~& z- w: A: o( y9 bprint "\nClassification report on test dataset\n"% |0 e8 M# [& N0 `
print classification_report(y_test, y_test_pred, target_names=target_names)& c" r! i9 N/ O* }; \* N+ E" A
print "#"*30 + "\n"
6 J- T4 X& V: r4 Y) X8 X1 V0 {0 `7 `0 X, v& f6 e) y) B9 S( r
|
|