- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np5 G) i; u% F* l4 V
import matplotlib.pyplot as plt# S6 n( \9 G& V$ F" i
8 \; s6 D8 h! a# m ~1 O
import utilities
, N/ x9 b! F; s3 }# a; b2 M1 Y% Z# m' M, c" Q v
# Load input data
$ X7 I9 k! x2 o% Z/ v& P9 _input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 C8 M6 i, C# D% Y7 w& ^/ I2 cX, y = utilities.load_data(input_file)* D# i5 p( v& {9 {, R7 v6 U
5 b2 c4 U1 m ?, |0 I
###############################################
2 y8 Z: i* L+ U# G# Separate the data into classes based on 'y'1 g& B: m% j4 q s' L; S, e5 T
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" k$ F" U A$ l
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ ^4 x) j+ K# d; B- e: G, C+ A
& Z$ u, B" @4 c7 Y2 N
# Plot the input data
% L: ~8 q# A" h9 w' @plt.figure()
1 F, g9 m/ U: g, {' N0 Q5 eplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')- Y R; q( e4 j9 |' d) _
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'): z: z$ s/ s/ R2 M$ q3 ^0 s
plt.title('Input data')
/ r( F a% J! ~3 L/ S+ w5 d
- s( v3 U- V' m###############################################
" ?: U! t( f) d% e5 U" r# Train test split and SVM training
. b7 N* p: g. ^1 |( sfrom sklearn import cross_validation0 ]# z5 |5 L+ _* K/ f
from sklearn.svm import SVC
! o+ G7 R* d" l0 Q6 R: r& Q0 t9 L t5 K% D! n, f
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)( s1 Z# b5 B% w
0 n+ @# a% V" G9 n2 @6 e% j#params = {'kernel': 'linear'}
& o7 V! c! X. N C1 Y#params = {'kernel': 'poly', 'degree': 3}
5 A* u" f7 j. v1 }params = {'kernel': 'rbf'}
( I5 q! m7 f: P4 ^6 a+ Fclassifier = SVC(**params)) [5 y4 S- V: K% f1 O# m8 v
classifier.fit(X_train, y_train)
& J: |7 o) u6 D# ]2 I! n3 `utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 W' T9 x+ t! M* E; y2 v# G
: c$ m, Z5 A ]& H1 B2 @y_test_pred = classifier.predict(X_test); M' a) v6 O% w& v& f
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
# i8 T: o- M- l) a) M( h( y: r0 _, b) m! r+ J3 k; e0 G, O5 G
###############################################
9 J2 T) ?8 o0 r+ f5 J' l$ A2 n# Evaluate classifier performance
6 y& O0 e3 B8 ~% M. [# K" k
$ v- V; b) B& B$ t1 e! Sfrom sklearn.metrics import classification_report6 `* W9 n4 e, @/ K# g" P
9 o! d8 ~: q: u8 _) E/ S0 }
target_names = ['Class-' + str(int(i)) for i in set(y)]( b6 D5 c5 u( z* N g3 q" E
print "\n" + "#"*306 g4 l5 g( i0 K; x4 b5 T
print "\nClassifier performance on training dataset\n"
# a, p& C! k& l. wprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ U7 Z8 u. M) V
print "#"*30 + "\n"! S( l* \' o: e) j+ U( R8 U
4 v' k7 P! Q8 k/ n3 v5 e
print "#"*30
; \5 I% j- _, e2 uprint "\nClassification report on test dataset\n"3 k# q \1 _3 w: N
print classification_report(y_test, y_test_pred, target_names=target_names)& `) q8 M8 x6 b2 ^+ Y5 N
print "#"*30 + "\n"
" w/ }! ?7 Y' ^ i. \) m ^
" r0 L/ O: n# X7 G! K' h |
|