- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np' k4 n. u. K: n, t
import matplotlib.pyplot as plt
. R# R, Z( D- R; z; I1 Y$ v
* I2 Y! }5 P/ D; y& I: V& Wimport utilities
9 j6 M- C, N6 Q% K3 z7 N+ R/ O) Y Y* x" O9 U4 Z* O
# Load input data% \% Q& w) N, h& V( W+ F
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
; R8 e3 m! e" c! ^/ O+ w8 v1 NX, y = utilities.load_data(input_file)3 Z: X. A' w5 w4 [
; X. b" I" }$ ?7 [) e$ w###############################################0 a9 g* S& Z8 g1 z4 H
# Separate the data into classes based on 'y'
5 r# f# ^6 C& b9 t1 Mclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])( S" ]( m7 L* k! V. u+ V- `
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])( K0 K3 t) R$ @
3 n3 K' f7 e/ M3 g- q3 F
# Plot the input data
2 K) ~; b) T/ e4 F1 Y8 W \plt.figure()2 r, I7 i5 S2 \5 A8 D. w( ]8 \ L
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- a/ f6 G! U5 fplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')9 @* U& E8 N8 @5 ]7 B! E
plt.title('Input data')
8 x+ b7 j! l; N5 x( }9 w: g% S% I" x5 O
###############################################$ [2 F9 _! ~4 L2 f- o; `
# Train test split and SVM training
2 Z( `9 u7 D# D( O2 H5 Z" n6 Pfrom sklearn import cross_validation
/ c8 h a1 m, Efrom sklearn.svm import SVC
% Z- z& P, H3 I/ S1 f8 o* Q) d* i! L/ x( N+ P: @- L
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 h: q7 @3 L8 C1 ` s6 O' P, l- F1 o! e% }6 m) f
#params = {'kernel': 'linear'}
$ B. o) j" K* S3 e#params = {'kernel': 'poly', 'degree': 3}& g6 R8 k3 S2 f- Z' I
params = {'kernel': 'rbf'}( e( M% G8 Y% h/ o
classifier = SVC(**params)
& l# b7 r% n7 R4 \9 X& m. dclassifier.fit(X_train, y_train)
4 Y. T2 J \" sutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')0 \- s# n( ?; O
1 V2 J x a0 p* ]6 ^y_test_pred = classifier.predict(X_test)
/ f& c& R' Z6 t# Gutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), ]5 h$ h0 N" _ V J) ]' R. [$ A
# j. e$ \2 s5 B) o, E) l
###############################################+ N9 i0 b8 X; w
# Evaluate classifier performance, b& h, k h+ w" Y
( g- D* b# P/ @' @5 c% \; j* A
from sklearn.metrics import classification_report
( u& f, B2 h6 t. k7 e# B: d( ]2 N' L: B( a
target_names = ['Class-' + str(int(i)) for i in set(y)]: p& O+ l* _5 z7 L! Z+ c
print "\n" + "#"*306 N. ?$ d# c3 u3 ~
print "\nClassifier performance on training dataset\n"
3 ]6 Y4 K* C; A8 f. Z$ A. Aprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)& d+ `, S" d+ l
print "#"*30 + "\n"
6 z5 z4 u4 s) \5 c. n: \7 E/ v' u/ u) z9 P' c
print "#"*30
- u5 A3 T& ~7 X1 N6 a% kprint "\nClassification report on test dataset\n"
6 s/ S4 E, l0 z Gprint classification_report(y_test, y_test_pred, target_names=target_names)
, g$ R3 |3 T1 V3 b3 c. x* e2 E$ Wprint "#"*30 + "\n"
3 u# [& K( F4 E: g$ e4 r3 H: _# c8 G! H0 J; |
|
|