- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
2 G4 h: X1 q, A! u4 Qimport matplotlib.pyplot as plt
/ a, z3 t9 m$ W% ` Z! V6 E& z0 T1 D+ ]
import utilities
+ L8 o2 L7 A" |
2 S3 e! V. H- U z9 ]# Load input data. K9 I1 A0 ]& M, i9 f3 ?. P; S
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'5 G( K; h3 @5 W" E4 z
X, y = utilities.load_data(input_file)# k2 B6 ~- J0 I: R* x0 X9 H3 d7 ^
1 Y0 y: z. t& p7 ^/ ?" _6 Y# g5 V6 c###############################################4 Z0 V- [$ ]$ Z: w8 p) V2 ] r
# Separate the data into classes based on 'y'7 ^; |& f" a% O2 `0 W; W
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])9 |& B# Z) U: T2 O, z! z4 K) u" e( Q
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 ^: y/ w8 Y* c- n0 Z- o- Q6 q
' g3 V$ |- M. v1 l# Plot the input data1 o" Q; ]; ~( p1 s+ n( u3 p: w, A
plt.figure() r t" z1 X! J3 _" h1 l* ~
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')* n9 i( p' B5 u; R2 `
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
N$ `7 `4 ]# Oplt.title('Input data')
& f" {. H& E. ?/ n9 P. z: ^& H1 r
###############################################
! r ?1 C0 N) n3 ~1 w" [# C @( v# Train test split and SVM training
& V1 L9 T* ~' H, A% A) H( y1 Lfrom sklearn import cross_validation
! g! T& @* o2 e8 T% t ]/ e& bfrom sklearn.svm import SVC" z4 R( a N4 S0 M' N+ [
. U% g( [+ ?$ i0 o% R8 C
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)0 G' ~: N$ y! e) `( ~4 S- U) J$ M1 t
7 E; S5 t. E' E# I; s+ W
#params = {'kernel': 'linear'}
( `! ` t7 }2 Y7 J#params = {'kernel': 'poly', 'degree': 3}9 z5 s9 ?+ I1 R; D. M+ M0 ^
params = {'kernel': 'rbf'}
: @2 ?$ Y5 N1 o* P7 Lclassifier = SVC(**params)
0 |$ D/ P0 J& Oclassifier.fit(X_train, y_train)4 @# G5 Q, S" C9 J4 f6 v9 Y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
# |7 }. I g/ \; p3 ?: h) b3 p$ h, b0 X: {* t
y_test_pred = classifier.predict(X_test)
+ r6 y4 s4 y L( d ?% gutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')1 Z% P1 h- l* W( ^3 v% v
" Y& C3 M/ c$ G+ f2 }9 i! [# R###############################################
" c8 H* e+ o4 q8 ^& A# Evaluate classifier performance7 E ]) K* R$ ? L- Q. P4 Z a! O& [
2 I( j; h8 c8 Z2 y0 K4 a J: d
from sklearn.metrics import classification_report
5 m" K0 q) e( i C/ p1 i. `! o( g s- `1 ^. I$ a! h
target_names = ['Class-' + str(int(i)) for i in set(y)]
9 Q2 U; o# ~9 ?: \" _, n/ Kprint "\n" + "#"*30
{" X7 `. L, m; S* v/ G9 M0 hprint "\nClassifier performance on training dataset\n"
$ N/ T( F |9 `print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
" _3 H, I/ }0 }1 A; J r+ Fprint "#"*30 + "\n"9 ]) p: U: K8 g- N
, A5 ~1 w. q4 }; }0 J
print "#"*309 t A" U5 \- V) i/ s% ?' q4 a: V
print "\nClassification report on test dataset\n"$ W4 s2 T/ [$ v) E# j
print classification_report(y_test, y_test_pred, target_names=target_names)5 w. V% a- S6 j' E! Q8 }5 `
print "#"*30 + "\n"; }3 n8 }/ k9 C# @
* L" ^. h- q( F8 z$ q% |- M6 o
|
|