- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np* E1 b% H) F0 ?! v
import matplotlib.pyplot as plt/ l2 J9 S U9 W1 ?0 P) I2 `9 G- K: @
P7 d; k5 l G1 J" X8 j# ~import utilities 4 l5 i* P! E0 q, J7 [2 M
! j% J' l* Q- v! F8 g6 h$ @- ]5 j. l# Load input data
1 D2 O) G5 O+ f U \% [! R" Vinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'7 @, I# O) K9 [. T1 O. p
X, y = utilities.load_data(input_file)! b3 L' Q( s/ \( f1 P
2 U+ s( P% c+ Y3 ^###############################################
' @; y. [/ ]; G. S" [9 d. I# Separate the data into classes based on 'y'
# Y7 g/ O) p7 Hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
3 e- X. I# _8 \ y3 T4 v& lclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
* b( ~: S# K+ u! f$ z# y" G& P: t+ K+ }- e8 F# h$ G
# Plot the input data
& Z% U8 g$ I9 K, c( Rplt.figure()2 n* b0 ]" x* ]8 I1 `: C
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 o( q$ Y9 `/ |7 `; V
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')% H9 Y# d$ t: g I0 a2 K
plt.title('Input data')
2 U) J/ _: |3 @9 p+ _
, M8 p) P5 R; G u###############################################
% j0 W% |3 t; |# Train test split and SVM training, n. O* \- k* e$ i# N
from sklearn import cross_validation
3 x* I1 v ?, K) v6 cfrom sklearn.svm import SVC
: }6 s. _8 H# C8 \! U" Q. P
# C6 y4 z$ ~3 o4 f# s' d) U$ {X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)! H( c" ~( `# x6 A
. }% K U7 g& u O# C
#params = {'kernel': 'linear'}4 U7 c/ B0 D$ O( \' j
#params = {'kernel': 'poly', 'degree': 3}
! X% {# V" w/ Y- b" [% z# Gparams = {'kernel': 'rbf'}# M$ D. V0 B0 [7 e7 \
classifier = SVC(**params)* T: Z# C; m! n- [6 q
classifier.fit(X_train, y_train)
& p3 |8 C3 J ]2 Q/ p' R O, `utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')2 |: A+ E" h [# x' ]+ C* }. j. Z$ H
$ _ h2 n+ \6 f5 I4 cy_test_pred = classifier.predict(X_test)
0 J+ l; ?: S u. |utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 V4 d" q4 H3 |! E/ w* `: Y
3 p- W" I$ O6 [+ D" D; T
###############################################& P i2 B* [' ]& j7 _0 F7 z) |
# Evaluate classifier performance$ `' H/ o$ Z" F
3 d5 D, g0 O3 I$ T+ J( n, |* }# d0 yfrom sklearn.metrics import classification_report
: c- v {) `* k' t
- J9 q! t+ W. h" q0 S0 ^target_names = ['Class-' + str(int(i)) for i in set(y)]
' `( J) g0 l/ r4 @% o! M# }4 X+ uprint "\n" + "#"*30) C. h; r+ X: I
print "\nClassifier performance on training dataset\n"
+ Q v {3 i4 W" {" rprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
6 p% t" |8 p8 O fprint "#"*30 + "\n"( A4 J/ D {1 ^1 {
( F+ p; d5 y% ?1 S9 }, v _print "#"*300 h9 l s" R0 ]0 ~8 B
print "\nClassification report on test dataset\n"
- ^/ P3 ~( `' Iprint classification_report(y_test, y_test_pred, target_names=target_names)
# }( z% X& ] J2 A8 ]& i; F+ A0 \print "#"*30 + "\n"$ f% K; R4 N/ ]/ U9 O1 B
, H7 G* w% u/ F8 Z3 d1 w: T
|
|