- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np* R. c% D, Z; q2 u
import matplotlib.pyplot as plt
$ ^4 F6 v8 j8 e5 L
! f5 I5 m- I6 @6 Y$ D2 S. p2 ximport utilities
/ f7 H$ T1 x+ Y' S6 u( C. \; @2 X# j2 B" [1 O5 G6 U/ [
# Load input data& Z) H% h% B2 ?9 ~1 e! i L
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ E; ` ^ ]8 s/ z. IX, y = utilities.load_data(input_file)
# Q5 l- w( o' E* D; b, n) u; E7 W" b" b# h
################################################ |8 e$ L3 E) k: Y
# Separate the data into classes based on 'y'5 [" k) M @) ^6 s0 I# \( J
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])$ C1 i8 ]3 p+ d* k j3 ^
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])" O2 y* V" @& Y, z/ S# \
& f) R2 a4 L& J1 {: B# Plot the input data0 C' l: e J6 q
plt.figure()7 `( }, y% X- @* ~" N
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
; ~* P' C# v- J! O M: E$ Pplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')2 e5 V) {! i3 B" [; U- ?3 Z
plt.title('Input data')& i6 ] |' o" l+ i" F. E
. K( p" c1 i2 j: @& w###############################################7 B/ s0 S/ D! a( U: W
# Train test split and SVM training
5 z, \( I, j1 }" {9 Z; [5 }from sklearn import cross_validation
: t! |) M. |) K5 ufrom sklearn.svm import SVC$ m9 [; o- y/ B* s4 h2 ?, n8 y! `
5 S: r5 N w& d! x5 b8 z
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)2 p! y1 B+ e# O( w* t L
9 @$ @$ Y& K& D; [7 g% d#params = {'kernel': 'linear'}
/ }4 v7 r5 m% q# W#params = {'kernel': 'poly', 'degree': 3}
' d: l! j5 @* f- Y0 z' p6 Cparams = {'kernel': 'rbf'}
- k; s& o# i6 f$ ^( \" v/ fclassifier = SVC(**params)/ X; X) W4 F- n3 e+ z+ F
classifier.fit(X_train, y_train)
6 p6 z+ ?" N% |" @# butilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 t/ h$ p5 @9 |% O0 N! \: x* t0 x# }4 i0 [4 z6 w/ y
y_test_pred = classifier.predict(X_test)
; w0 y/ R, j( R) a0 wutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')9 r2 f) K0 b( O8 o2 J, h+ B
& T! z2 | u; C! P* d; j
###############################################
! J4 k0 w5 v% f/ i0 K H# Evaluate classifier performance L7 }2 j4 @# A9 |
( ?1 x4 u: h, Rfrom sklearn.metrics import classification_report
3 g- n& G3 U& |& q* K3 [6 Q; T Y1 w- B( S2 P0 A3 d5 ^; q5 w& P/ K& ?
target_names = ['Class-' + str(int(i)) for i in set(y)]
2 @" m$ `5 Z# J- D( J9 Zprint "\n" + "#"*30! H$ T8 T ]6 M9 V
print "\nClassifier performance on training dataset\n", i- g$ @: N* E+ m6 g
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)) k2 ^- a8 j L* }
print "#"*30 + "\n"
7 s8 J9 I! _: ]+ `3 j* O; R: h3 S2 B- x N/ G5 ]
print "#"*30
! l6 ^5 _5 g# b- d: R/ y2 gprint "\nClassification report on test dataset\n"
, k0 G: {* b) j2 K0 I( ^2 zprint classification_report(y_test, y_test_pred, target_names=target_names)
' |7 I6 X1 m3 A& d3 _' s$ Mprint "#"*30 + "\n"5 l3 P4 J7 _8 b4 f: D* A d8 f
; |+ b V5 q, t, r
|
|