- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, _7 B& G {3 K. r1 Q Aimport matplotlib.pyplot as plt
. G/ n/ D: `4 i0 F4 @9 ] e- ^/ ?+ v
$ ], m* l/ b3 R( @: Z, {9 @+ Iimport utilities
6 V# N+ H( s, q0 {- C. v( a" [( _2 j# D J' g9 W
# Load input data$ {) v1 M6 p- t" c) _7 F
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'5 C& \0 x2 b6 m+ \$ o* D" E) x9 g
X, y = utilities.load_data(input_file)+ Q+ F% ~1 H- @0 _, {( V" V
* q, ~4 `" }( G# s###############################################" T: Y, d/ j1 v$ l
# Separate the data into classes based on 'y'. f; m: ]5 c( {- m( w2 e, Y
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
- h" Z' L. J; `class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]), y2 K; \2 T5 ~- w
! f3 v* S+ `2 {) ]$ Y$ e% \# Plot the input data+ i' F: c' P! H: J
plt.figure()1 o2 M: y3 }4 \3 q
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
( b$ k' D- Y3 ~$ T0 }plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')# O1 W0 c k% l2 M
plt.title('Input data')
& o4 y9 }" k# H7 Z+ J6 X0 \, R7 f0 r# C- q' b* Q
###############################################" X2 Z+ l. ^: `7 d+ B* K4 S, M4 F6 t
# Train test split and SVM training. N3 D1 ? Y8 d- S1 c Z! |
from sklearn import cross_validation' B9 Q% U* O) c5 Z; Y: `
from sklearn.svm import SVC9 k' f5 O9 ]2 o4 J4 P4 R
. ?3 e3 q* s# h! T8 W" m IX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
& Q( P1 ~& r# p2 B4 ~9 ]3 t
$ U2 f! x% i9 _; {7 A, T* d* U#params = {'kernel': 'linear'}- D+ I& _6 Y/ Q9 B: y
#params = {'kernel': 'poly', 'degree': 3}9 a% U6 a, E0 N9 H7 j: v0 d) e
params = {'kernel': 'rbf'}
+ p/ T- J8 Q" H. s& }classifier = SVC(**params)* ~/ A! l. j7 y C
classifier.fit(X_train, y_train)
0 j0 V# Z/ H$ Nutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') e1 D3 s$ ?: y0 t# x: y( T3 D. R
4 R) _ V% B S9 O7 F4 Sy_test_pred = classifier.predict(X_test)
( a( E% \3 W; j$ |utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
- a) l5 f) _5 n# G; m& ^# V& ]+ r- t9 {1 P7 ?
###############################################3 b2 @- g2 m& |1 _% \
# Evaluate classifier performance( s! O! [7 X% g5 ]
/ z# @0 x, _4 N. h/ {
from sklearn.metrics import classification_report
D6 W. q6 ^7 f2 |3 I# F
3 l1 X5 S$ W2 ytarget_names = ['Class-' + str(int(i)) for i in set(y)]
: c9 G$ o- E2 u- b8 g9 Rprint "\n" + "#"*306 g' Y+ f3 t+ T( r+ l
print "\nClassifier performance on training dataset\n"
/ X& O$ k1 M5 ^- Oprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)& ]+ z* r8 u: w% {7 J
print "#"*30 + "\n"( ]% H' c2 S' F# f. w" Y# f
# `! q( s" S/ y1 x/ V1 S
print "#"*307 ?( @- w3 ^8 R1 U' D' o
print "\nClassification report on test dataset\n"4 M6 A9 [( u8 `6 c6 ?2 ]6 F; U
print classification_report(y_test, y_test_pred, target_names=target_names)* S6 i* @% }- X0 Z0 N
print "#"*30 + "\n"- x! @. n, u) [! w
1 o2 _* [! e; D, a7 e; w4 P4 w |
|