- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ l! X* _. M; E! _9 _import matplotlib.pyplot as plt# Q+ L! N& _- c1 j0 `
% y4 w% p( B! |* `% }% _import utilities % b' o. x- k9 I: e; p; }
7 g% X. f# ^5 i7 _# Load input data
+ H- f- l* t9 C# j* z- Q9 binput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'% r! |8 @0 u; i7 @1 O$ {. p
X, y = utilities.load_data(input_file) B; x3 c& l- m, ?* L. D' p
) z1 K: l8 B/ u( i: N F3 B; q5 ]
###############################################
9 e3 {: I: j& G3 q; l4 }" {# Separate the data into classes based on 'y'
7 {, Z% n3 d2 n3 Iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
+ b$ S+ z- t: ]class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
/ m5 m3 E" ]" E7 f2 L- b: ^7 z
1 O7 p) Z; u9 N, B+ H2 f# Plot the input data' ]: s9 s" z# A& \& |4 D. c0 M
plt.figure()
7 E e5 _( J7 p& Y8 t0 ~plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
% d& q, u% d, bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 f, w! f7 o6 y: q+ oplt.title('Input data')
0 O* \6 T' |: H% {- h) h$ x. \7 u% Z( D! j7 ?0 Y
###############################################
" ?7 p1 K) `" }/ E1 r# Train test split and SVM training
( r# r# U: e. E. Q7 kfrom sklearn import cross_validation+ ^! I/ z. X9 D8 B" m: W
from sklearn.svm import SVC
1 @! V" J, X! [0 r5 Q7 \, P7 C) s& B* v* P! Z: `' Y
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
. s+ _9 Y8 @+ O! i1 r. m& K0 e/ W I- H; @' `
#params = {'kernel': 'linear'}8 o8 l% V# E: n' I9 `; p& v" R
#params = {'kernel': 'poly', 'degree': 3}
7 f# z8 n. E3 pparams = {'kernel': 'rbf'}
8 w8 r- I& W: M1 }3 a" ~1 \classifier = SVC(**params)% r' }0 y1 @1 y1 u, z
classifier.fit(X_train, y_train)
4 a8 l& [+ J. m1 g3 |utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
) R% L4 g8 L+ j7 Q% S4 _" x4 B) Y0 ]3 n/ X+ N5 {1 R1 e5 L
y_test_pred = classifier.predict(X_test)
! @* m( x% S6 f; B' h! z+ v) yutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')1 t0 U% Y' y3 w( w- w
% }7 w6 P! I4 q' Q4 F& O
###############################################
% z3 w* \; I1 r k- o6 f# Evaluate classifier performance
$ l [; N6 [8 {# @. `+ g
3 {4 E' c/ u9 U! ^/ Y, lfrom sklearn.metrics import classification_report
6 o8 ~* z5 Q0 _
, L6 I% }, f9 e; [4 Starget_names = ['Class-' + str(int(i)) for i in set(y)]) h$ a6 \, \+ c/ w5 E# O
print "\n" + "#"*30
/ Q* h" a3 R! ]1 e( fprint "\nClassifier performance on training dataset\n") D# s4 b9 v: z9 ~" @: W' S
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)5 n" z- i) B6 Y s( L1 C5 F! F& G
print "#"*30 + "\n"- v( H% N: _( [5 f( q0 f, W+ c
, A+ y* z& E6 l0 S; L+ J8 sprint "#"*30, I1 D1 I( ?; ~ W) P# I" H
print "\nClassification report on test dataset\n"
/ x5 H4 [. I( O) ?+ ~/ C$ D/ R9 xprint classification_report(y_test, y_test_pred, target_names=target_names)" b' {: J& }$ l( v
print "#"*30 + "\n"
g) x1 I& F# u4 O9 R% i
5 u! n; a0 q( V4 y |
|