- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np- X- X" E% p D+ p. B/ I E: Z
import matplotlib.pyplot as plt5 q8 K. p, P) M$ D
7 P+ K7 \& e% |+ i
import utilities 6 D7 d# L+ U# g. d2 n, W3 u1 I
^8 G; t/ M* p- @& o
# Load input data5 ~6 w: i* ~ h+ {7 p2 K
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'0 w, w$ C4 q" k5 Z# h( M
X, y = utilities.load_data(input_file)
8 }1 o! U6 L; M, c! \' P: E* b$ W7 ]: A4 Q
###############################################
3 [! I* \; r; {& Q# z+ j- n) O# Separate the data into classes based on 'y'6 H* r. H8 R- l7 C/ g/ f
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]): C: N( _0 G+ z# H
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])( K z1 Y; x4 P& m' k7 ^0 F4 ?
3 l. \5 K* I$ N0 Q: q6 E* l8 s& t# Plot the input data
5 s8 S: @# x; n2 c+ uplt.figure(); W' Y6 i6 }6 Z+ q" G, N( s
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')* y" @8 `/ B$ E7 E% J' g# D$ I- ~4 }' q& y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
7 L6 ^ N4 X6 `5 P1 v5 ?plt.title('Input data')3 M1 C) u' t( k
! x# z/ u" N! B: M% B' z, ~
###############################################
: A3 z1 G$ Q! L# X# Train test split and SVM training0 j* E4 k2 | x
from sklearn import cross_validation: m$ Y# c+ S% R* A) W2 q, \& L
from sklearn.svm import SVC: x, R, W8 z# O+ z
; F" @$ c1 R* D3 Q, IX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 `( A a/ P+ I- K7 c5 ]5 ^1 i4 G2 o {) m! t+ X4 ^* N
#params = {'kernel': 'linear'}! n' d$ z) B/ `
#params = {'kernel': 'poly', 'degree': 3}6 c. L1 q+ w3 s
params = {'kernel': 'rbf'}
* e4 m: [% Q& N& c9 mclassifier = SVC(**params)
U$ t6 E$ l' F8 }" s" `classifier.fit(X_train, y_train)
! X3 Z( Y+ W0 T' w/ ~. b1 X( H9 cutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')7 R. ?1 j- I& l) X- m" y3 X: O/ P
% X/ {2 }5 s! P0 n+ Ey_test_pred = classifier.predict(X_test)
# Y; u1 i, f* K$ I7 [utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
0 k. o4 {% ^: Z0 Q# s# c( Z6 x3 R8 S& q0 s- R/ r
###############################################
7 R5 V1 K' Q1 U& ]& M) K# Evaluate classifier performance
, d, w" ^+ n/ U* ^, [5 M
# Y8 }* X& V! u& r J2 F8 efrom sklearn.metrics import classification_report& ]. ]. G* A7 F# U; K& m% m
% p2 D" M) v; z# I, R, C
target_names = ['Class-' + str(int(i)) for i in set(y)]
" P2 N: ^7 v/ \ L# A. O7 qprint "\n" + "#"*30; c3 `, E! p0 K
print "\nClassifier performance on training dataset\n"
) r! D. o: @( Z" _7 G( N6 qprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% j' w6 W& M' B4 H. A0 y3 Vprint "#"*30 + "\n"; \9 N" v, \; p. l( D) c$ Z" Y) @
% S+ d+ G5 {0 q; r( d; {
print "#"*30; z% R' U c: G
print "\nClassification report on test dataset\n"
8 p# b; `2 K/ k9 G6 p. V: {3 Dprint classification_report(y_test, y_test_pred, target_names=target_names)8 E, b( ~6 g' S; K/ x8 g- U$ H
print "#"*30 + "\n"5 K; ?; q/ R( T' v5 E* r
4 h8 }, z& t1 m0 ]5 ^, c9 n w: E
|
|