- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np: f0 K7 n0 a/ S, e3 f8 Y X, ?
import matplotlib.pyplot as plt# _* a8 o1 [! m8 @: n! }' S+ z
. G% c4 p; a. v% [# `! L, o- J( _
import utilities & N% I: u/ D- u, M# Y; s/ o
" l2 D/ ]) N) A9 c- X# Y3 d
# Load input data
! s" t! l L/ B; t# Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
2 h5 X8 [9 V$ i6 ^. { P4 ]X, y = utilities.load_data(input_file)
% J* o4 M" v; u6 [3 g$ e( E* U; z
1 `& }0 [1 u U* V4 n###############################################% W+ Y$ K& L5 n( u# u+ K: R
# Separate the data into classes based on 'y'
+ E- R+ H; |, oclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ b. ^0 G- z% S- N2 n; O! Z
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
. s- t& f# e: z E }* V; ]1 ]
5 n; b" t r& V4 E+ q7 K/ P# Plot the input data
0 D( n+ L8 T. r' Lplt.figure(), A+ y! ~# Y8 @: I
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')( C) V+ V* |9 a+ z. v
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')9 f# q8 Z; N3 H3 x7 r
plt.title('Input data')* w& @ |& Q7 T3 L# z S
2 N/ j5 i$ ~( D" N( G+ ]2 b( y* x$ s
###############################################
( _5 n6 `9 P# n4 I, t4 l# Train test split and SVM training
+ `" g! {6 _' Afrom sklearn import cross_validation7 w- \9 b9 A: E( e& x1 I
from sklearn.svm import SVC
4 u" F" }% O, ^( Z2 v: l' _
0 B D! f! \/ `8 Z p# `' ZX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
" r# ]! m' D# e U8 ~4 X5 C, p+ ?5 F! c- [' c' o- i
#params = {'kernel': 'linear'}
- X( r' O* S6 L6 o#params = {'kernel': 'poly', 'degree': 3}
" n4 m* ]- b# T* f9 Kparams = {'kernel': 'rbf'}
( e& j0 W* F$ {8 U8 Y9 ] ]classifier = SVC(**params)
+ \: E8 \/ p5 B8 ]9 e. A& J, Cclassifier.fit(X_train, y_train)
0 C: }3 `! _2 S5 Y8 x0 Lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
% C' w9 a+ _3 V) L+ s' H
6 g: R. E' T0 ~% ~1 R. py_test_pred = classifier.predict(X_test)% S6 D+ ^2 `" V* F- a
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% m. M, j$ |# P; A' o2 c& B/ Z/ _7 e" C
###############################################4 o% W" e5 r% @/ J
# Evaluate classifier performance
2 D& J A- q- u k3 e4 \+ P
) n4 o5 F3 Y6 Sfrom sklearn.metrics import classification_report: W) O, ?% S# f7 o. T6 X1 G3 A
" l+ I( x2 o; F x& L ~target_names = ['Class-' + str(int(i)) for i in set(y)]
0 q4 d! b' ~: a3 M& N* y. Bprint "\n" + "#"*30
0 h( u- c4 a/ Sprint "\nClassifier performance on training dataset\n"
3 r, d" G, U" R- }" B/ bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)9 A9 X% @5 X6 }+ x; ^! T( R
print "#"*30 + "\n"
8 t" `, O: z+ I3 p! }. _" \' y2 ~- m
print "#"*30
, P$ o1 n% k* d, z9 O& y' jprint "\nClassification report on test dataset\n"
' w" `& s* ~6 F3 D' V2 k* U# {print classification_report(y_test, y_test_pred, target_names=target_names)
$ |9 j/ E8 B; V( Aprint "#"*30 + "\n"4 g3 W# a- h7 h+ D9 I& F5 ^+ z: a2 `! r
$ f$ g- I! H# T7 a% U8 a% g: @: Z |
|