- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np3 f$ V5 l7 k5 @! ^0 M
import matplotlib.pyplot as plt. k# D4 t' l. f- g$ a' y
3 ~. r% o. R/ t9 dimport utilities 2 _' ?$ B5 @; c) L; N3 {
; q& Y! ?. o. E
# Load input data8 O9 e8 d v7 l- N
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
! B5 H* S4 T Q: {1 ?0 x! CX, y = utilities.load_data(input_file)+ w5 a5 T/ n+ o! R) k! _
+ `( p' @2 S1 N" I# W###############################################
4 K7 d* S Q8 Q% S- W2 h4 c; i# Separate the data into classes based on 'y'
/ D( }6 E% E3 L: Iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
& T/ F2 H7 a& b) d2 K9 @class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]): p" `1 a: b2 I/ m$ n4 p0 h
) g; d& l8 O( S# Plot the input data* |0 J4 D" N! ^; U5 m5 Q
plt.figure()
1 ~( B* w0 `% j# Kplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
9 ?5 I$ Y2 y2 l6 ~& b. `. Oplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
% ]4 F/ w" c$ f3 l9 q2 M( L: D9 Lplt.title('Input data')
6 g" C( t g1 T2 W) n8 J
{/ D, A! {3 \5 t. I0 a###############################################
% n. N0 ]1 K, @# e4 ~! F( U# Train test split and SVM training
( @8 I& L: w' J. }# t0 F4 ~3 Qfrom sklearn import cross_validation `( s2 V! w0 R, u5 j# _' v7 K
from sklearn.svm import SVC
- p+ S) i- s% v( ?+ @7 m h6 B" d3 k
4 S+ p. Q& O: a# S' @+ O! hX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)7 N4 ~6 X+ _+ Z* f- Z
) w8 X! g2 q) i8 g# I% M. C9 |
#params = {'kernel': 'linear'}
0 n6 @9 K+ H& N& M- b# @1 m#params = {'kernel': 'poly', 'degree': 3}5 i$ P, g0 D/ Y* V3 j7 O
params = {'kernel': 'rbf'}
0 }4 m! }5 X$ c8 f; ^classifier = SVC(**params)
2 }' R) d6 K9 n' F- N' h6 tclassifier.fit(X_train, y_train)
8 P9 G- v2 o6 p/ I. S, nutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 A2 i2 N6 p6 S' [" g( B/ I1 k0 [7 m3 A
y_test_pred = classifier.predict(X_test)/ W) Q; k1 W3 t* A$ E
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
/ D. V5 v2 }7 _0 h& X" d1 ^( K' r7 |% T( v3 s
###############################################
- X) z1 `0 I7 ~- l# Evaluate classifier performance; S; f8 U5 m; A0 `
, _( |# F# G' kfrom sklearn.metrics import classification_report8 j* Z ]' {2 Z( j: \* s
7 T6 y* R7 M- x
target_names = ['Class-' + str(int(i)) for i in set(y)]0 v; f: c* n. T3 P# F# n
print "\n" + "#"*30$ I: J, y) E, k
print "\nClassifier performance on training dataset\n". I* ]& }& X1 e6 V$ y: r' B/ y
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, Z6 H, p) v! T5 C) Hprint "#"*30 + "\n"1 A% E' ?4 F4 E u' ^+ L! _
2 N' ?' o0 u8 u2 ]; Y2 oprint "#"*301 e# K' J. S0 g% l: w0 F# F
print "\nClassification report on test dataset\n"
: p! f, H: g+ O/ `( Oprint classification_report(y_test, y_test_pred, target_names=target_names)$ @7 u6 y: P5 o0 f" Q. y& a
print "#"*30 + "\n"
X, {' _: i0 D2 W: S- S7 A! p+ P
; G/ d' H8 A% ]% p2 w' Q* L |
|