- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
_) M/ z2 \7 o% simport matplotlib.pyplot as plt
( w; S" K# Q0 S E# j* u
) ?2 O7 y/ N( E$ ?) G, s: _2 C+ f9 I: Limport utilities 9 J4 X* E& z0 G5 A
: ?* o9 |+ b7 o
# Load input data( d! I4 W5 J0 e# [
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: ?+ K4 }# q d. pX, y = utilities.load_data(input_file)) d( I0 E9 ^: w7 w2 B7 E# C* b, @
( c: i& _% j3 U0 A
###############################################
" g& \" ^; ?5 W% W1 Q5 U) M- o8 I9 g# Separate the data into classes based on 'y'* |- Y" q: b- S) m1 K6 Z! L2 T
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
- @4 M8 h+ o* _! Nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, w6 E* P1 W: B& n0 _
; g7 J: z0 j* I9 d/ u# Plot the input data9 g6 |2 b) ~* ]. C
plt.figure()- A& d" w5 ?) j% U9 Q% y/ G/ E
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
1 `1 z+ K. i# Pplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
* u% D. Q* t; x4 F9 G5 ^0 \" wplt.title('Input data')7 N6 }& l1 n9 ?0 c% { E' m0 T6 b% k
, Y3 L0 w- U' ?7 \( o3 \###############################################
0 R2 C. W+ H& O& ~# Train test split and SVM training
6 o. F' {7 X$ d% Q6 Cfrom sklearn import cross_validation
! W9 R6 d' Z0 R# L4 a' nfrom sklearn.svm import SVC
) X! Z" k% h. I) s
! W. |/ J- a: }4 p) S: {X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 L; {, T. d" \3 h3 `
0 {: ~9 {! V! k% e! M# h#params = {'kernel': 'linear'}
/ {7 C+ h& D! V' F0 w7 B#params = {'kernel': 'poly', 'degree': 3}
# |, A7 Y# J2 \6 i" Rparams = {'kernel': 'rbf'} Y5 l8 g& ~$ w/ O" ~' k$ ~( V
classifier = SVC(**params)
0 Q# j# D+ B( s" [* {6 ^classifier.fit(X_train, y_train)% b) o: l+ D% E
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 |3 \+ U+ @. ]" E
! q3 S1 W! M6 P& l
y_test_pred = classifier.predict(X_test)+ w' Q$ n p6 J
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')4 h* k$ a: u8 |/ m: G" l$ T
+ O( K# p3 {$ y: d
###############################################
0 I, W/ v7 L3 G2 g% W3 M# Evaluate classifier performance
4 f+ Y+ P, q# J, \3 o$ W
# o. @7 x6 l( Y1 ]- S3 v" Vfrom sklearn.metrics import classification_report
4 P4 s+ y6 w* ]: L; _1 b. n. X$ w `& c0 E) n, C# S% v, B& ?6 f
target_names = ['Class-' + str(int(i)) for i in set(y)]
" Y9 z2 r: X; N# F- L+ j; {print "\n" + "#"*30
* ?' I) H' f( o" t Yprint "\nClassifier performance on training dataset\n"
4 G0 h' [5 Y4 i9 H- I; Iprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)& |/ z2 K9 o a, B
print "#"*30 + "\n"
* `5 B4 j$ q# f0 X- P+ ^' [5 H
% Y0 _! t9 P+ Fprint "#"*30: d, S' F! s2 v: U- \2 r
print "\nClassification report on test dataset\n"
8 L/ J$ n: r, W1 o* Kprint classification_report(y_test, y_test_pred, target_names=target_names)& A% V5 \3 ^7 e; @& N
print "#"*30 + "\n"2 w# m, N% z# q0 l/ H4 b' x
8 v: t' r1 k( C9 o1 e |
|