- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np: n* `2 f) j2 t: W( v
import matplotlib.pyplot as plt
: b' R9 Y/ R6 f7 f. @8 b1 W7 A" T+ Q+ j% u3 o! G
import utilities
* W! N T, W( a* m8 z
1 S4 R; r( Z3 i L+ x# Load input data2 ?+ Z- ~' F l& G# S2 O
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- _3 Z" A$ Q. t% o4 U
X, y = utilities.load_data(input_file)
" {: R7 v/ z5 w3 ?- O0 `, w5 A/ S2 z6 u ?$ W k" M. q3 f8 `
###############################################, S& n0 [# y J& O
# Separate the data into classes based on 'y'" k% E- s4 x9 J* A" D! I* G' K' h6 S
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% I# B6 j$ \, M7 O! [1 Gclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])5 ~) b" {5 F% G
5 w* E6 F0 A! Z6 I: d
# Plot the input data
. x' N. K* k: {4 S7 H' Mplt.figure()
, s$ f( [& n0 W1 Oplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 u* S+ T$ {4 Q- U
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) D+ d" T" m) ?
plt.title('Input data')
/ n2 ]& m' U( ~5 a/ ?2 f# o( I$ v- ?/ q3 s! `
################################################ M) X* _ a6 f( Q
# Train test split and SVM training
: D7 y$ _" F% [9 e/ L1 ^1 M) v, wfrom sklearn import cross_validation. ?0 W% o6 G0 s% Q( T2 C G/ m
from sklearn.svm import SVC1 x- u% P% v! q A$ J2 T
9 F% O3 ]3 ^* J. P: e; d \0 TX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 ~& y1 d# V' b, R" V3 k, C" X& a2 X J0 ^9 z2 {
#params = {'kernel': 'linear'}
! s* n' ?% ~, Q8 z#params = {'kernel': 'poly', 'degree': 3}
+ r) k' b9 P4 f* Vparams = {'kernel': 'rbf'}
6 G7 m0 M( J2 n. ~classifier = SVC(**params)' {8 V; @0 f% q( @& [8 a4 q
classifier.fit(X_train, y_train)
; N- `% m& b2 V N9 c( Rutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 ], y1 L; S3 i7 t4 ?7 d
8 x P) j. O; J7 Vy_test_pred = classifier.predict(X_test): D% V) |% O" `, H/ q& Q, R
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')! W+ |8 G) ]3 y0 h
0 y# M" C% U# o0 m! D###############################################$ ~% J3 K9 W6 N' X' k! v
# Evaluate classifier performance
6 X/ N+ R6 G" N* b. V t) u3 x
% B& w+ D. I, x- A) u- p z; O0 A# jfrom sklearn.metrics import classification_report& E) z% H6 G* |
" u. \6 H8 m! p: j& Y3 y
target_names = ['Class-' + str(int(i)) for i in set(y)]2 S9 c1 D) [. E9 _8 O
print "\n" + "#"*30
7 |5 ~/ X0 M6 V* [8 Gprint "\nClassifier performance on training dataset\n"
& z, G9 b) a' D: @7 Tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names); I2 [5 d. m1 Y3 {0 v+ \
print "#"*30 + "\n"
1 j2 r: l0 w+ v, @2 [ E1 \+ \# w- f y; X
print "#"*30
2 ^0 u7 {$ {; Y' V8 h1 Iprint "\nClassification report on test dataset\n": i% d0 K6 i3 v% k( Q0 T V
print classification_report(y_test, y_test_pred, target_names=target_names) M& t$ |1 d1 t
print "#"*30 + "\n"' `0 F) W: J, R* l* O2 [
6 a* n2 H& D7 V' R% X# a |
|