- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 N- k) P2 g. {* p2 W$ Limport matplotlib.pyplot as plt2 b6 L9 y4 x. v+ {7 L
3 u8 l; M( Q" m' {
import utilities
/ t5 ?4 n& \5 {/ F. R
$ B, H/ z7 R/ ~( I# Load input data& e8 j P5 v% N8 j' D) T
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ q. n2 h# Y/ X6 ?X, y = utilities.load_data(input_file)% N) c0 v* u# l) y! n' F
2 d& R9 k7 { n
###############################################
) \' l9 a$ H, _" }9 k# Separate the data into classes based on 'y'
& m/ B3 Q; l, J9 C! C5 V% v" Mclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" s1 t, D" i4 F
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
3 _7 y( j( t* m$ b& h. z8 \& n& _: {/ e. {
# Plot the input data) h% q! c" ]2 s. w2 k% }% G3 r
plt.figure()
2 `% E( \$ y6 B B2 b* Xplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'). y& Y! @( ^1 ?
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
6 k7 O* z9 g; E6 ]+ ^2 c. Kplt.title('Input data')
" t1 j+ @( F. e) s* m# v% m* H1 O- y; y& e' V5 g- P
###############################################, E, N; q6 Z+ I/ |* X$ m+ q! \7 b
# Train test split and SVM training# W( \0 G; `% ^, W+ X
from sklearn import cross_validation
! J; p; G* \+ Cfrom sklearn.svm import SVC
i ]# Q; m$ |# O- c* f* g* I3 `+ X/ c l
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( F- c6 i2 Y; X( f" |+ g4 q o0 I* e( |
#params = {'kernel': 'linear'}" ]! z! l5 _: {) ]
#params = {'kernel': 'poly', 'degree': 3}' L/ q2 _; o$ o X$ L# a. s
params = {'kernel': 'rbf'}+ _1 G3 T6 J; R
classifier = SVC(**params)
- a( y3 m, |# m' qclassifier.fit(X_train, y_train)
7 W6 l1 D$ J$ b" t* D' {utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ ]. v9 p0 p5 t( Y: S
9 |; H: C8 H$ |' }& _. k7 G, a
y_test_pred = classifier.predict(X_test)
3 f$ w. {1 B5 s3 r" Z5 l; xutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' p. I# \2 g" R/ }
5 O$ ^% g2 _ c+ ?3 e7 R- u
###############################################
' e- P, r9 b1 l! G# Evaluate classifier performance2 @& S: k( f0 \3 b) U7 p+ m
& A4 h& b0 U: U$ s. `from sklearn.metrics import classification_report
1 N! W/ V$ b0 T+ k
$ J; Y& S4 R' w- t4 x" Ntarget_names = ['Class-' + str(int(i)) for i in set(y)]9 v+ |( y' M: N/ y
print "\n" + "#"*30
" T4 e0 m' I' W2 Rprint "\nClassifier performance on training dataset\n"6 ?# e" K( b/ K( r
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)' {8 w2 p: z5 c
print "#"*30 + "\n"
+ }+ v- i3 p4 T% R- C
+ d Q, w/ p! C ?2 hprint "#"*30
8 G' X/ n( @7 J8 z% Zprint "\nClassification report on test dataset\n": d% P2 H: k& f/ C
print classification_report(y_test, y_test_pred, target_names=target_names)
5 F1 u: v8 n; B5 }6 Tprint "#"*30 + "\n"
h& ~+ t2 w9 l1 D( Y& J
9 x2 q; K" G+ g8 X1 I' P |
|