- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np; t4 M3 `, ~6 u& @- o
import matplotlib.pyplot as plt1 [; ?8 F( s$ C" m
: X$ r5 |! P6 Y/ }import utilities ; O5 E" H Q* ~" }6 ^4 z
7 Q" ?# t% B: `
# Load input data8 {7 Y9 K8 Q" g* V# t
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
% |9 \" c6 P$ a0 n1 \8 AX, y = utilities.load_data(input_file)
3 b5 s7 Q! r O, F) J( C( A1 s! Y% P4 X7 }- v8 b
###############################################- N: n d [$ g9 v8 ?
# Separate the data into classes based on 'y'
' f0 A9 c( W7 f8 v, Y* |' F; _class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])3 {5 B1 ^5 y: S1 l( S* T: M7 R
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])" O/ q* _6 r7 C2 K5 x' W
: s8 r8 i8 ?- J3 {/ E- Q# Plot the input data
t" A1 Y5 X9 d, k9 k0 A8 N. y0 Aplt.figure() b/ r8 D5 p* G( [
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
# f* D+ n( @: V" \9 C* P( x$ ~plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')* e3 Y- j2 u. C* K
plt.title('Input data')
p, f3 M B+ f, \3 f U, a4 [/ d* g }
###############################################
5 z2 e% }3 N; {; L5 B# Train test split and SVM training- b" Q: ?1 q1 o/ d6 h
from sklearn import cross_validation
+ w- ]& l4 ~+ g1 c4 ^. M3 zfrom sklearn.svm import SVC
3 ]3 ]% O2 e* w
. o$ f" ?' o2 h+ a! u) z: CX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)1 U9 ]. M: G* {3 T2 x' v: L5 B
( F! t, n4 A9 ^% u$ K ?
#params = {'kernel': 'linear'}: ]0 z: t N# z" Z( z( W, t
#params = {'kernel': 'poly', 'degree': 3}) b0 p! Q) l# u' e" d
params = {'kernel': 'rbf'}+ \, n" v ]1 k3 Z
classifier = SVC(**params)
5 c- k, }1 |' s0 h6 @9 E" R' \+ Uclassifier.fit(X_train, y_train)
n3 u1 f* @* F8 lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ ]4 ]; ]. j" d; i+ _0 a
O) n: n9 ?4 o1 K3 ly_test_pred = classifier.predict(X_test)" [: z+ |9 c! {. x* {
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 s: S- K, ?+ \0 d8 P3 F; N. V6 n# B: O, L3 ~
###############################################! _4 N: m, U) v5 w5 A+ K
# Evaluate classifier performance, @7 P% p/ D7 `
% n# t" M* p1 a2 z
from sklearn.metrics import classification_report
" h. M0 @) w4 n
4 E8 d5 k2 M: U; _target_names = ['Class-' + str(int(i)) for i in set(y)]$ K5 I$ B0 \% a! T
print "\n" + "#"*30
9 ]4 C* O3 K6 x5 g6 R$ Z+ m, bprint "\nClassifier performance on training dataset\n"# f( x8 R; _! k* g% b
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 R$ K3 X- R7 T# [print "#"*30 + "\n"7 i. w# L2 E( ^7 _* F
0 @% K7 N) S" |! z, Z( C
print "#"*301 k1 r8 D G0 Q
print "\nClassification report on test dataset\n"/ ?3 `% t+ F* Y5 }' f0 i" E3 ?7 w' g& g
print classification_report(y_test, y_test_pred, target_names=target_names)4 l+ p% Q! T1 J
print "#"*30 + "\n"- e+ U2 b" g- v6 r) l9 L# D
9 ~( U! Z# t- Z; S0 `: N {, K. M |
|