- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 a9 S8 B# ]3 Z+ e- u, U+ [% }, ]
import matplotlib.pyplot as plt" ~# v5 v# r/ f2 S
$ V. }7 [1 I: O5 |) v5 R' r: jimport utilities 4 ~3 ~) g' M+ a* } w2 }
9 a! J& K0 l F1 S0 H# Load input data
7 L) @* c5 u: R# m7 ~8 }/ Jinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'* O& X. W0 C" C7 H+ l
X, y = utilities.load_data(input_file) v. C/ ]: ]: f# G: `
5 A! l; l- H1 v5 f
################################################ d, b- o& n6 K5 f5 a
# Separate the data into classes based on 'y'
8 |# o, |3 e' D( [# N# V5 mclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
4 n) s R# N! _9 P r$ V# nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
0 X3 z2 F" {9 x0 a% o& y* h, K; u+ C I" ?. p
# Plot the input data4 t$ T& |0 j9 Z5 E
plt.figure()
6 N- `) s+ s1 g1 w( Cplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
" D' g1 v( ?% O {1 g. C2 @/ B' ^; bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
# r+ \5 A% J \- u5 o" F/ Xplt.title('Input data')
6 j1 G, G& Q) B8 ~8 U
1 r0 G: A+ B8 }. X8 r+ t9 m###############################################
/ L& P$ O# O P2 [# Train test split and SVM training) m9 I+ C; }; V& p
from sklearn import cross_validation" H0 {; f5 Z8 |5 e
from sklearn.svm import SVC
' `% u5 C+ t5 }: V: ~% U1 |
1 g- ]: m( F+ }; ^1 ~) GX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)5 r3 @4 I7 k; z2 `2 w
& E: b9 j+ I0 [; ^; U2 Q- s
#params = {'kernel': 'linear'}
( S. Y3 [- {/ `3 |1 N8 G% K#params = {'kernel': 'poly', 'degree': 3}
3 c% A* }7 r6 x4 {params = {'kernel': 'rbf'}" b! X& g' Q/ z' S( w5 Y7 A( Y" ~8 s/ _
classifier = SVC(**params)
8 U8 Y* d) A( t$ s* kclassifier.fit(X_train, y_train)
9 I3 K3 U8 R! H( j8 Vutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ }8 a2 @' t4 H0 F$ f
' O% f }+ z4 G2 b+ ly_test_pred = classifier.predict(X_test)( C; O0 M2 r& f5 }3 k# W
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')9 X( Y: V( M. j5 o
A) I% L8 x7 x4 [, C/ ~# L
###############################################( {) F* B+ ?; @2 e' k) d% [% k( @
# Evaluate classifier performance: T# S# a( \% M! \% o( h4 M. u3 o
& W) s4 K2 m0 L+ h8 V' l: Rfrom sklearn.metrics import classification_report
^1 F7 p2 c- ^% x2 |
: ?! F* q: F. V( d! R; W6 Qtarget_names = ['Class-' + str(int(i)) for i in set(y)]0 v: |3 x( h+ ~$ S( j6 `% S
print "\n" + "#"*30) Y, e1 r) k4 x" a8 P/ S
print "\nClassifier performance on training dataset\n"
" t5 y7 z: D% b' |' C0 C4 E# }print classification_report(y_train, classifier.predict(X_train), target_names=target_names)5 p7 P, ~# m+ J6 B, p9 _
print "#"*30 + "\n"
x! A. \7 V @1 d/ L$ l( h, j" E7 _$ S2 U3 d
print "#"*30, Y* H! f1 a( W! u2 c& S* ^
print "\nClassification report on test dataset\n"
" ]4 Z3 m' E* C1 @% ?2 `print classification_report(y_test, y_test_pred, target_names=target_names)5 W/ _4 e9 }' F- a% h
print "#"*30 + "\n"
o+ w& u" i" ?1 f
1 G4 E* v* g5 A' H |
|