- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
3 e! l# c2 x! Q) M3 J. uimport matplotlib.pyplot as plt9 |7 x9 m6 x& b0 C; A. Q6 L$ g, C
9 d/ l- K6 d6 J- Iimport utilities
% O$ `- l+ p& U) i- |: v" g% y
0 T1 S/ C$ Y! m/ k# Load input data
" k! @2 V+ Z ?; p4 ?7 qinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt') T" k9 M; S1 o1 u
X, y = utilities.load_data(input_file)* \$ z4 z) j: g- @$ b. F3 [
! M; \& X5 {3 `4 S5 ^: ~+ [
###############################################
f, |6 u( k6 d# Separate the data into classes based on 'y'
9 ^/ ~$ F" o0 H' v4 W6 Nclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
' s, z/ u! n* Z) Eclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])5 b8 m4 ^. ?1 O8 D: R0 x. f
& j: L/ R4 W7 ^9 {( Q3 K# Plot the input data- y# M# A# p% n
plt.figure(), [6 O- w2 U: t/ d" |; H( v
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
Q$ K& h$ D% Vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" k% k& _4 ~6 C$ b8 Zplt.title('Input data')
$ V: ?; e4 T. F; ` y' D4 e& Z4 w! m v/ M; E# b& E& T
###############################################6 v5 t. o) a$ n5 Y1 J
# Train test split and SVM training! t) }/ U6 r6 ?
from sklearn import cross_validation
2 O' M1 V* s" G @: r0 \( d& tfrom sklearn.svm import SVC- y" ]" ~. J4 k. |# C p
2 S1 F+ w; V8 b
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 B3 u: [7 K: Y) V. g
' q. l6 Q: X6 K* @4 f#params = {'kernel': 'linear'}8 u U" p) T0 m# `; ^+ L# p
#params = {'kernel': 'poly', 'degree': 3}
3 }8 ?- K* `3 ]* V1 A5 s- lparams = {'kernel': 'rbf'}
! E( P5 p4 q# |8 x3 F; @2 \classifier = SVC(**params)& ?) Y# A- f: w1 n( J! y/ a
classifier.fit(X_train, y_train)
. l7 Q9 U, K1 q) c: m/ G: hutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')0 i4 ], t. z; M
H0 y5 g8 v, T0 M2 g
y_test_pred = classifier.predict(X_test)
$ m0 s, @; I( k3 ?2 Y2 N8 Eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') ^& M' j X& g6 e) @+ b8 z) o8 L' [
3 ]0 V& w3 x! b( a* N9 Y
###############################################0 w7 V' Y2 ?6 \, C1 f# g8 p$ A" w: ]
# Evaluate classifier performance4 k2 y: R8 ~7 ]% c) V
6 Z* ^3 h& ]9 v }' l v+ W' l
from sklearn.metrics import classification_report+ U0 g, k) {( f9 U4 `$ Q$ `
1 W9 Z8 ^0 i+ d" p3 }* A* Ntarget_names = ['Class-' + str(int(i)) for i in set(y)]
+ ^! H" m e( p: [, k# Iprint "\n" + "#"*30
- H4 ?6 i, Z6 \2 q& C$ Wprint "\nClassifier performance on training dataset\n"
6 Z, ?5 Y. K7 L' K- D( A( U9 kprint classification_report(y_train, classifier.predict(X_train), target_names=target_names), J; f( V' k6 O7 F5 O; p
print "#"*30 + "\n"
: ]* R+ s" V4 u$ @9 j, q3 Y
. M4 y' J( T5 e0 Pprint "#"*30
* e4 K. S% N( `4 E4 S$ @: oprint "\nClassification report on test dataset\n": B! ~! d4 k. w; u4 f; T. P
print classification_report(y_test, y_test_pred, target_names=target_names)% H8 D9 T L& z+ `' U! P
print "#"*30 + "\n"
+ r; s$ k$ \" O, Q3 H0 r* m+ v( E! r- ~7 q
|
|