- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
' k1 a8 y$ {, Y2 T0 aimport matplotlib.pyplot as plt
2 K0 i" j5 e- d
# j" ~# U: t# [) G Dimport utilities 3 [1 Y3 {; r: `' h: U* t8 r# j+ J
1 K; o; D. r! b* Z8 c# Load input data V4 Q6 r- V$ J1 [6 O+ `7 X
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" m& `2 v# W( T- v! [, PX, y = utilities.load_data(input_file)% ~; Y$ m: h( n7 E9 I; U4 u
6 w; ?% O4 b5 L# B3 [9 W0 ?
###############################################: s1 |/ L4 B/ Y3 M9 ^! Q
# Separate the data into classes based on 'y'
5 p; A O: M+ \+ Uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])* v/ Z4 K; M1 l& }- d9 d+ a
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 ] T% e5 [3 k
" h: \2 j* k$ o5 S% I' `* R9 t# Plot the input data! s. J/ N/ u6 ~' J4 r/ ~
plt.figure()( p0 f' Y* o. M* M2 u1 W/ ~& l
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
" a. l7 V5 M2 V) B( v6 O4 oplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
7 q) r& q: S: E1 m4 Xplt.title('Input data')
9 ^* C1 `5 k# c0 U& m
: E1 j% ?: S- J/ G8 O###############################################! c4 H5 O4 I5 L" ~2 Y
# Train test split and SVM training- h2 Y% v, D. x# V# c! l' m
from sklearn import cross_validation
4 E9 l* C# z+ E0 zfrom sklearn.svm import SVC* |- y# s& e; Z: T2 ?1 N0 N
% k' o! w, L. O) } A3 @$ Z, z
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 e3 [6 ^ s( y2 ]( h: O7 c# g& L; k. T J6 n* b
#params = {'kernel': 'linear'}1 v. x3 U( D9 Q% J
#params = {'kernel': 'poly', 'degree': 3}- O! o2 X( L- v$ E$ Q6 {
params = {'kernel': 'rbf'}
$ [! `) h7 k/ N7 h6 K" Rclassifier = SVC(**params)6 v# r; a# m7 T( D
classifier.fit(X_train, y_train)
2 w! P! K8 a1 m. X+ b9 y- L/ X' rutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
" ~* r- N3 P3 M2 X; Z( Z& Z/ @) f9 p) J6 L# ~6 E5 w' I
y_test_pred = classifier.predict(X_test), x. T+ ]$ q6 z* Q4 m! r
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 X. Y m# X) T4 b5 `( D8 z3 H& j, \% ~% B7 m9 r1 |
###############################################
: y* V. [& H- `; G+ Y4 J# Evaluate classifier performance
3 I6 C( x+ n c$ h
' T! p7 {3 O, C( [" @, Efrom sklearn.metrics import classification_report8 y) p3 i: }3 b" u
) E4 C4 }# V5 V4 n. d; I2 g
target_names = ['Class-' + str(int(i)) for i in set(y)]2 ~' m$ b3 k) R3 j2 z, f
print "\n" + "#"*300 c/ R+ \) ?- d0 N- z1 v6 k
print "\nClassifier performance on training dataset\n"
y3 o, o* Y2 \; |print classification_report(y_train, classifier.predict(X_train), target_names=target_names)$ `; m( U% z, C8 p
print "#"*30 + "\n"
& F9 T; m! x5 r% g9 p E- E3 y W1 c" N: j; [+ |* g9 ^) @
print "#"*30
6 l! g: B9 |; {% dprint "\nClassification report on test dataset\n"' ~3 L% v9 c$ m% E: G! U
print classification_report(y_test, y_test_pred, target_names=target_names)
/ H3 i, ]! A, B( X, D2 qprint "#"*30 + "\n"2 X4 S5 \# l/ d* B' L
1 l$ } S! ~: `' D, ?
|
|