- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
; T/ r. p2 s6 B2 H" Z! Wimport matplotlib.pyplot as plt5 A& F- w; u/ x/ ~2 T- b- D3 F
, p. i e$ S, o0 u# i0 \import utilities
" B1 |! s- d$ G1 K, A! G: Q8 u W" e) s
# Load input data( K( ?) P1 H' e; c' \& N) \
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
! _: H$ n" |$ S% N; T" h; ZX, y = utilities.load_data(input_file)9 X) H$ s7 ]/ l, ^( r' z: c2 b' m
: T8 c% q1 E7 C, c, A4 R
###############################################- g- P8 n+ ^: B; o" _
# Separate the data into classes based on 'y'# l3 p1 N# G. t" C i: l; `" c
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ X5 V0 V" d& D1 u' N7 R: W" q& f ]
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
1 A; J- Q+ o/ z1 D7 W4 _) }6 N) E0 M* P6 O4 j
# Plot the input data7 U; L+ N: k) r. ?
plt.figure()
8 M: @' x, h _plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 V Y I9 D. T C: oplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
X2 ~! K5 g, F0 z( j* cplt.title('Input data')5 ~' \+ n: R- X+ Y; c0 {% f
0 r* H; v8 c8 b0 H2 s# `& P+ ?###############################################
& m4 J& m+ K; F+ N2 S2 r2 b* ~# Train test split and SVM training9 y& g' s* ]: L" {/ j# x# |6 ^
from sklearn import cross_validation5 @$ o/ O8 `6 H9 C% C# B
from sklearn.svm import SVC
: _9 ~1 t) o6 t4 R2 C0 L" ]& O( N7 Q1 F) r# s( x
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 M& y( ^! e5 J* n/ [" [3 D3 o
" K. G) ~- `# E#params = {'kernel': 'linear'}
/ T* u/ W6 ^2 p8 n8 T#params = {'kernel': 'poly', 'degree': 3}8 F: ~0 W( _( P4 L' y
params = {'kernel': 'rbf'}
5 y0 L# c5 E2 N2 B7 e% \1 Rclassifier = SVC(**params)
- N0 a0 ~& S: a, eclassifier.fit(X_train, y_train)- C' e+ ?5 I, k& f! H+ T7 |
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
* f \3 G! ^+ f" B
2 W- p4 n+ m2 E+ P6 Qy_test_pred = classifier.predict(X_test)
! l2 \) B2 s) H; `: f3 N6 }5 lutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
; i r6 y. u B1 w: N3 C0 P4 Y2 }9 [# T! `: u; X5 e3 j% v: m$ M
###############################################
1 @4 z c- f* F% n) V# Evaluate classifier performance6 g. J! c/ U" k$ e4 I% G0 b
" h/ U; q2 C; L1 E/ U" sfrom sklearn.metrics import classification_report+ z9 x" I$ m8 x' S7 E" m
& W5 @ b# l* ], P* N. ^target_names = ['Class-' + str(int(i)) for i in set(y)]
3 a9 w+ Q% J( Y6 Z. Hprint "\n" + "#"*309 j2 j" `) S* a0 J$ W8 Q! c
print "\nClassifier performance on training dataset\n"! ~6 e0 L* @' e6 ~3 D, V- e
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)3 q l' Z k" S4 S7 a+ Z
print "#"*30 + "\n"4 l- x+ U. x0 Q/ A* w' y6 o; \
2 t0 Q2 S( `/ G9 d! |print "#"*30
9 A6 ^9 S0 F+ f2 Iprint "\nClassification report on test dataset\n"
' V6 ]/ g/ f; a: l- g: l: J6 jprint classification_report(y_test, y_test_pred, target_names=target_names)' b: }+ n+ z8 H! n
print "#"*30 + "\n"4 H3 b6 i/ Z/ a8 N2 ]' x* Y, g) d
" f( w2 R7 T- m7 i1 d7 J* X
|
|