- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
x1 f, x2 b9 e1 W+ uimport matplotlib.pyplot as plt
& ^; w# J8 c8 X) M: I g4 r4 u' ~4 `6 K3 k6 u
import utilities ; @9 l/ c7 E8 s4 F
7 Q+ L) J$ B& t: l! n: W# B k+ q/ A. [
# Load input data* h0 @1 l, k* v: l/ ~
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: q6 n6 l& ?% Y4 rX, y = utilities.load_data(input_file)
6 I+ U" G& e- \5 ~2 z3 ~( R N: o: s: k
###############################################& M& f# e0 t6 l
# Separate the data into classes based on 'y'# g0 C% a/ D# |" b9 E
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). R& ? g A w7 \# v
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
0 D+ x( p; J( O6 O$ Q
, s; @% e" Q0 u# Plot the input data
3 r4 e' ]8 J, T+ n7 |+ U9 g6 wplt.figure()0 ?4 V. U' `( d2 e: }
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')5 ?0 A) f7 J& S& q1 w3 s; W
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
2 t7 ?7 |( O4 ~: m" b Fplt.title('Input data')6 s! H) x. i8 }( V1 J) T4 m
! w" q8 H; s( ~( P
###############################################, }7 e" P# U- x; h
# Train test split and SVM training$ e5 p& ~$ M$ T8 B. O, d! T
from sklearn import cross_validation
5 ~# i5 q6 B' Y3 w: n5 W' C& Cfrom sklearn.svm import SVC) d. [ R. y$ _/ L# ^4 p7 p
, Y9 U7 a4 k% [8 [0 R, vX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 m0 ?: ~ N+ B F9 Q4 m k: J9 S6 O2 K! p, Q3 n
#params = {'kernel': 'linear'}
# y" s' F8 T2 u& _ N0 U9 M$ |+ @#params = {'kernel': 'poly', 'degree': 3}
. Q% i2 j5 l0 ^/ fparams = {'kernel': 'rbf'}
* F7 d: ^) c0 @ g- \% Qclassifier = SVC(**params). z5 ^9 n, ?+ f* x/ o) j
classifier.fit(X_train, y_train)0 u. o. L. n7 q3 T
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')' ?5 B* W2 E. ^! \& }5 c
0 U" z# C3 y2 n$ w6 K! ]
y_test_pred = classifier.predict(X_test)9 r" y( v5 `1 m% [
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')2 ~+ h; L+ r, I0 H8 B: f% \
5 u) g X5 O9 d; }
###############################################! {9 d' Q3 }2 o$ y
# Evaluate classifier performance
$ o( v8 S9 X8 g+ { ~) A; i' n- [7 F$ M! J* q
from sklearn.metrics import classification_report
& G8 ]- g" j1 o: ~ I& \0 e) ?' Q4 ?/ \* o3 z$ [
target_names = ['Class-' + str(int(i)) for i in set(y)]& \- d7 m; k/ `% _" l7 h( Y5 b. }) ^
print "\n" + "#"*30+ T1 T# p4 i+ e4 }
print "\nClassifier performance on training dataset\n"
, q5 U8 @; r2 d7 `7 gprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)) J8 d5 i. ~/ @5 W% h/ |( x; ^) e! @
print "#"*30 + "\n"
8 Y/ e/ { K- B; f0 r
! q$ c. n( ^1 @+ I l w$ ^/ [% zprint "#"*30
( f: \. V9 k- U8 F% x+ vprint "\nClassification report on test dataset\n"7 B; `( i- E% J7 |; [+ U
print classification_report(y_test, y_test_pred, target_names=target_names)* H' t0 Y5 @) T
print "#"*30 + "\n"8 S# d& C) X+ u5 l4 _
' a7 l1 V2 E% j3 c
|
|