- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np3 Q5 o% v% W6 y8 {
import matplotlib.pyplot as plt
+ P8 Z9 f+ ^. e/ Y2 c: k! d, j$ O, I. ^5 h* Q% l+ y
import utilities , ?5 Y% B+ K' O# Q. U" J
' z" g9 ^0 V* r9 h4 K# Load input data
9 W6 U& E6 i9 o; N& D) v" c# E: {input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'- W) O h$ l% \) J E& B% v1 T
X, y = utilities.load_data(input_file)
& A% Y e7 F1 x" Y# t; {0 W2 C( A
( k+ j. B6 m" H1 J/ ?0 }% n###############################################
+ {# }# L$ l: e8 N. ~5 I5 ^# Separate the data into classes based on 'y'5 Y6 b( q. E/ c0 r6 F9 Z, z/ n
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])6 J+ u( o( Q+ H/ v B
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])9 w" a6 W+ ]; f5 b% Y
0 X1 Y6 Y) b! p5 [' t# Plot the input data
1 D) Y) R) R, U! W: w! M z: O pplt.figure()
@ J& _( D3 s/ i. D' q e: L4 j" Lplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: X; y M5 V% X! |1 Splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
1 o# U) _- b' @1 l8 S5 y, lplt.title('Input data')8 V; C# J7 E8 a4 |& i' @( f" p) S
# U4 D) [& A9 E2 Y w###############################################3 w" }/ D! {4 s
# Train test split and SVM training
" g' H& K$ e$ y+ u( A( R/ Nfrom sklearn import cross_validation
5 f( ^- v$ e6 n) |1 _ jfrom sklearn.svm import SVC
j& [3 L8 D' L2 y5 c- l5 C9 B' V3 t" M, ?
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
2 e0 ?" N6 l; F
' |0 t2 `, C2 M. v2 f8 q#params = {'kernel': 'linear'}
5 X3 L/ L2 V5 T5 J#params = {'kernel': 'poly', 'degree': 3}
$ x2 B' Y' h- ~2 jparams = {'kernel': 'rbf'}1 o, e% z Q" R$ [2 |8 K+ q
classifier = SVC(**params)
+ W% s' N+ Z eclassifier.fit(X_train, y_train)( ^9 U8 z- v- Z! W2 T
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')7 R: n5 `% L2 k0 p
- [$ T$ p3 T# F- C! t
y_test_pred = classifier.predict(X_test)8 S, Q o4 r, X4 d
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). d7 M7 X0 e3 w4 Z6 @
j5 T! a; c% A; {' h0 | ?( ~" i4 P###############################################
3 c6 S- r& }! q) {9 `# Evaluate classifier performance: I: i/ E# g6 \9 ~
4 ]# ?5 K7 G& p B& ufrom sklearn.metrics import classification_report
% t' }& ~3 Y% N- @7 U
. u2 @" v- h. starget_names = ['Class-' + str(int(i)) for i in set(y)]
3 i x: j8 n3 }" F3 v4 ^: aprint "\n" + "#"*303 d; I- v8 [1 G- B- \# _
print "\nClassifier performance on training dataset\n"
. z4 I6 `5 T( u+ i% w5 j; ?print classification_report(y_train, classifier.predict(X_train), target_names=target_names)- P3 U5 ? ?" }" q ]0 L2 g5 j: A- A
print "#"*30 + "\n"3 n+ ~' n. ^0 [% }8 u/ X ]+ j
) E- Z8 Y3 q. M% O/ Z! B8 Rprint "#"*30" l: D9 ~( y0 V* V7 Z+ C
print "\nClassification report on test dataset\n"
) n( x( J9 S+ E1 _) Bprint classification_report(y_test, y_test_pred, target_names=target_names)
6 s! G" T7 S1 c2 b7 @! L" Uprint "#"*30 + "\n"
& z8 b, ] m# S( H8 T. a3 N" E' l Q: V2 B
|
|