- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: ^6 u5 p+ c7 p6 rimport matplotlib.pyplot as plt1 Z! O# _0 Q: w) L
; R2 s# t0 Z- {. w$ eimport utilities
8 D9 m. c4 G) ]6 d8 _: v" h+ \+ }; e
# Load input data7 o6 R8 W) l2 N, ~! @) } z9 |5 L4 E
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'9 S3 [) N* G; a3 v. T$ Q5 T% G
X, y = utilities.load_data(input_file)
/ c8 F8 i0 q! H( a h
# g% E1 s- O+ _+ ~! Q1 t###############################################
2 m+ g7 i. u7 N* e3 c- r' T1 }5 \# Separate the data into classes based on 'y'
a* M3 t1 A# n. ^9 x+ i4 fclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])# ?+ M9 r% V0 }6 x, C" `- Z" t
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
. h9 x; I0 X) B3 J
+ t" x* B% v- P6 n4 m" z' i6 `# Plot the input data; E, B! A1 j( O" ?
plt.figure()' T, t& \& E$ T8 x/ C2 }8 w& `
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 U) C) Z8 t9 ~3 N/ n' A
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')8 `: z" a C8 u4 H ~
plt.title('Input data')& t; F/ X. j' W/ p
$ ^# V( [* N! G# t- q% D###############################################: ^4 E8 W) Q; }" v/ y8 W5 E9 x" P
# Train test split and SVM training4 F9 W- [! }0 E. q5 l1 v
from sklearn import cross_validation; |7 }. T* w2 l9 c0 h
from sklearn.svm import SVC
4 p$ o7 D+ L' Z
+ l! L5 o" N# MX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)9 O5 O- n4 [3 j; `. h5 H3 X( u
. z! f+ t" J, i8 h1 S7 @#params = {'kernel': 'linear'}& u! ~, P9 \; k3 T1 @2 a6 T
#params = {'kernel': 'poly', 'degree': 3}" k: I$ X) _+ s
params = {'kernel': 'rbf'}. q% d: \5 `1 {1 f
classifier = SVC(**params)$ B+ [2 n& Z9 p/ K, q
classifier.fit(X_train, y_train)# U$ H8 K! L% P1 b
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 l$ k& s3 c6 E% F5 Z( V6 G d9 e* x @) Q. i9 t+ `
y_test_pred = classifier.predict(X_test)
& Y3 W" W, S0 jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
. A& F. A3 m4 D# V( N6 [5 P1 i- I# x9 J+ F
###############################################
8 c0 z3 _" v' e; d# Evaluate classifier performance, J( Y; L* i8 q3 ^
; d i* [( q" l6 z. nfrom sklearn.metrics import classification_report8 T, n; S9 G5 E+ n6 _ H, \: d. b
5 \' L3 x3 E- l" j; E0 `target_names = ['Class-' + str(int(i)) for i in set(y)]
% J" P/ j' s9 X# uprint "\n" + "#"*307 y: s$ o) W1 L' y3 }9 J
print "\nClassifier performance on training dataset\n"
- I+ k, j- V: \: D- {: eprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ [( P/ ? y2 X, H' s
print "#"*30 + "\n"
5 f6 `- n- E, m7 X
3 z9 n2 @5 v3 ?* s: g0 J9 s8 L- zprint "#"*30
4 H8 C W0 y& k. u% c0 Q; Nprint "\nClassification report on test dataset\n"
+ d1 }$ l/ J5 n8 y( h/ Zprint classification_report(y_test, y_test_pred, target_names=target_names)/ c& f5 `- e! @" B
print "#"*30 + "\n"
* x! k0 X- H# g. }7 E
9 ?0 _2 ?% A% K6 F1 a* a: a |
|