- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np L+ Z8 G; |" q% U6 z! D' f
import matplotlib.pyplot as plt/ W- K7 b4 V4 H
5 G# } p8 B6 Qimport utilities
5 \- j8 \+ v% `4 y4 w5 _" `5 p ~" ?% L- A
# Load input data3 D& X! V2 y7 L+ a# q
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
$ C% g: `" ^+ |; M" Q( TX, y = utilities.load_data(input_file). t, _3 i* d2 N g0 C
; D7 x* _6 J, |" ]
###############################################
( A* }% T" [/ {/ n# Separate the data into classes based on 'y'
; g5 ~, N; c$ Q. i3 Uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])! H& X! r Q; ] F
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
3 g2 Z: T# I' F0 p( y+ o7 }% z- n, d& t4 m0 U5 ]
# Plot the input data
) y, M) J1 [8 j7 \; o' }% N5 ?plt.figure()- F2 t# m2 l- ~$ @& R
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 _/ D! z/ {" s% O( z! mplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
5 V' x+ W6 k3 X% P; b0 g4 Wplt.title('Input data') Y' X% N4 B3 {& l, B( Y
9 @: h0 b9 [) F/ ?6 m3 w# r9 z###############################################
: d( t; ]2 r$ g5 _6 w9 L' \# Train test split and SVM training
; P3 W5 n! W# p5 }. e- u- Mfrom sklearn import cross_validation @$ Y2 X% e7 I4 G
from sklearn.svm import SVC
1 ~5 @3 O& T2 Z0 M4 r0 ?! D) Y% ^; Y$ E9 b4 T
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)" \$ N" x u5 M4 k: m
+ B- x/ |2 D4 f% I6 U6 R#params = {'kernel': 'linear'}5 L# |+ G3 L1 N& \- H7 M
#params = {'kernel': 'poly', 'degree': 3}) P" \* _6 Z! g: }3 s
params = {'kernel': 'rbf'}
9 _' U* v# Y: aclassifier = SVC(**params)
& {2 _1 Z, Q5 j) h+ q% }0 mclassifier.fit(X_train, y_train): b; N4 O4 k) Y: o, v/ U- C b1 ?( x
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
3 {/ a8 y2 x/ j9 W$ Q; `5 u% l' H+ g3 P. E: c9 d
y_test_pred = classifier.predict(X_test)
; \3 H5 a1 Y- Sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' x1 H5 q: p4 y I2 B2 K! l& e# Z
' L' ^9 [+ h" r: j6 W6 k
###############################################
$ P9 p* F8 Z. r, o; |- H# Evaluate classifier performance
7 I5 Z5 K/ V. {, }8 _( e/ L5 {- f; d- P# d$ _0 B2 R* _0 c0 Z
from sklearn.metrics import classification_report4 d7 _" ^( a0 b8 }# B' @0 w
* e2 @+ d @' V( R# P8 E5 \target_names = ['Class-' + str(int(i)) for i in set(y)]+ a& T) G; m4 Q/ I% ^$ N
print "\n" + "#"*30& X+ d- i5 m9 b7 g8 C; q0 W
print "\nClassifier performance on training dataset\n"
3 i1 D; f f( q$ D, g) X4 Eprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
0 [8 f( }: o8 h! X4 ?% L \' L: L7 ^print "#"*30 + "\n"4 Z+ ` e: i h9 p
! G; C* R7 f! l$ X. t$ x. |
print "#"*307 l8 ~7 P; }9 i5 B0 R
print "\nClassification report on test dataset\n"' d4 }: E8 D. _. \% P
print classification_report(y_test, y_test_pred, target_names=target_names), h7 c q2 |5 e; R& H
print "#"*30 + "\n"
$ T. K# t$ ^' ?# M
0 N5 k% {1 M9 r9 R1 |6 N |
|