- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np2 d% d2 ~. g' T) I% i
import matplotlib.pyplot as plt
/ Q, ~; z, l7 r- a& l4 D6 O$ s. m; t4 Q; Y1 v
import utilities
2 H6 t- y4 A; ~
9 N' {. D8 v$ M2 n) W; h9 u# Load input data
9 K1 r! ^1 l. Q8 _( ginput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
( p- Y* Q1 |! W; c& u7 j. _. oX, y = utilities.load_data(input_file)* B, _7 H6 r7 x# |
) |% P; s3 W& J6 E- W9 X! H6 i
###############################################
6 h- v) Y7 }) d) {! {# Separate the data into classes based on 'y'
% y- d/ l$ t* Lclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 }* Q Q/ x" ~! P5 E
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( c) t0 N: E# B5 R1 @0 k2 D3 `0 }* |. D3 F/ U
# Plot the input data
) \6 I9 }6 p$ ?7 U* iplt.figure()
9 K7 z% a5 U0 |7 q0 v& wplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')8 {1 G$ H6 e& C2 O5 X- ~
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
4 U' h( s+ ^6 M Eplt.title('Input data')
1 `5 K/ B! {) r; c4 [/ A! s: U6 q4 A9 }. z; O
###############################################+ }3 k4 W4 H, }" {
# Train test split and SVM training" r* v" c3 x) A$ o1 T2 j
from sklearn import cross_validation" K% N9 \+ B2 d1 g4 K( e
from sklearn.svm import SVC
, K. d. m6 h) ]' @, w0 J. ^& P1 i: n) ]. v! z0 J: D
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)$ y: [3 `! X) B% Y
' e) p3 l1 ^: e' {+ |$ Z
#params = {'kernel': 'linear'}+ y; X) Z1 W. R, b- D$ W+ Y" y
#params = {'kernel': 'poly', 'degree': 3}
$ i! @+ u0 k, q$ K+ m! Oparams = {'kernel': 'rbf'}
+ G8 w& m% |, ^$ R; m& Y# Cclassifier = SVC(**params)
1 I4 ]! |7 p3 w& _$ z& k4 ]classifier.fit(X_train, y_train)
1 l; P3 R! S3 s8 d' A1 Lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* H- I! F* s. v. e; r! b- h
& {. |2 A/ F8 p$ Zy_test_pred = classifier.predict(X_test)
- L' L( n1 f7 E- ^utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 A# o3 g1 ^1 ]) B1 [4 |# z7 o# i1 i! l' @5 L5 R P2 v& F4 G
###############################################; J: P/ y$ J3 ?; d! h2 \6 b
# Evaluate classifier performance% Z% v0 E3 g; _* C: w" _& v8 q3 ^
& s* O0 ?) u: g9 e0 j% ^* r4 D
from sklearn.metrics import classification_report% O& v$ x9 Q8 a' t9 g) {2 \) ]
) _1 Z/ o2 j2 b: D& g
target_names = ['Class-' + str(int(i)) for i in set(y)]* ~% w8 P9 I! B1 U! u
print "\n" + "#"*30
* ]; U/ P) z* V5 _print "\nClassifier performance on training dataset\n" B$ ~- ~" P; p1 s: C e
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)6 g! Q. r! z+ }
print "#"*30 + "\n"
3 X$ |" f# S* v( b. o: ~* ^" U0 Y
5 y |* R$ Q7 [3 A7 d8 o ]) Qprint "#"*30
" c: h6 Y/ |9 R7 \; q2 k7 x! wprint "\nClassification report on test dataset\n"
/ B( I v! [: f6 vprint classification_report(y_test, y_test_pred, target_names=target_names)! z8 k2 O( Q& I9 N! f
print "#"*30 + "\n" k1 l. e; c F
! }0 F# o4 s6 U
|
|