- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
7 s1 U/ D) I8 z, gimport matplotlib.pyplot as plt0 |3 l9 W" W! H; O0 w. c
+ u1 X* k/ k( C0 h R
import utilities + h2 w" c' D5 E0 J; L4 c1 N) c
: i( k0 n) j) F5 K
# Load input data0 d1 A# ?: H8 A0 G2 F6 \/ j! I
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'# D9 E1 L" A6 d% r5 L! C3 ?) ^7 q8 B
X, y = utilities.load_data(input_file)% R/ G2 {- g+ n% @& E$ D
8 k3 u" X; k9 }###############################################
' ]! |# g% x% ^ S0 }! M: N" d# H$ l" g# Separate the data into classes based on 'y'
- y( G4 g( K' qclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ m- R: S, u8 K
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 j; G5 K9 e& a2 r% J
. ]; x( _. g( A2 G& U3 N$ ^% }
# Plot the input data
- p2 [) ?. ~3 [plt.figure(). f) X) M/ g0 [5 `
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
% [) E0 }) Q0 `' z3 aplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ x7 a; f9 w h
plt.title('Input data')
: e# |8 T" ~6 w. v0 K' X! l w; a* A$ d5 S7 b/ k
###############################################& G$ W$ @- l5 j2 V. o3 J0 m
# Train test split and SVM training
$ u( R/ E9 a4 u- ^from sklearn import cross_validation
7 k# k( n) k/ j+ X$ _. j1 K, F4 w4 _from sklearn.svm import SVC
* u9 x8 B$ z5 c" y0 I
4 b( j `$ Q8 c, W2 F3 yX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): k- S, _8 u D$ `$ F7 X3 _
! q2 S( u8 d- L' a7 H5 j# s7 n
#params = {'kernel': 'linear'}1 E# R4 P8 p$ w$ L2 d
#params = {'kernel': 'poly', 'degree': 3}
9 D7 H1 H5 e5 ]$ w$ o( j( l! {params = {'kernel': 'rbf'}
; |8 |% R6 Y' V* i% R! O8 \classifier = SVC(**params)* S- [ X S3 i, l4 [
classifier.fit(X_train, y_train)
" q: M0 n. Q# c4 F6 v, [utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')& H% g4 {) v9 F* N- z
6 c6 @* }: B/ X
y_test_pred = classifier.predict(X_test). t9 b' M; g; r2 X' x
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
& `% R. |" ^% @
# D; @9 i) b0 a: D( A& Y$ {############################################### t" I9 W5 ?7 l9 p
# Evaluate classifier performance
, G* U' A- y' n8 u% |: }8 p
( `/ q& v1 l6 d( E! B, D" o& |from sklearn.metrics import classification_report7 ^, A0 a6 \1 v# z4 @/ o
2 S P- m+ |: h6 @: T, otarget_names = ['Class-' + str(int(i)) for i in set(y)]# v; M: q* J ]- M
print "\n" + "#"*307 x0 |! n- D* z
print "\nClassifier performance on training dataset\n"# i' y' z! U! l! J' m7 W
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% L2 u( z3 I+ ?1 x1 J2 x$ Xprint "#"*30 + "\n"
9 Y/ N m2 M% ~5 D' {7 G5 A1 ?
q6 Q* v* ^: aprint "#"*30) k4 g6 l; F" c; U' \
print "\nClassification report on test dataset\n"# Y0 X! K* \% R' a9 J Q& m' [4 A
print classification_report(y_test, y_test_pred, target_names=target_names)
1 U' `- x! _) W) }! a: Fprint "#"*30 + "\n"3 z" `, u5 x- v& s& N
z* i- l3 l z5 T4 G! W |
|