- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 {6 |+ f7 F1 |0 T- Y, D
import matplotlib.pyplot as plt
" H1 y# Z- d3 ~6 W) }3 ?$ y: X$ b0 U# T* N+ N. j/ I
import utilities
" N S1 T" j o8 J; M& t: u# [9 z: _) ~7 a
# Load input data. {6 |( n- X: a" B
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 @$ i' Z1 W7 W% F fX, y = utilities.load_data(input_file)
7 M6 C v" T# ?2 _ n+ I" L% Q5 l H2 q* C2 ~- A. I4 B" E
###############################################6 D; z2 V6 k$ Z- |! N0 `
# Separate the data into classes based on 'y'1 O. _' z- D, a% f
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
: J6 `+ U3 d. D5 K: m+ V6 qclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])2 @: ^' E$ A" b: H- o( O
& P3 a5 G7 r* P/ _
# Plot the input data& d; y# J: G8 L, ?1 Y
plt.figure()7 t; x& D2 G9 \! u' S ]
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')0 H* N# O/ |6 Q! Z6 B: W. `
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
: T- Q. D* r/ P9 fplt.title('Input data')" [" k! @' f, z! J5 ~: \/ k* K
2 D* j; ]0 o( m% G
###############################################3 R3 j: l. {( y
# Train test split and SVM training; N$ F/ B' `. Z4 H R# ^
from sklearn import cross_validation
3 N* V; |) R; }1 [2 y1 {% @! q0 Vfrom sklearn.svm import SVC7 \* p- m2 V2 w- R- p |! Q
- O/ w2 G$ ]5 Q2 X; a
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: E" V% {5 C" i! \
: j7 Q. _4 F3 [4 Z# r: c% c7 u#params = {'kernel': 'linear'}
- s; n+ ]: a8 `" ]7 ~) a$ G3 X#params = {'kernel': 'poly', 'degree': 3}( b9 x% R! ~: A; c
params = {'kernel': 'rbf'}
) K$ S5 g5 Z& k2 Gclassifier = SVC(**params)
+ V' ^4 ~( s& v F8 l; eclassifier.fit(X_train, y_train)
' ]0 D7 Y \; h% Autilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ @! {9 V& ]$ c( }: k% Y
, n4 j7 d1 p2 [3 K$ s8 {y_test_pred = classifier.predict(X_test)3 D( y d& d! i0 [
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
$ }# p) {4 L& F& s8 o( u: |1 R+ Z S9 j1 |% Y/ [! F* n, M
############################################### N5 L( q9 }( ?, N
# Evaluate classifier performance
8 l7 _* ?& B W, B! ^, T( t& g$ Z$ u/ d% O7 N
from sklearn.metrics import classification_report
$ j# f1 x% c: X9 j( T0 A1 {! q F: @% K) M5 ?2 `) y; f5 A* c- s3 S
target_names = ['Class-' + str(int(i)) for i in set(y)]
3 U& ~& D( b; w1 l" Tprint "\n" + "#"*30; L& z0 S) D1 `( g
print "\nClassifier performance on training dataset\n"
6 z8 Z1 J5 J n+ {! z1 ], U. eprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)0 P( e$ L1 D) n- O1 f" Q6 z& k
print "#"*30 + "\n"' j* i$ L- A& c% n+ c( o
1 A* D" s8 u! f# P4 L9 D
print "#"*30
$ s" I$ O. p$ xprint "\nClassification report on test dataset\n". U r2 H% T- D2 ^
print classification_report(y_test, y_test_pred, target_names=target_names)
' h9 n+ U' S5 Q; Z$ Oprint "#"*30 + "\n"
8 t5 L1 g1 ?/ X, e9 c; c9 P: B( u! |! V( }9 G. t$ U; x
|
|