- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ T4 k3 ^/ U3 U; M/ C" jimport matplotlib.pyplot as plt
B) o7 ]& U% y r6 d8 L& M! h4 ?3 R% r5 W5 J {
import utilities
. n* n( B% \& g1 ]# t# A
2 @- Z3 ]4 v8 l# Load input data1 B; e( D# ^& }
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 i$ J7 L7 T$ F5 d1 nX, y = utilities.load_data(input_file)
" ~- Q1 w6 z6 t" Y( f/ [# G$ |. S3 v) t
###############################################4 L9 X2 ]7 |7 F% ]7 j% o
# Separate the data into classes based on 'y'4 L& A" T% W& T5 w& w( N8 Q1 h7 u- O
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 D! [8 i: W: K& A, e5 t2 w7 ~class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' I( f. F5 j5 _1 h$ I
( b) Y6 S" B& F& O% z% \# Plot the input data
+ L1 t$ \2 _- O4 V, `- d* T- n6 nplt.figure()
' @& {! r: |' w0 qplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')( Q( C" \4 d( u, {! L' A/ E/ B+ X, }
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')! `- t, ] N: I5 y+ y6 E* i
plt.title('Input data')/ m2 Z3 i7 X% F$ P6 x
/ k5 e0 O% j! q7 r8 G; y1 |###############################################
; m* O N6 S+ a( n' e$ T5 M# Train test split and SVM training
) O6 A5 |- f/ c; a+ E2 {0 |from sklearn import cross_validation
# a" J- p$ Y" }$ E# j# mfrom sklearn.svm import SVC
9 o& m! m( H; M7 \! G$ k0 f. `; F; K# D I) x
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
j5 f M* r' A; B" F
c( g, I0 |* c8 z#params = {'kernel': 'linear'}
1 V# v/ u( e; D! Z1 T, w: z#params = {'kernel': 'poly', 'degree': 3}
$ l- `$ Z: h* ], i7 }; Lparams = {'kernel': 'rbf'}
: | J% m( u1 p! x$ _classifier = SVC(**params)% z& j' t2 r% X; o' y
classifier.fit(X_train, y_train)# o) ^1 a9 K. ~* F( Q3 Z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 R( F3 Z8 g& y# C% M3 n, M; f6 y$ w) U* b
y_test_pred = classifier.predict(X_test)6 K5 E7 V$ p- l7 B! y
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
/ f- M1 g% l; [' J- s
6 O! l$ C" U, [! e###############################################
! T+ S8 e, ~3 `# Evaluate classifier performance0 u( \1 Y+ J- y" R% b
: _6 Q9 `4 p! E6 Z6 M
from sklearn.metrics import classification_report' k! ?& M/ @' @0 t( W
! y8 z& {- `3 Ntarget_names = ['Class-' + str(int(i)) for i in set(y)]
: F8 p+ ~; S) Q2 p, u' g5 zprint "\n" + "#"*309 a: m2 B1 F9 F0 V: ]
print "\nClassifier performance on training dataset\n"
% a1 p# v3 k# ~& H- ] H" ]print classification_report(y_train, classifier.predict(X_train), target_names=target_names)( x, E2 ]: U5 X% p0 {1 \2 z
print "#"*30 + "\n"
9 W% F1 f+ i! |8 p, q" R4 _: @$ k m, [- t+ V$ ~. d( h$ G# }) H. O
print "#"*30
4 |0 i8 _' T/ bprint "\nClassification report on test dataset\n"' U. {/ ]9 ?8 Z+ e6 H# O
print classification_report(y_test, y_test_pred, target_names=target_names)
5 [0 ^( x4 u4 c8 B& L5 v& `3 Zprint "#"*30 + "\n"
8 {' P2 p) ^8 M
# n$ F& [- U. N \; R |
|