- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ I7 b3 Y2 z5 ^4 T/ [
import matplotlib.pyplot as plt
, H) ]! Z. ]! k6 V( b
# g" M6 |, M: vimport utilities
, a y1 p# F2 T* \& |# s3 N) k9 b& ]# Y2 E& v
# Load input data
+ ]) U) [) m* l1 V- K/ einput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'3 [4 {# p& _# g/ j1 @$ N# g* k
X, y = utilities.load_data(input_file)1 a, R0 ]. A- n: G4 }
. J! c- _. |4 |
###############################################
}/ j8 |4 f& y: v8 D# Separate the data into classes based on 'y'/ I7 n/ f) m- f$ s( C! i
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
* a- f9 U7 v& B1 Y/ {class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])/ L5 `4 Z- v. L
( t$ A% N0 C4 _- i( s, p# Plot the input data% ~) \, h6 v) O. r
plt.figure()
1 x& p- m! h W2 N. a9 Splt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')& p! U9 Q1 j" p! h$ q2 Z
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& r. |3 H8 D z
plt.title('Input data')8 q. Y7 @7 A B! d$ ^4 o
& m$ V7 s0 R& ~3 f$ L+ i: c, w( S###############################################8 k+ P" Y O& H% P
# Train test split and SVM training
/ k$ E5 U% ]1 \3 \+ l7 Jfrom sklearn import cross_validation
o6 p }. O l. H3 @: efrom sklearn.svm import SVC3 w7 i0 _1 z; e8 z
) n5 A" f; i- P
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5); g- }" f% ^2 z( h' d6 m
; q0 o% O$ A* E& Z) O7 m
#params = {'kernel': 'linear'}
3 U$ w! R# a. r }1 G% k#params = {'kernel': 'poly', 'degree': 3}
3 B. s1 Z6 ~8 [; V1 g7 hparams = {'kernel': 'rbf'}
6 M. C$ ~* \2 x9 P; J. s1 f2 tclassifier = SVC(**params)3 C7 J2 Z% {( c0 l7 n
classifier.fit(X_train, y_train)
- ^4 i" ]/ C7 X% y) A* v% }' Kutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')% o% p0 `: @0 L( u* \
7 W" p: @. ` N- Z
y_test_pred = classifier.predict(X_test)0 j- d' u$ R) |; n" Y/ q
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'): d3 f" z p" ?' j; T2 `
9 C; c( ~- J6 Z; F; [ F' d8 y6 R. ?###############################################+ O0 b) v/ r0 s% {3 u3 A& F
# Evaluate classifier performance* f* B6 [7 b$ z) W% k8 o
8 j! v1 t' A$ k& C, ^from sklearn.metrics import classification_report4 ]4 x. M+ G# ~4 p
& \0 r6 n/ w, o5 `9 B) ftarget_names = ['Class-' + str(int(i)) for i in set(y)]
1 F0 g& d. M2 m; W% F; l/ Jprint "\n" + "#"*30
0 P$ F9 K4 I. o2 {$ sprint "\nClassifier performance on training dataset\n"* X6 z6 Z0 k( \9 x: k* _9 Q
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 k6 X0 I+ V' N$ F* iprint "#"*30 + "\n"! q1 i2 U, Q/ h& c
; e$ H) m f7 G# Y- y
print "#"*30
2 H! x0 \* H' b; ] Z* i0 uprint "\nClassification report on test dataset\n"
4 ?5 c1 a, J* K$ k s* Gprint classification_report(y_test, y_test_pred, target_names=target_names)
7 u8 r5 q0 [( P; R' Oprint "#"*30 + "\n", Q9 Z+ H' n+ S0 W, j& R( ~+ j
5 b8 l3 D" `* w |
|