- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: s: s# j: Z6 D8 Y7 Z( R- `import matplotlib.pyplot as plt
$ q$ F8 |) ^: [% P3 r% v/ t9 c& q% l# g6 J8 c! r' \
import utilities 9 _# `6 V5 J/ k4 B
, y( |. f$ n. c# Load input data
/ J# d9 c& w- D7 W, E/ C. tinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'. K/ N6 z5 [% k6 M
X, y = utilities.load_data(input_file)6 c r3 R, c2 q. P
5 Y4 `& v- }. } \3 @ }7 ^+ y###############################################
& ]# B7 L8 K* `" c9 `# Separate the data into classes based on 'y': L- n6 H; N! t# L2 Z
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
* }! N+ y2 G# d; G8 l, p* Wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])% i y0 c: R4 v z5 {' x4 o
) J" p8 @$ Z. y- A% [
# Plot the input data k( i" q# M7 j1 L0 Y
plt.figure()6 H# T. n$ W: R" ?3 r3 T' y
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')4 w+ b' f1 w o( a' ^ i
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
: X5 X8 G% }5 qplt.title('Input data')2 [: y ?! F. I! Q" l
) e, A0 Y8 p) Z/ Q+ A! V
###############################################/ v" h$ z+ O/ j6 h, ]* `( A
# Train test split and SVM training* d' U9 T5 i2 ~" R+ [1 b$ k
from sklearn import cross_validation `+ w8 k% N& O. L) C6 M- @
from sklearn.svm import SVC e' \) W; t: l; R
% d4 }+ H5 F/ T& T, ?. V" _
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
$ `% ^# G; D U6 z7 H- J; b
( J3 r* D5 v+ O' R; ]#params = {'kernel': 'linear'}3 z; w0 _8 C9 ~2 m
#params = {'kernel': 'poly', 'degree': 3}
L6 e$ _- a4 {params = {'kernel': 'rbf'}
5 ~, B+ }; Q$ v. Oclassifier = SVC(**params)
6 n& ^: @7 o% P6 ^; ~classifier.fit(X_train, y_train)" v, I# z8 C$ h ^6 N
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')' z; S% u- o- {3 i8 B+ \; T) W' G$ }
4 G4 ~) C4 Y! L6 q0 T2 R; ry_test_pred = classifier.predict(X_test)
) L/ _# `% k! W; i3 sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') [2 s x" I% O) n6 [5 L6 {
& K) {5 u" f. Z) Y
###############################################
% n' X [. M1 ?$ r0 d* _7 w% ?& N# Evaluate classifier performance# @# w3 |; s& ^1 K" E
3 Q( S" n/ ~% g5 u3 T( P8 G
from sklearn.metrics import classification_report5 v/ m; f* g6 G$ b. I
' @1 s3 }" [2 \6 Etarget_names = ['Class-' + str(int(i)) for i in set(y)]6 O. G) a; e) r: t) S" C
print "\n" + "#"*300 Q# l9 r% m& D! e4 Z
print "\nClassifier performance on training dataset\n"6 M, `+ x" B+ y" q2 J0 L% @0 Y3 \6 `
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)' L: _7 [- B- `8 \) J0 m* }, K
print "#"*30 + "\n"
8 H. L8 J. {2 n; m r" l& q. R
) ]8 X; t8 K+ S# e$ a! {print "#"*301 O% ] u8 X N5 E/ j; h4 O) x: K% @3 M
print "\nClassification report on test dataset\n"
6 e3 ?* k1 z2 p B5 {! i% H3 X8 dprint classification_report(y_test, y_test_pred, target_names=target_names)
( u4 \5 Q% _+ e5 P! a( _print "#"*30 + "\n"
+ c4 v4 \0 F. A, L% `2 o5 K1 D) @: R2 Y3 v4 E
|
|