- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 557
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# i4 k7 d; z. [2 qimport matplotlib.pyplot as plt
. v( W6 P, t% z2 W' U. p+ a/ P4 p b% R( V; y& B! I3 j7 D5 x
import utilities
& a2 v' v% D/ T( F* k6 j6 i: u9 \; @* a- A2 |
# Load input data+ a/ }& w, g; _
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'3 Z# G. J9 K1 q+ u" Q% `' b% j
X, y = utilities.load_data(input_file)
4 }0 k' z( ~3 |3 \6 T8 M2 P1 N( Q+ e I5 _, h5 C5 r) M& x1 {! Q
###############################################* m7 ^/ ^- d8 E% b$ W- X5 R
# Separate the data into classes based on 'y'1 ^: ~: D/ v. \( J. Z9 d; l" j
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 e" y; }; g" ^1 |, w
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 I; a7 Z: v* a
7 \2 z, x M7 r# Plot the input data
* G6 f2 |6 L1 \5 ^' j- Mplt.figure(). w, Q8 _8 c/ v8 m' g" ]
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
( _ `+ u. r* a3 O. n: e" M; Jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" e2 R' K6 j) A& Z7 P9 iplt.title('Input data')& ?6 D" d7 R( W, c" ?
0 u, N. B. _3 F T3 T3 j7 }! ^###############################################6 Q. X" G( Y* T7 n/ g! W
# Train test split and SVM training0 }; L/ W8 E! ]$ F- b
from sklearn import cross_validation
: d2 b# u" [* p; @/ k3 K. H4 s6 jfrom sklearn.svm import SVC6 @5 h) M+ A, b# t& m( q
( p' Q) w5 F4 h; U7 R) AX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
, H: n1 }. ^5 u1 c0 r) w2 g& G$ t. d
#params = {'kernel': 'linear'}
+ u: q6 t+ q+ P p+ b3 a#params = {'kernel': 'poly', 'degree': 3}
+ [+ g b2 K- B+ iparams = {'kernel': 'rbf'}/ @ a, c# j) w8 @2 D
classifier = SVC(**params)! |0 P! s/ O: J) m! ?' b: \
classifier.fit(X_train, y_train)
3 {' M$ P) |3 p+ w7 Rutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
. a$ a- u5 k& G3 w! F, C
: ?7 \" @2 \- U0 t, Vy_test_pred = classifier.predict(X_test). G2 N! h$ O1 s: ^: v ]$ B
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 B/ b3 G0 G0 U) N) N2 U3 H
1 e2 Y! O% J) p. V7 k+ P###############################################3 G6 ~# n/ }7 }
# Evaluate classifier performance- w' v" ]2 ^( Z3 A
( {4 s \: @1 Y
from sklearn.metrics import classification_report
/ x8 Y# [4 o; m; f0 ^5 }& k2 @( e2 S0 O+ P
target_names = ['Class-' + str(int(i)) for i in set(y)]/ D/ Q2 ?1 n3 A" ?2 s
print "\n" + "#"*307 |3 z( }) Q f* j
print "\nClassifier performance on training dataset\n"# W( P+ U* I- ?. S) v
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
) Y* ~* Z/ u& iprint "#"*30 + "\n"
4 G; k- }. e0 ]3 w0 J1 D/ z: a
# _* x3 Z. Z- @' vprint "#"*306 e U% |+ q- Z6 [
print "\nClassification report on test dataset\n"
t5 f N, ?" Z" j: Z$ Vprint classification_report(y_test, y_test_pred, target_names=target_names)
2 R1 f& `' N0 ]7 {& Nprint "#"*30 + "\n"
4 u: u, G+ d3 i
! b) s+ c7 h w ~ |
|