- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
9 }4 a1 ^4 f" t( K+ r7 P5 x: rimport matplotlib.pyplot as plt% m* z! v! s4 \# D1 U& {; b
! z" a6 p- Q, T- z" F4 `8 q
import utilities % W+ M4 F; j) E: z6 h/ f: K$ N
; ^: R( R: y, Y) D( J$ a2 N& d
# Load input data
, R5 y) I. ~# einput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'! J( u+ b' U$ A5 w) [# m
X, y = utilities.load_data(input_file)$ I9 [. s+ y8 _( @4 P
9 j, H# r9 E# x ?2 F. W: s
###############################################, V; G. f7 h+ r& l
# Separate the data into classes based on 'y'* q% {2 k4 V+ f6 f+ V# ?' t* |
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]), \5 e7 I8 G6 g: m+ m; n8 s
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]), B3 Q- t7 `* o0 _1 w4 z
p$ I" R0 N6 K# Plot the input data
o' |/ m+ c+ R/ m" lplt.figure(): |7 n0 E, d9 P
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')$ K v+ j4 B) o) {8 {/ p1 @
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')9 Q9 }9 u B' v. g- q
plt.title('Input data')
. g: F4 n4 H: t' X4 q" a2 W; c
# p! W' x g6 S W###############################################8 {1 a& y4 s* `$ Z
# Train test split and SVM training
1 L& _, g: T" G* Z3 M9 dfrom sklearn import cross_validation
4 O! j o* e6 G3 L% j! hfrom sklearn.svm import SVC
! ]% W. Y1 K! i& i5 k* j5 f" U* u" ~2 T) U) Q5 h
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)' _& a0 l3 o+ Y+ k
; E6 }/ c7 X" D# A+ C6 y v
#params = {'kernel': 'linear'}6 e8 j2 C. X' z. A. b. N/ J# L
#params = {'kernel': 'poly', 'degree': 3}
! G; m; s% X$ l& ]! pparams = {'kernel': 'rbf'}
) \/ w" P. D' ]classifier = SVC(**params)
: F5 D+ H/ }& L6 f2 Gclassifier.fit(X_train, y_train)
# b" }( h- E6 W* J) n: Butilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')# T* Y( Y2 F' }: j
( d" h" T9 H4 C* Xy_test_pred = classifier.predict(X_test)% B5 g! p9 Y7 J6 j- f/ D
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% K/ O, O8 B+ U6 K Z# |
0 s! r" j; n8 p( g8 ^###############################################
- R0 |: _& W6 S' R) j3 @# Evaluate classifier performance* B8 P% r1 R! t9 W; t
! n& x+ ]7 Y$ a
from sklearn.metrics import classification_report
* E, Z: [: P, Y" d$ _7 \3 _6 _, m2 D6 {+ e
target_names = ['Class-' + str(int(i)) for i in set(y)]' m3 N/ r/ p+ Z/ e9 U0 F
print "\n" + "#"*30
+ R4 W7 B, ^+ g% Bprint "\nClassifier performance on training dataset\n"" w/ p. [7 _3 w
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 T8 i4 T0 p+ C
print "#"*30 + "\n"
; N L& l4 E% m" S! g( I6 F
) a6 e2 X3 \: V1 f+ B' fprint "#"*30- R: K1 j" S" y3 d
print "\nClassification report on test dataset\n"
- _. a+ g0 `" ~' C' hprint classification_report(y_test, y_test_pred, target_names=target_names)
6 U( L& \: @) k$ W$ m2 {print "#"*30 + "\n"# X5 U! Z2 ]+ h% g5 A
! L4 O1 w' w: J j3 d* `' |
|
|