- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
4 G, H( Q2 F1 @5 c1 zimport matplotlib.pyplot as plt5 a* L# ?0 d( o0 |5 I
3 w5 I" Y% ~+ K+ \$ W
import utilities
8 I9 M4 ]- x+ Q8 U) z) L) Z6 S1 A# e5 `- u/ D
# Load input data! t* v$ p( E5 b
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt') s4 Y( K0 Q1 O3 Q+ T
X, y = utilities.load_data(input_file)
5 |5 u$ x& _- [" k+ P" H% W3 L1 e9 @9 U( H. C y6 V& \
###############################################) i+ h" A1 n+ C5 [4 ]* ~
# Separate the data into classes based on 'y'
8 Y( Q7 [. T6 e' c5 W6 `class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& }- ]' D0 S) u7 l1 H5 r
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
- l, r7 J& Q( y ^/ x9 E$ J; x0 P+ r
# Plot the input data
y# Q% C& {1 q0 `, \& g: Rplt.figure()6 ^9 H5 k. s# J$ `+ D$ X; | }, Y C
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
0 E7 A" `: k4 b- i( D8 K0 Oplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')% i4 w* O# Q- p7 o, I" _; B O
plt.title('Input data')- c! M* Q( T% b# u2 D6 z7 R! p) U
8 V; ] ` s! L& e4 B$ h
###############################################
8 O" ~. P9 J9 h5 L% r% r+ d# d9 z9 A# Train test split and SVM training* P- M2 C: j/ W
from sklearn import cross_validation! [: K/ `6 F; g" x4 x& {) }# B
from sklearn.svm import SVC
3 Q" o$ [/ i f7 Q0 X: E3 g; G/ r! E
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5). A) E/ g4 u0 Y# K
, n" h8 k' F2 T% A: m, Q; W2 p
#params = {'kernel': 'linear'}
9 R; z% ` z1 _3 J#params = {'kernel': 'poly', 'degree': 3}& i! Q" V6 k6 @$ Q% t0 N0 O+ W
params = {'kernel': 'rbf'}6 j. F* Q5 b9 A: E# }- H6 F
classifier = SVC(**params)
$ b+ G* q4 g& m# T5 m) lclassifier.fit(X_train, y_train)0 u, o8 T @: }9 i) ^8 q4 k( M
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
7 Y5 N7 U9 X+ c6 O2 a
, B0 X9 u( s4 q2 my_test_pred = classifier.predict(X_test)
& Z. o) l- b' @utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')" i7 b* }1 C0 j% [9 d
3 y' r/ k. k' Q! ]% t+ x
###############################################
, ]( G& S- S0 D+ }& i& u, d# Evaluate classifier performance
( x; P! @8 k Q( @. l' U
0 V- p. d0 G% jfrom sklearn.metrics import classification_report9 {" z3 _& i/ j( i4 y
9 c% ~; ]* r+ L' V0 C+ F9 F, n1 M# ^
target_names = ['Class-' + str(int(i)) for i in set(y)]# U: |% q! m" l! g0 `9 C b6 O
print "\n" + "#"*30+ L y7 y: T7 n2 Z
print "\nClassifier performance on training dataset\n"& v+ F9 ]9 d7 D* A- W
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)3 ^7 h6 d s/ v% [# s
print "#"*30 + "\n"3 l8 f3 r/ C/ w; R
( b3 W/ U6 K0 l: L- Aprint "#"*30
6 W: M2 e3 U' Eprint "\nClassification report on test dataset\n"% [4 ]! m) D6 @0 T3 A' @
print classification_report(y_test, y_test_pred, target_names=target_names)
0 R5 W# l1 k/ s* aprint "#"*30 + "\n"
. n) k3 Z7 m$ j, ~9 {' Q5 e& {% I
- S; [+ @6 u* Z& D1 }0 @ |
|