- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np* V- |# ?' D+ {3 W# j) i4 _; {
import matplotlib.pyplot as plt
: H/ c) s& x" Q6 V& ~ ~9 `! L& G7 f7 |- x
import utilities
, P7 J8 x& n+ G; ~4 _" N+ j! _! ^$ u& [7 }; B8 k4 g
# Load input data" A1 N! x& `0 \; c
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ i ~* x& Q6 U* h6 Q9 b9 uX, y = utilities.load_data(input_file). n$ T1 V% G" u {0 m! E4 u0 V
, k' Z3 _& D* i: Z) o1 K
###############################################
# S* L, C0 D3 j5 P2 @7 D0 n# Separate the data into classes based on 'y'
# Q3 G& J' t1 Dclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ G% B2 L) Z, q, A2 y8 |
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
Z/ A9 ]: ^- n% |; ^4 s9 H! ~+ w0 i4 M" l+ u
# Plot the input data# V3 b5 N7 |& |6 V; i2 X n1 c
plt.figure() h+ m' ^: |8 l; y6 @. X
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'); @: W( V6 H; b S4 L( C
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
4 A6 H; @; E* d2 s* e' Uplt.title('Input data')
$ _+ n- X- |. \ Q
- |3 I# V+ _0 o+ |# X4 A/ t6 f###############################################. Z& U% z) a r8 p3 x ?
# Train test split and SVM training
; z1 h" P% f6 y1 Jfrom sklearn import cross_validation
& @; ]# n" F% U( W" C5 ]( D( J5 W) afrom sklearn.svm import SVC/ f6 m9 S/ Z. C: y \% ]
l8 I# t/ y/ M
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)& N* Q" M! p5 [% f0 V( O
6 S& J D {0 R; |1 M. S
#params = {'kernel': 'linear'}" X0 m x5 h* Z4 N
#params = {'kernel': 'poly', 'degree': 3}8 W2 g, E3 D% H& K' i# q. n# M9 T
params = {'kernel': 'rbf'}
% u" W! ~7 U1 P; i/ l% ]classifier = SVC(**params)/ p" G$ y0 T I; F* E2 S4 x
classifier.fit(X_train, y_train)6 q, J$ Q4 ?! h0 a
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ _: e# y' n, e* U x: T3 r6 {
# C% y6 D+ h# O. f# n- g
y_test_pred = classifier.predict(X_test)* e8 n* q* x, e; z+ G! ~+ t
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')4 r/ O$ r5 n( |
! h: l* Z& x& d% @2 L
###############################################
% L; R, s7 c N( B# Evaluate classifier performance- K9 j2 S) }+ `- G
( o" G2 N: I& h3 I0 u" Kfrom sklearn.metrics import classification_report7 k1 b1 v! S: W8 ^( y
8 O7 g% b7 a' y8 B& ^2 h- R9 w8 Ttarget_names = ['Class-' + str(int(i)) for i in set(y)]
, K# C* N3 F9 r e7 Tprint "\n" + "#"*30
2 \, d' v4 O5 v; [' a' y3 H. s6 K. {print "\nClassifier performance on training dataset\n"
- F- k; M$ t8 u1 I! {. J! pprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ r; x# _' z1 n5 o" j1 N& S6 aprint "#"*30 + "\n"# Q4 o) w! h1 S( }
* g, h+ e0 u4 }
print "#"*300 }% g \% P1 m4 G
print "\nClassification report on test dataset\n"4 a O7 z. x5 i" Z8 ?/ n
print classification_report(y_test, y_test_pred, target_names=target_names)
- u, u4 J5 w8 ^. N U, cprint "#"*30 + "\n"
* t( ?0 j1 ^0 @2 w; d1 L& i# k3 X7 F9 Q, y, N
|
|