- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
; S& o T, \' b0 m- \ E# ~import matplotlib.pyplot as plt
3 Z1 n @* d6 F# G3 {" c# M4 j% M: f$ d# A3 @0 {
import utilities ?7 G8 N1 `7 ?) N: \% p* {# v
5 J1 ^" ^. T" g& |" s3 u6 G& v
# Load input data Y/ A/ J' l6 k* k" V, t6 C4 ?
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
6 [ @& o0 C1 t) ?" TX, y = utilities.load_data(input_file); G" [, G3 R( G X. Q& [
- T0 g2 e% g3 I
###############################################1 q( q) _: Y3 w: c
# Separate the data into classes based on 'y'
5 T! B' j- L: iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" N# b6 A9 U+ a
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])6 B) ~( E |: d( Q
, k& a* _ Z* j* Y( L* A# S5 G! p0 T
# Plot the input data
) w8 `9 E+ Z. s4 }2 X' Iplt.figure()
2 x% H- W; ]" z- Zplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')$ F' W' f/ X% C: g' M1 A
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ r- b# I H+ f
plt.title('Input data')! a$ v5 S3 M9 S- T7 e4 W
( v, j- [ X+ F- O) F( d###############################################' u0 p/ B b/ T5 z. G! }3 s
# Train test split and SVM training8 f+ [) U% `/ T/ p) s
from sklearn import cross_validation# j* @; j5 Q( ]
from sklearn.svm import SVC
0 t" d& f) M3 J% S: i! L* P" I
0 j9 v8 d' R' N5 e1 c, q6 ]X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- E8 b+ @% p5 [
0 F; X/ {+ H! C* K#params = {'kernel': 'linear'}
% \& b# Q# Z( W#params = {'kernel': 'poly', 'degree': 3} X2 ~3 I2 w: }
params = {'kernel': 'rbf'}
6 S; q5 M! Z2 [classifier = SVC(**params)
* ^: d3 y9 V% Q$ I$ r5 M2 Eclassifier.fit(X_train, y_train), i4 C( F# {% }
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
% ^6 \8 u' V+ M7 o$ [
/ r2 W: L: l$ `, Oy_test_pred = classifier.predict(X_test)
" p$ D; g2 g5 n# s; }$ autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
, d- R* R* R2 i
z/ m; i2 x: l+ e4 U; o2 t, K# O###############################################
. e; L: k: i2 S1 D' N9 N3 Q) j# Evaluate classifier performance( J$ B0 `4 G% t6 J; y: y7 w
8 B: C! D: q( E4 T8 N
from sklearn.metrics import classification_report
1 B" G* k5 I" @! P* N* l# \( o f3 |2 J0 D
target_names = ['Class-' + str(int(i)) for i in set(y)]
" I5 _7 z- a; I" C9 k! uprint "\n" + "#"*30
& b8 h$ U, A! f$ P/ }2 A6 Eprint "\nClassifier performance on training dataset\n"
2 n. k1 i$ E* jprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)6 P+ x) f: G, e
print "#"*30 + "\n"
4 v0 ^' |& \* ?+ i
6 _7 ?2 O! ]4 @* G; L8 uprint "#"*30' F K' c9 z5 @0 K R9 m' c; \/ i
print "\nClassification report on test dataset\n"
2 v5 M* S7 r7 J6 I% Q7 O- Qprint classification_report(y_test, y_test_pred, target_names=target_names)! Q# N8 d1 e; v& [4 Z
print "#"*30 + "\n"9 [, \6 F) K4 K; C l8 c' I
' y, u" D% u: ^/ y5 @! E* r" O |
|