- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 T. v" c+ K' X$ I$ `
import matplotlib.pyplot as plt
1 G' j; `6 L$ e/ _8 G- k+ }8 L: E* J
4 q" X* A, z) N2 ~import utilities ; i/ Y( y; d l
; ^, \# `" s4 X" I* ]# Load input data& C/ g1 K! R& I# A; b' p
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
0 [# W* v. b" oX, y = utilities.load_data(input_file)
) @. @6 k5 B8 M/ U& e; v; n% @0 T; w" O: u3 o1 d
###############################################
! Q+ Y; B$ j9 v. j: }+ x3 Y: T# Separate the data into classes based on 'y'
: l! D# Y% r% r( E* c gclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. D a( C8 o8 y* E: m9 l jclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])" {8 p( m }% z. |6 _ W
1 p. h* Z: S2 K6 n% j* C
# Plot the input data3 C; L9 Y |8 s1 k3 q$ n' t" K
plt.figure()% ? C, ]6 e g+ D; N0 u
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. f. @* f! W9 }plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" e$ w0 w$ o3 O, Tplt.title('Input data')
4 y( {% |, G+ C& K
3 w7 w5 d# ^3 N% f3 u, S. i5 O( `###############################################- t/ J% Q& U1 a. _. W9 F0 ]
# Train test split and SVM training
. A$ x$ e1 ^# r& Kfrom sklearn import cross_validation) \9 p' S% O% E: |4 W$ G
from sklearn.svm import SVC
/ \# N( Q/ I9 |3 Q: \+ T: T" B$ f3 u# E' c' V0 z( `0 B
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)/ a+ F' ~! g5 C4 R" N$ [
6 A5 ?0 O! g- f z( Z* L6 g( V#params = {'kernel': 'linear'}
* _& r6 K8 H3 ^# b' D* g8 f#params = {'kernel': 'poly', 'degree': 3}; e) `4 w1 } \% I5 x5 ?* q# @8 |
params = {'kernel': 'rbf'}
|3 r4 d) E4 kclassifier = SVC(**params)8 u7 h) C" v7 H( O4 U
classifier.fit(X_train, y_train)
! a2 d8 H+ J# wutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ y/ q: j2 u7 v1 `- k- }
4 T; X$ e) M6 l. \# I2 ~
y_test_pred = classifier.predict(X_test)
6 T) F, F# F5 zutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
; Y7 f% E. p) T1 v7 s8 F$ {
. R2 l' L! c: _8 \############################################### \' N0 A2 x6 `$ S3 A" I
# Evaluate classifier performance
7 d O4 }2 L& o0 S$ g9 X% T: ?7 B B1 q+ M3 N4 E
from sklearn.metrics import classification_report
. C( w& I3 m4 ], V9 T- s' ~5 [0 q" e* Q- s
target_names = ['Class-' + str(int(i)) for i in set(y)]
! o; ]) b @2 o( Lprint "\n" + "#"*302 |4 W8 {1 W6 A+ Z0 X
print "\nClassifier performance on training dataset\n"8 C i2 A7 h) ^* {: P# I
print classification_report(y_train, classifier.predict(X_train), target_names=target_names). y8 J- s" }; I, O# {! F J2 s4 m
print "#"*30 + "\n"
# \# u p6 S) U. R% f A
# k4 k, y Q7 h( ]print "#"*30
4 H2 ~3 Y1 f* @. L1 f/ u% _print "\nClassification report on test dataset\n"8 v# i+ |1 r/ B- }% d7 f
print classification_report(y_test, y_test_pred, target_names=target_names)2 ]' n" K" ^9 ~' Y: G2 H
print "#"*30 + "\n"
8 @6 [, r1 v: w, {. c7 i" |- b6 g+ |0 q$ [/ G
|
|