- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
) n" X; @& r$ ximport matplotlib.pyplot as plt9 \) B! I( l( U: |& q* Q
1 R) H9 _2 ]; p' b
import utilities " j& M- l/ P V6 x: W7 t* m8 ]8 B
+ m* P% L5 I u
# Load input data
% Q9 _. X! }4 K1 @, s) l* i' h! Minput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 p& `0 }! N3 |1 a9 j* Z; p; tX, y = utilities.load_data(input_file)$ @/ @" A# @" J/ E
; d( f- m+ d, W: M0 c2 Z2 V###############################################
- y$ Q: T- g& i/ d6 o) R' ` j# Separate the data into classes based on 'y'
8 P% v- Z' R. \" p7 [) hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
( X+ }2 `0 E+ A! h8 tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
% s5 q' I( f# n6 i" @
X3 v/ ?9 }2 z/ |# Plot the input data
" B: B+ m- U7 ]. u7 ^% Yplt.figure()
, j- u( H- z5 ~$ J- ~plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# F' f$ J: N) ]+ J
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'); ]6 _8 h9 n3 O8 M
plt.title('Input data')3 Q8 K0 |' K: N) U* J3 Y6 Z
& O. v- _# r$ @0 f###############################################* V5 N. A2 [1 ?: g" c
# Train test split and SVM training+ {. Z" z& X0 f( C" k# O
from sklearn import cross_validation
( O, U1 A, ~3 q1 `/ s! f& Dfrom sklearn.svm import SVC% \( L* {5 _2 T
" Q+ G5 A$ x4 O! F1 KX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)8 C1 Y9 ?0 k0 F2 [8 v2 x
) s, Q' y4 {. f Z& d* T
#params = {'kernel': 'linear'}" V, r9 A1 C: j2 U; I
#params = {'kernel': 'poly', 'degree': 3}6 p; o7 T9 z; U% I) \+ ?, B' Y
params = {'kernel': 'rbf'}, D$ @1 L; Z0 {# U
classifier = SVC(**params)' f0 N7 `- H U: X! q
classifier.fit(X_train, y_train)
* `4 _4 t6 m* I; {& sutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
( R* I6 Q% _3 z, t! V" J" U/ c. {
# G( m; E5 M/ r* X7 A: c0 p% o+ Ey_test_pred = classifier.predict(X_test)0 s1 L G: p r8 L8 ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
8 Q) b! V9 H J: b/ F3 Y
N6 l5 `8 _- m: l/ g W: T, V \###############################################+ V* I* @- m: Z! V6 h
# Evaluate classifier performance
! d" Z- g2 a% c
+ V0 w) F; \0 a2 {& hfrom sklearn.metrics import classification_report
: Z+ S4 M# h% i7 d+ D* g. y) r7 ?
, [0 U8 B2 y: Dtarget_names = ['Class-' + str(int(i)) for i in set(y)]+ l5 Q+ @5 O. }' L
print "\n" + "#"*30
) t3 I) q0 [8 q! o! Pprint "\nClassifier performance on training dataset\n"% y4 {5 W; H' m% E- O1 e, r
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 b. {: U3 b% M$ Q h) d& ~5 U* m
print "#"*30 + "\n"# I9 D$ X5 |) p6 V7 e7 J
0 T; P- N- l6 L# X4 D G gprint "#"*30+ o4 v8 z# G0 R7 e
print "\nClassification report on test dataset\n"
/ z/ F( Q# t7 {9 f7 eprint classification_report(y_test, y_test_pred, target_names=target_names)& k1 X9 [$ Y+ P( @! [) c
print "#"*30 + "\n"
) I% K+ }: j1 \/ ?/ L p- N
! F: l( V" ?7 v, P5 [ |
|