- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 b6 d: E; `2 h: Y# o: r! t) |% U# yimport matplotlib.pyplot as plt
9 w$ Y; Y7 C: }: `. N9 N
( s2 }+ I! g6 i8 g$ Vimport utilities : n9 Q% Z. x+ r2 R# e I M0 ~% A
' r! K$ ^2 H5 f$ b; d: a# Load input data
' ]3 `( [8 F1 \input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
4 u* l- F( r& w8 l8 y& q) KX, y = utilities.load_data(input_file)
$ I0 q+ M1 s9 u* r/ V% c, I
2 v4 n$ N D; X+ v1 g+ K###############################################
* L/ {* P5 n4 c3 S q# Separate the data into classes based on 'y'# S9 H5 B' z f) p1 _
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ r% E2 E2 [3 ~/ K; X- R, Fclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 `7 x' B1 }: U% C
* J. Y S7 N! E; ^% f0 J# Plot the input data2 R6 T0 T9 m, v2 h% c$ z
plt.figure()/ g% I7 Z! P, U% U
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
5 _9 C' {% f7 ]* a: D: M: L! Cplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')! t& U$ D* p5 W+ m
plt.title('Input data')* x1 O/ Y: ?0 S# m4 _/ w4 _
2 N8 A( ~8 N- r$ C: @
###############################################3 z# ?, S7 ?# e# b
# Train test split and SVM training# w5 N- f5 A6 Y. o4 ?
from sklearn import cross_validation# L y) A# A+ p* |
from sklearn.svm import SVC: S7 N6 U( K) G& j9 c5 r$ K/ K ]
" h4 b+ N% m! \X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 K: q9 g& i" Q% d/ c. A
7 {' l0 }, g1 N J2 R c#params = {'kernel': 'linear'}
& T w' Z4 X! V# Q: n. j9 N2 m5 Z#params = {'kernel': 'poly', 'degree': 3}% Z: w6 \: K, r( A& T
params = {'kernel': 'rbf'}4 t+ w" ^7 N( s+ M& r3 ]
classifier = SVC(**params)2 D+ u3 c! W8 K- v! o5 w8 p# f
classifier.fit(X_train, y_train)
5 S3 v' {; J8 [; {+ outilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'), H2 a3 i3 I! H; ]/ k+ R
+ R G( a3 n6 @" ^y_test_pred = classifier.predict(X_test)! S W7 s3 J2 ]6 F! Z
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). s8 I, o* s4 Z1 t6 n
1 c3 D, w2 R e/ Z###############################################7 q- o9 E4 X+ _: U ]
# Evaluate classifier performance' d7 H- @* j n( ]7 t! r6 Q1 u6 ^0 \ H
2 D) i1 w- \; V% H
from sklearn.metrics import classification_report
; }8 C: T7 ~% M; O( y, q
% b+ v3 \) I: K, u9 ftarget_names = ['Class-' + str(int(i)) for i in set(y)]
2 O' Y; _; i2 J/ d: o: H4 P6 Rprint "\n" + "#"*30! P' x/ W1 f$ F, }
print "\nClassifier performance on training dataset\n"
n$ P3 V, r# a" ?1 U! zprint classification_report(y_train, classifier.predict(X_train), target_names=target_names), ]1 O8 @/ A1 J. Q1 p3 I( c
print "#"*30 + "\n"6 k( b. E/ `9 l# @; C4 a
0 ~) x! y4 D- \! W& ^
print "#"*306 N( |) y& T- R, W& Q) x; X
print "\nClassification report on test dataset\n"$ E/ O- X+ `3 ]9 J J: W2 N
print classification_report(y_test, y_test_pred, target_names=target_names)4 q' n- T1 C- l6 X, F( W
print "#"*30 + "\n". q& W) F) w+ k! r! A* c
4 V1 Q9 V o- H/ ^, w; B6 E- P |
|