- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 D$ q7 X* Z* M6 E8 }8 wimport matplotlib.pyplot as plt
; u3 k/ W' s8 {3 u! @$ B3 G, n9 m4 S! O! a# F2 p) ~/ e$ E& F7 z
import utilities
& x k7 U5 v$ N, y9 J
4 v ^# S1 I, h' r; S3 ^! l# Load input data
s( b( Z- s; w- ]! }input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 j2 l. r- f( dX, y = utilities.load_data(input_file)
+ d# f9 J4 W% ?7 F3 N
1 {6 M" b0 o% j/ r###############################################6 Y! V1 F8 u W! S, o! H' H: ]2 O$ C
# Separate the data into classes based on 'y'
9 L1 m4 L' n" {& `; R0 |1 K$ a/ Gclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. f; N& v. M0 E9 Y+ Qclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])' W, A( x- W" P0 b$ y
& F: E; @/ E+ ^4 m
# Plot the input data r# g4 d$ t3 t
plt.figure()
& e9 m b$ ]5 b8 b: |0 w6 |" Uplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
! ~3 L% S7 T, j1 Aplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
* U1 g! {0 B9 x( U- vplt.title('Input data')
9 E8 K% o# R! H+ ?' o. i( M7 d+ T
# j" \3 v4 m" m2 S3 ~- m###############################################1 _2 {2 @7 H" v" m+ V0 }, A# O- }3 ?( Z* j
# Train test split and SVM training
6 V# Z5 ?. e1 u& i% S+ Y Kfrom sklearn import cross_validation
- X- H8 w/ I9 r* vfrom sklearn.svm import SVC- I8 F) p, a& n( ~ x' s
+ r, a! |& p1 J9 m" L
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
8 [' v ]* i" X- v1 g" L& @* J! A0 Z% @& T, |
#params = {'kernel': 'linear'}* }9 F; i! J0 p* E3 r
#params = {'kernel': 'poly', 'degree': 3}5 C0 X& \7 N& I! q
params = {'kernel': 'rbf'}5 O/ J$ ~1 y. o& O& {
classifier = SVC(**params)
& M! A6 n! k' L: h8 k, X- Oclassifier.fit(X_train, y_train)' J& }2 z! o6 M9 x, U% C& h5 m
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! S+ P: j* F. M% O0 x( R" Y V( k: o5 U; g) Z4 ]) S/ g8 l
y_test_pred = classifier.predict(X_test)
5 p- N( ]" m7 ~9 X6 n5 U. yutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'); ^& T9 s y# V$ o) f" r
# J0 R3 k7 Z$ h+ v
###############################################0 G- N, n% s: j( F [/ t
# Evaluate classifier performance
! N5 ~2 ^. L% s$ G; N# [! ]3 T
8 w- |1 D5 I' C9 Hfrom sklearn.metrics import classification_report5 t" j/ g# }' U% Y9 D$ N
' B, ?0 ^- m! a& o9 M5 ttarget_names = ['Class-' + str(int(i)) for i in set(y)]
- Y0 S2 [+ `$ Q. [, u7 |# ]' H4 zprint "\n" + "#"*30
8 G" B( E, F4 ]* @1 X; D( ~7 h2 Pprint "\nClassifier performance on training dataset\n"; T) X4 X8 ^. a/ `) R0 M8 m+ `& q' P
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
7 O& E. R. R+ ?% ]# v t- _; zprint "#"*30 + "\n"0 Y7 e* C; p, z
0 h4 Y Q o' `& n4 n7 Bprint "#"*30( U( v- l9 v2 U- x6 h
print "\nClassification report on test dataset\n"
' N/ s9 Z" c8 |print classification_report(y_test, y_test_pred, target_names=target_names)
# C, E( n" J: v Oprint "#"*30 + "\n". k7 @# _2 O4 B, m& H" a- ?0 K
% D. N: }6 K: Z, E* n: _' s
|
|