- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np+ h' c) i" j" \$ ]: i
import matplotlib.pyplot as plt0 j5 {* R1 r2 z4 y& h
9 ]7 ~ B( ?: D' ~; ^; t( a& U0 b9 Timport utilities - n5 Y5 u$ W0 n: l1 r* O8 Y
4 i$ P) d2 C3 e& c' `4 F0 |
# Load input data
4 y' G! }' ~4 w! U: Q. ]input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: g( Y1 H, w6 d& jX, y = utilities.load_data(input_file)/ O! L$ U& ?2 d0 [$ ]* Z& R
2 v! c! G9 S4 ]! t5 O###############################################9 K+ j6 q' ^9 L& Z0 @" @# ^
# Separate the data into classes based on 'y'
; u, B0 P; p3 x# X( K) p) lclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
( ]: d- L3 [8 Xclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
! l4 d- k# I: k7 j% B0 S6 x
1 f# w: e" E# C2 M2 g1 Z# Plot the input data
, e7 D z9 s! Q5 ^plt.figure(); V& b6 T: m( t! G3 G" @
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
$ w* k7 e% R/ h p! [5 q/ H5 Dplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
5 n/ Z2 \. x; C) ?) ] fplt.title('Input data')
3 e5 W7 m& u; w0 z( n
3 u& O9 F* V% Z1 J5 O* ~' Y5 n( N###############################################
, g/ G) R4 i" J6 t2 a# Train test split and SVM training
' A* y% }% U3 K2 X4 Bfrom sklearn import cross_validation3 K- B5 j( k6 B# O
from sklearn.svm import SVC
: q6 I5 ?/ D( P8 v3 `- D3 i( ]. a3 H# ~ N
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
% a: l* [, C6 y0 z8 N; G2 t
6 ]. n# A* A- }! g8 p#params = {'kernel': 'linear'}
& I' C M9 V @' S4 K& f#params = {'kernel': 'poly', 'degree': 3}
% C. {% f1 a+ Uparams = {'kernel': 'rbf'}
4 N7 C/ J: o" p/ U: u1 I6 N+ oclassifier = SVC(**params), I. f1 i( e6 ?$ ]/ k4 d1 E5 h4 h$ `
classifier.fit(X_train, y_train)
E" e1 Y! l2 j5 \ j( u6 Mutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset') W! q7 _( Y$ ^& n" w
% R: r d7 L2 W" My_test_pred = classifier.predict(X_test)
: ]. ~2 S8 C, _( jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
' g: N9 C k* B
; U% A+ E/ Y. _' L) Z8 Z###############################################
* D0 B! X2 Y- b# Evaluate classifier performance
' k- t2 I K* L) k( u
, w/ J/ c$ J. K1 W8 Rfrom sklearn.metrics import classification_report
; o1 a$ ?7 x) Y, z7 W* P' ?" Q: [) A; x. V+ s
target_names = ['Class-' + str(int(i)) for i in set(y)]9 A* c( d$ F' b( k+ p
print "\n" + "#"*30
- ^, L" f7 K6 I9 E5 w2 Aprint "\nClassifier performance on training dataset\n"& @9 a: `6 K: C9 }
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)7 w4 w0 m$ {$ ] V8 U' B5 n# I
print "#"*30 + "\n") v3 ]/ ?7 I: n! ~3 P3 g6 ?' J
$ |6 d0 C; w% D0 A8 [0 _9 s) Q
print "#"*307 H- h7 H8 n, {$ O. z4 V
print "\nClassification report on test dataset\n"6 H* I, _ R9 W
print classification_report(y_test, y_test_pred, target_names=target_names)0 I; D; `/ z7 D+ ~
print "#"*30 + "\n"* Z- ]$ P* q4 O8 N2 _- K! F
7 F3 k" f( }6 y" E3 J* ?
|
|