- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
3 O/ i! X5 |5 w8 cimport matplotlib.pyplot as plt
7 ?7 F9 T( \8 a( V5 N" Y0 l$ N& u! N( T" v2 H0 t1 L: g" X5 K
import utilities
5 P l7 A, { C
& b# |- B* l( S) _7 `4 d0 D# Load input data9 Y, }* J* I1 M8 s
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
/ l z* A9 x7 B& EX, y = utilities.load_data(input_file)4 j% Z5 _7 a4 {- X3 g
8 m J/ z' ^4 K3 C; \0 z5 Z( s###############################################
9 K0 {: A7 k* D2 T# Separate the data into classes based on 'y'
) J7 c4 E" y) u0 v$ {class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! _- W1 U: q) cclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])( o3 G4 v, y. p
. D1 O; z/ r. E& D9 B, l# Plot the input data0 [' p" b- P) b' K) x4 e! Z
plt.figure()
) {4 ]# ^/ ?% o: R/ fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')$ M2 Y% ?. T; @- u/ r% z
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')! J& `1 W3 M8 z) P8 M
plt.title('Input data')
& Y0 L, h: |8 T& n3 j# I# t9 x2 T E& `" U4 r! N
###############################################% y8 |4 ^- h# d2 I, P8 @$ I- N+ e
# Train test split and SVM training
9 _! h2 y [; S6 c( G9 w3 Bfrom sklearn import cross_validation
+ o: l: v! }6 m4 I, y5 Z1 ffrom sklearn.svm import SVC
7 e3 C" _2 I: n% E8 y
- z4 e% e W8 y( z9 E! GX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 ~, M9 f* `. a7 R; \ ~0 F8 K. y, p* K% \5 e. z
#params = {'kernel': 'linear'}
$ K3 R& M& W# y8 ]#params = {'kernel': 'poly', 'degree': 3}
1 U. l4 M- V1 c; S X4 I0 ~params = {'kernel': 'rbf'}# v8 b3 P4 k5 n+ J! l* W
classifier = SVC(**params)( e- x' z3 L' J0 |6 B; K2 _
classifier.fit(X_train, y_train)
# v# i, q2 }0 T% A- B0 m1 @% \utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')# g( j' Z* b+ ~
$ m$ U9 D. b- W! I
y_test_pred = classifier.predict(X_test) v/ {3 M0 k( N
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
9 V: C5 s X0 J( m2 e0 L0 g
. J( g) ^; f+ `2 ~+ i###############################################% a4 ~5 l/ m, x+ v; y4 V7 {0 Y
# Evaluate classifier performance
* X6 [+ x# P: b% m4 ?8 |* v: H0 c
3 y( a. i0 T; j1 s% c' Q" }5 pfrom sklearn.metrics import classification_report. ~7 m2 ` V0 ^3 y- ?- z
$ L L; U/ X; B3 Ztarget_names = ['Class-' + str(int(i)) for i in set(y)]6 a _3 p/ N1 K. R
print "\n" + "#"*30
4 [/ D' G- N. h u% ]+ _! Kprint "\nClassifier performance on training dataset\n"
5 ]0 K* s- z: s. Gprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 j' y. c% C0 ^ p: @: @. q, ?print "#"*30 + "\n"
8 E8 n5 l! P8 H# ]8 u$ a* t+ o
8 O" ?; V; ~, c. d7 J6 t) ?print "#"*30( \7 U. f: R5 F
print "\nClassification report on test dataset\n"
3 `% B3 r, v! k2 ]print classification_report(y_test, y_test_pred, target_names=target_names) c' g% [, ~( u& ^
print "#"*30 + "\n"1 u" B% ?$ a7 n5 K+ S0 F; y& F6 [
. ]2 D [, P v% L& L- v |
|