- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np7 c! i1 f3 y$ Z/ o1 l4 w) G7 q# I
import matplotlib.pyplot as plt: p ?' c3 D; U5 @9 ]
* w& R6 z9 d1 G4 [/ O, Mimport utilities
1 h$ b j3 ?! T' Y& m2 O1 _% v' u' H+ ^: ]# ]7 G0 p4 c
# Load input data' s/ A2 d: Q8 t6 o* `
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; U) F8 `6 @- c; \5 W
X, y = utilities.load_data(input_file)
+ c7 [$ J! R# s7 O9 I# `
6 r/ ]0 l" F% ~! ~7 l###############################################' a( x( ]( q% V! c! F
# Separate the data into classes based on 'y'2 v! Q7 z- ~/ x) S8 l5 k, q6 S
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). E- \% I1 C5 Q& c3 _
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); g0 [% e6 D3 R/ q" [
% T/ R! i& q( r% G# Plot the input data
$ |1 H$ @* Y$ e' I: S" r6 cplt.figure()
( F7 n+ q( A5 M/ Vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
e" y) l+ P" e7 zplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')' o7 ~; I3 ^. ~1 A {
plt.title('Input data')
7 {* u/ O; H# j: x, j2 f+ y8 J
- `! U/ H8 v) Z/ k" |+ p6 M###############################################
1 ^1 y$ b$ \8 U! w# y* X# Train test split and SVM training3 {0 F( W" y: ?" |* m
from sklearn import cross_validation
. a- O5 ~. x+ o1 \from sklearn.svm import SVC
: M" j, z9 t. O: P0 o# Z
2 R0 a9 @3 n& _" ~# l, _2 ~X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
5 _9 }6 t/ ^/ A9 v6 p
) v! v+ R5 ]6 c9 y: h#params = {'kernel': 'linear'}+ k. Z# ~* s* S
#params = {'kernel': 'poly', 'degree': 3}
- j6 \9 @- q: }* D# U8 bparams = {'kernel': 'rbf'}
3 N6 f5 e" w+ @classifier = SVC(**params), u) ~& n+ n: T0 f/ }+ o
classifier.fit(X_train, y_train)
# G/ z7 p, ]1 ~1 Cutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 ]/ `+ ]. Q+ M
$ P) F& [! w# B$ i3 M& Ly_test_pred = classifier.predict(X_test)
- T \. }8 Z# H# a1 Sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' l% F* v6 a+ N' j5 s" z* l; D
# _& }: B2 q* F$ T5 w" r###############################################
' i( j% |0 I& E c U- h1 e# Evaluate classifier performance
6 m `/ G8 `7 e: i# U
$ [4 P1 I& @* Z/ ^! qfrom sklearn.metrics import classification_report. |; p2 s% q6 f* U# t4 ~
4 Y1 ]& h& U2 y/ ^& r8 f; Qtarget_names = ['Class-' + str(int(i)) for i in set(y)]
' e9 A- @" U, v% Lprint "\n" + "#"*300 ?7 b) \. Z8 Q
print "\nClassifier performance on training dataset\n"( w. s: O+ f3 ]' T# ^/ f4 }; H
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
/ q' U4 a/ h' _0 G$ } zprint "#"*30 + "\n"
; c8 b5 O2 x# l; n5 W1 J. F1 C! t7 ?4 }3 d0 m
print "#"*30 ?; m, g) V: X! N" b& a" Q
print "\nClassification report on test dataset\n": g2 R! x/ ?- y$ T. d+ h; a
print classification_report(y_test, y_test_pred, target_names=target_names)9 G! s, c# M6 [8 T$ u/ ?% @% B
print "#"*30 + "\n"2 J, _# n9 o/ F. s7 U: a
2 F* D4 o5 e! W! f0 B T
|
|