sleep-analysis

import pandas as pd
import numpy as np
複製代碼
result = pd.read_csv('out.csv')
result.head()
複製代碼
/anaconda3/envs/py35/lib/python3.5/site-packages/IPython/core/interactiveshell.py:2728: DtypeWarning: Columns (3,4,5,6,7,8,10) have mixed types. Specify dtype option on import or set low_memory=False.
  interactivity=interactivity, compiler=compiler, result=result)
複製代碼
Unnamed: 0 uploadtime data version datatype h l ns s serialNumber t
0 0.0 2018-05-22T03:07:51.263Z {'serialNumber': '"S1DL11XHSHS1"', 't': '25.08... 2 True 59 18 2 62 "S1DL11XHSHS1" 25.0893
1 1.0 2018-05-22T03:07:52.008Z {'serialNumber': '"S1F311DQSHS1"', 't': '26.40... 2 True 50 4 1 61 "S1F311DQSHS1" 26.4033
2 2.0 2018-05-22T03:07:52.059Z {'serialNumber': '"S1DF11MFSHS1"', 't': '17.17... 2 True 70 16 0 67 "S1DF11MFSHS1" 17.1742
3 3.0 2018-05-22T03:07:52.350Z {'serialNumber': '"S1FD13XGSHS1"', 't': '21.24... 2 True 64 0 1 65 "S1FD13XGSHS1" 21.2446
4 4.0 2018-05-22T03:07:54.643Z {'serialNumber': '"S1DL11W4SHS1"', 't': '27.28... 2 True 45 0 1 60 "S1DL11W4SHS1" 27.2828
result = result.drop(result.columns[0], axis = 1)
複製代碼
result_no_nan = result.dropna(subset=['ns'])
複製代碼
result_no_nan = result_no_nan[result_no_nan['ns'] != 'ns']
複製代碼
result_no_nan['ns'] = result_no_nan['ns'].astype(int)
複製代碼
result_no_nan.ns.unique()
複製代碼
array([2, 1, 0, 3])
複製代碼
result_no_nan.ns.value_counts()
複製代碼
0    394296
1    358078
2     16030
3      2155
Name: ns, dtype: int64
複製代碼
result_no_nan['ns0'] = (result_no_nan['ns'] == 0).astype(int)
result_no_nan['ns1'] = (result_no_nan['ns'] == 1).astype(int)
result_no_nan['ns2'] = (result_no_nan['ns'] == 2).astype(int)
result_no_nan['ns3'] = (result_no_nan['ns'] == 3).astype(int)
複製代碼
result_no_nan.head(1)
複製代碼
uploadtime data version datatype h l ns s serialNumber t ns0 ns1 ns2 ns3
0 2018-05-22T03:07:51.263Z {'serialNumber': '"S1DL11XHSHS1"', 't': '25.08... 2 True 59 18 2 62 "S1DL11XHSHS1" 25.0893 0 0 1 0
out = result_no_nan[['serialNumber','uploadtime','h','l','s','t','ns0','ns1','ns2','ns3']]
複製代碼
out.head(1)
複製代碼
serialNumber uploadtime h l s t ns0 ns1 ns2 ns3
0 "S1DL11XHSHS1" 2018-05-22T03:07:51.263Z 59 18 62 25.0893 0 0 1 0
target = np.stack([out.ns0.values, out.ns1.values, out.ns2.values, out.ns3.values]).T
複製代碼
input_data = np.stack([out.h.values, out.l.values, out.s.values, out.t.values]).T
複製代碼
np.shape(input_data), np.shape(target)
複製代碼
((770559, 4), (770559, 4))
複製代碼
import tensorflow as tf
複製代碼
/anaconda3/envs/py35/lib/python3.5/importlib/_bootstrap.py:222: RuntimeWarning: compiletime version 3.6 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.5
  return f(*args, **kwds)
/anaconda3/envs/py35/lib/python3.5/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters
複製代碼
x = tf.placeholder("float", shape=[None, 4])
y = tf.placeholder("float", shape=[None, 4])
複製代碼
reg = tf.contrib.layers.l2_regularizer(scale=0.1)
複製代碼
fc_1 = tf.contrib.layers.fully_connected(x, 
                                         32, 
                                         weights_initializer = tf.truncated_normal_initializer(stddev = 0.1),
                                         weights_regularizer = reg,
                                         activation_fn=tf.nn.relu)
複製代碼
np.shape(fc_1)
複製代碼
TensorShape([Dimension(None), Dimension(32)])
複製代碼
fc_2 = tf.contrib.layers.fully_connected(fc_1, 
                                         32, 
                                         weights_initializer = tf.truncated_normal_initializer(stddev = 0.1),
                                         weights_regularizer = reg,
                                         activation_fn=tf.nn.relu)
複製代碼
np.shape(fc_2)
複製代碼
TensorShape([Dimension(None), Dimension(32)])
複製代碼
keep_prob = tf.placeholder("float")
fc2_drop = tf.nn.dropout(fc_2, keep_prob)
複製代碼
pred = tf.contrib.layers.fully_connected(fc2_drop, 4, activation_fn=tf.nn.softmax)
複製代碼
np.shape(pred), np.shape(target)
複製代碼
(TensorShape([Dimension(None), Dimension(4)]), (770559, 4))
複製代碼
weights = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
reg_ws = tf.contrib.layers.apply_regularization(reg, weights_list = weights)
複製代碼
loss = -tf.reduce_sum(y * tf.log(tf.clip_by_value(pred, 0.0001, 1))) + tf.reduce_sum(reg_ws)
複製代碼
correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
複製代碼
train_step = tf.train.AdamOptimizer(0.001).minimize(loss)
複製代碼
sess = tf.Session()
sess.run(tf.global_variables_initializer())
loss_train = []
train_acc = []
複製代碼
for i in range(4000):
    index = np.random.permutation(len(target))
    input_data =input_data[index]
    target = target[index]
    sess.run(train_step, feed_dict={x:input_data, y:target, keep_prob:0.5})
    if i % 50 == 0:
        loss_temp = sess.run(loss, feed_dict= {x:input_data, y:target, keep_prob:1})
        train_acc_temp = sess.run(accuracy, feed_dict= {x:input_data, y:target, keep_prob:1})
        loss_train.append(loss_temp)
        train_acc.append(train_acc_temp)
        print(loss_temp, train_acc_temp)
複製代碼
1375709.8 0.5048309
609597.56 0.7247907
541404.9 0.73670805
496564.28 0.7462751
487508.44 0.7478636
482841.1 0.7487253
480346.75 0.7482607
477108.9 0.7484346
475905.06 0.7484696
474179.06 0.74926645
472259.4 0.7497012
471763.56 0.74967396
471339.75 0.7500412
470828.5 0.75031114
471458.1 0.7515453
470603.12 0.7517828
469874.47 0.751653
469248.5 0.75303257
468931.7 0.7540331
468528.1 0.7539851
468503.97 0.75422126
468131.6 0.75447303
468123.9 0.7547144
468139.78 0.75418496
467144.9 0.7546249
467890.4 0.7549545
467048.97 0.7551518
466745.1 0.75575006
466457.34 0.75609916
465929.1 0.75655985
465421.4 0.75609136
465065.8 0.75649625
466193.6 0.7567532
465614.88 0.7571672
464973.97 0.75710493
465103.9 0.7571659
464903.06 0.7569699
465279.66 0.7574021
463968.34 0.7576798
464071.3 0.75737613
464002.7 0.7574748
464098.28 0.7576383
463965.44 0.75771487
463142.53 0.7580419
463134.94 0.75802374
463714.1 0.757332
463228.44 0.7578615
462894.66 0.758134
463095.1 0.7582041
462459.34 0.7581211
462745.3 0.7582807
462511.56 0.7579199
461954.0 0.758317
461555.75 0.7583235
461758.25 0.7583313
461913.78 0.7582119
461906.06 0.7583274
462053.12 0.75818336
461215.22 0.7583352
461650.03 0.75818205
461360.84 0.75823134
460985.78 0.7580925
461389.03 0.75806135
461056.66 0.7584001
460863.2 0.7579446
460948.8 0.7576357
460820.84 0.75813794
460422.53 0.75801337
460513.97 0.75804317
460623.8 0.75829107
460722.16 0.75801337
460025.47 0.7582729
460292.94 0.7576668
460127.25 0.7580146
460008.84 0.75840396
460206.84 0.7582664
459823.66 0.7583209
460277.3 0.757881
459591.34 0.75830925
459873.66 0.7581042
複製代碼
import matplotlib.pyplot as plt
複製代碼
plt.plot(loss_train, 'k-')
plt.title('train loss')
plt.show()
複製代碼

plt.plot(train_acc, 'b-', label = 'train_acc')
plt.title('train_acc')
plt.legend()
plt.show()
複製代碼

本站公眾號
   歡迎關注本站公眾號,獲取更多信息