Restructured full repo
This commit is contained in:
990
python-mock/PeakDetectionAnalysis.ipynb
Normal file
990
python-mock/PeakDetectionAnalysis.ipynb
Normal file
File diff suppressed because one or more lines are too long
30
python-mock/log.py
Normal file
30
python-mock/log.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import socket
|
||||
import struct
|
||||
import numpy as np
|
||||
import datetime
|
||||
|
||||
device = '192.168.178.79'
|
||||
|
||||
def network_receive():
|
||||
int_size = 4
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((device, 123))
|
||||
number_of_elements = struct.unpack('i', s.recv(int_size, socket.MSG_WAITALL))[0]
|
||||
session_start_time = struct.unpack('i', s.recv(int_size, socket.MSG_WAITALL))[0]
|
||||
dtime = datetime.datetime.fromtimestamp(session_start_time)
|
||||
print("Number of elements {}, session start {}".format(number_of_elements, dtime))
|
||||
data_buffer = struct.unpack('{}i'.format(number_of_elements), s.recv(int_size * number_of_elements, socket.MSG_WAITALL))
|
||||
s.close()
|
||||
return np.array(data_buffer, dtype=np.int32)
|
||||
|
||||
data = network_receive()
|
||||
plt.plot(data)
|
||||
plt.show()
|
||||
print(network_receive())
|
||||
|
||||
#data = []
|
||||
#while True:
|
||||
# value = network_receive()
|
||||
# data.append(value)
|
||||
# print(value, min(data), max(data))
|
||||
95
python-mock/measurement_session.py
Normal file
95
python-mock/measurement_session.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import numpy as np
|
||||
import msgpack
|
||||
|
||||
|
||||
def _decoding_hook(code, data):
|
||||
if code == -47 or code == 47 or code== -51:
|
||||
return np.frombuffer(data, dtype=np.int16)
|
||||
print(code)
|
||||
return 'unknown ext'
|
||||
|
||||
|
||||
def _encoding_hook(obj):
|
||||
if isinstance(obj, np.ndarray) and obj.dtype == np.int16:
|
||||
buffer = memoryview(obj).tobytes()
|
||||
assert len(buffer) / 2 == len(obj)
|
||||
return msgpack.ExtType(47, buffer)
|
||||
raise TypeError("Cannot pack: %s of type %s" % (obj, str(type(obj))))
|
||||
|
||||
|
||||
def deserialize(stream):
|
||||
return msgpack.unpackb(stream, ext_hook=_decoding_hook, raw=False)
|
||||
|
||||
|
||||
def serialize(obj):
|
||||
return msgpack.packb(obj, default=_encoding_hook, use_bin_type=True)
|
||||
|
||||
|
||||
def load_session_from_file(file):
|
||||
with open(file, 'rb') as f:
|
||||
file_contents = f.read()
|
||||
return deserialize(file_contents)
|
||||
|
||||
|
||||
def plot_session(session):
|
||||
import matplotlib.pyplot as plt
|
||||
y = session['values']
|
||||
if 'timestamps' in session:
|
||||
interval = 10
|
||||
t = session['timestamps'] / interval
|
||||
time_range_seconds = t[-1] - t[0]
|
||||
description = f"Sparse session, {len(t)} data points, {time_range_seconds / 60} minutes"
|
||||
plt.plot(session['timestamps'], session['values'], 'x-')
|
||||
else:
|
||||
measurement_interval = session.get('interval', 100)
|
||||
t = np.arange(len(y)) * measurement_interval / 1000
|
||||
description = f"Dense session, {len(t)} data points, {t[-1] / 60} minutes"
|
||||
plt.plot(t, y)
|
||||
plt.title(description)
|
||||
|
||||
|
||||
def prune_overflown_session(session, max_elements=8 * 1024):
|
||||
session = session.copy()
|
||||
session['values'] = session['values'][:max_elements]
|
||||
if 'timestamps' in session:
|
||||
session['timestamps'] = session['timestamps'][:max_elements]
|
||||
|
||||
return session
|
||||
|
||||
|
||||
def prune(session, beginning=10, end=10):
|
||||
session = session.copy()
|
||||
session['values'] = session['values'][beginning:-end]
|
||||
session['timestamps'] = session['timestamps'][beginning:-end]
|
||||
session['startIndex'] += beginning
|
||||
return session
|
||||
|
||||
|
||||
#def extend(session, value, beginning, end):
|
||||
# session = session.copy()
|
||||
# session['values'] = np.concatenate((np.ones([beginning]) * value, session['values'], np.ones([end]) * value))
|
||||
# what to put in time stamps?
|
||||
# return session
|
||||
|
||||
|
||||
def start_at_index(session, index):
|
||||
session = session.copy()
|
||||
to_remove = index - session['startIndex']
|
||||
if to_remove > 0:
|
||||
session['values'] = session['values'][to_remove:]
|
||||
if 'timestamps' in session:
|
||||
session['timestamps'] = session['timestamps'][to_remove:]
|
||||
return session
|
||||
|
||||
|
||||
def test_serialization_deserialization(session_file):
|
||||
with open(session_file, 'rb') as f:
|
||||
raw_file_contents = f.read()
|
||||
session = load_session_from_file(session_file)
|
||||
serialized = serialize(session)
|
||||
deserialized = deserialize(serialized)
|
||||
np.testing.assert_equal(deserialized['values'], session['values'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_serialization_deserialization('../example_sessions/04.st')
|
||||
16
python-mock/mock.py
Normal file
16
python-mock/mock.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import os
|
||||
from flask import Flask, Response, request
|
||||
from .measurement_session import load_session_from_file, serialize, start_at_index, prune_overflown_session
|
||||
|
||||
app = Flask(__name__)
|
||||
SESSION_DIR = '../example_sessions'
|
||||
|
||||
|
||||
@app.route("/api/sessionhistory/<session_id>", method=['GET'])
|
||||
def session_history(session_id):
|
||||
start_index = request.args.key.get('startIndex', 0)
|
||||
file_name = os.path.join(SESSION_DIR, session_id) + '.st'
|
||||
session = load_session_from_file(file_name)
|
||||
session = prune_overflown_session(session)
|
||||
session = start_at_index(session, start_index)
|
||||
return Response(serialize(session), mimetype="application/x-msgpack")
|
||||
195
python-mock/peak_detection.py
Normal file
195
python-mock/peak_detection.py
Normal file
@@ -0,0 +1,195 @@
|
||||
import numpy as np
|
||||
|
||||
__all__ = ['PeakDetectorZScore', 'PeakDetectorSimple', 'detect_peaks']
|
||||
|
||||
|
||||
def detect_peaks(values, detector):
|
||||
for v in values:
|
||||
detector.add(v)
|
||||
return np.array(detector.peaks, dtype=int)
|
||||
|
||||
|
||||
class PeakDetectorSimple:
|
||||
def __init__(self, threshold):
|
||||
self.peaks = []
|
||||
self._queue = []
|
||||
self.threshold = threshold
|
||||
self._counter = 0
|
||||
self._last_min = 0
|
||||
|
||||
def add(self, value):
|
||||
self._queue.append(value)
|
||||
if len(self._queue) > 3:
|
||||
self._queue.pop(0)
|
||||
if len(self._queue) != 3:
|
||||
return
|
||||
last, current, following = self._queue
|
||||
is_maximum = current > following and current > last
|
||||
if is_maximum and (current - self._last_min) > self.threshold:
|
||||
self.peaks.append(self._counter + 1)
|
||||
self._last_min = current
|
||||
self._last_min = min(self._last_min, current)
|
||||
self._counter += 1
|
||||
|
||||
|
||||
class PeakDetectorZScore:
|
||||
def __init__(self, lag, threshold, influence):
|
||||
self._filter = ZScoreFilter(lag, threshold, influence)
|
||||
self.peaks = []
|
||||
self._counter = 0
|
||||
self._previous_signal = 0
|
||||
self._max = None
|
||||
self._max_index = None
|
||||
|
||||
# debug
|
||||
self.up_down_signal = []
|
||||
|
||||
def add(self, value):
|
||||
signal = self._filter.add(value)
|
||||
if signal is not None:
|
||||
self.up_down_signal.append(signal)
|
||||
|
||||
rising_flank = self._previous_signal != 1 and signal == 1
|
||||
falling_flank = self._previous_signal == 1 and signal != 1
|
||||
if rising_flank:
|
||||
self._max = -1
|
||||
if signal == 1 and self._max is not None and value > self._max:
|
||||
self._max = value
|
||||
self._max_index = self._counter
|
||||
if falling_flank:
|
||||
self.peaks.append(self._max_index)
|
||||
|
||||
self._previous_signal = signal
|
||||
self._counter += 1
|
||||
|
||||
|
||||
class StatisticsQueue:
|
||||
|
||||
def __init__(self, size):
|
||||
self._queue = []
|
||||
self._queue_sum = 0
|
||||
self._queue_sum_sq = 0
|
||||
self._size = size
|
||||
|
||||
def add(self, value):
|
||||
self._queue.append(value)
|
||||
|
||||
self._queue_sum += value
|
||||
self._queue_sum_sq += value ** 2
|
||||
if len(self._queue) > self._size:
|
||||
removed = self._queue.pop(0)
|
||||
self._queue_sum -= removed
|
||||
self._queue_sum_sq -= removed ** 2
|
||||
|
||||
@property
|
||||
def avg(self):
|
||||
return self._queue_sum / len(self._queue)
|
||||
|
||||
@property
|
||||
def variance(self):
|
||||
exp_sq = self._queue_sum_sq / len(self._queue)
|
||||
return exp_sq - self.avg ** 2
|
||||
|
||||
@property
|
||||
def std_deviation(self):
|
||||
return np.sqrt(self.variance)
|
||||
|
||||
@property
|
||||
def filled(self):
|
||||
return len(self._queue) == self._size
|
||||
|
||||
|
||||
class ZScoreFilter:
|
||||
def __init__(self, lag, threshold, influence):
|
||||
self._threshold = threshold
|
||||
self._influence = influence
|
||||
self._stat_queue = StatisticsQueue(lag)
|
||||
self._last_value = None
|
||||
|
||||
# debug
|
||||
self.filtered = []
|
||||
self.means = []
|
||||
self.upper_bounds = []
|
||||
|
||||
def add(self, value):
|
||||
sq = self._stat_queue
|
||||
if not sq.filled:
|
||||
sq.add(value)
|
||||
self._last_value = value
|
||||
return None
|
||||
else:
|
||||
avg = sq.avg
|
||||
self.means.append(avg)
|
||||
self.upper_bounds.append(avg + self._threshold * sq.std_deviation)
|
||||
if abs(value - avg) > self._threshold * sq.std_deviation:
|
||||
signal = 1 if value > avg else -1
|
||||
filtered = self._influence * value + (1 - self._influence) * self._last_value
|
||||
sq.add(filtered)
|
||||
self._last_value = filtered
|
||||
self.filtered.append(filtered)
|
||||
return signal
|
||||
else:
|
||||
sq.add(value)
|
||||
self._last_value = value
|
||||
self.filtered.append(value)
|
||||
return 0
|
||||
|
||||
|
||||
def peak_detection_z_score(y, lag, threshold, influence):
|
||||
signals = np.zeros(len(y))
|
||||
filtered_y = np.array(y)
|
||||
avg_filter = [0] * len(y)
|
||||
std_filter = [0] * len(y)
|
||||
avg_filter[lag - 1] = np.mean(y[0:lag])
|
||||
std_filter[lag - 1] = np.std(y[0:lag])
|
||||
for i in range(lag, len(y)):
|
||||
if abs(y[i] - avg_filter[i - 1]) > threshold * std_filter[i - 1]:
|
||||
if y[i] > avg_filter[i - 1]:
|
||||
signals[i] = 1
|
||||
else:
|
||||
signals[i] = -1
|
||||
|
||||
filtered_y[i] = influence * y[i] + (1 - influence) * filtered_y[i - 1]
|
||||
else:
|
||||
signals[i] = 0
|
||||
filtered_y[i] = y[i]
|
||||
avg_filter[i] = np.mean(filtered_y[(i - lag + 1):i + 1])
|
||||
std_filter[i] = np.std(filtered_y[(i - lag + 1):i + 1])
|
||||
|
||||
return dict(signals=np.asarray(signals),
|
||||
avgFilter=np.asarray(avg_filter),
|
||||
stdFilter=np.asarray(std_filter))
|
||||
|
||||
|
||||
def test_zscore():
|
||||
from measurement_session import load_session_from_file, prune_overflown_session, prune
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
session_file = '../example_sessions/06.st'
|
||||
session = load_session_from_file(session_file)
|
||||
session = prune_overflown_session(session)
|
||||
session = prune(session, 10, 50)
|
||||
|
||||
lag = 8
|
||||
peak_detector_zscore = PeakDetectorZScore(lag=lag, threshold=2, influence=0)
|
||||
peaks = detect_peaks(session['values'], peak_detector_zscore)
|
||||
up_down = np.array([0] * lag + peak_detector_zscore.up_down_signal)
|
||||
up_down[up_down < 0] = -10
|
||||
up_down[up_down > 0] = 10000
|
||||
avgs = np.array([0] * lag + peak_detector_zscore._filter.means)
|
||||
filtered = np.array([0] * lag + peak_detector_zscore._filter.filtered)
|
||||
|
||||
plt.figure()
|
||||
plt.plot(session['timestamps'], session['values'], 'x-', label='data')
|
||||
plt.plot(session['timestamps'], filtered, 'x', label='filtered')
|
||||
plt.plot(session['timestamps'], up_down, '-', label='up_down')
|
||||
plt.plot(session['timestamps'], avgs, '-', label='avg')
|
||||
|
||||
# plt.plot(session['timestamps'][peaks+8], session['values'][peaks+8], 'o',
|
||||
# label=f"Simple {peak_detector_simple.threshold}")
|
||||
plt.title("Peak detection")
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_zscore()
|
||||
50
python-mock/plot.py
Normal file
50
python-mock/plot.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from msgpack.fallback import unpackb
|
||||
import requests
|
||||
import array
|
||||
|
||||
|
||||
def ext_hook(code, data):
|
||||
#if code == -47:
|
||||
if code == -51:
|
||||
return np.frombuffer(data, dtype=np.int16)
|
||||
else:
|
||||
print(code)
|
||||
return 'unknown ext'
|
||||
|
||||
|
||||
def decode(stream):
|
||||
return unpackb(stream, ext_hook=ext_hook, raw=False)
|
||||
|
||||
|
||||
def from_file(file_name):
|
||||
with open(file_name, 'rb') as f:
|
||||
file_contents = f.read()
|
||||
print("File size", len(file_contents))
|
||||
res = decode(file_contents)
|
||||
print(res)
|
||||
plt.plot(res['values'], 'x-')
|
||||
plt.show()
|
||||
return res
|
||||
|
||||
|
||||
def from_network(url="http://swimtrainer/api/session", start_index=0):
|
||||
r = requests.get("{}?startIndex={}".format(url, start_index))
|
||||
res = decode(r.content)
|
||||
print(res)
|
||||
plt.plot(res['timestamps'], res['values'], 'x-')
|
||||
plt.show()
|
||||
|
||||
|
||||
def analyze(data, max_size=8*1024):
|
||||
time_stamps = data['timestamps'][:max_size]
|
||||
values = data['values'][:max_size]
|
||||
time_range_seconds = (time_stamps[-1] - time_stamps[0]) / 10
|
||||
dense_time_range_seconds = max_size / 10
|
||||
print("Time range {}, dense {} [minutes]".format(time_range_seconds / 60, dense_time_range_seconds / 60))
|
||||
|
||||
#from_network()
|
||||
res = from_file('1589394292.st')
|
||||
print(res)
|
||||
#analyze(res)
|
||||
Reference in New Issue
Block a user