The customer is developing a product to be worn on the human wrist and needs to understand the worst case shock & vibration conditions that product would endure to develop and qualify it. The project will:
Data was acquired with enDAQ's S4-R500D40 which is small enough to be placed on the wrist as shown while also containing enough high powered sensors to give meaningful data. The device includes a 500g piezoresistive accelerometer which was sampled at 20,000 Hz. The device also captured rotation and orientation data but analysis of this was out of scope.
Note the orientation of the axes in the image:
This table provides an overview of the maxium values seen in the tests using the resultant of all three axes. It also provides the total amount of data acquired for each sport.
import pandas as pd
download_dir = r"C:\Users\shanly\Desktop\Wrist-Tests"+"\\"
sports_summary = pd.read_csv(download_dir+'sports-summary.csv').set_index('Tag Name')
sports_summary
Recording Length (minutes) | RMS Acceleration (g) | Peak Acceleration (g) | Peak Pseudo Velocity (in/s) | RMS Rotation (deg/s) | |
---|---|---|---|---|---|
Tag Name | |||||
Baseball (Catch) | 7.39 | 2.67 | 45.22 | 620.19 | 316.85 |
Running | 1.16 | 2.11 | 7.97 | 603.46 | 399.65 |
Football | 7.13 | 1.16 | 81.08 | 456.69 | 305.08 |
Frisbee | 4.76 | 1.01 | 41.66 | 435.20 | 196.95 |
Golf | 1.01 | 0.79 | 10.22 | 347.51 | 252.44 |
Volleyball | 7.59 | 1.25 | 70.91 | 347.39 | 242.07 |
Basketball | 8.04 | 1.10 | 27.17 | 303.44 | 260.40 |
Tennis | 10.18 | 0.78 | 35.89 | 246.03 | 206.90 |
Hockey | 0.30 | 1.18 | 19.68 | 244.11 | NaN |
Rock Climbing | 5.44 | 0.62 | 9.02 | 231.47 | 147.26 |
Fishing | 1.01 | 0.61 | 3.69 | 186.73 | 145.34 |
Bicycle | 1.91 | 1.04 | 18.29 | 125.76 | 71.35 |
Walk | 7.36 | 0.33 | 4.08 | 114.48 | 175.23 |
Hammering | 1.31 | 0.76 | 6.76 | 108.21 | 200.92 |
Rowing | 1.28 | 0.48 | 1.98 | 58.95 | 79.90 |
import plotly.express as px
psd_r = pd.read_csv(download_dir+'psd-by-sport-Resultant.csv').set_index('Frequency (Hz)')
px.line(psd_r,labels={"value": "Acceleration (g^2/Hz)"},log_x=True,log_y=True,
title='Power Spectrum Density Max Per Sport - Resultant').show()
psd_max = pd.read_csv(download_dir+'psd-max.csv').set_index('Frequency (Hz)')
fig = px.line(psd_max,labels={"value": "Acceleration (g^2/Hz)"},log_x=True,log_y=True,
title='Acceleration Power Spectral Density MAX by Axis')
psd_standard = pd.DataFrame({'Frequency (Hz)':[1, 3, 10, 100, 120, 140, 260, 500],
'Acceleration (g^2/Hz)':[.1, 2, .4, .004, .006, .001, .0005, 0.00005]})
display(psd_standard)
psd_standard.to_csv(path_or_buf=download_dir+'psd-max-standard.csv',index=False)
import plotly.graph_objects as go
fig.add_trace(go.Scatter(x=psd_standard['Frequency (Hz)'],y=psd_standard['Acceleration (g^2/Hz)'],name='Test Standard',mode='lines'))
fig.show()
Frequency (Hz) | Acceleration (g^2/Hz) | |
---|---|---|
0 | 1 | 0.10000 |
1 | 3 | 2.00000 |
2 | 10 | 0.40000 |
3 | 100 | 0.00400 |
4 | 120 | 0.00600 |
5 | 140 | 0.00100 |
6 | 260 | 0.00050 |
7 | 500 | 0.00005 |
accel = pd.read_csv(download_dir+'accel-from-psd-standard.csv').set_index('Time (s)')
fig = px.line(accel,title='Time History of Acceleration that Satisfies PSD Standard (3.4 g RMS)',
labels={"value": 'Acceleration (g)'})
fig.update_layout(showlegend=False)
fig.show()
pvss_r = pd.read_csv(download_dir+'pvss-by-sport-Resultant.csv').set_index('Natural Frequency (Hz)')
px.line(pvss_r,labels={"value": 'Peak Pseudo Velocity (in/s)'},log_x=True,log_y=True,
title='Peak Pseudo Velocity by Sport - Resultant').show()
pvss = pd.read_csv(download_dir+'pvss-max.csv').set_index('Natural Frequency (Hz)')
fig = px.line(pvss,labels={"value": 'Peak Pseudo Velocity (in/s)'},log_x=True,log_y=True,
title='Max Peak Pseudo Velocity')
srs_spec = pd.read_csv(download_dir+'half-srs.csv').set_index('Natural Frequency (Hz)')
srs_spec['Peak PV'] = srs_spec['Peak Acceleration (g)']/(2*np.pi*srs_spec.index/386.2205)
fig.add_trace(go.Scatter(x=srs_spec.index,y=srs_spec['Peak PV'],name='Half Sine Pulse',mode='lines'))
fig.show()
srs = pd.read_csv(download_dir+'srs-max.csv').set_index('Natural Frequency (Hz)')
fig = px.line(srs,labels={"value": 'Peak Acceleration Response (g)'},log_x=True,log_y=True,
title='Max Shock Response Spectrum')
fig.add_trace(go.Scatter(x=srs_spec.index,y=srs_spec['Peak Acceleration (g)'],name='Half Sine Pulse',mode='lines'))
fig.show()
The SRS above can be fed into most shock systems, but below is a simple half sine pulse that will satisfy the SRS.
Time = np.linspace(0, 0.025, 1000)
Amplitude = 123.2
Period = 0.0225
Function = 0.0
Function = Amplitude * np.sin(np.pi * Time / Period)
Function[Time > Period] = 0.
fig = px.line(x=Time,y=Function,labels={"y": "Acceleration (g)","x": "Time (s)"},
title='Half Sine Pulse to Satisfy SRS And Reproduce Environment')
fig.show()
import requests
parameters = {
"x-api-key": 'pfVEvgFedy8WGUUL8315Z5p3JaSmP5Rv9pSYpZUB'
}
domain = "https://p377cock71.execute-api.us-west-2.amazonaws.com/staging" #staging
domain = "https://qvthkmtukh.execute-api.us-west-2.amazonaws.com/master" #production
response = requests.get(domain+"/api/v1/account/info", headers=parameters)
print(response.json())
{'id': '2ae0a897-3822-4ada-9d5a-75f574eb2f7d', 'email': 'success@endaq.com'}
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
response = requests.get(domain+"/api/v1/files?limit=100&attributes=all", headers=parameters)
df = pd.DataFrame(response.json()['data'])
# Add Human Readable Datetime Stamps
df['Date Recorded'] = pd.to_datetime(df['recording_ts'], unit='s') - timedelta(hours=4)
df['Date Uploaded'] = pd.to_datetime(df['created_ts'], unit='s') - timedelta(hours=4)
df['Date Modified'] = pd.to_datetime(df['modified_ts'], unit='s') - timedelta(hours=4)
#Print
df
attributes | tags | id | file_name | file_size | device_meta | recording_length | recording_ts | created_ts | modified_ts | archived_ts | file_uploaded | device | attribute_names | Date Recorded | Date Uploaded | Date Modified | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | [{'id': 'efb68545-0798-4f46-a804-ff0e8a4317b0'... | [Acceleration Severity: High, Vibration Severi... | 04d88dda-2660-37d4-9f89-bd22c15f42a4 | Bolted-1619050177.IDE | 6149229.0 | None | 29.396118 | 1619041447 | 1619050177 | 1619050177 | None | True | None | [samplePeakStartTime, microphonoeRMSFull, psue... | 2021-04-21 17:44:07 | 2021-04-21 20:09:37 | 2021-04-21 20:09:37 |
1 | [] | [Acceleration Severity: High, Shock Severity: ... | 12940c9a-be60-3386-890d-bb135082968f | Taped-1619050176.IDE | 8595043.0 | None | 41.052246 | 1619041663 | 1619050176 | 1619050176 | None | True | None | [] | 2021-04-21 17:47:43 | 2021-04-21 20:09:36 | 2021-04-21 20:09:36 |
2 | [{'id': 'e0f9017e-532f-4bfc-8a8c-98a426ba4d80'... | [Shock Severity: Low, Vibration Severity: High... | bfb56d67-af5c-3983-a8d2-1d029c86a466 | DAQ11005_000117-1618934732.IDE | 2758026.0 | None | 16.323181 | 1618933938 | 1618934732 | 1618934732 | None | True | None | [psdResultantOctave, psdResultant1Hz, pvssResu... | 2021-04-20 11:52:18 | 2021-04-20 12:05:32 | 2021-04-20 12:05:32 |
3 | [{'id': '808cc7b6-32af-4f63-8269-30ba6b072894'... | [Wrist, Shock Severity: Low, Acceleration Seve... | 78d18021-77ce-37ca-9138-6d512e786451 | DAQ11005_000116-1618934731.IDE | 5132144.0 | None | 30.248261 | 1618933906 | 1618934731 | 1618934731 | None | True | None | [psuedoVelocityPeakFull, psdPeakOctaves, press... | 2021-04-20 11:51:46 | 2021-04-20 12:05:31 | 2021-04-20 12:05:31 |
4 | [{'id': '69db6677-df8c-49cf-a2a1-51ba82482ce1'... | [Vibration Severity: High, Rowing, Acceleratio... | a4d6e4ab-803b-362c-b444-e7bf0cb28779 | DAQ11005_000115-1618934731.IDE | 5132170.0 | None | 30.247101 | 1618933875 | 1618934731 | 1618934731 | None | True | None | [gyroscopeRMSFull, accelerationRMSFull, psuedo... | 2021-04-20 11:51:15 | 2021-04-20 12:05:31 | 2021-04-20 12:05:31 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
72 | [{'id': '025a3809-0502-4a83-87c8-b24f3ea32bbd'... | [Acceleration Severity: High, Vibration Severi... | f2a0ee3d-2dfa-3a0c-ab2d-6a497586d4d7 | DAQ11005_000032-1617811718.IDE | 3504038.0 | None | 20.704041 | 1617746507 | 1617811718 | 1617811718 | None | True | None | [displacementRMSFull, pressureMeanFull, accele... | 2021-04-06 18:01:47 | 2021-04-07 12:08:38 | 2021-04-07 12:08:38 |
73 | [] | [Vibration Severity: High, Wrist, Shock Severi... | 7969fd47-0874-3557-83b0-17e192cd7e4f | DAQ11005_000031-1617811717.IDE | 10261526.0 | None | 60.332825 | 1617746445 | 1617811718 | 1617811718 | None | True | None | [] | 2021-04-06 18:00:45 | 2021-04-07 12:08:38 | 2021-04-07 12:08:38 |
74 | [] | [Shock Severity: Low, Shock Severity: Low, Sho... | 63f68c1e-d701-3fa3-87d2-d4187d82f222 | DAQ11005_000030-1617811717.IDE | 10261526.0 | None | 60.333832 | 1617746384 | 1617811717 | 1617811717 | None | True | None | [] | 2021-04-06 17:59:44 | 2021-04-07 12:08:37 | 2021-04-07 12:08:37 |
75 | [] | [Shock Severity: Medium, Vibration Severity: H... | 8cf5222e-7344-388a-ac91-9f8339546b94 | DAQ11005_000029-1617811716.IDE | 10261550.0 | None | 60.332825 | 1617746322 | 1617811716 | 1617811716 | None | True | None | [] | 2021-04-06 17:58:42 | 2021-04-07 12:08:36 | 2021-04-07 12:08:36 |
76 | [] | [Wrist, Acceleration Severity: High, Tennis, V... | d8fe5772-13c7-3c3d-ad8b-08356c02172a | DAQ11005_000028-1617811716.IDE | 10261562.0 | None | 60.332794 | 1617746261 | 1617811716 | 1617811716 | None | True | None | [] | 2021-04-06 17:57:41 | 2021-04-07 12:08:36 | 2021-04-07 12:08:36 |
77 rows × 17 columns
tags = df.tags.explode().unique()
num = []
recording_length = []
for i in range(len(tags)):
df_tag = df[df.tags.apply(lambda x: tags[i] in x)]
num.append(len(df_tag))
recording_length.append(df_tag.recording_length.sum()/60)
tags_df = pd.DataFrame(tags,columns=['Tag Name'])
tags_df['Count'] = num
tags_df['Recording Length (minutes)'] = recording_length
tags_df.sort_values(by=['Count'],ascending=False)
Tag Name | Count | Recording Length (minutes) | |
---|---|---|---|
1 | Vibration Severity: High | 77 | 67.064268 |
5 | Wrist | 75 | 65.890129 |
11 | Shock Severity: High | 60 | 50.625636 |
0 | Acceleration Severity: High | 54 | 47.118691 |
8 | Acceleration Severity: Medium | 18 | 17.116668 |
24 | Tennis | 11 | 10.184723 |
4 | Shock Severity: Low | 9 | 5.147713 |
23 | Baseball (Catch) | 9 | 7.391575 |
18 | Football | 8 | 7.134405 |
22 | Volleyball | 8 | 7.585721 |
21 | Basketball | 8 | 8.044704 |
19 | Rock Climbing | 7 | 5.440895 |
10 | Shock Severity: Medium | 6 | 10.116780 |
6 | Acceleration Severity: Low | 5 | 2.828909 |
20 | Frisbee | 5 | 4.761549 |
12 | Bicycle | 4 | 1.913641 |
16 | Running | 3 | 1.162036 |
7 | Rowing | 3 | 1.280309 |
9 | Hammering | 3 | 1.313230 |
14 | Fishing | 2 | 1.008256 |
2 | Shock Severity: Very Low | 2 | 1.174139 |
3 | Frequency_Response | 2 | 1.174139 |
15 | Golf | 2 | 1.008266 |
17 | Walk | 1 | 7.356675 |
13 | Hockey | 1 | 0.304144 |
tags = ['Walk', 'Football', 'Running',
'Rock Climbing', 'Frisbee', 'Basketball', 'Volleyball',
'Baseball (Catch)', 'Tennis','Bicycle',
'Rowing','Hammering','Fishing','Golf','Hockey']
download_dir = r"C:\Users\shanly\Desktop\Wrist-Tests"+"\\"
sports_df = tags_df[tags_df['Tag Name'].isin(tags)].set_index('Tag Name').sort_values(by=['Recording Length (minutes)'],ascending=False)
sports_df
Count | Recording Length (minutes) | |
---|---|---|
Tag Name | ||
Tennis | 11 | 10.184723 |
Basketball | 8 | 8.044704 |
Volleyball | 8 | 7.585721 |
Baseball (Catch) | 9 | 7.391575 |
Walk | 1 | 7.356675 |
Football | 8 | 7.134405 |
Rock Climbing | 7 | 5.440895 |
Frisbee | 5 | 4.761549 |
Bicycle | 4 | 1.913641 |
Hammering | 3 | 1.313230 |
Rowing | 3 | 1.280309 |
Running | 3 | 1.162036 |
Golf | 2 | 1.008266 |
Fishing | 2 | 1.008256 |
Hockey | 1 | 0.304144 |
for tag in tags:
print(tag)
ids = df[df.tags.apply(lambda x: tag in x)].id.values
for file_id in ids:
response = requests.get(domain+"/api/v1/files/download/"+file_id, headers=parameters)
r = requests.get(response.json()['url'])
print(" Downloading: "+response.json()['file_name'])
open(download_dir+tag+"\\"+response.json()['file_name'], 'wb').write(r.content)
Bicycle Downloading: DAQ11005_000108-1618518058.IDE Downloading: DAQ11005_000109-1618518058.IDE Downloading: DAQ11005_000110-1618518057.IDE Downloading: DAQ11005_000111-1618518056.IDE Rowing Downloading: DAQ11005_000117-1618934732.IDE Downloading: DAQ11005_000116-1618934731.IDE Downloading: DAQ11005_000115-1618934731.IDE Hammering Downloading: DAQ11005_000114-1618934612.IDE Downloading: DAQ11005_000112-1618934612.IDE Downloading: DAQ11005_000113-1618934611.IDE Fishing Downloading: DAQ11005_000080-1618345158.IDE Downloading: DAQ11005_000078-1618345157.IDE Golf Downloading: DAQ11005_000075-1618345008.IDE Downloading: DAQ11005_000076-1618345007.IDE Hockey Downloading: DAQ10636_000007-1618411804.IDE
from os import listdir
from os.path import isfile, join
import bsvp
builder = (
bsvp.GetDataBuilder(preferred_chs=[], accel_highpass_cutoff=2)
.add_psd(freq_bin_width=1)
.add_pvss(init_freq=2, bins_per_octave=6)
.add_metrics()
.add_peaks(margin_len=1000)
)
all_data = []
for tag in tags:
mypath = download_dir+tag+"\\"
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
filenames = [mypath + sub for sub in onlyfiles]
data = builder.aggregate_data(filenames)
all_data.append(data)
processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_01.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_02.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_03.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_04.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_05.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_06.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Walk\Walk_000001-1618250717_07.IDE...
C:\Users\shanly\Anaconda3\lib\site-packages\scipy\signal\spectral.py:1961: UserWarning: nperseg = 3995 is greater than input length = 2238, using nperseg = 2238 warnings.warn('nperseg = {0:d} is greater than input length '
aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000033-1617813357.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000034-1617813358.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000035-1617813359.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000036-1617813354.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000037-1617813355.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000038-1617813355.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000039-1617813356.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Football\DAQ11005_000040-1617813357.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Running\DAQ11005_000072-1618321607.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Running\DAQ11005_000073-1618321606.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Running\DAQ11005_000074-1618321606.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000015-1617813276.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000016-1617813277.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000017-1617813277.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000018-1617813278.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000019-1617813274.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000020-1617813275.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rock Climbing\DAQ11005_000021-1617813275.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Frisbee\DAQ11005_000056-1617813229.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Frisbee\DAQ11005_000057-1617813229.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Frisbee\DAQ11005_000058-1617813230.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Frisbee\DAQ11005_000059-1617813228.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Frisbee\DAQ11005_000060-1617813228.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000041-1617812939.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000042-1617812939.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000043-1617812940.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000044-1617812940.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000045-1617812941.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000046-1617812942.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000047-1617812937.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Basketball\DAQ11005_000048-1617812938.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000061-1617812772.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000062-1617812773.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000063-1617812773.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000064-1617812774.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000065-1617812770.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000066-1617812770.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000067-1617812771.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Volleyball\DAQ11005_000068-1617812771.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000049-1617812447.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000050-1617812448.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000051-1617812449.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000052-1617812449.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000053-1617812450.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000054-1617812445.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000055-1617812446.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000069-1617812446.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Baseball (Catch)\DAQ11005_000070-1617812447.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000022-1617811719.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000023-1617811719.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000024-1617811720.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000025-1617811721.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000026-1617811721.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000027-1617811722.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000028-1617811716.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000029-1617811716.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000030-1617811717.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000031-1617811717.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Tennis\DAQ11005_000032-1617811718.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Bicycle\DAQ11005_000108-1618518058.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Bicycle\DAQ11005_000109-1618518058.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Bicycle\DAQ11005_000110-1618518057.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Bicycle\DAQ11005_000111-1618518056.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Rowing\DAQ11005_000115-1618934731.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rowing\DAQ11005_000116-1618934731.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Rowing\DAQ11005_000117-1618934732.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Hammering\DAQ11005_000112-1618934612.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Hammering\DAQ11005_000113-1618934611.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Hammering\DAQ11005_000114-1618934612.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Fishing\DAQ11005_000078-1618345157.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Fishing\DAQ11005_000080-1618345158.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Golf\DAQ11005_000075-1618345008.IDE... processing C:\Users\shanly\Desktop\Wrist-Tests\Golf\DAQ11005_000076-1618345007.IDE... aggregating data... done! processing C:\Users\shanly\Desktop\Wrist-Tests\Hockey\DAQ10636_000007-1618411804.IDE... aggregating data... done!
metrics_interest = ["RMS Acceleration","Peak Absolute Acceleration",
"Peak Pseudo Velocity Shock Spectrum","RMS Angular Velocity"]
metrics_names = ['RMS Acceleration (g)','Peak Acceleration (g)','Peak Pseudo Velocity (in/s)','RMS Rotation (deg/s)']
for met in metrics_names:
sports_df[met] = 0
for i in range(len(all_data)):
metrics = all_data[i]["metrics"]
for j in range(len(metrics_interest)):
sports_df.loc[tags[i],metrics_names[j]] = metrics[metrics.calculation==metrics_interest[j]].value.max()
sports_df['Peak Pseudo Velocity (in/s)'] = sports_df['Peak Pseudo Velocity (in/s)']/25.4
sports_summary = sports_df.drop(['Count'],axis=1).round(2).sort_values(by=['Peak Pseudo Velocity (in/s)'],ascending=False)
sports_summary.reset_index().to_csv(path_or_buf=download_dir+'sports-summary.csv',index=False)
sports_summary
Recording Length (minutes) | RMS Acceleration (g) | Peak Acceleration (g) | Peak Pseudo Velocity (in/s) | RMS Rotation (deg/s) | |
---|---|---|---|---|---|
Tag Name | |||||
Baseball (Catch) | 7.39 | 2.67 | 45.22 | 620.19 | 316.85 |
Running | 1.16 | 2.11 | 7.97 | 603.46 | 399.65 |
Football | 7.13 | 1.16 | 81.08 | 456.69 | 305.08 |
Frisbee | 4.76 | 1.01 | 41.66 | 435.20 | 196.95 |
Golf | 1.01 | 0.79 | 10.22 | 347.51 | 252.44 |
Volleyball | 7.59 | 1.25 | 70.91 | 347.39 | 242.07 |
Basketball | 8.04 | 1.10 | 27.17 | 303.44 | 260.40 |
Tennis | 10.18 | 0.78 | 35.89 | 246.03 | 206.90 |
Hockey | 0.30 | 1.18 | 19.68 | 244.11 | NaN |
Rock Climbing | 5.44 | 0.62 | 9.02 | 231.47 | 147.26 |
Fishing | 1.01 | 0.61 | 3.69 | 186.73 | 145.34 |
Bicycle | 1.91 | 1.04 | 18.29 | 125.76 | 71.35 |
Walk | 7.36 | 0.33 | 4.08 | 114.48 | 175.23 |
Hammering | 1.31 | 0.76 | 6.76 | 108.21 | 200.92 |
Rowing | 1.28 | 0.48 | 1.98 | 58.95 | 79.90 |
import plotly.graph_objects as go
from plotly.subplots import make_subplots
fig = make_subplots(rows=3, cols=1,
shared_xaxes=False,
vertical_spacing=0.1,
subplot_titles = ['RMS Acceleration (g)','Peak Acceleration (g)','Peak Pseudo Velocity (in/s)'])
def ecdf(x):
x = np.sort(x)
def result(v):
return np.searchsorted(x, v, side='right') / x.size
return result
for i in range(len(all_data)):
metrics = all_data[i]["metrics"]
metrics.columns = ['filename', 'calculation', "axis", 'value', 'serial number','start time']
mets = metrics[(metrics.calculation=="RMS Acceleration") & (metrics.axis=="Resultant")]
fig.add_trace(go.Scatter(
x=np.unique(mets.value),
y=ecdf(mets.value)(np.unique(mets.value)),
name=tags[i],
mode='lines'
),row=1,col=1)
mets = metrics[(metrics.calculation=="Peak Absolute Acceleration") & (metrics.axis=="Resultant")]
fig.add_trace(go.Scatter(
x=np.unique(mets.value),
y=ecdf(mets.value)(np.unique(mets.value)),
name=tags[i],
mode='lines'
),row=2,col=1)
mets = metrics[(metrics.calculation=="Peak Pseudo Velocity Shock Spectrum") & (metrics.axis=="Resultant")]
fig.add_trace(go.Scatter(
x=np.unique(mets.value/25.4),
y=ecdf(mets.value/25.4)(np.unique(mets.value/25.4)),
name=tags[i],
mode='lines'
),row=3,col=1)
fig.update_layout(height=1000)
fig.write_html(download_dir+'metrics.html',full_html=False,include_plotlyjs='cdn')
fig.show()
import plotly.express as px
def style_pvss(fig,title,savename):
fig.update_xaxes(type="log")
fig.update_yaxes(type="log")
fig.update_layout(
xaxis_title="Natural Frequency (Hz)",
yaxis_title="Peak Pseudo Velocity (in/s)",
title=title
)
fig.write_html(savename,full_html=False,include_plotlyjs='cdn')
def Plot_PVSS(axis):
fig = go.Figure()
fig2 = go.Figure()
df_pvss = pd.DataFrame(columns=tags)
colors = px.colors.qualitative.Dark24
for i in range(1,len(all_data)): #ignore walking
pvss = all_data[i]["pvss"]
pv = pvss[pvss.axis==axis]
fig.add_trace(go.Scatter(
x=pv.frequency,
y=pv.value/25.4,
text=pv.filename,
mode='markers',
name=tags[i],
line={'color':colors[i]}
))
freqs = pv.frequency.drop_duplicates().to_list()
df_pvss['Natural Frequency (Hz)'] = freqs
maxes = []
for freq in freqs:
maxes.append(pv[pv.frequency==freq].value.max()/25.4)
df_pvss[tags[i]] = maxes
fig2.add_trace(go.Scatter(
x=freqs,
y=maxes,
mode='lines',
name=tags[i],
line={'color':colors[i]}
))
df_pvss.to_csv(path_or_buf=download_dir+'pvss-by-sport-'+axis+'.csv',index=False)
style_pvss(fig,'Peak Pseudo Velocity Shock Spectrum of All Files - '+axis+' Axis',
download_dir+'pvss-all-files-'+axis+'.html')
style_pvss(fig2,'Peak Pseudo Velocity Shock Spectrum Max Per Sport - '+axis+' Axis',
download_dir+'pvss-max-'+axis+'.html')
fig.show()
fig2.show()
axes = ['Resultant','Z','Y','X']
for axis in axes:
Plot_PVSS(axis)
pvss = pd.DataFrame()
for i in range(len(all_data)):
pvss = pd.concat([pvss,all_data[i]["pvss"]],axis=0)
pvss.frequency = pvss.frequency.round(2)
pvss_max = pd.DataFrame(columns=['Natural Frequency (Hz)','X','Y','Z','Resultant'])
for axis in axes:
pv = pvss[pvss.axis==axis]
freqs = pv.frequency.drop_duplicates().to_list()
maxes = []
for freq in freqs:
maxes.append(pv[pv.frequency==freq].value.max()/25.4)
pvss_max['Natural Frequency (Hz)'] = freqs
pvss_max[axis]=maxes
pvss_max = pvss_max.set_index('Natural Frequency (Hz)')
pvss_max.reset_index().to_csv(path_or_buf=download_dir+'pvss-max.csv',index=False)
fig = px.line(pvss_max,
labels={
"value": "Peak Pseudo Velocity (in/s)",
},
log_x=True,
log_y=True,
title='Peak Pseudo Velocity Shock Spectrum MAX by Axis')
fig.write_html(download_dir+'pvss-max.html',full_html=False,include_plotlyjs='cdn')
fig.show()
srs = pvss_max.copy()
for axis in axes:
srs[axis] = pvss_max[axis]*2*np.pi*pvss_max.index/386.2205
srs.reset_index().to_csv(path_or_buf=download_dir+'srs-max.csv',index=False)
fig = px.line(srs,
labels={
"value": "Peak Acceleration (g)",
},
log_x=True,
log_y=True,
title='Shock Response Spectrum MAX by Axis')
fig.write_html(download_dir+'srs-max.html',full_html=False,include_plotlyjs='cdn')
fig.show()
def style_psd(fig,title,savename):
fig.update_xaxes(type="log")
fig.update_yaxes(type="log")
fig.update_layout(
xaxis_title="Frequency (Hz)",
yaxis_title="Acceleration (g^2/Hz)",
title=title
)
fig.write_html(savename,full_html=False,include_plotlyjs='cdn')
def Plot_PSD(axis,max_frequency):
fig = go.Figure()
fig2 = go.Figure()
df_psd = pd.DataFrame(columns=tags)
colors = px.colors.qualitative.Dark24
for i in range(len(all_data)):
psds = all_data[i]["psd"]
psd = psds[(psds.axis==axis) & (psds.frequency<max_frequency)].copy()
psd.frequency = psd.frequency.round(0)
fig.add_trace(go.Scatter(
x=psd.frequency,
y=psd.value,
text=psd.filename,
mode='markers',
name=tags[i],
line={'color':colors[i]}
))
freqs = psd.frequency.drop_duplicates().to_list()
df_psd['Frequency (Hz)'] = freqs
maxes = []
for freq in freqs:
maxes.append(psd[psd.frequency==freq].value.max())
df_psd[tags[i]] = maxes
fig2.add_trace(go.Scatter(
x=freqs,
y=maxes,
mode='lines',
name=tags[i],
line={'color':colors[i]}
))
df_psd.to_csv(path_or_buf=download_dir+'psd-by-sport-'+axis+'.csv',index=False)
style_psd(fig,'Power Spectrum Density of All Files - '+axis+' Axis',
download_dir+'psd-all-files-'+axis+'.html')
style_psd(fig2,'Power Spectrum Density Max Per Sport - '+axis+' Axis',
download_dir+'psd-max-'+axis+'.html')
#fig.show()
fig2.show()
axes = ['Resultant','Z','Y','X']
for axis in axes:
Plot_PSD(axis,500)
psd = pd.DataFrame()
for i in range(len(all_data)):
psd = pd.concat([psd,all_data[i]["psd"]],axis=0)
psd.frequency = psd.frequency.round(0)
psd = psd[psd.frequency<500].copy()
psd_max = pd.DataFrame(columns=['Frequency (Hz)','X','Y','Z','Resultant'])
for axis in axes:
psd_df = psd[psd.axis==axis]
freqs = psd_df.frequency.drop_duplicates().to_list()
maxes = []
for freq in freqs:
maxes.append(psd_df[psd_df.frequency==freq].value.max())
psd_max['Frequency (Hz)'] = freqs
psd_max[axis]=maxes
psd_max = psd_max.set_index('Frequency (Hz)')
psd_max.reset_index().to_csv(path_or_buf=download_dir+'psd-max.csv',index=False)
fig = px.line(psd_max,
labels={
"value": "Acceleration (g^2/Hz)",
},
log_x=True,
log_y=True,
title='Acceleration Power Spectral Density MAX by Axis')
fig.write_html(download_dir+'psd-max.html',full_html=False,include_plotlyjs='cdn')
fig.show()
for i in range(len(all_data)):
peaks = all_data[i]["peaks"]
peaks["peak offset"] = peaks["peak offset"].dt.total_seconds() # plotly doesn't handle timedelta's well
def Plot_PEAKS(axis):
fig = go.Figure()
colors = px.colors.qualitative.Dark24
for i in range(len(all_data)):
peaks = all_data[i]["peaks"]
peak = peaks[peaks.axis==axis]
fig.add_trace(go.Scatter(
x=peak['peak offset'],
y=peak.value,
text=peak.filename,
mode='lines',
name=tags[i],
line={'color':colors[i]}
))
fig.update_layout(
xaxis_title="Time (s)",
yaxis_title="Acceleration (g)",
title='Peak Shock Event of All Files - '+axis+' Axis'
)
fig.write_html(download_dir+'peak-all-files'+axis+'.html',full_html=False,include_plotlyjs='cdn')
for axis in axes:
Plot_PEAKS(axis)