Skip to content

Static Offset per Device #331

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jul 16, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions bcipy/acquisition/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
'float32', 'double64', 'string', 'int32', 'int16', 'int8'
]
DEFAULT_DEVICE_TYPE = 'EEG'
DEFAULT_STATIC_OFFSET = 0.1

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -85,6 +86,12 @@ class DeviceSpec:
see https://labstreaminglayer.readthedocs.io/projects/liblsl/ref/enums.html
excluded_from_analysis - list of channels (label) to exclude from analysis.
status - recording status
static_offset - Specifies the static trigger offset (in seconds) used to align
triggers properly with EEG data from LSL. The system includes built-in
offset correction, but there is still a hardware-limited offset between EEG
and trigger timing values for which the system does not account. The correct
value may be different for each computer, and must be determined on a
case-by-case basis. Default: 0.1",
"""

def __init__(self,
Expand All @@ -95,7 +102,8 @@ def __init__(self,
description: Optional[str] = None,
excluded_from_analysis: Optional[List[str]] = None,
data_type: str = 'float32',
status: DeviceStatus = DeviceStatus.ACTIVE):
status: DeviceStatus = DeviceStatus.ACTIVE,
static_offset: float = DEFAULT_STATIC_OFFSET):

assert sample_rate >= 0, "Sample rate can't be negative."
assert data_type in SUPPORTED_DATA_TYPES
Expand All @@ -109,6 +117,7 @@ def __init__(self,
self.excluded_from_analysis = excluded_from_analysis or []
self._validate_excluded_channels()
self.status = status
self.static_offset = static_offset

@property
def channel_count(self) -> int:
Expand Down Expand Up @@ -152,7 +161,8 @@ def to_dict(self) -> dict:
'sample_rate': self.sample_rate,
'description': self.description,
'excluded_from_analysis': self.excluded_from_analysis,
'status': str(self.status)
'status': str(self.status),
'static_offset': self.static_offset
}

def __str__(self):
Expand Down Expand Up @@ -188,7 +198,8 @@ def make_device_spec(config: dict) -> DeviceSpec:
description=config['description'],
excluded_from_analysis=config.get(
'excluded_from_analysis', []),
status=DeviceStatus.from_str(config.get('status', default_status)))
status=DeviceStatus.from_str(config.get('status', default_status)),
static_offset=config.get('static_offset', DEFAULT_STATIC_OFFSET))


def load(config_path: Path = Path(DEFAULT_CONFIG), replace: bool = False) -> Dict[str, DeviceSpec]:
Expand Down
12 changes: 8 additions & 4 deletions bcipy/acquisition/multimodal.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,9 @@ def get_data_by_device(

Parameters
----------
start - start time (acquisition clock) of data window
start - start time (acquisition clock) of data window; NOTE: the
actual start time will be adjusted to by the static_offset
configured for each device.
seconds - duration of data to return for each device
content_types - specifies which devices to include; if not
unspecified, data for all types is returned.
Expand All @@ -144,19 +146,21 @@ def get_data_by_device(
for content_type in content_types:
name = content_type.name
client = self.get_client(content_type)

adjusted_start = start + client.device_spec.static_offset
if client.device_spec.sample_rate > 0:
count = round(seconds * client.device_spec.sample_rate)
log.info(f'Need {count} records for processing {name} data')
output[content_type] = client.get_data(start=start,
output[content_type] = client.get_data(start=adjusted_start,
limit=count)
data_count = len(output[content_type])
if strict and data_count < count:
msg = f'Needed {count} {name} records but received {data_count}'
raise InsufficientDataException(msg)
else:
# Markers have an IRREGULAR_RATE.
output[content_type] = client.get_data(start=start,
end=start + seconds)
output[content_type] = client.get_data(start=adjusted_start,
end=adjusted_start + seconds)
return output

def cleanup(self):
Expand Down
7 changes: 5 additions & 2 deletions bcipy/acquisition/tests/test_client_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def setUp(self):
self.eeg_device_mock.content_type = 'EEG'
self.eeg_device_mock.sample_rate = 300
self.eeg_device_mock.is_active = True
self.eeg_device_mock.static_offset = 0.1

self.eeg_client_mock = Mock()
self.eeg_client_mock.device_spec = self.eeg_device_mock
Expand All @@ -26,6 +27,7 @@ def setUp(self):
self.gaze_device_mock.content_type = 'Eyetracker'
self.gaze_device_mock.sample_rate = 60
self.gaze_device_mock.is_active = False
self.gaze_device_mock.static_offset = 0.0
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is currently assumed 0.0 for my analyses as well, but I will think of a way to estimate the correct value

self.gaze_client_mock = Mock()
self.gaze_client_mock.device_spec = self.gaze_device_mock

Expand Down Expand Up @@ -115,6 +117,7 @@ def test_get_data_by_device(self):
switch_device_mock.name = 'Test-switch-2000'
switch_device_mock.content_type = 'Markers'
switch_device_mock.sample_rate = 0.0
switch_device_mock.static_offset = 0.2

switch_client_mock = Mock()
switch_client_mock.device_spec = switch_device_mock
Expand All @@ -141,11 +144,11 @@ def test_get_data_by_device(self):
ContentType.MARKERS
])

self.eeg_client_mock.get_data.assert_called_once_with(start=100,
self.eeg_client_mock.get_data.assert_called_once_with(start=100.1,
limit=1500)
self.gaze_client_mock.get_data.assert_called_once_with(start=100,
limit=300)
switch_client_mock.get_data.assert_called_with(start=100, end=105)
switch_client_mock.get_data.assert_called_with(start=100.2, end=105.2)

self.assertTrue(ContentType.EEG in results)
self.assertTrue(ContentType.EYETRACKER in results)
Expand Down
49 changes: 45 additions & 4 deletions bcipy/acquisition/tests/test_devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def test_load_from_config(self):
self.assertEqual(channels, spec.channels)
self.assertEqual(devices.DeviceStatus.ACTIVE, spec.status)
self.assertTrue(spec.is_active)
self.assertEqual(spec.static_offset, devices.DEFAULT_STATIC_OFFSET)

self.assertEqual(spec, devices.preconfigured_device('DSI-VR300'))
shutil.rmtree(temp_dir)
Expand Down Expand Up @@ -222,9 +223,22 @@ def test_device_spec_to_dict(self):
"""DeviceSpec should be able to be converted to a dictionary."""
device_name = 'TestDevice'
channels = ['C1', 'C2', 'C3']
expected_channel_output = [{'label': 'C1', 'name': 'C1', 'type': None, 'units': None},
{'label': 'C2', 'name': 'C2', 'type': None, 'units': None},
{'label': 'C3', 'name': 'C3', 'type': None, 'units': None}]
expected_channel_output = [{
'label': 'C1',
'name': 'C1',
'type': None,
'units': None
}, {
'label': 'C2',
'name': 'C2',
'type': None,
'units': None
}, {
'label': 'C3',
'name': 'C3',
'type': None,
'units': None
}]
sample_rate = 256.0
content_type = 'EEG'
spec = devices.DeviceSpec(name=device_name,
Expand All @@ -238,6 +252,8 @@ def test_device_spec_to_dict(self):
self.assertEqual(expected_channel_output, spec_dict['channels'])
self.assertEqual(sample_rate, spec_dict['sample_rate'])
self.assertEqual('passive', spec_dict['status'])
self.assertEqual(spec_dict['static_offset'],
devices.DEFAULT_STATIC_OFFSET)

def test_load_status(self):
"""Should be able to load a list of supported devices from a
Expand All @@ -259,7 +275,32 @@ def test_load_status(self):

devices.load(config_path, replace=True)
supported = devices.preconfigured_devices()
self.assertEqual(devices.DeviceStatus.PASSIVE, supported['MyDevice'].status)
self.assertEqual(devices.DeviceStatus.PASSIVE,
supported['MyDevice'].status)
shutil.rmtree(temp_dir)

def test_load_static_offset(self):
"""Loaded device should support using a custom static offset."""

# create a config file in a temp location.
temp_dir = tempfile.mkdtemp()
offset = 0.2
my_devices = [
dict(name="MyDevice",
content_type="EEG",
description="My Device",
channels=["a", "b", "c"],
sample_rate=100.0,
status=str(devices.DeviceStatus.PASSIVE),
static_offset=offset)
]
config_path = Path(temp_dir, 'my_devices.json')
with open(config_path, 'w', encoding=DEFAULT_ENCODING) as config_file:
json.dump(my_devices, config_file)

devices.load(config_path, replace=True)
supported = devices.preconfigured_devices()
self.assertEqual(supported['MyDevice'].static_offset, offset)
shutil.rmtree(temp_dir)

def test_device_status(self):
Expand Down
6 changes: 4 additions & 2 deletions bcipy/helpers/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
import os
import tarfile
from pathlib import Path
from typing import Dict, List, Tuple, Optional, Union
from typing import Dict, List, Optional, Tuple, Union

import mne
import numpy as np
from mne.io import RawArray
from pyedflib import FILETYPE_BDFPLUS, FILETYPE_EDFPLUS, EdfWriter
from tqdm import tqdm

from bcipy.acquisition.devices import preconfigured_device
from bcipy.config import (DEFAULT_PARAMETER_FILENAME, RAW_DATA_FILENAME,
TRIGGER_FILENAME)
from bcipy.helpers.load import load_json_parameters, load_raw_data
Expand Down Expand Up @@ -177,6 +178,7 @@ def pyedf_convert(data_dir: str,
value_cast=True)
data = load_raw_data(str(Path(data_dir, f'{RAW_DATA_FILENAME}.csv')))
fs = data.sample_rate
device_spec = preconfigured_device(data.daq_type)
if pre_filter:
default_transform = get_default_transform(
sample_rate_hz=data.sample_rate,
Expand All @@ -194,7 +196,7 @@ def pyedf_convert(data_dir: str,
else:
raw_data, _ = data.by_channel()
durations = trigger_durations(params) if use_event_durations else {}
static_offset = params['static_trigger_offset']
static_offset = device_spec.static_offset
logger.info(f'Static offset: {static_offset}')

trigger_type, trigger_timing, trigger_label = trigger_decoder(
Expand Down
11 changes: 7 additions & 4 deletions bcipy/helpers/demo/demo_visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@
if not path:
path = load_experimental_data()

parameters = load_json_parameters(f'{path}/{DEFAULT_PARAMETER_FILENAME}', value_cast=True)
parameters = load_json_parameters(f'{path}/{DEFAULT_PARAMETER_FILENAME}',
value_cast=True)

# extract all relevant parameters
trial_window = parameters.get("trial_window", (0, 0.5))
Expand All @@ -59,12 +60,16 @@
filter_high = parameters.get("filter_high")
filter_low = parameters.get("filter_low")
filter_order = parameters.get("filter_order")
static_offset = parameters.get("static_trigger_offset")

raw_data = load_raw_data(Path(path, f'{RAW_DATA_FILENAME}.csv'))
channels = raw_data.channels
type_amp = raw_data.daq_type
sample_rate = raw_data.sample_rate

devices.load(Path(path, DEFAULT_DEVICE_SPEC_FILENAME))
device_spec = devices.preconfigured_device(raw_data.daq_type)
static_offset = device_spec.static_offset

# setup filtering
default_transform = get_default_transform(
sample_rate_hz=sample_rate,
Expand All @@ -82,8 +87,6 @@
)
labels = [0 if label == 'nontarget' else 1 for label in trigger_targetness]

devices.load(Path(path, DEFAULT_DEVICE_SPEC_FILENAME))
device_spec = devices.preconfigured_device(raw_data.daq_type)
channel_map = analysis_channels(channels, device_spec)

save_path = None if not args.save else path
Expand Down
7 changes: 3 additions & 4 deletions bcipy/helpers/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@ def get_data_for_decision(inquiry_timing: List[Tuple[str, float]],
def get_device_data_for_decision(
inquiry_timing: List[Tuple[str, float]],
daq: ClientManager,
offset: float = 0.0,
prestim: float = 0.0,
poststim: float = 0.0) -> Dict[ContentType, List[Record]]:
"""Queries the acquisition client manager for a slice of data from each
Expand All @@ -206,13 +205,13 @@ def get_device_data_for_decision(
_, last_stim_time = inquiry_timing[-1]

# adjust for offsets
time1 = first_stim_time + offset - prestim
time2 = last_stim_time + offset
time1 = first_stim_time - prestim
time2 = last_stim_time

if time2 < time1:
raise InsufficientDataException(
f'Invalid data query [{time1}-{time2}] with parameters:'
f'[inquiry={inquiry_timing}, offset={offset}, prestim={prestim}, poststim={poststim}]'
f'[inquiry={inquiry_timing}, prestim={prestim}, poststim={poststim}]'
)

data = daq.get_data_by_device(start=time1,
Expand Down
8 changes: 0 additions & 8 deletions bcipy/helpers/tests/resources/mock_session/parameters.json
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,6 @@
],
"type": "str"
},
"static_trigger_offset": {
"value": ".1",
"section": "bci_config",
"readableName": "Static Trigger Offset",
"helpTip": "Specifies the static trigger offset (in seconds) used to align triggers properly with EEG data from LSL. The system includes built-in offset correction, but there is still a hardware-limited offset between EEG and trigger timing values for which the system does not account. The default value of 0.1 has been verified for OHSU hardware. The correct value may be different for other computers, and must be determined on a case-by-case basis. Default: .1",
"recommended_values": "",
"type": "float"
},
"k_folds": {
"value": "10",
"section": "signal_config",
Expand Down
17 changes: 9 additions & 8 deletions bcipy/helpers/tests/test_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from mockito import any as any_value
from mockito import mock, unstub, verify, verifyNoMoreInteractions, when

import bcipy.acquisition.devices as devices
from bcipy.config import (DEFAULT_ENCODING, DEFAULT_PARAMETER_FILENAME,
RAW_DATA_FILENAME, TRIGGER_FILENAME)
from bcipy.helpers import convert
Expand All @@ -33,8 +34,7 @@ def create_bcipy_session_artifacts(
'filter_high': 30,
'filter_order': 5,
'notch_filter_frequency': 60,
'down_sampling_rate': 3,
'static_trigger_offset': 0.0
'down_sampling_rate': 3
},
) -> Tuple[str, RawData, Parameters]:
"""Write BciPy session artifacts to a temporary directory.
Expand All @@ -44,7 +44,9 @@ def create_bcipy_session_artifacts(
trg_data = MOCK_TRIGGER_DATA
if isinstance(channels, int):
channels = [f'ch{i}' for i in range(channels)]
data = sample_data(ch_names=channels, sample_rate=sample_rate, rows=samples)
data = sample_data(ch_names=channels, daq_type='SampleDevice', sample_rate=sample_rate, rows=samples)
devices.register(devices.DeviceSpec('SampleDevice', channels=channels, sample_rate=sample_rate))

with open(Path(write_dir, TRIGGER_FILENAME), 'w', encoding=DEFAULT_ENCODING) as trg_file:
trg_file.write(trg_data)

Expand All @@ -57,7 +59,6 @@ def create_bcipy_session_artifacts(
time_prompt=0.5,
time_flash=0.5,
# define filter settings
static_trigger_offset=filter_settings['static_trigger_offset'],
down_sampling_rate=filter_settings['down_sampling_rate'],
notch_filter_frequency=filter_settings['notch_filter_frequency'],
filter_high=filter_settings['filter_high'],
Expand Down Expand Up @@ -348,8 +349,7 @@ def setUp(self) -> None:
'filter_high': 30,
'filter_order': 5,
'notch_filter_frequency': 60,
'down_sampling_rate': 3,
'static_trigger_offset': 0.0
'down_sampling_rate': 3
}
create_bcipy_session_artifacts(
self.temp_dir,
Expand Down Expand Up @@ -476,11 +476,12 @@ def setUp(self):
'filter_high': 30,
'filter_order': 5,
'notch_filter_frequency': 60,
'down_sampling_rate': 3,
'static_trigger_offset': 0.0
'down_sampling_rate': 3
}
self.channels = ['timestamp', 'O1', 'O2', 'Pz']
self.raw_data = RawData('SampleDevice', self.sample_rate, self.channels)
devices.register(devices.DeviceSpec('SampleDevice', channels=self.channels, sample_rate=self.sample_rate))

# generate 100 random samples of data
for _ in range(0, 100):
channel_data = gen_random_data(low=-1000,
Expand Down
Loading
Loading