Source code for moabb.datasets.utils

Utils for easy database selection

import inspect

import moabb.datasets as db
from moabb.datasets.base import BaseDataset

dataset_list = []
for ds in inspect.getmembers(db, inspect.isclass):
    if issubclass(ds[1], BaseDataset):

[docs]def find_intersecting_channels(datasets, verbose=False): """ Given a list of dataset instances return a list of channels shared by all datasets. Skip datasets which have 0 overlap with the others returns: set of common channels, list of datasets with valid channels """ allchans = set() dset_chans = [] keep_datasets = [] for d in datasets: print("Searching dataset: {:s}".format(type(d).__name__)) s1 = d.get_data([1])[1] sess1 = s1[list(s1.keys())[0]] raw = sess1[list(sess1.keys())[0]] raw.pick_types(eeg=True) processed = [] for ch in["ch_names"]: ch = ch.upper() if ch.find("EEG") == -1: # TODO: less hacky way of finding poorly labeled datasets processed.append(ch) allchans.update(processed) if len(processed) > 0: if verbose: print("Found EEG channels: {}".format(processed)) dset_chans.append(processed) keep_datasets.append(d) else: print( "Dataset {:s} has no recognizable EEG channels".format(type(d).__name__) ) # noqa allchans.intersection_update(*dset_chans) allchans = [s.replace("Z", "z") for s in allchans] return allchans, keep_datasets
def _download_all(update_path=True, verbose=None): """Download all data. This function is mainly used to generate the data cache. """ # iterate over dataset for ds in dataset_list: # call download ds().download(update_path=True, verbose=verbose, accept=True)