Skip to content

Commit

Permalink
massive overhaul of pyodide in the app
Browse files Browse the repository at this point in the history
Co-authored-by: Dano Morrison <[email protected]>
  • Loading branch information
teonbrooks and jdpigeon committed Aug 3, 2019
1 parent bc7b6ef commit b917a67
Show file tree
Hide file tree
Showing 5 changed files with 103 additions and 125 deletions.
60 changes: 19 additions & 41 deletions app/epics/pyodideEpics.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ import {
mergeMap,
tap,
pluck,
ignoreElements,
filter
} from "rxjs/operators";
import { getWorkspaceDir } from "../utils/filesystem/storage";
import { languagePluginLoader } from "../utils/pyodide/pyodide";
import { parseSingleQuoteJSON } from "../utils/pyodide/functions"
import { readFiles } from "../utils/filesystem/read";
import {
LAUNCH,
LOAD_EPOCHS,
Expand All @@ -22,8 +22,8 @@ import {
loadERP
} from "../actions/pyodideActions";
import {
test,
imports,
loadPackages,
utils,
loadCSV,
loadCleanedEpochs,
filterIIR,
Expand All @@ -34,7 +34,7 @@ import {
plotPSD,
plotERP,
plotTopoMap,
saveEpochs
saveEpochs,
} from "../utils/pyodide/commands";
import {
EMOTIV_CHANNELS,
Expand Down Expand Up @@ -96,49 +96,27 @@ const setPyodideStatus = payload => ({
type: SET_PYODIDE_STATUS
});

const receiveExecuteReply = payload => ({
payload,
type: RECEIVE_EXECUTE_REPLY
});

const receiveExecuteResult = payload => ({
payload,
type: RECEIVE_EXECUTE_RESULT
});

const receiveDisplayData = payload => ({
payload,
type: RECEIVE_DISPLAY_DATA
});

const receiveStream = payload => ({
payload,
type: RECEIVE_STREAM
});

// -------------------------------------------------------------------------
// Epics

const launchEpic = action$ =>
action$.ofType(LAUNCH).pipe(
tap(() => console.log("launching")),
mergeMap(async () => {
await languagePluginLoader;
console.log("loaded language plugin");
// using window.pyodide instead of pyodide to get linter to stop yelling ;)
await window.pyodide.loadPackage(["mne"]);
console.log("loaded mne package");
}),
mergeMap(loadPackages),
mergeMap(utils),
map(() => setPyodideStatus(PYODIDE_STATUS.LOADED))
);

const loadEpochsEpic = (action$, state$) =>
action$.ofType(LOAD_EPOCHS).pipe(
pluck("payload"),
filter(filePathsArray => filePathsArray.length >= 1),
map(filePathsArray => loadCSV(filePathsArray)),
map(() => filterIIR(1, 30)),
map(() =>
tap(files => console.log('files:', files)),
map((filePathsArray => readFiles(filePathsArray))),
tap(csvArray => console.log('csvs:', csvArray)),
mergeMap(csvArray => loadCSV(csvArray)),
mergeMap(() => filterIIR(1, 30)),
mergeMap(() =>
epochEvents(
{
[state$.value.experiment.params.stimulus1.title]: EVENTS.STIMULUS_1,
Expand All @@ -148,11 +126,10 @@ const loadEpochsEpic = (action$, state$) =>
0.8
)
),
map(epochEventsCommand => epochEventsCommand),
map(() => getEpochsInfo(PYODIDE_VARIABLE_NAMES.RAW_EPOCHS))
);

const loadCleanedEpochsEpic = (action$, state$) =>
const loadCleanedEpochsEpic = (action$) =>
action$.ofType(LOAD_CLEANED_EPOCHS).pipe(
pluck("payload"),
filter(filePathsArray => filePathsArray.length >= 1),
Expand All @@ -178,12 +155,13 @@ const cleanEpochsEpic = (action$, state$) =>
map(() => getEpochsInfo(PYODIDE_VARIABLE_NAMES.RAW_EPOCHS))
);

const getEpochsInfoEpic = (action$, state$) =>
const getEpochsInfoEpic = (action$) =>
action$.ofType(GET_EPOCHS_INFO).pipe(
pluck("payload"),
map(variableName => requestEpochsInfo(variableName)),
map(epochInfoString =>
parseSingleQuoteJSON(epochInfoString).map(infoObj => ({
tap(payload => console.log('payload: ', payload)),
mergeMap(requestEpochsInfo),
map(epochInfoArray =>
epochInfoArray.map(infoObj => ({
name: Object.keys(infoObj)[0],
value: infoObj[Object.keys(infoObj)[0]]
}))
Expand Down
18 changes: 18 additions & 0 deletions app/utils/filesystem/read.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
const fs = require("fs");

export const readFiles = (filePathsArray) => {
return filePathsArray.map(path => {
console.log('about to read file')
const file = fs.readFileSync(path, 'utf8')
console.log('read file')
return file
})
}



// -------------------------------------------
// Helper methods

const formatFilePath = (filePath: string) =>
`"${filePath.replace(/\\/g, "/")}"`;
92 changes: 38 additions & 54 deletions app/utils/pyodide/commands.js
Original file line number Diff line number Diff line change
@@ -1,80 +1,69 @@
import * as path from "path";
import { readFileSync } from "fs";
import { languagePluginLoader } from "./pyodide";


let pyodide;
// -----------------------------
// Imports and Utility functions

export const test = async () => {
await window.pyodide.loadPackage(["mne"]);

const mneCommands = [
`import numpy as np`,
`import mne`,
`data = np.repeat(np.atleast_2d(np.arange(1000)), 8, axis=0)`,
`info = mne.create_info(8, 250)`,
`raw = mne.io.RawArray(data=data, info=info)`,
`raw.save("test_brainwaves.fif")`
];
await window.pyodide.runPython(mneCommands.join("; "));
};

export const loadPackages = async () => window.pyodide.loadPackage(["mne"]);

export const imports = () =>
pyodide.runPython(
readFileSync(path.join(__dirname, "/utils/pyodide/pyimport.py"), "utf8")
);
export const loadPackages = async () => {
await languagePluginLoader;
console.log("loaded language plugin");
// using window.pyodide instead of pyodide to get linter to stop yelling ;)
await window.pyodide.loadPackage(["matplotlib", "mne", "pandas"]);
await window.pyodide.runPython("import js");
console.log("loaded mne package");

export const utils = () =>
pyodide.runPython(
}

export const utils = async () =>
window.pyodide.runPython(
readFileSync(path.join(__dirname, "/utils/pyodide/utils.py"), "utf8")
);

export const loadCSV = (filePathArray: Array<string>) =>
[
`files = [${filePathArray.map(filePath => formatFilePath(filePath))}]`,
`replace_ch_names = None`,
`raw = load_data(files, replace_ch_names)`
].join("\n");
export const loadCSV = async (csvArray: Array<any>) => {
window.csvArray = csvArray;
// TODO: Pass attached variable name as parameter to load_data
await window.pyodide.runPython(
`raw = load_data()`)
}

// ---------------------------
// MNE-Related Data Processing
export const loadCleanedEpochs = (filePathArray: Array<string>) =>
export const loadCleanedEpochs = (epocsArray: Array<any>) =>
[
`files = [${filePathArray.map(filePath => formatFilePath(filePath))}]`,
`clean_epochs = concatenate_epochs([read_epochs(file) for file in files])`,
`conditions = OrderedDict({key: [value] for (key, value) in clean_epochs.event_id.items()})`
].join("\n");

// NOTE: this command includes a ';' to prevent returning data
export const filterIIR = (lowCutoff: number, highCutoff: number) =>
`raw.filter(${lowCutoff}, ${highCutoff}, method='iir');`;
export const filterIIR = async (lowCutoff: number, highCutoff: number) =>
window.pyodide.runPython(`raw.filter(${lowCutoff}, ${highCutoff}, method='iir');`)

export const epochEvents = (
export const epochEvents = async (
eventIDs: { [string]: number },
tmin: number,
tmax: number,
reject?: Array<string> | string = "None"
) => {
const command = [
`event_id = ${JSON.stringify(eventIDs)}`,
`tmin=${tmin}`,
`tmax=${tmax}`,
`baseline= (tmin, tmax)`,
`picks = None`,
`reject = ${reject}`,
"events = find_events(raw)",
`raw_epochs = Epochs(raw, events=events, event_id=event_id,
) => window.pyodide.runPython([
`event_id = ${JSON.stringify(eventIDs)}`,
`tmin=${tmin}`,
`tmax=${tmax}`,
`baseline= (tmin, tmax)`,
`picks = None`,
`reject = ${reject}`,
"events = find_events(raw)",
`raw_epochs = Epochs(raw, events=events, event_id=event_id,
tmin=tmin, tmax=tmax, baseline=baseline, reject=reject, preload=True,
verbose=False, picks=picks)`,
`conditions = OrderedDict({key: [value] for (key, value) in raw_epochs.event_id.items()})`
].join("\n");
return command;
};
`conditions = OrderedDict({key: [value] for (key, value) in raw_epochs.event_id.items()})`
].join("\n"))

export const requestEpochsInfo = (variableName: string) =>
`get_epochs_info(${variableName})`;
export const requestEpochsInfo = async (variableName: string) => {
const pyodideReturn = await window.pyodide.runPython(`get_epochs_info(${variableName})`);
return pyodideReturn
}

export const requestChannelInfo = () =>
`[ch for ch in clean_epochs.ch_names if ch != 'Marker']`;
Expand Down Expand Up @@ -104,8 +93,3 @@ export const saveEpochs = (workspaceDir: string, subject: string) =>
)
)})`;

// -------------------------------------------
// Helper methods

const formatFilePath = (filePath: string) =>
`"${filePath.replace(/\\/g, "/")}"`;
15 changes: 0 additions & 15 deletions app/utils/pyodide/pyimport.py
Original file line number Diff line number Diff line change
@@ -1,16 +1 @@
from time import time, strftime, gmtime
import os
from collections import OrderedDict
from glob import glob

import numpy as np
import pandas as pd # maybe we can remove this dependency
import seaborn as sns
from matplotlib import pyplot as plt

from mne import (Epochs, RawArray, concatenate_raws, concatenate_epochs,
create_info, find_events, read_epochs, set_eeg_reference)
from mne.channels import read_montage


plt.style.use(fivethirtyeight)
43 changes: 28 additions & 15 deletions app/utils/pyodide/utils.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,32 @@
from glob import glob
import os
from time import time, strftime, gmtime
from collections import OrderedDict
from mne import create_info, concatenate_raws, viz
from mne.io import RawArray
from mne.channels import read_montage
import pandas as pd

import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt
import pandas as pd # maybe we can remove this dependency
# import seaborn as sns

from mne import (Epochs, concatenate_raws, concatenate_epochs, create_info,
find_events, read_epochs, set_eeg_reference, viz)
from mne.io import RawArray
from io import StringIO
from mne.channels import read_montage


# plt.style.use(fivethirtyeight)

sns.set_context('talk')
sns.set_style('white')
# sns.set_context('talk')
# sns.set_style('white')


def load_data(fnames, sfreq=128., replace_ch_names=None):
def load_data(sfreq=128., replace_ch_names=None):
"""Load CSV files from the /data directory into a RawArray object.
Parameters
----------
fnames : list
CSV filepaths from which to load data
sfreq : float
EEG sampling frequency
replace_ch_names : dict | None
Expand All @@ -31,12 +38,13 @@ def load_data(fnames, sfreq=128., replace_ch_names=None):
raw : an instance of mne.io.RawArray
The loaded data.
"""

## js is loaded in loadPackages
## TODO: Received attached variable name
raw = []
print(fnames)
for fname in fnames:
for csv in js.csvArray:
string_io = StringIO(csv)
# read the file
data = pd.read_csv(fname, index_col=0)
data = pd.read_csv(string_io, index_col=0)

data = data.dropna()

Expand Down Expand Up @@ -77,7 +85,11 @@ def load_data(fnames, sfreq=128., replace_ch_names=None):


def plot_topo(epochs, conditions=OrderedDict()):
palette = sns.color_palette("hls", len(conditions) + 1)
# palette = sns.color_palette("hls", len(conditions) + 1)
# temp hack, just pull in the color palette from seaborn
palette = [(0.85999999999999999, 0.37119999999999997, 0.33999999999999997),
(0.33999999999999997, 0.85999999999999999, 0.37119999999999997),
(0.37119999999999997, 0.33999999999999997, 0.85999999999999999)]
evokeds = [epochs[name].average() for name in (conditions)]

evoked_topo = viz.plot_evoked_topo(
Expand Down Expand Up @@ -189,6 +201,7 @@ def plot_conditions(epochs, ch_ind=0, conditions=OrderedDict(), ci=97.5,
return fig, ax

def get_epochs_info(epochs):
print('Get Epochs Info:')
return [*[{x: len(epochs[x])} for x in epochs.event_id],
{"Drop Percentage": round((1 - len(epochs.events) /
len(epochs.drop_log)) * 100, 2)},
Expand Down

0 comments on commit b917a67

Please sign in to comment.