Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use latest dandi dev branch #499

Merged
merged 15 commits into from
Nov 8, 2023
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions environments/environment-Windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,6 @@ dependencies:
- flask-cors === 3.0.10
- flask_restx == 1.1.0
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full]
- dandi @ git+https://github.com/dandi/dandi-cli.git@master
CodyCBakerPhD marked this conversation as resolved.
Show resolved Hide resolved
- pytest == 7.2.2
- pytest-cov == 4.1.0
8 changes: 8 additions & 0 deletions nwb-guide.spec
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@ datas = [('./paths.config.json', '.'), ('./package.json', '.')]
binaries = []
hiddenimports = ['scipy._distributor_init', 'scipy._lib.messagestream', 'scipy._lib._ccallback', 'scipy._lib._testutils', 'email_validator']
datas += collect_data_files('jsonschema_specifications')
tmp_ret = collect_all('dandi')
CodyCBakerPhD marked this conversation as resolved.
Show resolved Hide resolved
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('keyrings')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('unittest')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('nwbinspector')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('neuroconv')
Expand All @@ -19,6 +25,8 @@ tmp_ret = collect_all('pynwb')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('hdmf')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('hdmf_zarr')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
CodyCBakerPhD marked this conversation as resolved.
Show resolved Hide resolved
tmp_ret = collect_all('ndx_dandi_icephys')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
tmp_ret = collect_all('ci_info')
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"build:mac": "npm run build && npm run build:flask && npm run build:electron:mac",
"build:linux": "npm run build && npm run build:flask && npm run build:electron:linux",
"build:flask": "python -m PyInstaller nwb-guide.spec --log-level DEBUG --clean --noconfirm --distpath ./build/flask",
"build:flask:spec:base": "pyi-makespec --name nwb-guide --onedir --collect-data jsonschema_specifications --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all ndx_dandi_icephys --collect-all ci_info --hidden-import scipy._distributor_init --hidden-import scipy._lib.messagestream --hidden-import scipy._lib._ccallback --hidden-import scipy._lib._testutils --hidden-import email_validator ./pyflask/app.py",
"build:flask:spec:base": "pyi-makespec --name nwb-guide --onedir --collect-data jsonschema_specifications --collect-all dandi --collect-all keyrings --collect-all unittest --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all hdmf_zarr --collect-all ndx_dandi_icephys --collect-all ci_info --hidden-import scipy._distributor_init --hidden-import scipy._lib.messagestream --hidden-import scipy._lib._ccallback --hidden-import scipy._lib._testutils --hidden-import email_validator ./pyflask/app.py",
"build:flask:spec": "npm run build:flask:spec:base && python prepare_pyinstaller_spec.py",
"build:electron:win": "electron-builder build --win --publish never",
"build:electron:mac": "electron-builder build --mac --publish never",
Expand Down
8 changes: 4 additions & 4 deletions pyflask/apis/neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
inspect_nwb_file,
inspect_nwb_folder,
inspect_multiple_filesystem_objects,
upload_to_dandi,
upload_project_to_dandi,
upload_folder_to_dandi,
upload_multiple_filesystem_objects_to_dandi,
)
Expand Down Expand Up @@ -123,19 +123,19 @@ def post(self):


@neuroconv_api.route("/upload/project")
class Upload(Resource):
class UploadProject(Resource):
@neuroconv_api.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"})
def post(self):
try:
return upload_to_dandi(**neuroconv_api.payload)
return upload_project_to_dandi(**neuroconv_api.payload)

except Exception as e:
if notBadRequestException(e):
neuroconv_api.abort(500, str(e))


@neuroconv_api.route("/upload/folder")
class Upload(Resource):
class UploadFolder(Resource):
@neuroconv_api.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"})
def post(self):
try:
Expand Down
2 changes: 1 addition & 1 deletion pyflask/manageNeuroconv/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
get_metadata_schema,
convert_to_nwb,
validate_metadata,
upload_to_dandi,
upload_project_to_dandi,
upload_folder_to_dandi,
upload_multiple_filesystem_objects_to_dandi,
listen_to_neuroconv_events,
Expand Down
6 changes: 2 additions & 4 deletions pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,6 @@ def get_all_converter_info() -> dict:

return {name: derive_interface_info(converter) for name, converter in module_to_dict(converters).items()}

return output


def get_all_interface_info() -> dict:
"""Format an information structure to be used for selecting interfaces based on modality and technique."""
Expand Down Expand Up @@ -470,7 +468,7 @@ def update_conversion_progress(**kwargs):


def upload_multiple_filesystem_objects_to_dandi(**kwargs):
tmp_folder_path = aggregate_symlinks_in_new_directory(kwargs["filesystem_paths"], "upload")
tmp_folder_path = _aggregate_symlinks_in_new_directory(kwargs["filesystem_paths"], "upload")
innerKwargs = {**kwargs}
del innerKwargs["filesystem_paths"]
innerKwargs["nwb_folder_path"] = tmp_folder_path
Expand Down Expand Up @@ -502,7 +500,7 @@ def upload_folder_to_dandi(
)


def upload_to_dandi(
def upload_project_to_dandi(
dandiset_id: str,
api_key: str,
project: Optional[str] = None,
Expand Down
6 changes: 3 additions & 3 deletions schemas/json/dandi/upload.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@
"number_of_jobs": {
"type": "integer",
"title": "Job Count",
"description": "The number of files to upload in parallel. A value of <code>-1</code> uses all available processes",
"default": 1,
"description": "The number of files to upload in parallel. A value of <code>-1</code> uses all available processes.<br><small><b>Note:</b> If you encounter an error for any selector value, particularly a Segmentation Fault error, try a value of <code>1</code> to run the jobs without parallelization.</small>",
"default": -1,
"min": -1
},
"number_of_threads": {
"type": "integer",
"title": "Threads per Job",
"description": "The number of threads to handle each file. A value of <code>-1</code> uses all available threads per process.",
"default": 1,
"default": -1,
"min": -1
},
"cleanup": {
Expand Down
Loading