Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable partial override for code analyzer #1296

Merged
merged 8 commits into from
Mar 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,6 @@ updates:
schedule:
interval: "weekly"

- package-ecosystem: "npm"
directory: "/packages/notebook-containerizer"
schedule:
interval: "weekly"

- package-ecosystem: "npm"
directory: "/packages/notebook-search"
schedule:
Expand Down
4 changes: 0 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ install-backend: build-backend
build-frontend: jlpm-install
npx lerna run build --scope @jupyter_vre/chart-customs
npx lerna run build --scope @jupyter_vre/core
npx lerna run build --scope @jupyter_vre/notebook-containerizer
npx lerna run build --scope @jupyter_vre/notebook-search
npx lerna run build --scope @jupyter_vre/dataset-search
npx lerna run build --scope @jupyter_vre/components
Expand Down Expand Up @@ -59,7 +58,6 @@ install-ui:
$(call INSTALL_LAB_EXTENSION,notebook-search)
$(call INSTALL_LAB_EXTENSION,dataset-search)
$(call INSTALL_LAB_EXTENSION,core)
$(call INSTALL_LAB_EXTENSION,notebook-containerizer)
$(call INSTALL_LAB_EXTENSION,chart-customs)
$(call INSTALL_LAB_EXTENSION,components)
$(call INSTALL_LAB_EXTENSION,experiment-manager)
Expand All @@ -70,7 +68,6 @@ link-ui:
$(call LINK_LAB_EXTENSION,notebook-search)
$(call LINK_LAB_EXTENSION,dataset-search)
$(call LINK_LAB_EXTENSION,core)
$(call LINK_LAB_EXTENSION,notebook-containerizer)
$(call LINK_LAB_EXTENSION,chart-customs)
$(call LINK_LAB_EXTENSION,components)
$(call LINK_LAB_EXTENSION,experiment-manager)
Expand All @@ -80,7 +77,6 @@ link-ui:
dist-ui: build-frontend
mkdir -p dist
$(call PACKAGE_LAB_EXTENSION,core)
$(call PACKAGE_LAB_EXTENSION,notebook-containerizer)
$(call PACKAGE_LAB_EXTENSION,chart-customs)
$(call PACKAGE_LAB_EXTENSION,components)
$(call PACKAGE_LAB_EXTENSION,experiment-manager)
Expand Down
5 changes: 2 additions & 3 deletions docker/vanilla/dev.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ COPY packages/chart-customs/package.json packages/chart-customs/
COPY packages/components/package.json packages/components/
COPY packages/core/package.json packages/core/
COPY packages/experiment-manager/package.json packages/experiment-manager/
COPY packages/notebook-containerizer/package.json packages/notebook-containerizer/
COPY packages/notebook-search/package.json packages/notebook-search/
COPY packages/vre-menu/package.json packages/vre-menu/
COPY packages/vre-panel/package.json packages/vre-panel/
Expand Down Expand Up @@ -69,11 +68,11 @@ RUN jupyter serverextension enable --py jupyterlab_vre --user
WORKDIR /live/ts
COPY --chown=$NB_USER:users packages/ packages/
COPY --chown=$NB_USER:users tsconfig-base.json .
RUN extensions="chart-customs core notebook-containerizer notebook-search components experiment-manager vre-panel vre-menu"; \
RUN extensions="chart-customs core notebook-search components experiment-manager vre-panel vre-menu"; \
for ext in $extensions; do \
npx lerna run build --scope "@jupyter_vre/$ext"; \
done
RUN extensions="chart-customs core notebook-containerizer notebook-search components experiment-manager vre-panel vre-menu"; \
RUN extensions="chart-customs core notebook-search components experiment-manager vre-panel vre-menu"; \
for ext in $extensions; do \
jupyter labextension link --no-build "packages/$ext"; \
done
Expand Down
3 changes: 0 additions & 3 deletions jupyterlab_vre/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from .dataset_search.handlers import DatasetSearchHandler, DatasetDownloadHandler
from .experiment_manager.handlers import ExportWorkflowHandler, ExecuteWorkflowHandler
from .handlers import CatalogGetAllHandler
from .notebook_containerizer.handlers import NotebookExtractorHandler
from .notebook_search.handlers import NotebookSearchHandler, NotebookSearchRatingHandler, NotebookDownloadHandler, \
NotebookSeachHistoryHandler, NotebookSourceHandler
from .registries.handlers import RegistriesHandler
Expand All @@ -34,8 +33,6 @@ def load_jupyter_server_extension(lab_app):
(
url_path_join(lab_app.web_app.settings['base_url'], r'/vre/notebooksearchrating'), NotebookSearchRatingHandler),
(url_path_join(lab_app.web_app.settings['base_url'], r'/vre/containerizer/extract'), ExtractorHandler),
(
url_path_join(lab_app.web_app.settings['base_url'], r'/vre/nbcontainerizer/extract'), NotebookExtractorHandler),
(url_path_join(lab_app.web_app.settings['base_url'], r'/vre/containerizer/types'), TypesHandler),
(url_path_join(lab_app.web_app.settings['base_url'], r'/vre/containerizer/baseimage'), BaseImageHandler),
(url_path_join(lab_app.web_app.settings['base_url'], r'/vre/containerizer/addcell'), CellsHandler),
Expand Down
100 changes: 47 additions & 53 deletions jupyterlab_vre/component_containerizer/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,10 @@
from jupyterlab_vre.database.cell import Cell
from jupyterlab_vre.services.containerizer.Rcontainerizer import Rcontainerizer
from jupyterlab_vre.services.converter.converter import ConverterReactFlowChart
from jupyterlab_vre.services.extractor.headerextractor import HeaderExtractor
from jupyterlab_vre.services.extractor.extractor import DummyExtractor
from jupyterlab_vre.services.extractor.pyextractor import PyExtractor
from jupyterlab_vre.services.extractor.rextractor import RExtractor
from jupyterlab_vre.services.extractor.headerextractor import HeaderExtractor

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -124,27 +125,33 @@ async def post(self, *args, **kwargs):

source = notebook.cells[cell_index].source

# extractor based on the cell header
try:
extractor = HeaderExtractor(notebook, source)
except jsonschema.ValidationError as e:
self.set_status(400, f"Invalid cell header")
self.write(
{
'message': f"Error in cell header: {e}",
'reason': None,
'traceback': traceback.format_exception(e),
}
)
self.flush()
return
if notebook.cells[cell_index].cell_type != 'code':
# dummy extractor for non-code cells (e.g. markdown)
extractor = DummyExtractor(notebook, source)
else:
# extractor based on the cell header
try:
extractor = HeaderExtractor(notebook, source)
except jsonschema.ValidationError as e:
self.set_status(400, f"Invalid cell header")
self.write(
{
'message': f"Error in cell header: {e}",
'reason': None,
'traceback': traceback.format_exception(e),
}
)
self.flush()
return

# extractor based on the kernel (if cell header is not defined)
if not extractor.enabled():
if kernel == "IRkernel":
extractor = RExtractor(notebook)
else:
extractor = PyExtractor(notebook)
# Extractor based on code analysis. Used if the cell has no header,
# or if some values are not specified in the header
if not extractor.is_complete():
if kernel == "IRkernel":
code_extractor = RExtractor(notebook, source)
else:
code_extractor = PyExtractor(notebook, source)
extractor.add_missing_values(code_extractor)

extracted_nb = extract_cell_by_index(notebook, cell_index)
if kernel == "IRkernel":
Expand All @@ -159,29 +166,13 @@ async def post(self, *args, **kwargs):
if 'JUPYTERHUB_USER' in os.environ:
title += '-' + slugify(os.environ['JUPYTERHUB_USER'])

ins = {}
outs = {}
params = {}
confs = []
dependencies = []

# Check if cell is code. If cell is for example markdown we get execution from 'extractor.infer_cell_inputs(
# source)'
if notebook.cells[cell_index].cell_type == 'code':
ins = extractor.infer_cell_inputs(source)
outs = extractor.infer_cell_outputs(source)

confs = extractor.extract_cell_conf_ref(source)
dependencies = extractor.infer_cell_dependencies(source, confs)

# If any of these change, we create a new cell in the catalog.
# This matches the cell properties saved in workflows.
cell_identity_dict = {
'title': title,
'params': params,
'inputs': ins,
'outputs': outs,
'deps': sorted(dependencies, key=lambda x: x['name']),
'params': extractor.params,
'inputs': extractor.ins,
'outputs': extractor.outs,
}
cell_identity_str = json.dumps(cell_identity_dict, sort_keys=True)
node_id = hashlib.sha1(cell_identity_str.encode()).hexdigest()[:7]
Expand All @@ -191,27 +182,26 @@ async def post(self, *args, **kwargs):
title=title,
task_name=slugify(title.lower()),
original_source=source,
inputs=ins,
outputs=outs,
params=params,
confs=confs,
dependencies=dependencies,
inputs=extractor.ins,
outputs=extractor.outs,
params={},
confs=extractor.confs,
dependencies=extractor.dependencies,
container_source="",
kernel=kernel,
notebook_dict=extracted_nb.dict()
)
if notebook.cells[cell_index].cell_type == 'code':
cell.integrate_configuration()
params = extractor.extract_cell_params(cell.original_source)
cell.add_params(params)
cell.add_param_values(params)
cell.integrate_configuration()
extractor.params = extractor.extract_cell_params(cell.original_source)
cell.add_params(extractor.params)
cell.add_param_values(extractor.params)

node = ConverterReactFlowChart.get_node(
node_id,
title,
set(ins),
set(outs),
params,
set(extractor.ins),
set(extractor.outs),
extractor.params,
)

chart = {
Expand Down Expand Up @@ -300,6 +290,7 @@ def wait_for_github_api_resources():
sleep(remaining_time + 1)
rate_limit = github.get_rate_limit()


def find_job(
wf_id=None,
wf_creation_utc=None,
Expand Down Expand Up @@ -340,6 +331,7 @@ def find_job(
return job
return None


def wait_for_job(
wf_id=None,
wf_creation_utc=None,
Expand Down Expand Up @@ -381,12 +373,14 @@ def wait_for_job(
return job
sleep(5)


def write_cell_to_file(current_cell):
Path('/tmp/workflow_cells/cells').mkdir(parents=True, exist_ok=True)
with open('/tmp/workflow_cells/cells/' + current_cell.task_name + '.json', 'w') as f:
f.write(current_cell.toJSON())
f.close()


class CellsHandler(APIHandler, Catalog):
logger = logging.getLogger(__name__)

Expand Down
Empty file.
102 changes: 0 additions & 102 deletions jupyterlab_vre/notebook_containerizer/handlers.py

This file was deleted.

Loading
Loading