diff --git a/.happy/terraform/envs/dev/providers.tf b/.happy/terraform/envs/dev/providers.tf index 8085efb00..091fdaf57 100644 --- a/.happy/terraform/envs/dev/providers.tf +++ b/.happy/terraform/envs/dev/providers.tf @@ -1,5 +1,5 @@ provider aws { - version = "~> 3.39.0" + version = "~> 4.18.0" region = "us-west-2" assume_role { role_arn = "arn:aws:iam::${var.aws_account_id}:role/${var.aws_role}" diff --git a/.happy/terraform/envs/prod/providers.tf b/.happy/terraform/envs/prod/providers.tf index 8085efb00..091fdaf57 100644 --- a/.happy/terraform/envs/prod/providers.tf +++ b/.happy/terraform/envs/prod/providers.tf @@ -1,5 +1,5 @@ provider aws { - version = "~> 3.39.0" + version = "~> 4.18.0" region = "us-west-2" assume_role { role_arn = "arn:aws:iam::${var.aws_account_id}:role/${var.aws_role}" diff --git a/.happy/terraform/envs/staging/providers.tf b/.happy/terraform/envs/staging/providers.tf index 8085efb00..091fdaf57 100644 --- a/.happy/terraform/envs/staging/providers.tf +++ b/.happy/terraform/envs/staging/providers.tf @@ -1,5 +1,5 @@ provider aws { - version = "~> 3.39.0" + version = "~> 4.18.0" region = "us-west-2" assume_role { role_arn = "arn:aws:iam::${var.aws_account_id}:role/${var.aws_role}" diff --git a/.happy/terraform/modules/ecs-stack/main.tf b/.happy/terraform/modules/ecs-stack/main.tf index 92d722ce7..ce49886f8 100644 --- a/.happy/terraform/modules/ecs-stack/main.tf +++ b/.happy/terraform/modules/ecs-stack/main.tf @@ -16,12 +16,15 @@ locals { frontend_cmd = [] backend_cmd = [] + plugins_cmd = [] + failure_cmd = ["get_plugin_manifest.failure_handler"] security_groups = local.secret["security_groups"] zone = local.secret["zone_id"] cluster = local.secret["cluster_arn"] frontend_image_repo = local.secret["ecrs"]["frontend"]["url"] backend_image_repo = local.secret["ecrs"]["backend"]["url"] + plugins_image_repo = local.secret["ecrs"]["plugins"]["url"] external_dns = local.secret["external_zone_name"] internal_dns = local.secret["internal_zone_name"] rest_api_id = local.secret["api_gateway"]["rest_api_id"] @@ -48,6 +51,8 @@ locals { frontend_url = var.frontend_url != "" ? var.frontend_url: try(join("", ["https://", module.frontend_dns.dns_prefix, ".", local.external_dns]), var.frontend_url) backend_function_name = "${local.custom_stack_name}-backend" + plugins_function_name = "${local.custom_stack_name}-plugins" + failure_function_name = "${local.custom_stack_name}-failure" } module frontend_dns { @@ -118,6 +123,54 @@ module backend_lambda { timeout = 300 } +module plugins_lambda { + source = "../lambda-container" + function_name = local.plugins_function_name + image = "${local.plugins_image_repo}:${local.image_tag}" + cmd = local.plugins_cmd + tags = var.tags + + vpc_config = { + subnet_ids = local.cloud_env.private_subnets + security_group_ids = local.security_groups + } + + environment = { + "BUCKET" = local.data_bucket_name + "BUCKET_PATH" = var.env == "dev" ? local.custom_stack_name : "" + } + + log_retention_in_days = 14 + timeout = 900 + memory_size = 10240 + ephemeral_storage_size = 10240 + maximum_retry_attempts = 0 + create_async_event_config = true + destination_on_failure = module.failure_lambda.function_arn +} + +module failure_lambda { + source = "../lambda-container" + function_name = local.failure_function_name + image = "${local.plugins_image_repo}:${local.image_tag}" + cmd = local.failure_cmd + tags = var.tags + + vpc_config = { + subnet_ids = local.cloud_env.private_subnets + security_group_ids = local.security_groups + } + + environment = { + "BUCKET" = local.data_bucket_name + "BUCKET_PATH" = var.env == "dev" ? local.custom_stack_name : "" + } + + log_retention_in_days = 14 + timeout = 900 + maximum_retry_attempts = 0 +} + module api_gateway_proxy_stage { source = "../api-gateway-proxy-stage" lambda_function_name = local.backend_function_name @@ -137,6 +190,27 @@ resource "aws_cloudwatch_event_rule" "update_rule" { tags = var.tags } +resource "aws_lambda_permission" "allow_bucket" { + statement_id = "AllowExecutionFromS3Bucket" + action = "lambda:InvokeFunction" + function_name = module.plugins_lambda.function_arn + principal = "s3.amazonaws.com" + source_arn = local.data_bucket_arn +} + +resource "aws_s3_bucket_notification" "plugins_notification" { + bucket = local.data_bucket_name + + lambda_function { + lambda_function_arn = module.plugins_lambda.function_arn + events = ["s3:ObjectCreated:*"] + filter_prefix = var.env == "dev" ? local.custom_stack_name : "" + filter_suffix = ".yaml" + } + + depends_on = [aws_lambda_permission.allow_bucket] +} + resource "aws_cloudwatch_event_target" "update_target" { rule = aws_cloudwatch_event_rule.update_rule.name arn = module.backend_lambda.function_arn @@ -184,12 +258,57 @@ data aws_iam_policy_document backend_policy { } } +data aws_iam_policy_document plugins_policy { + statement { + actions = [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject", + ] + + resources = ["${local.data_bucket_arn}/*"] + } + + statement { + actions = [ + "lambda:InvokeFunction", + "lambda:InvokeAsync", + ] + + resources = [module.failure_lambda.function_arn] + } +} + +data aws_iam_policy_document failure_policy { + statement { + actions = [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject", + ] + + resources = ["${local.data_bucket_arn}/*"] + } +} + resource aws_iam_role_policy policy { name = "${local.custom_stack_name}-${var.env}-policy" role = module.backend_lambda.role_name policy = data.aws_iam_policy_document.backend_policy.json } +resource aws_iam_role_policy plugins_lambda_policy { + name = "${local.custom_stack_name}-${var.env}-plugins-lambda-policy" + role = module.plugins_lambda.role_name + policy = data.aws_iam_policy_document.plugins_policy.json +} + +resource aws_iam_role_policy failure_lambda_policy { + name = "${local.custom_stack_name}-${var.env}-failure-lambda-policy" + role = module.failure_lambda.role_name + policy = data.aws_iam_policy_document.failure_policy.json +} + resource aws_acm_certificate cert { domain_name = "${module.frontend_dns.dns_prefix}.${local.external_dns}" validation_method = "DNS" diff --git a/.happy/terraform/modules/lambda-container/main.tf b/.happy/terraform/modules/lambda-container/main.tf index ff8da08a3..7bb15a0a1 100644 --- a/.happy/terraform/modules/lambda-container/main.tf +++ b/.happy/terraform/modules/lambda-container/main.tf @@ -1,6 +1,6 @@ module lambda { source = "terraform-aws-modules/lambda/aws" - version = "2.0.0" + version = "3.2.1" publish = var.provisioned_lambda == -1 ? false : true function_name = var.function_name @@ -16,6 +16,8 @@ module lambda { vpc_security_group_ids = var.vpc_config == null ? null : var.vpc_config.security_group_ids memory_size = var.memory_size + ephemeral_storage_size = var.ephemeral_storage_size + maximum_retry_attempts = var.maximum_retry_attempts kms_key_arn = var.kms_key_arn role_name = var.function_name role_path = var.lambda_role_path @@ -24,6 +26,10 @@ module lambda { attach_network_policy = true reserved_concurrent_executions = var.reserved_concurrent_executions allowed_triggers = var.allowed_triggers + destination_on_failure = var.destination_on_failure + create_async_event_config = var.create_async_event_config + + hash_extra = random_string.random_md5.result } resource "aws_lambda_provisioned_concurrency_config" "provisioned" { @@ -35,4 +41,10 @@ resource "aws_lambda_provisioned_concurrency_config" "provisioned" { lifecycle { create_before_destroy = true } +} + +# create random md5 so that the lambda would be refreshed +resource "random_string" "random_md5" { + length = 64 + special = false } \ No newline at end of file diff --git a/.happy/terraform/modules/lambda-container/variables.tf b/.happy/terraform/modules/lambda-container/variables.tf index 060309060..f2c7945dc 100644 --- a/.happy/terraform/modules/lambda-container/variables.tf +++ b/.happy/terraform/modules/lambda-container/variables.tf @@ -26,6 +26,11 @@ variable log_retention_in_days { default = null } +variable maximum_retry_attempts { + type = number + default = 2 +} + variable function_description { type = string description = "Description for lambda function." @@ -72,6 +77,12 @@ variable memory_size { default = 128 } +variable ephemeral_storage_size { + type = number + description = "Amount of ephemeral storage (/tmp) in MB your Lambda Function can use at runtime." + default = 512 +} + variable allowed_triggers { description = "Map of allowed triggers to create Lambda permissions" type = map(any) @@ -94,4 +105,16 @@ variable cmd { variable description { type = string default = "" +} + +variable create_async_event_config { + type = bool + description = "Controls whether async event configuration for Lambda Function/Alias should be created" + default = false +} + +variable destination_on_failure { + type = string + description = "Amazon Resource Name (ARN) of the destination resource for failed asynchronous invocations" + default = null } \ No newline at end of file diff --git a/backend/api/app.py b/backend/api/app.py index f9235d738..32b64df20 100644 --- a/backend/api/app.py +++ b/backend/api/app.py @@ -1,4 +1,5 @@ import os + from apig_wsgi import make_lambda_handler from werkzeug.middleware.dispatcher import DispatcherMiddleware from flask import Flask, Response, jsonify, render_template diff --git a/backend/api/model.py b/backend/api/model.py index 6cb0e83d5..9abb17a8f 100644 --- a/backend/api/model.py +++ b/backend/api/model.py @@ -1,28 +1,27 @@ from concurrent import futures from datetime import datetime -from typing import Tuple, Dict, List +from typing import Tuple, Dict, List, Callable from zipfile import ZipFile from io import BytesIO from collections import defaultdict - from utils.conda import get_conda_forge_package from utils.github import get_github_metadata, get_artifact from utils.pypi import query_pypi, get_plugin_pypi_metadata -from api.s3 import get_cache, cache -from utils.utils import render_description, send_alert, get_attribute, get_category_mapping +from api.s3 import get_cache, cache, is_npe2_plugin +from utils.utils import render_description, send_alert, get_attribute, get_category_mapping, parse_manifest from utils.datadog import report_metrics from api.zulip import notify_new_packages index_subset = {'name', 'summary', 'description_text', 'description_content_type', 'authors', 'license', 'python_version', 'operating_system', 'release_date', 'version', 'first_released', - 'development_status', 'category', 'conda'} + 'development_status', 'category', 'display_name', 'plugin_types', 'reader_file_extensions', + 'writer_file_extensions', 'writer_save_layers', 'npe2', 'error_message', 'conda'} def get_public_plugins() -> Dict[str, str]: """ Get the dictionary of public plugins and versions. - :return: dict of public plugins and their versions """ public_plugins = get_cache('cache/public-plugins.json') @@ -35,7 +34,6 @@ def get_public_plugins() -> Dict[str, str]: def get_hidden_plugins() -> Dict[str, str]: """ Get the dictionary of hidden plugins and versions. - :return: dict of hidden plugins and their versions """ hidden_plugins = get_cache('cache/hidden-plugins.json') @@ -48,7 +46,6 @@ def get_hidden_plugins() -> Dict[str, str]: def get_valid_plugins() -> Dict[str, str]: """ Get the dictionary of valid plugins and versions. - :return: dict of valid plugins and their versions """ return {**get_hidden_plugins(), **get_public_plugins()} @@ -56,8 +53,7 @@ def get_valid_plugins() -> Dict[str, str]: def get_plugin(plugin: str, version: str = None) -> dict: """ - Get plugin metadata for a particular plugin, get latest if version is None. - + Get plugin and manifest metadata for a particular plugin, get latest if version is None. :param plugin: name of the plugin to get :param version: version of the plugin :return: plugin metadata dictionary @@ -67,29 +63,40 @@ def get_plugin(plugin: str, version: str = None) -> dict: return {} elif version is None: version = plugins[plugin] - plugin = get_cache(f'cache/{plugin}/{version}.json') - if plugin: - return plugin + plugin_metadata = get_cache(f'cache/{plugin}/{version}.json') + manifest_metadata = get_frontend_manifest_metadata(plugin, version) + plugin_metadata.update(manifest_metadata) + if plugin_metadata: + return plugin_metadata else: return {} +def get_frontend_manifest_metadata(plugin, version): + # load manifest from yaml (triggering build) + raw_metadata = get_manifest(plugin, version) + if 'process_count' in raw_metadata: + raw_metadata = None + interpreted_metadata = parse_manifest(raw_metadata) + interpreted_metadata['npe2'] = is_npe2_plugin(plugin, version) + return interpreted_metadata + + def get_manifest(plugin: str, version: str = None) -> dict: """ Get plugin manifest file for a particular plugin, get latest if version is None. - :param plugin: name of the plugin to get :param version: version of the plugin manifest - :return: plugin manifest dictionary + :return: plugin manifest dictionary. """ plugins = get_valid_plugins() if plugin not in plugins: return {} elif version is None: version = plugins[plugin] - plugin = get_cache(f'cache/{plugin}/{version}.yaml', 'yaml') - if plugin: - return plugin + plugin_metadata = get_cache(f'cache/{plugin}/{version}.yaml', 'yaml') + if plugin_metadata: + return plugin_metadata else: cache({"process_count": 0}, f'cache/{plugin}/{version}.yaml', format='yaml') return {"process_count": 0} @@ -98,7 +105,6 @@ def get_manifest(plugin: str, version: str = None) -> dict: def get_index() -> dict: """ Get the index page related metadata for all plugins. - :return: dict for index page metadata """ index = get_cache('cache/index.json') @@ -111,7 +117,6 @@ def get_index() -> dict: def slice_metadata_to_index_columns(plugins_metadata: List[dict]) -> List[dict]: """ slice index to only include specified indexing related columns. - :param plugins_metadata: plugin metadata dictionary :return: sliced dict metadata for the plugin """ @@ -122,7 +127,6 @@ def slice_metadata_to_index_columns(plugins_metadata: List[dict]) -> List[dict]: def get_excluded_plugins() -> Dict[str, str]: """ Get the excluded plugins. - :return: dict for excluded plugins and their exclusion status """ excluded_plugins = get_cache('excluded_plugins.json') @@ -132,10 +136,14 @@ def get_excluded_plugins() -> Dict[str, str]: return {} +def build_manifest_metadata(plugin: str, version: str) -> Tuple[str, dict]: + metadata = get_frontend_manifest_metadata(plugin, version) + return plugin, metadata + + def build_plugin_metadata(plugin: str, version: str) -> Tuple[str, dict]: """ Build plugin metadata from multiple sources, reuse cached ones if available. - :return: dict for aggregated plugin metadata """ cached_plugin = get_cache(f'cache/{plugin}/{version}.json') @@ -161,10 +169,8 @@ def build_plugin_metadata(plugin: str, version: str) -> Tuple[str, dict]: metadata['category'] = categories metadata['category_hierarchy'] = category_hierarchy del metadata['labels'] - if 'conda' not in metadata: metadata['conda'] = get_conda_forge_package(plugin) - cache(metadata, f'cache/{plugin}/{version}.json') return plugin, metadata @@ -179,9 +185,11 @@ def update_cache(): - cache/{plugin}/{version}.json (skip if exists) """ plugins = query_pypi() - plugins_metadata = get_plugin_metadata_async(plugins) + plugins_metadata = get_plugin_metadata_async(plugins, build_plugin_metadata) + manifest_metadata = get_plugin_metadata_async(plugins, build_manifest_metadata) + for plugin in plugins: + plugins_metadata[plugin].update(manifest_metadata[plugin]) excluded_plugins = get_updated_plugin_exclusion(plugins_metadata) - visibility_plugins = {"public": {}, "hidden": {}} for plugin, version in plugins.items(): visibility = plugins_metadata[plugin].get('visibility', 'public') @@ -235,16 +243,17 @@ def get_updated_plugin_exclusion(plugins_metadata): return excluded_plugins -def get_plugin_metadata_async(plugins: Dict[str, str]) -> dict: +def get_plugin_metadata_async(plugins: Dict[str, str], metadata_builder: Callable) -> dict: """ Query plugin metadata async. :param plugins: plugin name and versions to query + :param metadata_builder: function to read and parse metadata files :return: plugin metadata list """ plugins_metadata = {} with futures.ThreadPoolExecutor(max_workers=32) as executor: - plugin_futures = [executor.submit(build_plugin_metadata, k, v) + plugin_futures = [executor.submit(metadata_builder, k, v) for k, v in plugins.items()] for future in futures.as_completed(plugin_futures): plugins_metadata[future.result()[0]] = (future.result()[1]) diff --git a/backend/api/s3.py b/backend/api/s3.py index 866fb087d..552beed31 100644 --- a/backend/api/s3.py +++ b/backend/api/s3.py @@ -65,3 +65,22 @@ def cache(content: Union[dict, list, IO[bytes]], key: str, mime: str = None, for yaml.dump(content).encode('utf8')) as stream: s3_client.upload_fileobj(Fileobj=stream, Bucket=bucket, Key=os.path.join(bucket_path, key), ExtraArgs=extra_args) + + +def is_npe2_plugin(plugin, version): + """ + Return True if the plugin is npe2, False otherwise. + + :param plugin: name of plugin + :param version: version of plugin + :return: True if npe2, False otherwise + """ + key = f'cache/{plugin}/{version}.yaml' + try: + manifest_body = s3_client.get_object(Bucket=bucket, Key=os.path.join(bucket_path, key))['Body'] + manifest_body_str = manifest_body.read().decode('utf-8') + if manifest_body_str.startswith('#npe2'): + return True + except ClientError: + print(f"Not cached: {key}") + return False diff --git a/backend/api/templates/swagger.yml b/backend/api/templates/swagger.yml index d0c8ae679..4afec99b3 100644 --- a/backend/api/templates/swagger.yml +++ b/backend/api/templates/swagger.yml @@ -147,7 +147,7 @@ paths: 200: description: The return yaml object is the plugin's manifest. content: - application/x-yaml: + application/yaml: schema: $ref: '#/components/schemas/Manifest' 404: diff --git a/backend/preview/_tests/test_preview_meta.py b/backend/preview/_tests/test_preview_meta.py index 1476cc060..5557ed4b8 100644 --- a/backend/preview/_tests/test_preview_meta.py +++ b/backend/preview/_tests/test_preview_meta.py @@ -5,8 +5,7 @@ import requests import dateutil.parser from datetime import datetime - -from ..preview import clone_repo, build_dist, parse_meta, get_plugin_preview, get_pypi_date_meta, populate_source_code_url +from preview.preview import clone_repo, build_dist, parse_meta, get_plugin_preview, get_pypi_date_meta, populate_source_code_url code_plugin_url = "https://github.com/chanzuckerberg/napari-demo" hub_plugin_url = "https://api.napari-hub.org/plugins/napari-demo" diff --git a/backend/preview/preview.py b/backend/preview/preview.py index 88e11e3de..94887271b 100644 --- a/backend/preview/preview.py +++ b/backend/preview/preview.py @@ -9,7 +9,8 @@ import os import json import requests -from utils.utils import get_category_mapping +import yaml +from utils.utils import get_category_mapping, parse_manifest from utils.github import github_pattern, get_github_metadata, get_github_repo_url from utils.pypi import get_plugin_pypi_metadata @@ -72,6 +73,8 @@ def get_plugin_preview(repo_pth: str, dest_dir: str, is_local: bool = False, bra meta.update(github_metadata) # get release date and first released get_pypi_date_meta(meta) + manifest_attributes = get_manifest_attributes(meta['name'], repo_pth) + meta.update(manifest_attributes) # write json with open(os.path.join(dest_dir, "preview_meta.json"), "w") as f: @@ -262,6 +265,7 @@ def get_pypi_date_meta(meta): meta["release_date"] = release_date meta["first_released"] = first_released + def populate_source_code_url(meta): """Pattern match project_site as GitHub URL when source code url missing @@ -271,3 +275,36 @@ def populate_source_code_url(meta): match = github_pattern.match(meta["project_site"]) if match: meta["code_repository"] = match.group(0) + +def discover_manifest(plugin_name): + # to avoid depending on npe2 in the backend, we delay this import to runtime + # this dependency will be satisfied by the preview action + # see https://github.com/chanzuckerberg/napari-hub-preview-action + from npe2 import PluginManager + pm = PluginManager() + pm.discover(include_npe1=False) + is_npe2 = True + try: + manifest_dict = yaml.load(pm.get_manifest(plugin_name).yaml(), Loader=yaml.Loader) + except KeyError: + pm.discover(include_npe1=True) + is_npe2 = False + # forcing lazy discovery to run + my_widgs = list(pm.iter_widgets()) + manifest_dict = yaml.load(pm.get_manifest(plugin_name).yaml(), Loader=yaml.Loader) + return manifest_dict, is_npe2 + + +def get_manifest_attributes(plugin_name, repo_pth): + """ + Try to install plugin and discover manifest values. If install + or manifest discovery fails, return default empty values. + """ + try: + manifest, is_npe2 = discover_manifest(plugin_name) + except Exception as e: + manifest = None + is_npe2 = False + manifest_attributes = parse_manifest(manifest) + manifest_attributes['npe2'] = is_npe2 + return manifest_attributes diff --git a/backend/requirements.txt b/backend/requirements.txt index 82ca89982..7f19efaca 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -14,4 +14,4 @@ wheel pkginfo datadog-lambda build -pyOpenSSL \ No newline at end of file +pyOpenSSL diff --git a/backend/utils/utils.py b/backend/utils/utils.py index 14213f6ed..5dea22ec7 100644 --- a/backend/utils/utils.py +++ b/backend/utils/utils.py @@ -1,7 +1,6 @@ import os -from typing import List, Dict - import requests +from typing import List, Dict, Optional from bs4 import BeautifulSoup from markdown import markdown from requests import HTTPError @@ -80,7 +79,7 @@ def reformat_ssh_key_to_pem_bytes(ssh_key_str: str) -> bytes: :param ssh_key_str: utf-8 string without header and footer for the github app rsa private key :return: pem formatted private key in bytes with header and footer """ - chunked = '\n'.join(ssh_key_str[i:i+64] for i in range(0, len(ssh_key_str), 64)) + chunked = '\n'.join(ssh_key_str[i:i + 64] for i in range(0, len(ssh_key_str), 64)) return f"-----BEGIN RSA PRIVATE KEY-----\n{chunked}\n-----END RSA PRIVATE KEY-----\n".encode("utf-8") @@ -124,3 +123,50 @@ def get_category_mapping(category: str, mappings: Dict[str, List]) -> List[Dict] return [] else: return mappings[category] + + +def parse_manifest(manifest: Optional[dict] = None): + """ + Convert raw manifest into dictionary of npe2 attributes. + :param manifest: raw manifest + """ + manifest_attributes = { + 'display_name': '', + 'plugin_types': [], + 'reader_file_extensions': [], + 'writer_file_extensions': [], + 'writer_save_layers': [], + } + if manifest is None: + return manifest_attributes + manifest_attributes['display_name'] = manifest.get('display_name', '') + if 'contributions' in manifest: + manifest_contributions = manifest.get('contributions', dict()) + if 'readers' in manifest_contributions: + readers = manifest_contributions['readers'] + manifest_attributes['plugin_types'].append('reader') + reader_file_extensions = set() + for reader in readers: + filename_patterns = reader.get('filename_patterns', []) + for ext in filename_patterns: + reader_file_extensions.add(ext) + manifest_attributes['reader_file_extensions'] = list(reader_file_extensions) + if 'writers' in manifest_contributions: + writers = manifest_contributions['writers'] + manifest_attributes['plugin_types'].append('writer') + writer_file_extensions = set() + writer_save_layers = set() + for writer in writers: + filename_extensions = writer.get('filename_extensions', []) + layer_types = writer.get('layer_types', []) + for ext in filename_extensions: + writer_file_extensions.add(ext) + for ext in layer_types: + writer_save_layers.add(ext) + manifest_attributes['writer_file_extensions'] = list(writer_file_extensions) + manifest_attributes['writer_save_layers'] = list(writer_save_layers) + if 'themes' in manifest_contributions: + manifest_attributes['plugin_types'].append('theme') + if 'widgets' in manifest_contributions: + manifest_attributes['plugin_types'].append('widget') + return manifest_attributes diff --git a/docker-compose.yml b/docker-compose.yml index b6b481a02..f7a2c3125 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -77,6 +77,38 @@ services: naparinet: aliases: - backend.naparinet.local + plugins: + image: '${DOCKER_REPO}napari-hub-dev-plugins' + platform: linux/amd64 + build: + context: plugins + cache_from: + - '${DOCKER_REPO}napari-hub-dev-plugins:${STACK_NAME}' + args: + - BUILDKIT_INLINE_CACHE=1 + - HAPPY_COMMIT + - HAPPY_BRANCH + - HAPPY_TAG + restart: always + depends_on: + - localstack + ports: + - '5000:5000' + environment: + - PYTHONUNBUFFERED=1 + - AWS_DEFAULT_REGION=us-west-2 + - AWS_ACCESS_KEY_ID=test + - AWS_SECRET_ACCESS_KEY=test + - BOTO_ENDPOINT_URL=http://localstack:4566 + - BUCKET=imaging-test-napari-hub + # This entrypoint only used for local dev; in cloud we deploy as a Lambda container + entrypoint: gunicorn --worker-class gevent --workers 4 --bind 0.0.0.0:5000 napari:app --max-requests 10000 --timeout 5 --keep-alive 5 --log-level info --reload + volumes: + - ./plugins:/var/task + networks: + naparinet: + aliases: + - plugins.naparinet.local networks: naparinet: volumes: diff --git a/plugins/Dockerfile b/plugins/Dockerfile new file mode 100644 index 000000000..63c77f850 --- /dev/null +++ b/plugins/Dockerfile @@ -0,0 +1,11 @@ +FROM public.ecr.aws/lambda/python:3.8 + +ENV NPE2_NOCACHE = 1 + +RUN ["yum", "install", "-y", "mesa-libGL"] + +COPY requirements.txt . +RUN ["pip", "install", "-r", "requirements.txt"] + +COPY . . +CMD ["get_plugin_manifest.generate_manifest"] diff --git a/plugins/__init__.py b/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/plugins/get_plugin_manifest.py b/plugins/get_plugin_manifest.py new file mode 100644 index 000000000..f1ea5e143 --- /dev/null +++ b/plugins/get_plugin_manifest.py @@ -0,0 +1,87 @@ +import urllib.parse +import boto3 +import yaml +import subprocess +import sys +from npe2 import PluginManager + +s3 = boto3.client('s3') + + +def discover_manifest(plugin_name): + """ + Discovers manifest via npe2 library and fetches metadata related to plugin's manifest file. + """ + pm = PluginManager() + pm.discover(include_npe1=False) + is_npe2 = True + try: + manifest = pm.get_manifest(plugin_name) + except KeyError: + pm.discover(include_npe1=True) + is_npe2 = False + # forcing lazy discovery to run + pm.index_npe1_adapters() + manifest = pm.get_manifest(plugin_name) + return manifest, is_npe2 + + +def generate_manifest(event, context): + """ + Inspects the yaml file of the plugin to retrieve the value of process_count. If the value of process_count + is in the yaml file and it is less than max_failure_tries, then the method attempts to pip install the plugin + with its version, calls discover_manifest to return manifest and is_npe2, then write to designated location on s3. + """ + max_failure_tries = 2 + bucket = event['Records'][0]['s3']['bucket']['name'] + key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8') + response = s3.get_object(Bucket=bucket, Key=key) + myBody = response["Body"] + myYaml = yaml.safe_load(myBody) + s3_client = boto3.client('s3') + s3_body = '' + if 'process_count' not in myYaml or myYaml['process_count'] >= max_failure_tries: + return + try: + splitPath = str(key).split("/") + plugin = splitPath[-2] + version = splitPath[-1][:-5] + command = [sys.executable, "-m", "pip", "install", f'{plugin}=={version}', "--target=/tmp/" + plugin] + p = subprocess.Popen(command, stdout=subprocess.PIPE) + while p.poll() is None: + l = p.stdout.readline() # This blocks until it receives a newline. + sys.path.insert(0, "/tmp/" + plugin) + manifest, is_npe2 = discover_manifest(plugin) + body = '#npe2' if is_npe2 else "#npe1" + s3_body = body + "\n" + manifest.yaml() + except Exception as e: + str_e = str(e).replace('"', "") + str_e = str_e.replace("'", "") + s3_body = 'process_count: ' + str(myYaml['process_count']) + '\n' + 'error_message: ' + f"'{str_e}'" + raise e + finally: + response = s3_client.delete_object( + Bucket=bucket, + Key=key + ) + s3_client.put_object(Body=s3_body, Bucket=bucket, Key=key) + + +def failure_handler(event, context): + """ + Inspects the yaml file of the plugin, and if process_count is in the yaml file, then the method + increments the value of process_count in the yaml file, then write to designated location on s3. + """ + yaml_path = event['requestPayload']['Records'][0]['s3']['object']['key'] + bucket = event['requestPayload']['Records'][0]['s3']['bucket']['name'] + response = s3.get_object(Bucket=bucket, Key=yaml_path) + myBody = response["Body"] + myYaml = yaml.safe_load(myBody) + s3_client = boto3.client('s3') + if 'process_count' in myYaml: + response = s3_client.delete_object( + Bucket=bucket, + Key=yaml_path + ) + myYaml['process_count'] += 1 + s3_client.put_object(Body=yaml.dump(myYaml), Bucket=bucket, Key=yaml_path) diff --git a/plugins/requirements.txt b/plugins/requirements.txt new file mode 100644 index 000000000..b8cfa7872 --- /dev/null +++ b/plugins/requirements.txt @@ -0,0 +1,7 @@ +boto3 +requests +pyyaml +napari +npe2 +numpy +pyqt5 \ No newline at end of file