Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

COST-1165: Add Priority worker/queue. #2741

Merged
merged 5 commits into from
Mar 22, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 58 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -432,3 +432,61 @@ services:
image: prom/pushgateway
ports:
- 9091:9091

priority-worker:
myersCody marked this conversation as resolved.
Show resolved Hide resolved
container_name: priority_worker
hostname: priority-worker-1
image: koku_base
working_dir: /koku/koku
entrypoint: ['watchmedo', 'auto-restart', '--directory=./', '--pattern=*.py', '--recursive', '--', 'celery', '-A', 'koku', 'worker', '-l', 'info', '-Q', 'priority_delete,priority_update']
environment:
- AWS_ACCESS_KEY_ID
- AWS_SECRET_ACCESS_KEY
- DATABASE_SERVICE_NAME=POSTGRES_SQL
- DATABASE_ENGINE=postgresql
- DATABASE_NAME=${DATABASE_NAME-postgres}
- POSTGRES_SQL_SERVICE_HOST=db
- POSTGRES_SQL_SERVICE_PORT=5432
- DATABASE_USER=${DATABASE_USER-postgres}
- DATABASE_PASSWORD=${DATABASE_PASSWORD-postgres}
- RABBITMQ_HOST=${RABBITMQ_HOST-koku-rabbit}
- RABBITMQ_PORT=5672
- USE_RABBIT=${USE_RABBIT}
- DEVELOPMENT=${DEVELOPMENT-True}
- LOG_LEVEL=INFO
- DJANGO_SETTINGS_MODULE=koku.settings
- MASU_SECRET_KEY=abc
- prometheus_multiproc_dir=/tmp
- PROMETHEUS_PUSHGATEWAY=${PROMETHEUS_PUSHGATEWAY-pushgateway:9091}
- ENABLE_S3_ARCHIVING=${ENABLE_S3_ARCHIVING-False}
- ENABLE_PARQUET_PROCESSING=${ENABLE_PARQUET_PROCESSING-False}
- S3_BUCKET_NAME=${S3_BUCKET_NAME-koku-bucket}
- S3_BUCKET_PATH=${S3_BUCKET_PATH-data_archive}
- S3_ENDPOINT
- S3_ACCESS_KEY
- S3_SECRET
- PVC_DIR=${PVC_DIR-/testing/pvc_dir}
- GOOGLE_APPLICATION_CREDENTIALS=${GOOGLE_APPLICATION_CREDENTIALS}
- KOKU_CELERY_ENABLE_SENTRY
- KOKU_CELERY_SENTRY_DSN
- KOKU_SENTRY_ENVIRONMENT
- DEMO_ACCOUNTS
- INITIAL_INGEST_OVERRIDE=${INITIAL_INGEST_OVERRIDE-False}
- INITIAL_INGEST_NUM_MONTHS=${INITIAL_INGEST_NUM_MONTHS-2}
- AUTO_DATA_INGEST=${AUTO_DATA_INGEST-True}
- REPORT_PROCESSING_BATCH_SIZE=${REPORT_PROCESSING_BATCH_SIZE-100000}
- REPORT_PROCESSING_TIMEOUT_HOURS=${REPORT_PROCESSING_TIMEOUT_HOURS-2}
- PRESTO_HOST=${PRESTO_HOST-presto}
- PRESTO_PORT=${PRESTO_PORT-8080}
- DATE_OVERRIDE
- MASU_DEBUG

volumes:
- '.:/koku'

privileged: true
links:
- koku-rabbit
depends_on:
- koku-base
- koku-rabbit
7 changes: 3 additions & 4 deletions koku/cost_models/cost_model_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
from cost_models.models import CostModelMap
from masu.processor.tasks import OCP_QUEUE
from masu.processor.tasks import refresh_materialized_views
from masu.processor.tasks import REFRESH_MATERIALIZED_VIEWS_QUEUE
from masu.processor.tasks import update_cost_model_costs
from masu.processor.tasks import UPDATE_COST_MODEL_COSTS_QUEUE

Expand Down Expand Up @@ -98,14 +97,14 @@ def update_provider_uuids(self, provider_uuids):
except Provider.DoesNotExist:
LOG.info(f"Provider {provider_uuid} does not exist. Skipping cost-model update.")
else:
queue = OCP_QUEUE if provider.type == Provider.PROVIDER_OCP else None
queue_choice = OCP_QUEUE if provider.type == Provider.PROVIDER_OCP else UPDATE_COST_MODEL_COSTS_QUEUE
myersCody marked this conversation as resolved.
Show resolved Hide resolved
schema_name = provider.customer.schema_name
chain(
update_cost_model_costs.s(schema_name, provider.uuid, start_date, end_date).set(
queue=queue or UPDATE_COST_MODEL_COSTS_QUEUE
queue=queue_choice
),
refresh_materialized_views.si(schema_name, provider.type, provider_uuid=provider.uuid).set(
queue=queue or REFRESH_MATERIALIZED_VIEWS_QUEUE
queue=queue_choice
),
).apply_async()

Expand Down
3 changes: 2 additions & 1 deletion koku/masu/processor/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,9 @@
REFRESH_MATERIALIZED_VIEWS_QUEUE = "reporting"
REMOVE_EXPIRED_DATA_QUEUE = "remove_expired"
SUMMARIZE_REPORTS_QUEUE = "process"
UPDATE_COST_MODEL_COSTS_QUEUE = "reporting"
UPDATE_COST_MODEL_COSTS_QUEUE = "priority_update"
myersCody marked this conversation as resolved.
Show resolved Hide resolved
myersCody marked this conversation as resolved.
Show resolved Hide resolved
UPDATE_SUMMARY_TABLES_QUEUE = "reporting"
DELETE_SOURCE_QUEUE = "priority_delete"
myersCody marked this conversation as resolved.
Show resolved Hide resolved


def record_all_manifest_files(manifest_id, report_files):
Expand Down
3 changes: 2 additions & 1 deletion koku/sources/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
import logging

from koku import celery_app
from masu.processor.tasks import DELETE_SOURCE_QUEUE
from sources.sources_provider_coordinator import SourcesProviderCoordinator
from sources.storage import load_providers_to_delete

LOG = logging.getLogger(__name__)


@celery_app.task(name="sources.tasks.delete_source", queue="remove_expired")
@celery_app.task(name="sources.tasks.delete_source", queue=DELETE_SOURCE_QUEUE)
def delete_source(source_id, auth_header, koku_uuid):
"""Delete Provider and Source."""
LOG.info(f"Deleting Provider {koku_uuid} for Source ID: {source_id}")
Expand Down