From 4a14dc876d5d00b3b32b8e31563a744a7befda63 Mon Sep 17 00:00:00 2001 From: Matthew Fala Date: Thu, 17 Mar 2022 00:06:37 +0000 Subject: [PATCH] load_tests: add tcp load tests Signed-off-by: Matthew Fala --- .gitignore | 1 + load_tests/.gitignore | 3 + .../kinesis_s3_firehose/app.py | 89 ++++---- load_tests/load_test.py | 207 +++++++++++++----- .../logger/{ => stdout_logger}/Dockerfile | 0 load_tests/logger/stdout_logger/fluent.conf | 0 .../{ => stdout_logger}/log_generator.c | 0 load_tests/logger/tcp_logger/.gitignore | 2 + load_tests/logger/tcp_logger/Dockerfile | 6 + load_tests/logger/tcp_logger/README.md | 47 ++++ load_tests/logger/tcp_logger/build-clean.sh | 4 + load_tests/logger/tcp_logger/build.sh | 3 + load_tests/logger/tcp_logger/fluent.conf | 51 +++++ load_tests/logger/tcp_logger/pom.xml | 81 +++++++ load_tests/logger/tcp_logger/run.sh | 1 + .../src/main/java/com/mycompany/app/App.java | 59 +++++ .../src/main/java/com/mycompany/app/run.txt | 5 + .../tcp_logger/src/main/resources/log4j2.xml | 24 ++ .../test/java/com/mycompany/app/AppTest.java | 20 ++ .../tcp_logger/src/test/log4j.properties | 8 + load_tests/setup_test_environment.sh | 7 +- load_tests/task_definitions/cloudwatch.json | 29 ++- load_tests/task_definitions/firehose.json | 29 ++- load_tests/task_definitions/kinesis.json | 29 ++- load_tests/task_definitions/s3.json | 29 ++- 25 files changed, 618 insertions(+), 116 deletions(-) create mode 100644 load_tests/.gitignore rename load_tests/logger/{ => stdout_logger}/Dockerfile (100%) create mode 100644 load_tests/logger/stdout_logger/fluent.conf rename load_tests/logger/{ => stdout_logger}/log_generator.c (100%) create mode 100644 load_tests/logger/tcp_logger/.gitignore create mode 100644 load_tests/logger/tcp_logger/Dockerfile create mode 100644 load_tests/logger/tcp_logger/README.md create mode 100755 load_tests/logger/tcp_logger/build-clean.sh create mode 100755 load_tests/logger/tcp_logger/build.sh create mode 100644 load_tests/logger/tcp_logger/fluent.conf create mode 100644 load_tests/logger/tcp_logger/pom.xml create mode 100755 load_tests/logger/tcp_logger/run.sh create mode 100644 load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/App.java create mode 100644 load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/run.txt create mode 100644 load_tests/logger/tcp_logger/src/main/resources/log4j2.xml create mode 100644 load_tests/logger/tcp_logger/src/test/java/com/mycompany/app/AppTest.java create mode 100644 load_tests/logger/tcp_logger/src/test/log4j.properties diff --git a/.gitignore b/.gitignore index 8c0580437..a7347b7c4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ bin integ/out +.venv diff --git a/load_tests/.gitignore b/load_tests/.gitignore new file mode 100644 index 000000000..bb216362f --- /dev/null +++ b/load_tests/.gitignore @@ -0,0 +1,3 @@ +create_testing_resources/cdk.out +create_testing_resources/kinesis_s3_firehose/cdk.out +task_definitions/*_*m.json \ No newline at end of file diff --git a/load_tests/create_testing_resources/kinesis_s3_firehose/app.py b/load_tests/create_testing_resources/kinesis_s3_firehose/app.py index 770abba22..cf01553b8 100644 --- a/load_tests/create_testing_resources/kinesis_s3_firehose/app.py +++ b/load_tests/create_testing_resources/kinesis_s3_firehose/app.py @@ -1,3 +1,4 @@ +from logging import captureWarnings import os import json from aws_cdk import ( @@ -8,6 +9,7 @@ core, ) +DESTINATION_LIST = ["", "std-"] # "" is the destination tag for logs coming from non-stdstream input THROUGHPUT_LIST = json.loads(os.environ['THROUGHPUT_LIST']) PLATFORM = os.environ['PLATFORM'].lower() PREFIX= os.environ['PREFIX'] @@ -30,49 +32,52 @@ def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: ) names = locals() - for throughput in THROUGHPUT_LIST: - # Data streams and related delivery streams for kinesis test - names[PLATFORM+'_kinesis_stream_'+throughput] = kinesis.Stream(self, PLATFORM+'KinesisStream'+throughput, - stream_name=PREFIX+PLATFORM+'-kinesisStream-'+throughput, - shard_count=100) - kinesis_policy = iam.Policy(self, 'kinesisPolicyfor'+throughput, - statements=[iam.PolicyStatement(actions=['kinesis:*'], resources=[names.get(PLATFORM+'_kinesis_stream_'+throughput).stream_arn])], - roles=[firehose_role], - ) - names[PLATFORM+'_kinesis_test_delivery_stream_'+throughput] = firehose.CfnDeliveryStream( - self, PLATFORM+'KinesisTestDeliveryStream'+throughput, - delivery_stream_name=PREFIX+PLATFORM+'-kinesisTest-deliveryStream-'+throughput, - delivery_stream_type='KinesisStreamAsSource', - kinesis_stream_source_configuration=firehose.CfnDeliveryStream.KinesisStreamSourceConfigurationProperty( - kinesis_stream_arn=names.get(PLATFORM+'_kinesis_stream_'+throughput).stream_arn, - role_arn=firehose_role.role_arn - ), - s3_destination_configuration=firehose.CfnDeliveryStream.S3DestinationConfigurationProperty( - bucket_arn=bucket.bucket_arn, - buffering_hints=firehose.CfnDeliveryStream.BufferingHintsProperty( - interval_in_seconds=60, - size_in_m_bs=50 - ), - compression_format='UNCOMPRESSED', - role_arn=firehose_role.role_arn, - prefix=f'kinesis-test/{PLATFORM}/{throughput}/' - )) - names.get(PLATFORM+'_kinesis_test_delivery_stream_'+throughput).add_depends_on(kinesis_policy.node.default_child) - # Delivery streams for firehose test - names[PLATFORM+'_firehose_test_delivery_stream_'+throughput] = firehose.CfnDeliveryStream( - self, PLATFORM+'FirehoseTestDeliveryStream'+throughput, - delivery_stream_name=PREFIX+PLATFORM+'-firehoseTest-deliveryStream-'+throughput, - delivery_stream_type='DirectPut', - s3_destination_configuration=firehose.CfnDeliveryStream.S3DestinationConfigurationProperty( - bucket_arn=bucket.bucket_arn, - buffering_hints=firehose.CfnDeliveryStream.BufferingHintsProperty( - interval_in_seconds=60, - size_in_m_bs=50 + for destination in DESTINATION_LIST: + for throughput in THROUGHPUT_LIST: + caps_identifier = destination.capitalize().replace("-", "") + throughput.capitalize() + identifier = destination + throughput + # Data streams and related delivery streams for kinesis test + names[PLATFORM+'_kinesis_stream_'+identifier] = kinesis.Stream(self, PLATFORM+'KinesisStream'+caps_identifier, + stream_name=PREFIX+PLATFORM+'-kinesisStream-'+identifier, + shard_count=100) + kinesis_policy = iam.Policy(self, 'kinesisPolicyfor'+identifier, + statements=[iam.PolicyStatement(actions=['kinesis:*'], resources=[names.get(PLATFORM+'_kinesis_stream_'+identifier).stream_arn])], + roles=[firehose_role], + ) + names[PLATFORM+'_kinesis_test_delivery_stream_'+identifier] = firehose.CfnDeliveryStream( + self, PLATFORM+'KinesisTestDeliveryStream'+caps_identifier, + delivery_stream_name=PREFIX+PLATFORM+'-kinesisTest-deliveryStream-'+identifier, + delivery_stream_type='KinesisStreamAsSource', + kinesis_stream_source_configuration=firehose.CfnDeliveryStream.KinesisStreamSourceConfigurationProperty( + kinesis_stream_arn=names.get(PLATFORM+'_kinesis_stream_'+identifier).stream_arn, + role_arn=firehose_role.role_arn ), - compression_format='UNCOMPRESSED', - role_arn=firehose_role.role_arn, - prefix=f'firehose-test/{PLATFORM}/{throughput}/' - )) + s3_destination_configuration=firehose.CfnDeliveryStream.S3DestinationConfigurationProperty( + bucket_arn=bucket.bucket_arn, + buffering_hints=firehose.CfnDeliveryStream.BufferingHintsProperty( + interval_in_seconds=60, + size_in_m_bs=50 + ), + compression_format='UNCOMPRESSED', + role_arn=firehose_role.role_arn, + prefix=f'kinesis-test/{PLATFORM}/{identifier}/' + )) + names.get(PLATFORM+'_kinesis_test_delivery_stream_'+identifier).add_depends_on(kinesis_policy.node.default_child) + # Delivery streams for firehose test + names[PLATFORM+'_firehose_test_delivery_stream_'+identifier] = firehose.CfnDeliveryStream( + self, PLATFORM+'FirehoseTestDeliveryStream'+caps_identifier, + delivery_stream_name=PREFIX+PLATFORM+'-firehoseTest-deliveryStream-'+identifier, + delivery_stream_type='DirectPut', + s3_destination_configuration=firehose.CfnDeliveryStream.S3DestinationConfigurationProperty( + bucket_arn=bucket.bucket_arn, + buffering_hints=firehose.CfnDeliveryStream.BufferingHintsProperty( + interval_in_seconds=60, + size_in_m_bs=50 + ), + compression_format='UNCOMPRESSED', + role_arn=firehose_role.role_arn, + prefix=f'firehose-test/{PLATFORM}/{identifier}/' + )) # Add stack outputs core.CfnOutput(self, 'S3BucketName', diff --git a/load_tests/load_test.py b/load_tests/load_test.py index 1ee8fa6a4..386b23785 100644 --- a/load_tests/load_test.py +++ b/load_tests/load_test.py @@ -6,6 +6,7 @@ import subprocess from datetime import datetime, timezone +IS_TASK_DEFINITION_PRINTED = False PLATFORM = os.environ['PLATFORM'].lower() OUTPUT_PLUGIN = os.environ['OUTPUT_PLUGIN'].lower() TESTING_RESOURCES_STACK_NAME = os.environ['TESTING_RESOURCES_STACK_NAME'] @@ -19,6 +20,20 @@ else: THROUGHPUT_LIST = json.loads(os.environ['THROUGHPUT_LIST']) +# Input Logger Data +INPUT_LOGGERS = [ + { + "name": "stdstream", + "logger_image": "075490442118.dkr.ecr.us-west-2.amazonaws.com/load-test-fluent-bit-app-image:latest", + "fluent_config_file_path": "./load_tests/logger/stdout_logger/fluent.conf" + }, + { + "name": "tcp", + "logger_image": "826489191740.dkr.ecr.us-west-2.amazonaws.com/amazon/tcp-logger:latest", + "fluent_config_file_path": "./load_tests/logger/tcp_logger/fluent.conf" + }, +] + # Return the approximate log delay for each ecs load test # Estimate log delay = task_stop_time - task_start_time - logger_image_run_time def get_log_delay(log_delay_epoch_time): @@ -42,7 +57,7 @@ def check_app_exit_code(response): sys.exit('[TEST_FAILURE] Error occured to get task container list') for container in containers: if container['name'] == 'app' and container['exitCode'] != 0: - sys.exit('[TEST_FAILURE] Logger failed to generate all logs with exit code: ', container['exitCode']) + sys.exit('[TEST_FAILURE] Logger failed to generate all logs with exit code: ' + str(container['exitCode'])) # Return the total number of input records for each load test def calculate_total_input_number(throughput): @@ -51,17 +66,39 @@ def calculate_total_input_number(throughput): # 1. Configure task definition for each load test based on existing templates # 2. Register generated task definition -def generate_task_definition(throughput): +def generate_task_definition(throughput, input_logger, s3_fluent_config_arn): + if not hasattr(generate_task_definition, "counter"): + generate_task_definition.counter = 0 # it doesn't exist yet, so initialize it + generate_task_definition.counter += 1 + destination_identifier = get_destination_identifier(throughput, input_logger) + destination_identifier_firelens = get_destination_identifier_firelens(throughput, input_logger) task_definition_dict = { + + # App Container Environment Variables + '$APP_IMAGE': input_logger['logger_image'], + '$LOGGER_RUN_TIME_IN_SECOND': str(LOGGER_RUN_TIME_IN_SECOND), + + # Firelens Container Environment Variables + '$FLUENT_BIT_IMAGE': os.environ['FLUENT_BIT_IMAGE'], + '$INPUT_NAME': input_logger['name'], + '$LOGGER_PORT': "4560", + '$FLUENT_CONFIG_S3_FILE_ARN': s3_fluent_config_arn, + '$OUTPUT_PLUGIN': OUTPUT_PLUGIN, + + # General Environment Variables + '$FIRELENS_DESTINATION_IDENTIFIER': destination_identifier_firelens, + '$DESTINATION_IDENTIFIER': destination_identifier, '$THROUGHPUT': throughput, + + # Task Environment Variables '$TASK_ROLE_ARN': os.environ['LOAD_TEST_TASK_ROLE_ARN'], '$TASK_EXECUTION_ROLE_ARN': os.environ['LOAD_TEST_TASK_EXECUTION_ROLE_ARN'], - '$FLUENT_BIT_IMAGE': os.environ['FLUENT_BIT_IMAGE'], + + # Plugin Specific Environment Variables '$APP_IMAGE': os.environ['ECS_APP_IMAGE'], - '$LOGGER_RUN_TIME_IN_SECOND': str(LOGGER_RUN_TIME_IN_SECOND), 'cloudwatch': {'$CW_LOG_GROUP_NAME': os.environ['CW_LOG_GROUP_NAME']}, - 'firehose': {'$DELIVERY_STREAM': os.environ[f'FIREHOSE_TEST_{throughput}']}, - 'kinesis': {'$STREAM': os.environ[f'KINESIS_TEST_{throughput}']}, + 'firehose': {'$DELIVERY_STREAM_PREFIX': f'{PREFIX}{PLATFORM}-firehoseTest-deliveryStream'}, + 'kinesis': {'$STREAM_PREFIX': f'{PREFIX}{PLATFORM}-kinesisStream'}, 's3': {'$S3_BUCKET_NAME': os.environ['S3_BUCKET_NAME']}, } fin = open(f'./load_tests/task_definitions/{OUTPUT_PLUGIN}.json', 'r') @@ -76,7 +113,8 @@ def generate_task_definition(throughput): fout.write(data) fout.close() fin.close() - os.system(f'aws ecs register-task-definition --cli-input-json file://load_tests/task_definitions/{OUTPUT_PLUGIN}_{throughput}.json') + + os.system(f'aws ecs register-task-definition --cli-input-json file://load_tests/task_definitions/{OUTPUT_PLUGIN}_{throughput}.json {(">/dev/null", "")[IS_TASK_DEFINITION_PRINTED]}') # With multiple codebuild projects running parallel, # Testing resources only needs to be created once @@ -118,55 +156,105 @@ def run_ecs_tests(): ecs_cluster_name = os.environ['ECS_CLUSTER_NAME'] client = boto3.client('ecs') waiter = client.get_waiter('tasks_stopped') - processes = set() names = locals() - # Run ecs tasks and store task arns - for throughput in THROUGHPUT_LIST: - os.environ['THROUGHPUT'] = throughput - generate_task_definition(throughput) - response = client.run_task( + # Run ecs tests once per input logger type + for input_logger in INPUT_LOGGERS: + processes = set() + + # Delete corresponding testing data for a fresh start + delete_testing_data() + + # S3 Fluent Bit extra config data + s3_fluent_config_arn = publish_fluent_config_s3(input_logger) + + # Run ecs tasks and store task arns + for throughput in THROUGHPUT_LIST: + os.environ['THROUGHPUT'] = throughput + generate_task_definition(throughput, input_logger, s3_fluent_config_arn) + response = client.run_task( + cluster=ecs_cluster_name, + launchType='EC2', + taskDefinition=f'{PREFIX}{OUTPUT_PLUGIN}-{throughput}' + ) + names[f'{OUTPUT_PLUGIN}_{throughput}_task_arn'] = response['tasks'][0]['taskArn'] + + # Validation input type banner + print(f'\nValidation results for input type: {input_logger["name"]}') + + # Wait until task stops and start validation + for throughput in THROUGHPUT_LIST: + waiter.wait( cluster=ecs_cluster_name, - launchType='EC2', - taskDefinition=f'{PREFIX}{OUTPUT_PLUGIN}-{throughput}' - ) - names[f'{OUTPUT_PLUGIN}_{throughput}_task_arn'] = response['tasks'][0]['taskArn'] + tasks=[ + names[f'{OUTPUT_PLUGIN}_{throughput}_task_arn'], + ], + WaiterConfig={ + 'MaxAttempts': 600 + } + ) + response = client.describe_tasks( + cluster=ecs_cluster_name, + tasks=[ + names[f'{OUTPUT_PLUGIN}_{throughput}_task_arn'], + ] + ) + check_app_exit_code(response) + input_record = calculate_total_input_number(throughput) + start_time = response['tasks'][0]['startedAt'] + stop_time = response['tasks'][0]['stoppedAt'] + log_delay = get_log_delay(parse_time(stop_time)-parse_time(start_time)-LOGGER_RUN_TIME_IN_SECOND) + set_buffer(parse_time(stop_time)) + log_delay = get_log_delay(response) + set_buffer(response) + # Validate logs + os.environ['LOG_SOURCE_NAME'] = input_logger["name"] + os.environ['LOG_SOURCE_IMAGE'] = input_logger["logger_image"] + destination_identifier = get_destination_identifier(throughput, input_logger) + if OUTPUT_PLUGIN == 'cloudwatch': + os.environ['LOG_PREFIX'] = destination_identifier + os.environ['DESTINATION'] = 'cloudwatch' + else: + os.environ['LOG_PREFIX'] = f'{OUTPUT_PLUGIN}-test/ecs/' + destination_identifier + '/' + os.environ['DESTINATION'] = 's3' + processes.add(subprocess.Popen(['go', 'run', './load_tests/validation/validate.go', input_record, log_delay])) + + # Wait until all subprocesses for validation completed + for p in processes: + p.wait() - # Wait until task stops and start validation - for throughput in THROUGHPUT_LIST: - waiter.wait( - cluster=ecs_cluster_name, - tasks=[ - names[f'{OUTPUT_PLUGIN}_{throughput}_task_arn'], - ], - WaiterConfig={ - 'MaxAttempts': 600 - } - ) - response = client.describe_tasks( - cluster=ecs_cluster_name, - tasks=[ - names[f'{OUTPUT_PLUGIN}_{throughput}_task_arn'], - ] +# Returns s3 arn +def publish_fluent_config_s3(input_logger): + bucket_name = os.environ['S3_BUCKET_NAME'] + s3 = boto3.client('s3') + s3.upload_file( + input_logger["fluent_config_file_path"], + bucket_name, + f'{OUTPUT_PLUGIN}-test/{PLATFORM}/fluent-{input_logger["name"]}.conf', + ) + return f'arn:aws:s3:::{bucket_name}/{OUTPUT_PLUGIN}-test/{PLATFORM}/fluent-{input_logger["name"]}.conf' + +# The following method is used to clear data between +# testing batches +def delete_testing_data(): + # All testing data related to the plugin option will be deleted + if OUTPUT_PLUGIN == 'cloudwatch': + # Delete associated cloudwatch log streams + client = boto3.client('logs') + response = client.describe_log_streams( + logGroupName=os.environ['CW_LOG_GROUP_NAME'] ) - check_app_exit_code(response) - input_record = calculate_total_input_number(throughput) - start_time = response['tasks'][0]['startedAt'] - stop_time = response['tasks'][0]['stoppedAt'] - log_delay = get_log_delay(parse_time(stop_time)-parse_time(start_time)-LOGGER_RUN_TIME_IN_SECOND) - set_buffer(parse_time(stop_time)) - # Validate logs - if OUTPUT_PLUGIN == 'cloudwatch': - os.environ['LOG_PREFIX'] = throughput - os.environ['DESTINATION'] = 'cloudwatch' - else: - os.environ['LOG_PREFIX'] = f'{OUTPUT_PLUGIN}-test/ecs/{throughput}/' - os.environ['DESTINATION'] = 's3' - processes.add(subprocess.Popen(['go', 'run', './load_tests/validation/validate.go', input_record, log_delay])) - - # Wait until all subprocesses for validation completed - for p in processes: - p.wait() + for stream in response["logStreams"]: + client.delete_log_stream( + logGroupName=os.environ['CW_LOG_GROUP_NAME'], + logStreamName=stream["logStreamName"] + ) + else: + # Delete associated s3 bucket objects + s3 = boto3.resource('s3') + bucket = s3.Bucket(os.environ['S3_BUCKET_NAME']) + s3_objects = bucket.objects.filter(Prefix=f'{OUTPUT_PLUGIN}-test/{PLATFORM}/') + s3_objects.delete() def generate_daemonset_config(throughput): daemonset_config_dict = { @@ -230,6 +318,21 @@ def delete_testing_resources(): os.system('kubectl delete namespace load-test-fluent-bit-eks-ns') os.system(f'eksctl scale nodegroup --cluster={EKS_CLUSTER_NAME} --nodes=0 ng') +def get_destination_identifier(throughput, input_logger): + # Destination identifier + # [log source] ----- (stdout) -> std-{{throughput}}/... + # \___ (tcp ) -> {{throughput}}/... + # + # All inputs should have throughput as destination identifier + # except stdstream + destination_identifier = throughput + if (input_logger['name'] == 'stdstream'): + destination_identifier = 'std-' + throughput + return destination_identifier + +def get_destination_identifier_firelens(throughput, input_logger): + return 'std-' + throughput + if sys.argv[1] == 'create_testing_resources': create_testing_resources() elif sys.argv[1] == 'ECS': @@ -239,4 +342,4 @@ def delete_testing_resources(): elif sys.argv[1] == 'delete_testing_resources': # testing resources only need to be deleted once if OUTPUT_PLUGIN == 'cloudwatch': - delete_testing_resources() \ No newline at end of file + delete_testing_resources() diff --git a/load_tests/logger/Dockerfile b/load_tests/logger/stdout_logger/Dockerfile similarity index 100% rename from load_tests/logger/Dockerfile rename to load_tests/logger/stdout_logger/Dockerfile diff --git a/load_tests/logger/stdout_logger/fluent.conf b/load_tests/logger/stdout_logger/fluent.conf new file mode 100644 index 000000000..e69de29bb diff --git a/load_tests/logger/log_generator.c b/load_tests/logger/stdout_logger/log_generator.c similarity index 100% rename from load_tests/logger/log_generator.c rename to load_tests/logger/stdout_logger/log_generator.c diff --git a/load_tests/logger/tcp_logger/.gitignore b/load_tests/logger/tcp_logger/.gitignore new file mode 100644 index 000000000..6f3b7938f --- /dev/null +++ b/load_tests/logger/tcp_logger/.gitignore @@ -0,0 +1,2 @@ +.attach_* +target/ \ No newline at end of file diff --git a/load_tests/logger/tcp_logger/Dockerfile b/load_tests/logger/tcp_logger/Dockerfile new file mode 100644 index 000000000..4a0b33ade --- /dev/null +++ b/load_tests/logger/tcp_logger/Dockerfile @@ -0,0 +1,6 @@ +FROM openjdk:16-alpine3.13 + +WORKDIR /jars +COPY /target/my-app-1.0-SNAPSHOT-jar-with-dependencies.jar /jars/my-app-1.0-SNAPSHOT-jar-with-dependencies.jar + +ENTRYPOINT ["java", "-jar", "/jars/my-app-1.0-SNAPSHOT-jar-with-dependencies.jar"] diff --git a/load_tests/logger/tcp_logger/README.md b/load_tests/logger/tcp_logger/README.md new file mode 100644 index 000000000..aed6500c3 --- /dev/null +++ b/load_tests/logger/tcp_logger/README.md @@ -0,0 +1,47 @@ +### Log4j TCP Appender Test Code + +Many customers use the Log4j TCP Appender with Fluent Bit and ECS FireLens: https://github.com/aws-samples/amazon-ecs-firelens-examples/tree/mainline/examples/fluent-bit/ecs-log-collection#tutorial-3-using-log4j-with-tcp + +This directory contains some example log4j code that emits logs at a configurable rate. This can be used to stress/perf/debug test Fluent Bit's TCP input. + +#### Prerequisites +1. Install Maven & Java. Here's a link for how to do so on Amazon Linux 2 https://docs.aws.amazon.com/neptune/latest/userguide/iam-auth-connect-prerq.html + +#### Instructions to run +``` +# Build +cd tcp_logger +sudo ./build.sh + +# Configure +export ITERATION=25m +export TIME=10 +export LOGGER_PORT=4560 +export LOGGER_DEST_ADDR=127.0.0.1 + +# Run +./run.sh +``` + +Or concisely +``` +sudo ./build.sh; ./run.sh +``` + +#### Instructions to compile & run docker image locally +``` +cd tcp_logger +sudo ./build.sh +docker build -t amazon/tcp-logger:latest . +docker run --add-host=host.docker.internal:host-gateway --net=host amazon/tcp-logger:latest +``` + +#### Pushing to ECS +``` +ecs-cli push amazon/tcp-logger:latest +``` + +#### Debugging TCP Logger +``` +export DEBUG_TCP_LOGGER=true +``` \ No newline at end of file diff --git a/load_tests/logger/tcp_logger/build-clean.sh b/load_tests/logger/tcp_logger/build-clean.sh new file mode 100755 index 000000000..e43c13617 --- /dev/null +++ b/load_tests/logger/tcp_logger/build-clean.sh @@ -0,0 +1,4 @@ +mvn clean +mvn compile +mvn assembly:single +mvn package diff --git a/load_tests/logger/tcp_logger/build.sh b/load_tests/logger/tcp_logger/build.sh new file mode 100755 index 000000000..388836ac0 --- /dev/null +++ b/load_tests/logger/tcp_logger/build.sh @@ -0,0 +1,3 @@ +mvn compile +mvn assembly:single +mvn package diff --git a/load_tests/logger/tcp_logger/fluent.conf b/load_tests/logger/tcp_logger/fluent.conf new file mode 100644 index 000000000..35d6f1536 --- /dev/null +++ b/load_tests/logger/tcp_logger/fluent.conf @@ -0,0 +1,51 @@ +[SERVICE] + Grace 30 + Log_Level debug + +[INPUT] + # TAG used for routing in OUTPUT. Don't parse JSON for performance + Name tcp + Listen 0.0.0.0 + Port ${LOGGER_PORT} + Tag ${OUTPUT_PLUGIN} + Format none + +[OUTPUT] + Name s3 + Match s3 + region us-west-2 + bucket ${S3_BUCKET_NAME} + total_file_size 50M + upload_timeout 1m + use_put_object On + s3_key_format /s3-test/ecs/${DESTINATION_IDENTIFIER}/$TAG/%Y/%m/%d/%H/%M/%S + auto_retry_requests true + workers 1 + +[OUTPUT] + Name kinesis_streams + Match kinesis + region us-west-2 + stream ${STREAM_PREFIX}-${DESTINATION_IDENTIFIER} + auto_retry_requests true + workers 5 + +[OUTPUT] + Name kinesis_firehose + Match firehose + region us-west-2 + delivery_stream ${DELIVERY_STREAM_PREFIX}-${DESTINATION_IDENTIFIER} + auto_retry_requests true + workers 5 + +[OUTPUT] + Name cloudwatch_logs + Match cloudwatch + log_stream_name ${THROUGHPUT} + log_group_name ${CW_LOG_GROUP_NAME} + auto_create_group true + region us-west-2 + log_key log + auto_retry_requests true + workers 1 + retry_limit 5 diff --git a/load_tests/logger/tcp_logger/pom.xml b/load_tests/logger/tcp_logger/pom.xml new file mode 100644 index 000000000..328b116d6 --- /dev/null +++ b/load_tests/logger/tcp_logger/pom.xml @@ -0,0 +1,81 @@ + + + + 4.0.0 + + com.mycompany.app + my-app + 1.0-SNAPSHOT + + my-app + + http://www.example.com + + + UTF-8 + 1.7 + 1.7 + + + + + junit + junit + 4.11 + test + + + org.apache.logging.log4j + log4j-api + 2.17.1 + + + org.apache.logging.log4j + log4j-core + 2.17.1 + + + + log4j + log4j + 1.2.17 + + + + + + + + maven-assembly-plugin + + + + com.mycompany.app.App + + + + jar-with-dependencies + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.1.0 + + + + true + + com.mycompany.app.App + + + + + + + + diff --git a/load_tests/logger/tcp_logger/run.sh b/load_tests/logger/tcp_logger/run.sh new file mode 100755 index 000000000..19486fa0b --- /dev/null +++ b/load_tests/logger/tcp_logger/run.sh @@ -0,0 +1 @@ +java -cp target/my-app-1.0-SNAPSHOT-jar-with-dependencies.jar com.mycompany.app.App \ No newline at end of file diff --git a/load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/App.java b/load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/App.java new file mode 100644 index 000000000..6c5c63bc6 --- /dev/null +++ b/load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/App.java @@ -0,0 +1,59 @@ +package com.mycompany.app; + +import org.apache.logging.log4j.LogManager; +import java.util.concurrent.TimeUnit; +import org.apache.logging.log4j.Logger; + +public class App +{ + private static final Logger logger; + private static int TIME; + private static int ITERATION; + private static String ONE_KB_TEXT; + + public static void main(final String[] args) throws InterruptedException { + + String tmp = System.getenv("TIME"); + if (tmp != null) { + try { + App.TIME = Integer.parseInt(tmp); + } + catch (NumberFormatException e) { + e.printStackTrace(); + } + } + tmp = System.getenv("ITERATION"); + if (tmp != null) { + try { + App.ITERATION = Integer.parseInt(tmp.replace("m", "")) * 1000; + } + catch (NumberFormatException e) { + e.printStackTrace(); + } + } + + if (System.getenv("DEBUG_TCP_LOGGER") != null && System.getenv("DEBUG_TCP_LOGGER").equals("true")) { + System.out.println("Starting Load Test. Iteration " + App.ITERATION + ". On port: " + System.getenv("LOGGER_PORT") + ". Time: " + App.TIME); + } + + final ClassLoader loader = App.class.getClassLoader(); + final long testStartTime = System.currentTimeMillis(); + long testExpectedTime = System.currentTimeMillis(); + for (int i = 0; i < App.TIME; ++i) { + final long batchStartTime = System.currentTimeMillis(); + for (int k = 0; k < App.ITERATION; ++k) { + App.logger.info("" + (10000000 + i*App.ITERATION + k) + "_" + batchStartTime + "_" + App.ONE_KB_TEXT); + } + testExpectedTime += 1000L; + final long deltaTime = testExpectedTime - System.currentTimeMillis(); + TimeUnit.MILLISECONDS.sleep(deltaTime); + } + } + + static { + logger = LogManager.getLogger((Class)App.class); + App.TIME = 10; + App.ITERATION = 1; + App.ONE_KB_TEXT = "RUDQEWDDKBVMHPYVOAHGADVQGRHGCNRDCTLUWQCBFBKFGZHTGEUKFXWNCKXPRWBSVJGHEARMDQGVVRFPVCIBYEORHYPUTQJKUMNZJXIYLDCJUHABJIXFPUNJQDORGPKWFLQZXIGVGCWTZCVWGBFSGVXGEITYKNTWCYZDOAZFOTXDOFRPECXBSCSORSUUNUJZEJZPTODHBXVMOETBRFGNWNZHGINVNYZPKKSFLZHLSSDHFGLTHZEKICPGNYSCTAIHARDDYIJHKLMAOIDLEKRXMFNVJOJVDFYKNVIQKCIGTRFWKJRHQSFDWWKTJNMNKFBOMBMZMRCOHPUFZEPTQTZBLBDBZPJJXRYDFSOWKDVZLZYWSJYFTCKQJFPQOMCWQHKLNHUGWWVBGTRLLVUHTPHTKNBSRUNNOIFGIJPBHPCKYXNGDCQYJEWFFKRRTHJDUBEZPJIXMAOLZQDZQAYEUZFRLTLTXNGAVAGZZDUERZWTJVDTXPKOIRTCKTFOFJAXVFLNKPBYOIYVPHUYBRZZORCEMMAUTZIAUSXVDTKHSUIRTSYWQMYZBMUGSATXPNESEVQMUKHYZFWSLHJDNYUQWOKDUTUKPRXBLIYGSCFGBGXATINMMCWNWBGJTLZTPKGBTPWTHQPUHDJITWPCJLGZFNZTCIEWWVTREFCTPVOUADQCRQCBRHNHDKGQIXHIWGGDGAAFYZRODKFTKQATAUDOMZTSQUYZHGNJOBSUJDHESPBOIJCGXPEZMMQJNFTYBJEYXPZAZICZJKEZKCZEUMZTTSQEHADOVMCDMDEBUJAPKIAEYQEWIYZSAYAWAGFSTBJYCUFZHMJMLCTVTZWGCPDAURQYSXVICLVWKPAOMVTQTESYFPTMNMSNZPUXMDJRDKHDRAIRYELEXRJUAMOLZVWNHGNVFETVUDZEIDJRPSHMXAZDZXDCXMUJTPDTDUHBAZGPIQOUNUHMVLCZCSUUHGTE"; + } +} diff --git a/load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/run.txt b/load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/run.txt new file mode 100644 index 000000000..ca3a7eac1 --- /dev/null +++ b/load_tests/logger/tcp_logger/src/main/java/com/mycompany/app/run.txt @@ -0,0 +1,5 @@ +docker run -it -e "LOGGER_THREADS=1" -e "LOGGER_ITERATIONS=1" -e "LOG_SIZE_BYTES=1" -e "LOGGER_SLEEP_MS=1000" -e "LOGGER_PORT=5171" -e "INFINITE=true" log4j:latest + +docker run -d --network host -e "LOGGER_THREADS=50" -e "LOGGER_ITERATIONS=1" -e "LOG_SIZE_BYTES=1000" -e "LOGGER_SLEEP_MS=10000" -e "LOGGER_PORT=5471" -e "INFINITE=true" log4j:latest + +docker run -d --network host -e "LOGGER_THREADS=50" -e "LOGGER_ITERATIONS=1" -e "LOG_SIZE_BYTES=25000" -e "LOGGER_SLEEP_MS=10000" -e "LOGGER_PORT=5472" -e "INFINITE=true" log4j:latest \ No newline at end of file diff --git a/load_tests/logger/tcp_logger/src/main/resources/log4j2.xml b/load_tests/logger/tcp_logger/src/main/resources/log4j2.xml new file mode 100644 index 000000000..90db1bc22 --- /dev/null +++ b/load_tests/logger/tcp_logger/src/main/resources/log4j2.xml @@ -0,0 +1,24 @@ + + + + + + + + + %m%n + + + + + + + + + + \ No newline at end of file diff --git a/load_tests/logger/tcp_logger/src/test/java/com/mycompany/app/AppTest.java b/load_tests/logger/tcp_logger/src/test/java/com/mycompany/app/AppTest.java new file mode 100644 index 000000000..81ac34538 --- /dev/null +++ b/load_tests/logger/tcp_logger/src/test/java/com/mycompany/app/AppTest.java @@ -0,0 +1,20 @@ +package com.mycompany.app; + +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +/** + * Unit test for simple App. + */ +public class AppTest +{ + /** + * Rigorous Test :-) + */ + @Test + public void shouldAnswerWithTrue() + { + assertTrue( true ); + } +} diff --git a/load_tests/logger/tcp_logger/src/test/log4j.properties b/load_tests/logger/tcp_logger/src/test/log4j.properties new file mode 100644 index 000000000..2ea9fa920 --- /dev/null +++ b/load_tests/logger/tcp_logger/src/test/log4j.properties @@ -0,0 +1,8 @@ +# Root logger option +log4j.rootLogger=DEBUG, stdout + +# Redirect log messages to console +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.Target=System.out +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n \ No newline at end of file diff --git a/load_tests/setup_test_environment.sh b/load_tests/setup_test_environment.sh index 405b4c8f9..29fcab744 100755 --- a/load_tests/setup_test_environment.sh +++ b/load_tests/setup_test_environment.sh @@ -13,12 +13,7 @@ fi stackOutputs=$(aws cloudformation describe-stacks --stack-name ${LOG_STORAGE_STACK_NAME} --output text --query 'Stacks[0].Outputs[*].OutputValue') read -r -a outputArray <<< "$stackOutputs" export S3_BUCKET_NAME="${outputArray[0]}" -# Set load tests related data streams and delivery streams as environment variables. These resources are predefined and created in stack load-test-fluent-bit-log-storage -ThroughputArray=("1m" "2m" "3m" "20m" "25m" "30m") -for i in "${ThroughputArray[@]}"; do - export KINESIS_TEST_${i}="${PREFIX}ecs-kinesisStream-${i}" - export FIREHOSE_TEST_${i}="${PREFIX}ecs-firehoseTest-deliveryStream-${i}" -done + # Set necessary images as env vars export FLUENT_BIT_IMAGE="${AWS_ACCOUNT}.dkr.ecr.${AWS_REGION}.amazonaws.com/amazon/aws-for-fluent-bit-test:latest" export ECS_APP_IMAGE="906394416424.dkr.ecr.us-west-2.amazonaws.com/load-test-fluent-bit-ecs-app-image:latest" diff --git a/load_tests/task_definitions/cloudwatch.json b/load_tests/task_definitions/cloudwatch.json index 30b5c5348..46fc70e15 100644 --- a/load_tests/task_definitions/cloudwatch.json +++ b/load_tests/task_definitions/cloudwatch.json @@ -2,13 +2,30 @@ "family": "load-test-fluent-bit-cloudwatch-$THROUGHPUT", "taskRoleArn": "$TASK_ROLE_ARN", "executionRoleArn": "$TASK_EXECUTION_ROLE_ARN", + "networkMode": "bridge", "containerDefinitions": [ { "essential": true, "image": "$FLUENT_BIT_IMAGE", "name": "log_router", + "environment": [ + { "name": "OUTPUT_PLUGIN", "value": "$OUTPUT_PLUGIN" }, + { "name": "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name": "THROUGHPUT", "value": "$THROUGHPUT" }, + { "name": "INPUT_NAME", "value": "$INPUT_NAME" }, + + { "name": "S3_BUCKET_NAME", "value": "$S3_BUCKET_NAME" }, + { "name": "DESTINATION_IDENTIFIER", "value": "$DESTINATION_IDENTIFIER" }, + { "name": "STREAM_PREFIX", "value": "$STREAM_PREFIX" }, + { "name": "DELIVERY_STREAM_PREFIX", "value": "$DELIVERY_STREAM_PREFIX" }, + { "name": "CW_LOG_GROUP_NAME", "value": "$CW_LOG_GROUP_NAME" } + ], "firelensConfiguration": { - "type": "fluentbit" + "type": "fluentbit", + "options": { + "config-file-type": "s3", + "config-file-value": "$FLUENT_CONFIG_S3_FILE_ARN" + } }, "logConfiguration": { "logDriver": "awslogs", @@ -19,16 +36,20 @@ "awslogs-stream-prefix": "ecs-load-test" } }, - "cpu": 1024, + "cpu": 512, "memoryReservation": 50 }, { "essential": true, "image": "$APP_IMAGE", "name": "app", + "links": ["log_router"], "environment" : [ { "name" : "ITERATION", "value" : "$THROUGHPUT" }, - { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" } + { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" }, + { "name" : "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name" : "LOGGER_DEST_ADDR", "value": "log_router" }, + { "name" : "DEBUG_TCP_LOGGER", "value": "false" } ], "logConfiguration": { "logDriver":"awsfirelens", @@ -38,7 +59,7 @@ "log_key": "log", "log_group_name": "$CW_LOG_GROUP_NAME", "auto_create_group": "true", - "log_stream_name": "$THROUGHPUT", + "log_stream_name": "$FIRELENS_DESTINATION_IDENTIFIER", "log-driver-buffer-limit": "536870911", "auto_retry_requests": "true", "workers": "1", diff --git a/load_tests/task_definitions/firehose.json b/load_tests/task_definitions/firehose.json index f863eee36..8841411a9 100644 --- a/load_tests/task_definitions/firehose.json +++ b/load_tests/task_definitions/firehose.json @@ -2,13 +2,30 @@ "family": "load-test-fluent-bit-firehose-$THROUGHPUT", "taskRoleArn": "$TASK_ROLE_ARN", "executionRoleArn": "$TASK_EXECUTION_ROLE_ARN", + "networkMode": "bridge", "containerDefinitions": [ { "essential": true, "image": "$FLUENT_BIT_IMAGE", "name": "log_router", + "environment": [ + { "name": "OUTPUT_PLUGIN", "value": "$OUTPUT_PLUGIN" }, + { "name": "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name": "THROUGHPUT", "value": "$THROUGHPUT" }, + { "name": "INPUT_NAME", "value": "$INPUT_NAME" }, + + { "name": "S3_BUCKET_NAME", "value": "$S3_BUCKET_NAME" }, + { "name": "DESTINATION_IDENTIFIER", "value": "$DESTINATION_IDENTIFIER" }, + { "name": "STREAM_PREFIX", "value": "$STREAM_PREFIX" }, + { "name": "DELIVERY_STREAM_PREFIX", "value": "$DELIVERY_STREAM_PREFIX" }, + { "name": "CW_LOG_GROUP_NAME", "value": "$CW_LOG_GROUP_NAME" } + ], "firelensConfiguration": { - "type": "fluentbit" + "type": "fluentbit", + "options": { + "config-file-type": "s3", + "config-file-value": "$FLUENT_CONFIG_S3_FILE_ARN" + } }, "logConfiguration": { "logDriver": "awslogs", @@ -19,23 +36,27 @@ "awslogs-stream-prefix": "ecs-load-test" } }, - "cpu": 1024, + "cpu": 512, "memoryReservation": 50 }, { "essential": true, "image": "$APP_IMAGE", "name": "app", + "links": ["log_router"], "environment" : [ { "name" : "ITERATION", "value" : "$THROUGHPUT" }, - { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" } + { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" }, + { "name" : "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name" : "LOGGER_DEST_ADDR", "value": "log_router" }, + { "name" : "DEBUG_TCP_LOGGER", "value": "false" } ], "logConfiguration": { "logDriver":"awsfirelens", "options": { "Name": "kinesis_firehose", "region": "us-west-2", - "delivery_stream": "$DELIVERY_STREAM", + "delivery_stream": "$DELIVERY_STREAM_PREFIX-$FIRELENS_DESTINATION_IDENTIFIER", "log-driver-buffer-limit": "536870911", "auto_retry_requests": "true", "workers": "5" diff --git a/load_tests/task_definitions/kinesis.json b/load_tests/task_definitions/kinesis.json index e27cfef68..0a28be3f4 100644 --- a/load_tests/task_definitions/kinesis.json +++ b/load_tests/task_definitions/kinesis.json @@ -2,13 +2,30 @@ "family": "load-test-fluent-bit-kinesis-$THROUGHPUT", "taskRoleArn": "$TASK_ROLE_ARN", "executionRoleArn": "$TASK_EXECUTION_ROLE_ARN", + "networkMode": "bridge", "containerDefinitions": [ { "essential": true, "image": "$FLUENT_BIT_IMAGE", "name": "log_router", + "environment": [ + { "name": "OUTPUT_PLUGIN", "value": "$OUTPUT_PLUGIN" }, + { "name": "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name": "THROUGHPUT", "value": "$THROUGHPUT" }, + { "name": "INPUT_NAME", "value": "$INPUT_NAME" }, + + { "name": "S3_BUCKET_NAME", "value": "$S3_BUCKET_NAME" }, + { "name": "DESTINATION_IDENTIFIER", "value": "$DESTINATION_IDENTIFIER" }, + { "name": "STREAM_PREFIX", "value": "$STREAM_PREFIX" }, + { "name": "DELIVERY_STREAM_PREFIX", "value": "$DELIVERY_STREAM_PREFIX" }, + { "name": "CW_LOG_GROUP_NAME", "value": "$CW_LOG_GROUP_NAME" } + ], "firelensConfiguration": { - "type": "fluentbit" + "type": "fluentbit", + "options": { + "config-file-type": "s3", + "config-file-value": "$FLUENT_CONFIG_S3_FILE_ARN" + } }, "logConfiguration": { "logDriver": "awslogs", @@ -19,23 +36,27 @@ "awslogs-stream-prefix": "ecs-load-test" } }, - "cpu": 1024, + "cpu": 512, "memoryReservation": 50 }, { "essential": true, "image": "$APP_IMAGE", + "links": ["log_router"], "name": "app", "environment" : [ { "name" : "ITERATION", "value" : "$THROUGHPUT" }, - { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" } + { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" }, + { "name" : "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name" : "LOGGER_DEST_ADDR", "value": "log_router" }, + { "name" : "DEBUG_TCP_LOGGER", "value": "false" } ], "logConfiguration": { "logDriver":"awsfirelens", "options": { "Name": "kinesis_streams", "region": "us-west-2", - "stream": "$STREAM", + "stream": "$STREAM_PREFIX-$FIRELENS_DESTINATION_IDENTIFIER", "log-driver-buffer-limit": "536870911", "auto_retry_requests": "true", "workers": "5" diff --git a/load_tests/task_definitions/s3.json b/load_tests/task_definitions/s3.json index cf9b6d6b4..08119dd74 100644 --- a/load_tests/task_definitions/s3.json +++ b/load_tests/task_definitions/s3.json @@ -2,13 +2,30 @@ "family": "load-test-fluent-bit-s3-$THROUGHPUT", "taskRoleArn": "$TASK_ROLE_ARN", "executionRoleArn": "$TASK_EXECUTION_ROLE_ARN", + "networkMode": "bridge", "containerDefinitions": [ { "essential": true, "image": "$FLUENT_BIT_IMAGE", "name": "log_router", + "environment": [ + { "name": "OUTPUT_PLUGIN", "value": "$OUTPUT_PLUGIN" }, + { "name": "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name": "THROUGHPUT", "value": "$THROUGHPUT" }, + { "name": "INPUT_NAME", "value": "$INPUT_NAME" }, + + { "name": "S3_BUCKET_NAME", "value": "$S3_BUCKET_NAME" }, + { "name": "DESTINATION_IDENTIFIER", "value": "$DESTINATION_IDENTIFIER" }, + { "name": "STREAM_PREFIX", "value": "$STREAM_PREFIX" }, + { "name": "DELIVERY_STREAM_PREFIX", "value": "$DELIVERY_STREAM_PREFIX" }, + { "name": "CW_LOG_GROUP_NAME", "value": "$CW_LOG_GROUP_NAME" } + ], "firelensConfiguration": { - "type": "fluentbit" + "type": "fluentbit", + "options": { + "config-file-type": "s3", + "config-file-value": "$FLUENT_CONFIG_S3_FILE_ARN" + } }, "logConfiguration": { "logDriver": "awslogs", @@ -19,16 +36,20 @@ "awslogs-stream-prefix": "ecs-load-test" } }, - "cpu": 1024, + "cpu": 512, "memoryReservation": 50 }, { "essential": true, "image": "$APP_IMAGE", "name": "app", + "links": ["log_router"], "environment" : [ { "name" : "ITERATION", "value" : "$THROUGHPUT" }, - { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" } + { "name" : "TIME", "value" : "$LOGGER_RUN_TIME_IN_SECOND" }, + { "name" : "LOGGER_PORT", "value": "$LOGGER_PORT" }, + { "name" : "LOGGER_DEST_ADDR", "value": "log_router" }, + { "name" : "DEBUG_TCP_LOGGER", "value": "false" } ], "logConfiguration": { "logDriver":"awsfirelens", @@ -39,7 +60,7 @@ "total_file_size": "50M", "upload_timeout": "1m", "use_put_object": "On", - "s3_key_format": "/s3-test/ecs/$THROUGHPUT/$TAG/%Y/%m/%d/%H/%M/%S", + "s3_key_format": "/s3-test/ecs/$FIRELENS_DESTINATION_IDENTIFIER/$TAG/%Y/%m/%d/%H/%M/%S", "log-driver-buffer-limit": "536870911", "auto_retry_requests": "true", "workers": "1"