Skip to content

Commit

Permalink
fix(glue): s3 path specified in --spark-event-logs-path needs to end …
Browse files Browse the repository at this point in the history
…with slash (#29357)

### Issue # (if applicable)

Closes #29356.

### Reason for this change

Currently S3 path specified in `--spark-event-logs-path` does not end with slash in case only bucket is provided but prefix is not provided. This parameter causes errors when viewing the event log through Spark UI / Spark history server.

### Description of changes

Add trailing slash when it does not end with slash.

### Description of how you validated changes

Completed unit test and integ test.

### Checklist
- [x] My code adheres to the [CONTRIBUTING GUIDE](https://github.com/aws/aws-cdk/blob/main/CONTRIBUTING.md) and [DESIGN GUIDELINES](https://github.com/aws/aws-cdk/blob/main/docs/DESIGN_GUIDELINES.md)

----

*By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license*
  • Loading branch information
moomindani authored Mar 19, 2024
1 parent f0d1d67 commit 4ff3565
Show file tree
Hide file tree
Showing 7 changed files with 420 additions and 11 deletions.
2 changes: 1 addition & 1 deletion packages/@aws-cdk/aws-glue-alpha/lib/job.ts
Original file line number Diff line number Diff line change
Expand Up @@ -826,7 +826,7 @@ export class Job extends JobBase {
bucket.grantReadWrite(role, this.cleanPrefixForGrant(props.prefix));
const args = {
'--enable-spark-ui': 'true',
'--spark-event-logs-path': bucket.s3UrlForObject(props.prefix),
'--spark-event-logs-path': bucket.s3UrlForObject(props.prefix).replace(/\/?$/, '/'), // path will always end with a slash
};

return {
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,8 @@
"s3://",
{
"Ref": "EtlJob20SparkUIBucketFD07FBD8"
}
},
"/"
]
]
},
Expand Down Expand Up @@ -267,6 +268,43 @@
"Properties": {
"PolicyDocument": {
"Statement": [
{
"Action": [
"s3:Abort*",
"s3:DeleteObject*",
"s3:GetBucket*",
"s3:GetObject*",
"s3:List*",
"s3:PutObject",
"s3:PutObjectLegalHold",
"s3:PutObjectRetention",
"s3:PutObjectTagging",
"s3:PutObjectVersionTagging"
],
"Effect": "Allow",
"Resource": [
{
"Fn::GetAtt": [
"StreamingJob20SparkUIBucket92EF3706",
"Arn"
]
},
{
"Fn::Join": [
"",
[
{
"Fn::GetAtt": [
"StreamingJob20SparkUIBucket92EF3706",
"Arn"
]
},
"/*"
]
]
}
]
},
{
"Action": [
"s3:GetBucket*",
Expand Down Expand Up @@ -319,6 +357,11 @@
]
}
},
"StreamingJob20SparkUIBucket92EF3706": {
"Type": "AWS::S3::Bucket",
"UpdateReplacePolicy": "Retain",
"DeletionPolicy": "Retain"
},
"StreamingJob20355B58C7": {
"Type": "AWS::Glue::Job",
"Properties": {
Expand All @@ -340,6 +383,19 @@
},
"DefaultArguments": {
"--job-language": "python",
"--enable-spark-ui": "true",
"--spark-event-logs-path": {
"Fn::Join": [
"",
[
"s3://",
{
"Ref": "StreamingJob20SparkUIBucket92EF3706"
},
"/"
]
]
},
"arg1": "value1",
"arg2": "value2"
},
Expand Down Expand Up @@ -520,7 +576,8 @@
"s3://",
{
"Ref": "EtlJob30SparkUIBucket9D789346"
}
},
"/"
]
]
},
Expand Down Expand Up @@ -625,6 +682,43 @@
"Properties": {
"PolicyDocument": {
"Statement": [
{
"Action": [
"s3:Abort*",
"s3:DeleteObject*",
"s3:GetBucket*",
"s3:GetObject*",
"s3:List*",
"s3:PutObject",
"s3:PutObjectLegalHold",
"s3:PutObjectRetention",
"s3:PutObjectTagging",
"s3:PutObjectVersionTagging"
],
"Effect": "Allow",
"Resource": [
{
"Fn::GetAtt": [
"StreamingJob30SparkUIBucketEFBF52D3",
"Arn"
]
},
{
"Fn::Join": [
"",
[
{
"Fn::GetAtt": [
"StreamingJob30SparkUIBucketEFBF52D3",
"Arn"
]
},
"/*"
]
]
}
]
},
{
"Action": [
"s3:GetBucket*",
Expand Down Expand Up @@ -677,6 +771,11 @@
]
}
},
"StreamingJob30SparkUIBucketEFBF52D3": {
"Type": "AWS::S3::Bucket",
"UpdateReplacePolicy": "Retain",
"DeletionPolicy": "Retain"
},
"StreamingJob30E005FBEB": {
"Type": "AWS::Glue::Job",
"Properties": {
Expand All @@ -698,6 +797,19 @@
},
"DefaultArguments": {
"--job-language": "python",
"--enable-spark-ui": "true",
"--spark-event-logs-path": {
"Fn::Join": [
"",
[
"s3://",
{
"Ref": "StreamingJob30SparkUIBucketEFBF52D3"
},
"/"
]
]
},
"arg1": "value1",
"arg2": "value2"
},
Expand Down Expand Up @@ -878,7 +990,8 @@
"s3://",
{
"Ref": "EtlJob40SparkUIBucket02F50B0D"
}
},
"/"
]
]
},
Expand Down Expand Up @@ -983,6 +1096,43 @@
"Properties": {
"PolicyDocument": {
"Statement": [
{
"Action": [
"s3:Abort*",
"s3:DeleteObject*",
"s3:GetBucket*",
"s3:GetObject*",
"s3:List*",
"s3:PutObject",
"s3:PutObjectLegalHold",
"s3:PutObjectRetention",
"s3:PutObjectTagging",
"s3:PutObjectVersionTagging"
],
"Effect": "Allow",
"Resource": [
{
"Fn::GetAtt": [
"StreamingJob40SparkUIBucketA97E24C6",
"Arn"
]
},
{
"Fn::Join": [
"",
[
{
"Fn::GetAtt": [
"StreamingJob40SparkUIBucketA97E24C6",
"Arn"
]
},
"/*"
]
]
}
]
},
{
"Action": [
"s3:GetBucket*",
Expand Down Expand Up @@ -1035,6 +1185,11 @@
]
}
},
"StreamingJob40SparkUIBucketA97E24C6": {
"Type": "AWS::S3::Bucket",
"UpdateReplacePolicy": "Retain",
"DeletionPolicy": "Retain"
},
"StreamingJob40E284A782": {
"Type": "AWS::Glue::Job",
"Properties": {
Expand All @@ -1056,6 +1211,19 @@
},
"DefaultArguments": {
"--job-language": "python",
"--enable-spark-ui": "true",
"--spark-event-logs-path": {
"Fn::Join": [
"",
[
"s3://",
{
"Ref": "StreamingJob40SparkUIBucketA97E24C6"
},
"/"
]
]
},
"arg1": "value1",
"arg2": "value2"
},
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 4ff3565

Please sign in to comment.