From 01cbb55e2c19fee32e9783108c3aafb2baeaedff Mon Sep 17 00:00:00 2001
From: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
Date: Mon, 21 Oct 2024 01:43:42 +0100
Subject: [PATCH] feat: major version update (#327)
* update workflow examples
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* test md
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* tidy
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* line breaks
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* more line breaks
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* another one
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* weird
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* more
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* consolidate action.yaml
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* relocate docs
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* fix relative links
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
* caching example
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
---------
Signed-off-by: Rishav Dhar <19497993+rdhar@users.noreply.github.com>
---
{docs => .github}/assets/comment.png | Bin
{docs => .github}/assets/revisions.png | Bin
.github/examples/pr_push_auth.yaml | 13 +-
docs/README.md => README.md | 43 +-
docs/SECURITY.md => SECURITY.md | 0
action.js | 607 -------------------------
action.yaml | 565 +++++++++++++++++++++++
action.yml | 390 ----------------
8 files changed, 611 insertions(+), 1007 deletions(-)
rename {docs => .github}/assets/comment.png (100%)
rename {docs => .github}/assets/revisions.png (100%)
rename docs/README.md => README.md (88%)
rename docs/SECURITY.md => SECURITY.md (100%)
delete mode 100644 action.js
create mode 100644 action.yaml
delete mode 100644 action.yml
diff --git a/docs/assets/comment.png b/.github/assets/comment.png
similarity index 100%
rename from docs/assets/comment.png
rename to .github/assets/comment.png
diff --git a/docs/assets/revisions.png b/.github/assets/revisions.png
similarity index 100%
rename from docs/assets/revisions.png
rename to .github/assets/revisions.png
diff --git a/.github/examples/pr_push_auth.yaml b/.github/examples/pr_push_auth.yaml
index 5fb01631..cb32c8c9 100644
--- a/.github/examples/pr_push_auth.yaml
+++ b/.github/examples/pr_push_auth.yaml
@@ -1,5 +1,5 @@
---
-name: Trigger on pull_request (plan) and push (apply) events with Terraform and AWS authentication.
+name: Trigger on pull_request (plan) and push (apply) events with Terraform, AWS authentication and caching.
on:
pull_request:
@@ -27,6 +27,17 @@ jobs:
aws-region: us-east-1
role-to-assume: ${{ secrets.AWS_ROLE }}
+ - name: Create cache
+ run: |
+ mkdir --parents $HOME/.terraform.d/plugin-cache
+ echo "TF_PLUGIN_CACHE_DIR=$HOME/.terraform.d/plugin-cache" >> $GITHUB_ENV
+
+ - name: Cache TF
+ uses: actions/cache@v4
+ with:
+ path: ~/.terraform.d/plugin-cache
+ key: cache-tf-${{ runner.os }}-${{ hashFiles('path/to/directory/.terraform.lock.hcl') }}
+
- name: Setup TF
uses: hashicorp/setup-terraform@v3
diff --git a/docs/README.md b/README.md
similarity index 88%
rename from docs/README.md
rename to README.md
index db013c11..e2a2f603 100644
--- a/docs/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
[![Terraform Compatible](https://img.shields.io/badge/Terraform-Compatible-844FBA?logo=terraform&logoColor=white)](https://github.com/hashicorp/setup-terraform "Terraform Compatible.")
[![OpenTofu Compatible](https://img.shields.io/badge/OpenTofu-Compatible-FFDA18?logo=opentofu&logoColor=white)](https://github.com/opentofu/setup-opentofu "OpenTofu Compatible.")
*
-[![GitHub license](https://img.shields.io/github/license/devsectop/tf-via-pr?logo=apache&label=License)](../LICENSE.txt "Apache License 2.0.")
+[![GitHub license](https://img.shields.io/github/license/devsectop/tf-via-pr?logo=apache&label=License)](LICENSE.txt "Apache License 2.0.")
[![GitHub release tag](https://img.shields.io/github/v/release/devsectop/tf-via-pr?logo=semanticrelease&label=Release)](https://github.com/devsectop/tf-via-pr/releases "View all releases.")
*
[![GitHub repository stargazers](https://img.shields.io/github/stars/devsectop/tf-via-pr)](https://github.com/devsectop/tf-via-pr "Become a stargazer.")
@@ -10,7 +10,7 @@
-[![PR comment of plan output with "Diff of changes" section expanded.](assets/comment.png)](https://github.com/devsectop/tf-via-pr/blob/main/docs/comment.png?raw=true "View full-size image.")
+[![PR comment of plan output with "Diff of changes" section expanded.](/.github/assets/comment.png)](https://github.com/devsectop/tf-via-pr/blob/main/.github/assets/comment.png?raw=true "View full-size image.")
+
## Usage
@@ -76,15 +77,31 @@ jobs:
>
> - Pin your workflow version to a specific release tag or SHA to harden your CI/CD pipeline [security](#security) against supply chain attacks.
> - Environment variables can be passed in for cloud platform authentication (e.g., [configure-aws-credentials](https://github.com/aws-actions/configure-aws-credentials "Configuring AWS credentials for use in GitHub Actions.") for short-lived credentials).
+
### Where to find more examples?
-The functional workflow examples below showcase common use cases, while a comprehensive list of inputs is also [documented](#parameters).
+The following workflows showcase common use cases, while a comprehensive list of inputs is [documented](#parameters) below.
-- [Trigger](/.github/examples/pr_push_auth.yaml) on `pull_request` (plan) and `push` (apply) events with Terraform and AWS **authentication**.
-- [Trigger](/.github/examples/pr_merge_matrix.yaml) on `pull_request` (plan) and `merge_group` (apply) events with OpenTofu in **matrix** strategy.
-- [Trigger](/.github/examples/pr_self_hosted.yaml) on `pull_request` (plan or apply) event event with Terraform and OpenTofu on **self-hosted** runner.
-- [Trigger](/.github/examples/schedule_refresh.yaml) on `schedule` (cron) event with "fmt" and "validate" checks to identify **configuration drift**.
+
+
+
+ Run on pull_request (plan) and push (apply) events with Terraform, AWS authentication and caching.
+ |
+
+ Run on pull_request (plan) and merge_group (apply) events with OpenTofu in matrix strategy.
+ |
+
+
+
+ Run on pull_request (plan or apply) event with Terraform and OpenTofu on self-hosted runner.
+ |
+
+ Run on schedule (cron) event with "fmt" and "validate" checks to identify configuration drift.
+ |
+
+
+
### How does encryption work?
@@ -97,6 +114,7 @@ unzip
openssl enc -aes-256-ctr -pbkdf2 -salt -in -out tf.plan.decrypted -pass pass:"" -d
show tf.plan.decrypted
```
+
## Parameters
@@ -115,11 +133,13 @@ unzip
| UI | `comment-pr` | PR comment by: `update` existing comment, `recreate` and delete previous one, or `none`.Default: `update` |
| UI | `label-pr` | Add a PR label with the command input.Default: `true` |
| UI | `hide-args` | Hide comma-separated arguments from the command input.Default: `detailed-exitcode,lock,out,var` |
+
The default behavior of comment-pr
is to update the existing PR comment with the latest plan output, making it easy to track changes over time through the comment's revision history.
-[![PR comment revision history comparing plan and apply outputs.](assets/revisions.png)](https://github.com/devsectop/tf-via-pr/blob/main/docs/revisions.png?raw=true "View full-size image.")
+[![PR comment revision history comparing plan and apply outputs.](/.github/assets/revisions.png)](https://github.com/devsectop/tf-via-pr/blob/main/.github/assets/revisions.png?raw=true "View full-size image.")
+
### Inputs - Arguments
@@ -171,6 +191,7 @@ unzip
| `arg-workspace` | `-workspace` |
| `arg-write` | `-write` |
+
### Outputs
@@ -180,11 +201,14 @@ unzip
| `comment-id` | ID of the PR comment. |
| `exitcode` | Exit code of the last TF command. |
| `identifier` | Unique name of the workflow run and artifact. |
+
## Security
View [security policy and reporting instructions](SECURITY.md).
+
+
## Changelog
View [all notable changes](https://github.com/devsectop/tf-via-pr/releases "Releases.") to this project in [Keep a Changelog](https://keepachangelog.com "Keep a Changelog.") format, which adheres to [Semantic Versioning](https://semver.org "Semantic Versioning.").
@@ -197,9 +221,10 @@ View [all notable changes](https://github.com/devsectop/tf-via-pr/releases "Rele
> - [Raise an issue](https://github.com/devsectop/tf-via-pr/issues "Raise an issue.") to propose changes or report unexpected behavior.
> - [Open a discussion](https://github.com/devsectop/tf-via-pr/discussions "Open a discussion.") to discuss broader topics or questions.
> - [Become a stargazer](https://github.com/devsectop/tf-via-pr/stargazers "Become a stargazer.") if you find this project useful.
+
## License
-- This project is licensed under the permissive [Apache License 2.0](../LICENSE.txt "Apache License 2.0.").
+- This project is licensed under the permissive [Apache License 2.0](LICENSE.txt "Apache License 2.0.").
- All works herein are my own, shared of my own volition, and [contributors](https://github.com/devsectop/tf-via-pr/graphs/contributors "Contributors.").
- Copyright 2022-2024 [Rishav Dhar](https://github.com/rdhar "Rishav Dhar's GitHub profile.") — All wrongs reserved.
diff --git a/docs/SECURITY.md b/SECURITY.md
similarity index 100%
rename from docs/SECURITY.md
rename to SECURITY.md
diff --git a/action.js b/action.js
deleted file mode 100644
index e5224f64..00000000
--- a/action.js
+++ /dev/null
@@ -1,607 +0,0 @@
-module.exports = async ({ context, core, exec, github }) => {
- // Set character limits to fit within GitHub comments.
- const result_concise_limit = 48e3;
- const result_outline_limit = 12e3;
- const fmt_result_limit = 6e3;
-
- // Get PR number from event trigger for unique identifier.
- let pr_number;
- if (context.eventName === "push") {
- const { data: list_prs_of_commit } =
- await github.rest.repos.listPullRequestsAssociatedWithCommit({
- commit_sha: context.sha,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- const pr =
- list_prs_of_commit.find((pr) => {
- return context.payload.ref === `refs/heads/${pr.head.ref}`;
- }) || list_prs_of_commit[0];
- pr_number = pr.number;
- } else if (context.eventName === "merge_group") {
- pr_number = parseInt(context.ref.split("/pr-")[1]);
- } else {
- pr_number = context.issue.number || 0;
- }
-
- // Check for Tofu CLI path.
- process.env.tf_tool = process.env.TOFU_CLI_PATH ? "tofu" : process.env.tf_tool;
-
- // Unique identifier of the TF run for later retrieval.
- const tf_identifier = [
- process.env.tf_tool,
- pr_number,
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_destroy,
- process.env.arg_out,
- ]
- .filter((arg) => arg)
- .map((arg) => String(arg).replace(/[^a-zA-Z0-9]/g, "-"))
- .join(".");
- core.setOutput("identifier", tf_identifier);
-
- // Capture TF command input and outputs.
- let cli_input, cli_result, fmt_result, result_concise, result_outline, result_summary;
-
- const data_handler = (data) => {
- cli_result += data.toString();
-
- // Filter result to drop state refresh information.
- result_concise = cli_result
- .split("\n")
- .filter(
- (line) =>
- !/(: Creating...|: Creation complete after|: Destroying...|: Destruction complete after|: Modifications complete after|: Modifying...|: Provisioning with|: Read complete after|: Reading...|: Refreshing state...|: Still creating...|: Still destroying...|: Still modifying...|: Still reading...|. This may take a few moments...)/.test(
- line
- )
- )
- .join("\n");
- if (result_concise?.length >= result_concise_limit) {
- result_concise = result_concise.substring(0, result_concise_limit) + "…";
- }
- core.setOutput("last_result", result_concise);
-
- // Capture result summary.
- result_summary =
- cli_result
- .split("\n")
- .reverse()
- .find((line) => /^(No changes|Error:|Apply|Plan:)/.test(line)) || "View details…";
- core.setOutput("summary", result_summary);
- };
-
- const options = {
- listeners: {
- stdout: data_handler,
- stderr: data_handler,
- },
- ignoreReturnCode: true,
- };
-
- // Function to execute TF commands.
- const exec_tf = async (input_arguments, input_header, input_header_slice) => {
- const arguments = input_arguments.filter((arg) => arg);
- const header = input_header.filter((arg) => arg);
- cli_input = header.concat(arguments.slice(input_header_slice)).join(" ");
- cli_result = "";
- core.setOutput("header", cli_input);
- const exitcode = await exec.exec(process.env.tf_tool, arguments, options);
- if (exitcode === 1) {
- core.setFailed(`Process failed with exit code ${exitcode}`);
- }
- };
-
- try {
- // TF init if not cached.
- if (!/^true$/i.test(process.env.cache_hit)) {
- await exec_tf(
- [
- process.env.arg_chdir,
- "init",
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_var,
- process.env.arg_backend,
- process.env.arg_cloud,
- process.env.arg_force_copy,
- process.env.arg_from_module,
- process.env.arg_get,
- process.env.arg_lock_timeout,
- process.env.arg_lock,
- process.env.arg_lockfile,
- process.env.arg_migrate_state,
- process.env.arg_plugin_dir,
- process.env.arg_reconfigure,
- process.env.arg_test_directory,
- process.env.arg_upgrade,
- ],
- [
- "init",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_destroy,
- ],
- 2
- );
- }
-
- // Select or create TF workspace.
- if (process.env.arg_workspace) {
- await exec_tf(
- [
- process.env.arg_chdir,
- "workspace",
- "select",
- process.env.arg_or_create,
- process.env.arg_workspace,
- ],
- [
- "select",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_or_create,
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_destroy,
- ],
- 5
- );
- }
-
- // TF validate.
- if (/^true$/i.test(process.env.validate_enable)) {
- await exec_tf(
- [
- process.env.arg_chdir,
- "validate",
- process.env.arg_json,
- process.env.arg_no_tests,
- process.env.arg_test_directory,
- ],
- [
- "validate",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_destroy,
- ],
- 2
- );
- }
-
- // TF fmt.
- if (process.env.arg_command === "plan" && /^true$/i.test(process.env.fmt_enable)) {
- await exec_tf(
- [
- process.env.arg_chdir,
- "fmt",
- process.env.arg_list,
- process.env.arg_write,
- process.env.arg_diff,
- process.env.arg_check,
- process.env.arg_recursive,
- ],
- [
- "fmt",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_destroy,
- ],
- 2
- );
-
- fmt_result = cli_result;
- if (fmt_result?.length >= fmt_result_limit) {
- fmt_result = cli_result.substring(0, fmt_result_limit) + "…";
- }
- core.setOutput("fmt_result", fmt_result);
- }
-
- // Add PR label of the TF command specified.
- if (pr_number && /^true$/i.test(process.env.label_pr)) {
- await github.rest.issues.addLabels({
- issue_number: pr_number,
- labels: [`tf:${process.env.arg_command}`],
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- await github.rest.issues.updateLabel({
- color: "5C4EE5",
- description: `Pull requests that ${process.env.arg_command} TF code`,
- name: `tf:${process.env.arg_command}`,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- }
-
- // TF plan.
- if (process.env.arg_command === "plan") {
- await exec_tf(
- [
- process.env.arg_chdir,
- "plan",
- process.env.arg_out,
- process.env.arg_var_file,
- process.env.arg_destroy,
- process.env.arg_compact_warnings,
- process.env.arg_concise,
- process.env.arg_detailed_exitcode,
- process.env.arg_generate_config_out,
- process.env.arg_json,
- process.env.arg_lock_timeout,
- process.env.arg_lock,
- process.env.arg_parallelism,
- process.env.arg_refresh_only,
- process.env.arg_refresh,
- process.env.arg_replace,
- process.env.arg_target,
- process.env.arg_var,
- ],
- [
- "plan",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- ],
- 3
- );
-
- result_outline = cli_result
- .split("\n")
- .filter((line) => line.startsWith(" # "))
- .map((line) => {
- const diff_line = line.slice(4);
- if (diff_line.includes(" created")) return "+ " + diff_line;
- if (diff_line.includes(" destroyed")) return "- " + diff_line;
- if (diff_line.includes(" updated") || diff_line.includes(" replaced"))
- return "! " + diff_line;
- return "# " + diff_line;
- })
- .join("\n");
- if (result_outline?.length >= result_outline_limit) {
- result_outline = result_outline.substring(0, result_outline_limit) + "…";
- }
- core.setOutput("outline", result_outline);
- }
-
- // TF apply.
- if (process.env.arg_command === "apply") {
- // Download the TF plan file if not auto-approved.
- if (!/^true$/i.test(process.env.arg_auto_approve_raw)) {
- // TF plan anew for later comparison if plan_parity is enabled.
- if (/^true$/i.test(process.env.plan_parity)) {
- await exec_tf(
- [
- process.env.arg_chdir,
- "plan",
- process.env.arg_out + ".new",
- process.env.arg_var_file,
- process.env.arg_destroy,
- process.env.arg_compact_warnings,
- process.env.arg_concise,
- process.env.arg_detailed_exitcode,
- process.env.arg_generate_config_out,
- process.env.arg_json,
- process.env.arg_lock_timeout,
- process.env.arg_lock,
- process.env.arg_parallelism,
- process.env.arg_refresh_only,
- process.env.arg_refresh,
- process.env.arg_replace,
- process.env.arg_target,
- process.env.arg_var,
- ],
- [
- "plan",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- ],
- 3
- );
- }
-
- process.env.arg_auto_approve = process.env.arg_out.replace(/^-out=/, "");
- process.env.arg_var_file = process.env.arg_var = "";
-
- // List artifacts for the TF identifier.
- const { data: list_artifacts } = await github.rest.actions.listArtifactsForRepo({
- name: tf_identifier,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
-
- // Get the latest TF plan artifact download URL.
- const download_artifact = await github.rest.actions.downloadArtifact({
- archive_format: "zip",
- artifact_id: list_artifacts.artifacts[0].id,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
-
- // Download and unzip the TF plan artifact.
- await exec.exec("/bin/bash", [
- "-c",
- `curl --no-progress-meter --location "${download_artifact.url}" --output "${tf_identifier}"`,
- ]);
- await exec.exec("/bin/bash", [
- "-c",
- `unzip "${tf_identifier}" -d "${process.env.arg_chdir.replace(/^-chdir=/, "")}"`,
- ]);
-
- // Decrypt the TF plan file if encrypted.
- if (process.env.encrypt_passphrase) {
- const working_directory = [
- process.env.arg_chdir.replace(/^-chdir=/, ""),
- process.env.arg_out.replace(/^-out=/, ""),
- ].join("/");
- let temp_file = "";
-
- await exec.exec("/bin/bash", ["-c", "mktemp"], {
- listeners: {
- stdout: (data) => {
- temp_file += data.toString().trim();
- },
- },
- });
- await exec.exec("/bin/bash", [
- "-c",
- `printf %s "${process.env.encrypt_passphrase}" > "${temp_file}"`,
- ]);
- await exec.exec("/bin/bash", [
- "-c",
- `openssl enc -aes-256-ctr -pbkdf2 -salt -in "${working_directory}" -out "${working_directory}.decrypted" -pass file:"${temp_file}" -d`,
- ]);
- await exec.exec("/bin/bash", [
- "-c",
- `mv ${working_directory}.decrypted ${working_directory}`,
- ]);
- }
-
- // Generate an outline of the TF plan.
- await exec_tf(
- [process.env.arg_chdir, "show", process.env.arg_out.replace(/^-out=/, "")],
- [
- "show",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_destroy,
- ],
- 2
- );
- result_outline = cli_result
- .split("\n")
- .filter((line) => line.startsWith(" # "))
- .map((line) => {
- const diff_line = line.slice(4);
- if (diff_line.includes(" created")) return "+ " + diff_line;
- if (diff_line.includes(" destroyed")) return "- " + diff_line;
- if (diff_line.includes(" updated") || diff_line.includes(" replaced"))
- return "! " + diff_line;
- return "# " + diff_line;
- })
- .join("\n");
-
- // Compare normalized output of the old TF plan with the new one.
- // If they match, then replace the old TF plan with the new one to avoid stale apply.
- // Otherwise, proceed with the stale apply.
- if (/^true$/i.test(process.env.plan_parity)) {
- await exec_tf(
- [process.env.arg_chdir, "show", process.env.arg_out.replace(/^-out=/, "") + ".new"],
- [
- "show",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- process.env.arg_var_file,
- process.env.arg_destroy,
- ],
- 2
- );
-
- const result_outline_old = result_outline.split("\n").sort().join("\n");
- const result_outline_new = cli_result
- .split("\n")
- .filter((line) => line.startsWith(" # "))
- .map((line) => {
- const diff_line = line.slice(4);
- if (diff_line.includes(" created")) return "+ " + diff_line;
- if (diff_line.includes(" destroyed")) return "- " + diff_line;
- if (diff_line.includes(" updated") || diff_line.includes(" replaced"))
- return "! " + diff_line;
- return "# " + diff_line;
- })
- .sort()
- .join("\n");
-
- if (result_outline_old === result_outline_new) {
- await exec.exec("/bin/bash", [
- "-c",
- `mv ${process.env.arg_chdir.replace(/^-chdir=/, "")}/${process.env.arg_out.replace(
- /^-out=/,
- ""
- )}.new ${process.env.arg_chdir.replace(/^-chdir=/, "")}/${process.env.arg_out.replace(
- /^-out=/,
- ""
- )}`,
- ]);
- }
- }
-
- if (result_outline?.length >= result_outline_limit) {
- result_outline = result_outline.substring(0, result_outline_limit) + "…";
- }
- core.setOutput("outline", result_outline);
- }
-
- await exec_tf(
- [
- process.env.arg_chdir,
- "apply",
- process.env.arg_var_file,
- process.env.arg_destroy,
- process.env.arg_backup,
- process.env.arg_compact_warnings,
- process.env.arg_json,
- process.env.arg_lock_timeout,
- process.env.arg_lock,
- process.env.arg_parallelism,
- process.env.arg_refresh_only,
- process.env.arg_refresh,
- process.env.arg_replace,
- process.env.arg_state_out,
- process.env.arg_state,
- process.env.arg_target,
- process.env.arg_var,
- process.env.arg_auto_approve,
- ],
- [
- "apply",
- process.env.arg_chdir,
- process.env.arg_workspace_alt,
- process.env.arg_backend_config,
- ],
- 2
- );
- }
- } finally {
- // Resolve the job URL for the footer, accounting for matrix strategy.
- const { data: workflow_run } = await github.rest.actions.listJobsForWorkflowRunAttempt({
- attempt_number: process.env.GITHUB_RUN_ATTEMPT,
- owner: context.repo.owner,
- repo: context.repo.repo,
- run_id: context.runId,
- per_page: 100,
- });
- const check_id =
- workflow_run.jobs.find((job) =>
- process.env.MATRIX !== "null"
- ? job.name.includes(Object.values(JSON.parse(process.env.MATRIX)).join(", "))
- : job.name.toLowerCase() === context.job
- )?.id || workflow_run.jobs[0].id;
- core.setOutput("check_id", check_id);
- const check_url = workflow_run.jobs.find((job) => job.id === check_id).html_url;
-
- // Update the check status with TF output summary.
- await github.rest.checks.update({
- check_run_id: check_id,
- output: {
- summary: result_summary,
- title: result_summary,
- },
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
-
- // Render the TF fmt command output.
- const output_fmt =
- process.env.arg_command === "plan" &&
- /^true$/i.test(process.env.fmt_enable) &&
- fmt_result?.length
- ? `Format diff check.
-
-\`\`\`diff
-${fmt_result}
-\`\`\`
- `
- : "";
-
- // Render the TF plan outline.
- const output_outline = result_outline?.length
- ? `Outline of changes.
-
-\`\`\`diff
-${result_outline}
-\`\`\`
- `
- : "";
-
- // Render the TF output body.
- const output_body = `
-
-\`\`\`fish
-${cli_input}
-\`\`\`
-
-${output_fmt}
-
-${output_outline}
-
-${result_summary}
-
-###### ${context.workflow} by @${context.actor} via [${context.eventName}](${check_url}) at ${context.payload.pull_request?.updated_at ||
- context.payload.head_commit?.timestamp ||
- context.payload.merge_group?.head_commit.timestamp
- }.
-
-
-\`\`\`hcl
-${result_concise}
-\`\`\`
-
-
-`;
-
- // Present the TF output body in workflow summary.
- core.summary.addRaw(output_body);
- core.summary.write();
-
- // Add or update PR comment with TF output.
- if (pr_number && /^true$/i.test(process.env.comment_pr)) {
- // Check if the PR contains a bot comment with the TF identifier.
- const { data: list_comments } = await github.rest.issues.listComments({
- issue_number: pr_number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- per_page: 100,
- });
- const bot_comment = list_comments.find((comment) => {
- return comment.user.type === "Bot" && comment.body.includes(``);
- });
-
- // If a bot comment exists with a matching TF identifier, then either edit
- // it to reflect the latest TF output or create a new comment and delete
- // the existing one. Otherwise create a new comment.
- if (bot_comment) {
- if (/^true$/i.test(process.env.update_comment)) {
- const { data: pr_comment } = await github.rest.issues.updateComment({
- body: output_body,
- comment_id: bot_comment.id,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- core.setOutput("comment_id", pr_comment.id);
- } else {
- await github.rest.issues.deleteComment({
- comment_id: bot_comment.id,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- const { data: pr_comment } = await github.rest.issues.createComment({
- body: output_body,
- issue_number: pr_number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- core.setOutput("comment_id", pr_comment.id);
- }
- } else {
- const { data: pr_comment } = await github.rest.issues.createComment({
- body: output_body,
- issue_number: pr_number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- });
- core.setOutput("comment_id", pr_comment.id);
- }
- }
- }
-};
diff --git a/action.yaml b/action.yaml
new file mode 100644
index 00000000..349804e6
--- /dev/null
+++ b/action.yaml
@@ -0,0 +1,565 @@
+---
+name: Terraform/OpenTofu via Pull Request
+author: Rishav Dhar (@rdhar)
+description: Plan and apply Terraform/OpenTofu via PR automation, using best practices for secure and scalable IaC workflows.
+
+runs:
+ using: composite
+ steps:
+ - shell: bash
+ run: |
+ # Check for required tools.
+ which gh > /dev/null 2>&1 || { echo "Please install GitHub CLI before running this action as it is required for interacting with GitHub."; exit 1; }
+ which jq > /dev/null 2>&1 || { echo "Please install jq before running this action as it is required for processing JSON outputs."; exit 1; }
+ which ${{ inputs.tool }} > /dev/null 2>&1 || { echo "Please install ${{ inputs.tool }} before running this action as it is required for provisioning TF code."; exit 1; }
+ if [[ "${{ inputs.plan-encrypt }}" ]]; then which openssl > /dev/null 2>&1 || { echo "Please install openssl before running this action as it is required for plan file encryption."; exit 1; }; fi
+ if [[ "${{ inputs.plan-parity }}" ]]; then which diff > /dev/null 2>&1 || { echo "Please install diff before running this action as it is required for comparing plan file parity."; exit 1; }; fi
+
+ - id: arg
+ shell: bash
+ run: |
+ # Populate variables.
+ # Environment variables.
+ echo GH_API="X-GitHub-Api-Version:2022-11-28" >> $GITHUB_ENV
+ echo GH_TOKEN="${{ inputs.token }}" >> $GITHUB_ENV
+ echo TF_CLI_ARGS=$([[ -n "${{ env.TF_CLI_ARGS }}" ]] && echo "${{ env.TF_CLI_ARGS }} -no-color" || echo "-no-color") >> $GITHUB_ENV
+ echo TF_IN_AUTOMATION="true" >> $GITHUB_ENV
+ echo TF_INPUT="false" >> $GITHUB_ENV
+
+ # CLI arguments.
+ echo arg-auto-approve=$([[ -n "${{ inputs.arg-auto-approve }}" ]] && echo " -auto-approve" || echo "") >> $GITHUB_OUTPUT
+ echo arg-backend-config=$([[ -n "${{ inputs.arg-backend-config }}" ]] && echo " -backend-config='${{ inputs.arg-backend-config }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-backend=$([[ -n "${{ inputs.arg-backend }}" ]] && echo " -backend=${{ inputs.arg-backend }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-backup=$([[ -n "${{ inputs.arg-backup }}" ]] && echo " -backup=${{ inputs.arg-backup }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-chdir=$([[ -n "${{ inputs.arg-chdir || inputs.working-directory }}" ]] && echo " -chdir='${{ inputs.arg-chdir || inputs.working-directory }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-check=$([[ -n "${{ inputs.arg-check }}" ]] && echo " -check" || echo "") >> $GITHUB_OUTPUT
+ echo arg-compact-warnings=$([[ -n "${{ inputs.arg-compact-warnings }}" ]] && echo " -compact-warnings" || echo "") >> $GITHUB_OUTPUT
+ echo arg-concise=$([[ -n "${{ inputs.arg-concise }}" ]] && echo " -concise" || echo "") >> $GITHUB_OUTPUT
+ echo arg-destroy=$([[ -n "${{ inputs.arg-destroy }}" ]] && echo " -destroy" || echo "") >> $GITHUB_OUTPUT
+ echo arg-detailed-exitcode=$([[ -n "${{ inputs.arg-detailed-exitcode }}" ]] && echo " -detailed-exitcode" || echo "") >> $GITHUB_OUTPUT
+ echo arg-diff=$([[ -n "${{ inputs.arg-diff }}" ]] && echo " -diff" || echo "") >> $GITHUB_OUTPUT
+ echo arg-force-copy=$([[ -n "${{ inputs.arg-force-copy }}" ]] && echo " -force-copy" || echo "") >> $GITHUB_OUTPUT
+ echo arg-from-module=$([[ -n "${{ inputs.arg-from-module }}" ]] && echo " -from-module='${{ inputs.arg-from-module }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-generate-config-out=$([[ -n "${{ inputs.arg-generate-config-out }}" ]] && echo " -generate-config-out='${{ inputs.arg-generate-config-out }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-get=$([[ -n "${{ inputs.arg-get }}" ]] && echo " -get=${{ inputs.arg-get }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-list=$([[ -n "${{ inputs.arg-list }}" ]] && echo " -list=${{ inputs.arg-list }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-lock-timeout=$([[ -n "${{ inputs.arg-lock-timeout }}" ]] && echo " -lock-timeout=${{ inputs.arg-lock-timeout }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-lock=$([[ -n "${{ inputs.arg-lock }}" ]] && echo " -lock=${{ inputs.arg-lock }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-lockfile=$([[ -n "${{ inputs.arg-lockfile }}" ]] && echo " -lockfile=${{ inputs.arg-lockfile }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-migrate-state=$([[ -n "${{ inputs.arg-migrate-state }}" ]] && echo " -migrate-state" || echo "") >> $GITHUB_OUTPUT
+ echo arg-no-tests=$([[ -n "${{ inputs.arg-no-tests }}" ]] && echo " -no-tests" || echo "") >> $GITHUB_OUTPUT
+ echo arg-or-create=$([[ -n "${{ inputs.arg-or-create }}" ]] && echo " -or-create=${{ inputs.arg-or-create }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-parallelism=$([[ -n "${{ inputs.arg-parallelism }}" ]] && echo " -parallelism=${{ inputs.arg-parallelism }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-plugin-dir=$([[ -n "${{ inputs.arg-plugin-dir }}" ]] && echo " -plugin-dir='${{ inputs.arg-plugin-dir }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-reconfigure=$([[ -n "${{ inputs.arg-reconfigure }}" ]] && echo " -reconfigure" || echo "") >> $GITHUB_OUTPUT
+ echo arg-recursive=$([[ -n "${{ inputs.arg-recursive }}" ]] && echo " -recursive" || echo "") >> $GITHUB_OUTPUT
+ echo arg-refresh-only=$([[ -n "${{ inputs.arg-refresh-only }}" ]] && echo " -refresh-only" || echo "") >> $GITHUB_OUTPUT
+ echo arg-refresh=$([[ -n "${{ inputs.arg-refresh }}" ]] && echo " -refresh=${{ inputs.arg-refresh }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-replace=$([[ -n "${{ inputs.arg-replace }}" ]] && echo " -replace='${{ inputs.arg-replace }}'" | sed "s/,/' -replace='/g" || echo "") >> $GITHUB_OUTPUT
+ echo arg-state-out=$([[ -n "${{ inputs.arg-state-out }}" ]] && echo " -state-out='${{ inputs.arg-state-out }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-state=$([[ -n "${{ inputs.arg-state }}" ]] && echo " -state=${{ inputs.arg-state }}" || echo "") >> $GITHUB_OUTPUT
+ echo arg-target=$([[ -n "${{ inputs.arg-target }}" ]] && echo " -target='${{ inputs.arg-target }}'" | sed "s/,/' -target='/g" || echo "") >> $GITHUB_OUTPUT
+ echo arg-test-directory=$([[ -n "${{ inputs.arg-test-directory }}" ]] && echo " -test-directory='${{ inputs.arg-test-directory }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-upgrade=$([[ -n "${{ inputs.arg-upgrade }}" ]] && echo " -upgrade" || echo "") >> $GITHUB_OUTPUT
+ echo arg-var-file=$([[ -n "${{ inputs.arg-var-file }}" ]] && echo " -var-file='${{ inputs.arg-var-file }}'" || echo "") >> $GITHUB_OUTPUT
+ echo arg-var=$([[ -n "${{ inputs.arg-var }}" ]] && echo " -var='${{ inputs.arg-var }}'" | sed "s/,/' -var='/g" || echo "") >> $GITHUB_OUTPUT
+ echo arg-write=$([[ -n "${{ inputs.arg-write }}" ]] && echo " -write=${{ inputs.arg-write }}" || echo "") >> $GITHUB_OUTPUT
+
+ - id: identifier
+ env:
+ GH_MATRIX: ${{ toJSON(matrix) }}
+ shell: bash
+ run: |
+ # Unique identifier.
+ # Get PR number using different query methods for push, merge_group, and pull_request events.
+ if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then
+ # List PRs associated with the commit, then get the PR number from the head ref or the latest PR.
+ associated_prs=$(gh api /repos/${GITHUB_REPOSITORY}/commits/${GITHUB_SHA}/pulls --header "$GH_API" --method GET --field per_page=100)
+ pr_number=$(echo "$associated_prs" | jq -r '(.[] | select(.head.ref == env.GITHUB_REF_NAME) | .number) // .[0].number')
+ elif [[ "$GITHUB_EVENT_NAME" == "merge_group" ]]; then
+ # Get the PR number by parsing the ref name.
+ pr_number=$(echo "${GITHUB_REF_NAME}" | sed -n 's/.*pr-\([0-9]*\)-.*/\1/p')
+ else
+ # Get the PR number from the event payload or fallback on 0.
+ pr_number=${{ github.event.number || 0 }}
+ fi
+ echo "pr=$pr_number" >> $GITHUB_OUTPUT
+
+ # Generate identifier for the workflow run.
+ identifier="${{ inputs.tool }} $pr_number ${{ inputs.workspace }}${{ steps.arg.outputs.arg-chdir }}${{ steps.arg.outputs.arg-backend-config }}${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-destroy }}.tf.plan"
+ identifier=$(echo "$identifier" | sed 's/[^a-zA-Z0-9]/./g')
+ echo "name=$identifier" >> $GITHUB_OUTPUT
+
+ # List jobs from the current workflow run.
+ workflow_run=$(gh api /repos/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}/attempts/${GITHUB_RUN_ATTEMPT}/jobs --header "$GH_API" --method GET --field per_page=100)
+
+ # Get the current job ID from the workflow run using different query methods for matrix and regular jobs.
+ if [[ "$GH_MATRIX" == "null" ]]; then
+ # For regular jobs, get the ID of the job with the same name as $GITHUB_JOB (lowercase and '-' or '_' replaced with ' ').
+ # Otherwise, get the ID of the first job in the list as a fallback.
+ job_id=$(echo "$workflow_run" | jq -r '(.jobs[] | select((.name | ascii_downcase | gsub("-|_"; " ")) == (env.GITHUB_JOB | ascii_downcase | gsub("-|_"; " "))) | .id) // .jobs[0].id' | head -n 1)
+ else
+ # For matrix jobs, join the matrix values with comma separator into a single string and get the ID of the job which contains it.
+ matrix=$(echo "$GH_MATRIX" | jq -r 'to_entries | map(.value) | join(", ")')
+ job_id=$(echo "$workflow_run" | jq -r --arg matrix "$matrix" '.jobs[] | select(.name | contains($matrix)) | .id')
+ fi
+ echo "job=$job_id" >> $GITHUB_OUTPUT
+
+ # Get the step number that has status "in_progress" from the current job.
+ workflow_step=$(echo "$workflow_run" | jq -r --arg job_id "$job_id" '.jobs[] | select(.id == ($job_id | tonumber)) | .steps[] | select(.status == "in_progress") | .number')
+ echo "step=$workflow_step" >> $GITHUB_OUTPUT
+
+ - id: initialize
+ shell: bash
+ run: |
+ # TF initialize.
+ trap 'exit_code="$?"; echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"' EXIT
+ args="${{ steps.arg.outputs.arg-backend-config }}${{ steps.arg.outputs.arg-backend }}${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-var }}${{ steps.arg.outputs.arg-force-copy }}${{ steps.arg.outputs.arg-from-module }}${{ steps.arg.outputs.arg-get }}${{ steps.arg.outputs.arg-lock-timeout }}${{ steps.arg.outputs.arg-lock }}${{ steps.arg.outputs.arg-lockfile }}${{ steps.arg.outputs.arg-migrate-state }}${{ steps.arg.outputs.arg-plugin-dir }}${{ steps.arg.outputs.arg-reconfigure }}${{ steps.arg.outputs.arg-test-directory }}${{ steps.arg.outputs.arg-upgrade }}"
+ echo "${{ inputs.tool }} init${{ steps.arg.outputs.arg-chdir }}${args}" | sed 's/ -/\n -/g' > tf.command.txt
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} init${args} > >(tee -a tf.console.txt) 2> tf.console.txt
+
+ - id: workspace
+ if: ${{ inputs.arg-workspace != '' }}
+ shell: bash
+ run: |
+ # TF workspace select.
+ trap 'exit_code="$?"; echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"' EXIT
+ args="${{ steps.arg.outputs.arg-or-create }} ${{ inputs.arg-workspace }}"
+ echo "${{ inputs.tool }} workspace select${{ steps.arg.outputs.arg-chdir }}${args}" | sed 's/ -/\n -/g' > tf.command.txt
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} workspace select${args} > >(tee -a tf.console.txt) 2> tf.console.txt
+
+ - id: validate
+ if: ${{ inputs.validate == 'true' }}
+ shell: bash
+ run: |
+ # TF validate.
+ trap 'exit_code="$?"; echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"' EXIT
+ args="${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-var }}${{ steps.arg.outputs.arg-no-tests }}${{ steps.arg.outputs.arg-test-directory }}"
+ echo "${{ inputs.tool }} validate${{ steps.arg.outputs.arg-chdir }}${args}" | sed 's/ -/\n -/g' > tf.command.txt
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} validate${args} > >(tee -a tf.console.txt) 2> tf.console.txt
+
+ - id: format
+ if: ${{ inputs.format == 'true' }}
+ shell: bash
+ run: |
+ # TF format.
+ trap 'exit_code="$?"; echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"' EXIT
+ args="${{ steps.arg.outputs.arg-check }}${{ steps.arg.outputs.arg-diff }}${{ steps.arg.outputs.arg-list }}${{ steps.arg.outputs.arg-recursive }}${{ steps.arg.outputs.arg-write }}"
+ echo "${{ inputs.tool }} fmt${{ steps.arg.outputs.arg-chdir }}${args}" | sed 's/ -/\n -/g' > tf.command.txt
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} fmt${args} > >(tee -a tf.console.txt) 2> tf.console.txt
+
+ - if: ${{ inputs.label-pr == 'true' && steps.identifier.outputs.pr != 0 }}
+ continue-on-error: true
+ shell: bash
+ run: |
+ # Label PR.
+ # If the label does not exist, create it before adding it to the PR in the format 'tf:${{ inputs.command }}'.
+ gh api /repos/${GITHUB_REPOSITORY}/labels/tf:${{ inputs.command }} --header "$GH_API" --method GET || \
+ gh api /repos/${GITHUB_REPOSITORY}/labels --header "$GH_API" --method POST --field "name=tf:${{ inputs.command }}" --field "description=Pull requests that ${{ inputs.command }} TF code." --field "color=5C4EE5"
+ gh api /repos/${GITHUB_REPOSITORY}/issues/${{ steps.identifier.outputs.pr }}/labels --header "$GH_API" --method POST --field "labels[]=tf:${{ inputs.command }}"
+
+ - id: plan
+ if: ${{ inputs.command == 'plan' }}
+ shell: bash
+ run: |
+ # TF plan.
+ trap 'exit_code="$?"; echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"; if [[ "$exit_code" == "2" ]]; then exit 0; fi' EXIT
+ args="${{ steps.arg.outputs.arg-destroy }}${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-var }}${{ steps.arg.outputs.arg-compact-warnings }}${{ steps.arg.outputs.arg-concise }}${{ steps.arg.outputs.arg-detailed-exitcode }}${{ steps.arg.outputs.arg-generate-config-out }}${{ steps.arg.outputs.arg-lock-timeout }}${{ steps.arg.outputs.arg-lock }}${{ steps.arg.outputs.arg-parallelism }}${{ steps.arg.outputs.arg-refresh-only }}${{ steps.arg.outputs.arg-refresh }}${{ steps.arg.outputs.arg-replace }}${{ steps.arg.outputs.arg-target }} -out=tf.plan"
+ echo "${{ inputs.tool }} plan${{ steps.arg.outputs.arg-chdir }}${args}" | sed 's/ -/\n -/g' > tf.command.txt
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} plan${args} > >(tee -a tf.console.txt) 2> tf.console.txt
+
+ - id: download
+ if: ${{ inputs.command == 'apply' && inputs.arg-auto-approve != 'true' }}
+ shell: bash
+ run: |
+ # Download plan file.
+ # Get the artifact ID of the latest matching plan files for download.
+ artifact_id=$(gh api /repos/${GITHUB_REPOSITORY}/actions/artifacts --header "$GH_API" --method GET --field "name=${{ steps.identifier.outputs.name }}" --jq '.artifacts[0].id')
+ gh api /repos/${GITHUB_REPOSITORY}/actions/artifacts/${artifact_id}/zip --header "$GH_API" --method GET > "${{ steps.identifier.outputs.name }}.zip"
+
+ # Unzip the plan file to the working directory, then clean up the zip file.
+ unzip "${{ steps.identifier.outputs.name }}.zip" -d "${{ inputs.arg-chdir || inputs.working-directory }}"
+ rm -f "${{ steps.identifier.outputs.name }}.zip"
+
+ - if: ${{ inputs.plan-encrypt != '' && steps.download.outcome == 'success' }}
+ env:
+ pass: ${{ inputs.plan-encrypt }}
+ path: ${{ format('{0}{1}tf.plan', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}
+ shell: bash
+ run: |
+ # Decrypt plan file.
+ temp_file=$(mktemp)
+ printf "%s" "$pass" > "$temp_file"
+ openssl enc -aes-256-ctr -pbkdf2 -salt -in "$path" -out "$path.decrypted" -pass file:"$temp_file" -d
+ mv "$path.decrypted" "$path"
+
+ - if: ${{ steps.plan.outcome == 'success' || steps.download.outcome == 'success' }}
+ shell: bash
+ run: |
+ # TF show.
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} show${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-var }} tf.plan > tf.console.txt
+
+ # Diff of changes.
+ # Filter lines starting with " # " and save to tf.diff.txt, then prepend diff-specific symbols based on specific keywords.
+ grep -E '^ # ' tf.console.txt | sed -e 's/^ # \(.* be created\)/+ \1/' -e 's/^ # \(.* be destroyed\)/- \1/' -e 's/^ # \(.* be updated\|.* be replaced\)/! \1/' -e 's/^ # \(.* be read\)/~ \1/' -e 's/^ # \(.*\)/# \1/' > tf.diff.txt || true
+
+ - if: ${{ inputs.plan-encrypt != '' && steps.plan.outcome == 'success' }}
+ env:
+ pass: ${{ inputs.plan-encrypt }}
+ path: ${{ format('{0}{1}tf.plan', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}
+ shell: bash
+ run: |
+ # Encrypt plan file.
+ temp_file=$(mktemp)
+ printf "%s" "$pass" > "$temp_file"
+ openssl enc -aes-256-ctr -pbkdf2 -salt -in "$path" -out "$path.encrypted" -pass file:"$temp_file"
+ mv "$path.encrypted" "$path"
+
+ - if: ${{ inputs.command == 'plan' }}
+ uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
+ with:
+ name: ${{ steps.identifier.outputs.name }}
+ path: ${{ format('{0}{1}tf.plan', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}
+ overwrite: true
+
+ - if: ${{ inputs.plan-parity == 'true' && steps.download.outcome == 'success' }}
+ shell: bash
+ run: |
+ # TF plan parity.
+ # Generate a new plan file, then compare it with the previous one.
+ # Both plan files are normalized by sorting and removing empty lines.
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} plan${{ steps.arg.outputs.arg-destroy }}${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-var }}${{ steps.arg.outputs.arg-compact-warnings }}${{ steps.arg.outputs.arg-concise }}${{ steps.arg.outputs.arg-detailed-exitcode }}${{ steps.arg.outputs.arg-generate-config-out }}${{ steps.arg.outputs.arg-lock-timeout }}${{ steps.arg.outputs.arg-lock }}${{ steps.arg.outputs.arg-parallelism }}${{ steps.arg.outputs.arg-refresh-only }}${{ steps.arg.outputs.arg-refresh }}${{ steps.arg.outputs.arg-replace }}${{ steps.arg.outputs.arg-target }} -out=tf.plan.parity > /dev/null
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} show${{ steps.arg.outputs.arg-var-file }}${{ steps.arg.outputs.arg-var }} tf.plan.parity | sort | sed '/^$/d' > tf.plan.new
+ cat tf.console.txt | sort | sed '/^$/d' > tf.plan.old
+
+ # If both plan files are identical, then replace the old plan file with the new one to prevent stale apply.
+ diff --brief tf.plan.new tf.plan.old > /dev/null && mv "${{ format('{0}{1}tf.plan.parity', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}" "${{ format('{0}{1}tf.plan', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}"
+ rm -f tf.plan.new tf.plan.old "${{ format('{0}{1}tf.plan.parity', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}"
+
+ - id: apply
+ if: ${{ inputs.command == 'apply' }}
+ shell: bash
+ run: |
+ # TF apply.
+ trap 'exit_code="$?"; echo "exit_code=$exit_code" >> "$GITHUB_OUTPUT"' EXIT
+ # If ${{ inputs.arg-auto-approve }} is true, then then pass in variables, otherwise pass in the plan file without variables.
+ if [[ "${{ inputs.arg-auto-approve }}" == "true" ]]; then
+ plan="${{ steps.arg.outputs.arg-auto-approve }}"
+ var_file="${{ steps.arg.outputs.arg-var-file }}"
+ var="${{ steps.arg.outputs.arg-var }}"
+ else
+ plan=" tf.plan"
+ var_file=""
+ var=""
+ fi
+ args="${{ steps.arg.outputs.arg-destroy }}${var_file}${var}${{ steps.arg.outputs.arg-backup }}${{ steps.arg.outputs.arg-compact-warnings }}${{ steps.arg.outputs.arg-concise }}${{ steps.arg.outputs.arg-lock-timeout }}${{ steps.arg.outputs.arg-lock }}${{ steps.arg.outputs.arg-parallelism }}${{ steps.arg.outputs.arg-refresh-only }}${{ steps.arg.outputs.arg-refresh }}${{ steps.arg.outputs.arg-replace }}${{ steps.arg.outputs.arg-state-out }}${{ steps.arg.outputs.arg-state }}${{ steps.arg.outputs.arg-target }}${plan}"
+ echo "${{ inputs.tool }} apply${{ steps.arg.outputs.arg-chdir }}${args}" | sed 's/ -/\n -/g' > tf.command.txt
+ ${{ inputs.tool }}${{ steps.arg.outputs.arg-chdir }} apply${args} > >(tee -a tf.console.txt) 2> tf.console.txt
+
+ - id: post
+ if: ${{ !cancelled() && steps.identifier.outcome == 'success' }}
+ shell: bash
+ run: |
+ # Post output.
+ # Parse the tf.command.txt file.
+ command=$(cat tf.command.txt)
+
+ # Remove each comma-delemited argument from the command.
+ IFS=',' read -ra args <<< "${{ inputs.hide-args }}"
+ for arg in "${args[@]}"; do
+ command=$(echo "$command" | grep -v "^ -${arg}\b")
+ done
+ command=$(echo "$command" | tr -d '\n')
+
+ # Parse the tf.console.txt file, truncated for character limit.
+ console=$(grep -v '\.\.\.$' tf.console.txt | head -c 42000)
+ summary=$(cat tf.console.txt | tac | grep -m 1 -E '^(Apply complete!|No changes.|Error:|Plan:)' | tac || echo "View output.")
+
+ # Add summary to the job status.
+ check_run=$(gh api /repos/${GITHUB_REPOSITORY}/check-runs/${{ steps.identifier.outputs.job }} --header "$GH_API" --method PATCH --field "output[title]=${summary}" --field "output[summary]=${summary}")
+
+ # From check_run, echo html_url.
+ check_url=$(echo "$check_run" | jq -r '.html_url')
+ echo "check_id=$(echo "$check_run" | jq -r '.id')" >> $GITHUB_OUTPUT
+
+ # If tf.diff.txt exists, display it within a diff block, truncated for character limit.
+ if [[ -s tf.diff.txt ]]; then
+ diff="
+ Diff of changes.
+
+ \`\`\`diff
+ $(head -c 24000 tf.diff.txt)
+ \`\`\`
+ "
+ else
+ diff=""
+ fi
+
+ body=$(cat <
+ \`\`\`fish
+ ${command}
+ \`\`\`
+
+ ${diff}
+
+ ${summary}
+
+
+ ###### By @${GITHUB_TRIGGERING_ACTOR} at ${{ github.event.pull_request.updated_at || github.event.head_commit.timestamp || github.event.merge_group.head_commit.timestamp }} [(view log)](${check_url}#step:${{ steps.identifier.outputs.step }}:1).
+
+
+ \`\`\`hcl
+ ${console}
+ \`\`\`
+
+
+
+
+ EOTF_VIA_PR
+ )
+
+ # Post output to job summary.
+ echo "$body" >> $GITHUB_STEP_SUMMARY
+
+ # Post PR comment per ${{ inputs.comment-pr }} and if the PR number is not 0.
+ if [[ "${{ inputs.comment-pr }}" != "none" && "${{ steps.identifier.outputs.pr }}" != "0" ]]; then
+ # Check if the PR contains a bot comment with the same identifier.
+ list_comments=$(gh api /repos/${GITHUB_REPOSITORY}/issues/${{ steps.identifier.outputs.pr }}/comments --header "$GH_API" --method GET --field per_page=100)
+ bot_comment=$(echo "$list_comments" | jq -r --arg identifier "${{ steps.identifier.outputs.name }}" '.[] | select(.user.type == "Bot") | select(.body | contains($identifier)) | .id' | head -n 1)
+
+ if [[ -n "$bot_comment" ]]; then
+ if [[ "${{ inputs.comment-pr }}" == "recreate" ]]; then
+ # Delete previous comment before posting a new one.
+ gh api /repos/${GITHUB_REPOSITORY}/issues/comments/${bot_comment} --header "$GH_API" --method DELETE
+ pr_comment=$(gh api /repos/${GITHUB_REPOSITORY}/issues/${{ steps.identifier.outputs.pr }}/comments --header "$GH_API" --method POST --field "body=${body}")
+ echo "comment_id=$(echo "$pr_comment" | jq -r '.id')" >> $GITHUB_OUTPUT
+ elif [[ "${{ inputs.comment-pr }}" == "update" ]]; then
+ # Update existing comment.
+ pr_comment=$(gh api /repos/${GITHUB_REPOSITORY}/issues/comments/${bot_comment} --header "$GH_API" --method PATCH --field "body=${body}")
+ echo "comment_id=$(echo "$pr_comment" | jq -r '.id')" >> $GITHUB_OUTPUT
+ fi
+ else
+ # Post new comment.
+ pr_comment=$(gh api /repos/${GITHUB_REPOSITORY}/issues/${{ steps.identifier.outputs.pr }}/comments --header "$GH_API" --method POST --field "body=${body}")
+ echo "comment_id=$(echo "$pr_comment" | jq -r '.id')" >> $GITHUB_OUTPUT
+ fi
+ fi
+
+ # Clean up files.
+ rm -f tf.command.txt tf.console.txt tf.diff.txt "${{ format('{0}{1}tf.plan', inputs.arg-chdir || inputs.working-directory, (inputs.arg-chdir || inputs.working-directory) && '/' || '') }}"
+
+outputs:
+ check-id:
+ description: "ID of the check run."
+ value: ${{ steps.post.outputs.check_id }}
+ comment-id:
+ description: "ID of the PR comment."
+ value: ${{ steps.post.outputs.comment_id }}
+ exitcode:
+ description: "Exit code of the last TF command."
+ value: ${{ steps.apply.outputs.exit_code || steps.plan.outputs.exit_code || steps.format.outputs.exit_code || steps.validate.outputs.exit_code || steps.workspace.outputs.exit_code || steps.initialize.outputs.exit_code }}
+ identifier:
+ description: "Unique name of the workflow run and artifact."
+ value: ${{ steps.identifier.outputs.name }}
+
+inputs:
+ # Action parameters.
+ command:
+ default: "plan"
+ description: "Command to run between: `plan` or `apply` (e.g., `plan`)."
+ required: false
+ comment-pr:
+ default: "update"
+ description: "PR comment by: `update` existing comment, `recreate` and delete previous one, or `none` (e.g., `update`)."
+ required: false
+ format:
+ default: "false"
+ description: "Check format of TF code (e.g., `false`)."
+ required: false
+ hide-args:
+ default: "detailed-exitcode,lock,out,var"
+ description: "Hide comma-separated arguments from the command input (e.g., `detailed-exitcode,lock,out,var`)."
+ required: false
+ label-pr:
+ default: "true"
+ description: "Add a PR label with the command input (e.g., `true`)."
+ required: false
+ plan-encrypt:
+ default: ""
+ description: "Encrypt plan file artifact with the given input (e.g., `secrets.PASSPHRASE`)."
+ required: false
+ plan-parity:
+ default: "false"
+ description: "Compare the plan file with a newly-generated one to prevent stale apply (e.g., `false`)."
+ required: false
+ token:
+ default: ${{ github.token }}
+ description: "Specify a GitHub token (e.g., `secrets.GITHUB_TOKEN`)."
+ required: false
+ tool:
+ default: "terraform"
+ description: "Choose the tool to provision TF code (e.g., `tofu`)."
+ required: false
+ validate:
+ default: "false"
+ description: "Check validation of TF code (e.g., `false`)."
+ required: false
+ working-directory:
+ default: ""
+ description: "Specify the working directory of TF code, alias of `arg-chdir` (e.g., `stacks/dev`)."
+ required: false
+
+ # CLI arguments.
+ arg-auto-approve:
+ default: ""
+ description: "auto-approve"
+ required: false
+ arg-backend-config:
+ default: ""
+ description: "backend-config"
+ required: false
+ arg-backend:
+ default: ""
+ description: "backend"
+ required: false
+ arg-backup:
+ default: ""
+ description: "backup"
+ required: false
+ arg-chdir:
+ default: ""
+ description: "chdir"
+ required: false
+ arg-check:
+ default: "true"
+ description: "check"
+ required: false
+ arg-compact-warnings:
+ default: ""
+ description: "compact-warnings"
+ required: false
+ arg-concise:
+ default: ""
+ description: "concise"
+ required: false
+ arg-destroy:
+ default: ""
+ description: "destroy"
+ required: false
+ arg-detailed-exitcode:
+ default: "true"
+ description: "detailed-exitcode"
+ required: false
+ arg-diff:
+ default: "true"
+ description: "diff"
+ required: false
+ arg-force-copy:
+ default: ""
+ description: "force-copy"
+ required: false
+ arg-from-module:
+ default: ""
+ description: "from-module"
+ required: false
+ arg-generate-config-out:
+ default: ""
+ description: "generate-config-out"
+ required: false
+ arg-get:
+ default: ""
+ description: "get"
+ required: false
+ arg-list:
+ default: ""
+ description: "list"
+ required: false
+ arg-lock-timeout:
+ default: ""
+ description: "lock-timeout"
+ required: false
+ arg-lock:
+ default: ""
+ description: "lock"
+ required: false
+ arg-lockfile:
+ default: ""
+ description: "lockfile"
+ required: false
+ arg-migrate-state:
+ default: ""
+ description: "migrate-state"
+ required: false
+ arg-no-tests:
+ default: ""
+ description: "no-tests"
+ required: false
+ arg-or-create:
+ default: "true"
+ description: "or-create"
+ required: false
+ arg-parallelism:
+ default: ""
+ description: "parallelism"
+ required: false
+ arg-plugin-dir:
+ default: ""
+ description: "plugin-dir"
+ required: false
+ arg-reconfigure:
+ default: ""
+ description: "reconfigure"
+ required: false
+ arg-recursive:
+ default: "true"
+ description: "recursive"
+ required: false
+ arg-refresh-only:
+ default: ""
+ description: "refresh-only"
+ required: false
+ arg-refresh:
+ default: ""
+ description: "refresh"
+ required: false
+ arg-replace:
+ default: ""
+ description: "replace"
+ required: false
+ arg-state-out:
+ default: ""
+ description: "state-out"
+ required: false
+ arg-state:
+ default: ""
+ description: "state"
+ required: false
+ arg-target:
+ default: ""
+ description: "target"
+ required: false
+ arg-test-directory:
+ default: ""
+ description: "test-directory"
+ required: false
+ arg-upgrade:
+ default: ""
+ description: "upgrade"
+ required: false
+ arg-var-file:
+ default: ""
+ description: "var-file"
+ required: false
+ arg-var:
+ default: ""
+ description: "var"
+ required: false
+ arg-workspace:
+ default: ""
+ description: "workspace"
+ required: false
+ arg-write:
+ default: ""
+ description: "write"
+ required: false
diff --git a/action.yml b/action.yml
deleted file mode 100644
index 52413b3c..00000000
--- a/action.yml
+++ /dev/null
@@ -1,390 +0,0 @@
-name: Terraform/OpenTofu via Pull Request
-author: Rishav Dhar (@rdhar)
-description: GitHub Action to plan and apply Terraform/OpenTofu via PR automation, using best practices for secure and scalable infrastructure-as-code (IaC) workflows.
-
-branding:
- color: purple
- icon: package
-
-inputs:
- # Input parameters.
- cache_plugins:
- description: Boolean flag to cache TF plugins for faster workflow runs (requires .terraform.lock.hcl file).
- required: false
- default: "false"
- comment_pr:
- description: Boolean flag to add PR comment of TF command output.
- required: false
- default: "true"
- encrypt_passphrase:
- description: String passphrase to encrypt the TF plan file.
- required: false
- default: ""
- fmt_enable:
- description: Boolean flag to enable TF fmt command and display diff of changes.
- required: false
- default: "true"
- label_pr:
- description: Boolean flag to add PR label of TF command to run.
- required: false
- default: "true"
- plan_parity:
- description: Boolean flag to compare the TF plan file with a newly-generated one to prevent stale apply.
- required: false
- default: "false"
- tenv_arch:
- description: String architecture of the tenv tool to install and use.
- required: false
- default: "amd64"
- tenv_version:
- description: String version tag of the tenv tool to install and use.
- required: false
- default: ""
- tf_tool:
- description: String name of the TF tool to use and override default assumption from wrapper environment variable.
- required: false
- default: "terraform"
- tf_version:
- description: String version constraint of the TF tool to install and use.
- required: false
- default: ""
- update_comment:
- description: Boolean flag to update existing PR comment instead of creating a new comment and deleting the old one.
- required: false
- default: "false"
- validate_enable:
- description: Boolean flag to enable TF validate command check.
- required: false
- default: "false"
-
- # TF arguments.
- arg_auto_approve:
- description: Boolean flag to toggle skipping of interactive approval of plan before applying.
- required: false
- default: ""
- arg_backend:
- description: Boolean flag to toggle TF backend initialization.
- required: false
- default: ""
- arg_backend_config:
- description: Comma-separated string list of file path(s) to the backend configuration.
- required: false
- default: ""
- arg_backup:
- description: Boolean flag to toggle backup of the existing state file before modifying.
- required: false
- default: ""
- arg_chdir:
- description: String path to the working directory where the TF command should be run.
- required: false
- default: ""
- arg_check:
- description: Boolean flag to toggle checking of file formatting with appropriate exit code.
- required: false
- default: ""
- arg_cloud:
- description: Boolean flag to toggle TF backend initialization.
- required: false
- default: ""
- arg_command:
- description: String name of the TF command to run (either 'plan' or 'apply').
- required: false
- default: "plan"
- arg_compact_warnings:
- description: Boolean flag to toggle compact output for warnings.
- required: false
- default: ""
- arg_concise:
- description: Boolean flag to toggle skipping of refresh log lines.
- required: false
- default: ""
- arg_destroy:
- description: Boolean flag to toggle destruction of all managed objects.
- required: false
- default: ""
- arg_detailed_exitcode:
- description: String to set the detailed exit code mode.
- required: false
- default: ""
- arg_diff:
- description: Boolean flag to toggle display diff of formatting changes.
- required: false
- default: "true"
- arg_force_copy:
- description: Boolean flag to toggle suppression of prompts about copying state data.
- required: false
- default: ""
- arg_from_module:
- description: String path to copy contents from the given module source into the target directory.
- required: false
- default: ""
- arg_generate_config_out:
- description: String path to write the generated configuration.
- required: false
- default: ""
- arg_get:
- description: Boolean flag to toggle downloading of modules for the configuration.
- required: false
- default: ""
- arg_ignore_remote_version:
- description: Boolean flag to toggle checking if the local and remote TF versions use compatible state representations.
- required: false
- default: ""
- arg_json:
- description: Boolean flag to toggle JSON output format.
- required: false
- default: ""
- arg_list:
- description: Boolean flag to toggle listing of files whose formatting differs.
- required: false
- default: "false"
- arg_lock:
- description: Boolean flag to toggle state locking during state operations.
- required: false
- default: ""
- arg_lock_timeout:
- description: String duration to retry a state lock.
- required: false
- default: ""
- arg_lockfile:
- description: String to set dependency lockfile mode.
- required: false
- default: ""
- arg_migrate_state:
- description: Boolean flag to toggle reconfiguration of the backend, attempting to migrate any existing state.
- required: false
- default: ""
- arg_no_tests:
- description: Boolean flag to toggle validation of test files.
- required: false
- default: ""
- arg_or_create:
- description: Boolean flag to toggle workspace creation if it doesn't exist.
- required: false
- default: ""
- arg_out:
- description: String path to write the generated plan.
- required: false
- default: "tfplan"
- arg_parallelism:
- description: String number to limit the number of concurrent operations.
- required: false
- default: ""
- arg_plugin_dir:
- description: Comma-separated string list of directory path(s) containing plugin binaries.
- required: false
- default: ""
- arg_reconfigure:
- description: Boolean flag to toggle reconfiguration of the backend, ignoring any saved configuration.
- required: false
- default: ""
- arg_recursive:
- description: Boolean flag to toggle recursive processing of directories.
- required: false
- default: "true"
- arg_refresh:
- description: Boolean flag to skip checking of external changes to remote objects.
- required: false
- default: ""
- arg_refresh_only:
- description: Boolean flag to toggle checking of remote objects still match the current configuration without proposing any actions to undo external changes.
- required: false
- default: ""
- arg_replace:
- description: Comma-separated string list of resource addresses to replace.
- required: false
- default: ""
- arg_state:
- description: String path to read and save state.
- required: false
- default: ""
- arg_state_out:
- description: String path to write state.
- required: false
- default: ""
- arg_target:
- description: Comma-separated string list of resource addresses to target.
- required: false
- default: ""
- arg_test_directory:
- description: String path to the test directory.
- required: false
- default: ""
- arg_upgrade:
- description: Boolean flag to toggle upgrading the latest module and provider versions allowed within configured constraints.
- required: false
- default: ""
- arg_var:
- description: Comma-separated string list of variables to set in the format 'key=value'.
- required: false
- default: ""
- arg_var_file:
- description: Comma-separated string list of file path(s) to the variable configuration.
- required: false
- default: ""
- arg_workspace:
- description: String name of the workspace to select or create.
- required: false
- default: ""
- arg_write:
- description: Boolean flag to toggle writing of formatted files.
- required: false
- default: "false"
-
-outputs:
- check_id:
- description: String output of the workflow check run ID.
- value: ${{ steps.tf.outputs.check_id }}
- comment_id:
- description: String output of the PR comment ID.
- value: ${{ steps.tf.outputs.comment_id }}
- exitcode:
- description: String output of the last TF command's exit code.
- value: ${{ steps.tf.outputs.exitcode }}
- fmt_result:
- description: String output of the TF fmt command (truncated).
- value: ${{ steps.tf.outputs.fmt_result }}
- header:
- description: String output of the TF command input.
- value: ${{ steps.tf.outputs.header }}
- identifier:
- description: String output of the TF run's unique identifier.
- value: ${{ steps.tf.outputs.identifier }}
- last_result:
- description: String output of the last TF command (truncated).
- value: ${{ steps.tf.outputs.last_result }}
- outline:
- description: String outline of the TF plan.
- value: ${{ steps.tf.outputs.outline }}
- stderr:
- description: String output of the last TF command's standard error.
- value: ${{ steps.tf.outputs.stderr }}
- stdout:
- description: String output of the last TF command's standard output.
- value: ${{ steps.tf.outputs.stdout }}
- summary:
- description: String summary of the last TF command.
- value: ${{ steps.tf.outputs.summary }}
-
-runs:
- using: composite
- steps:
- - name: Cache TF plugins
- id: cache_plugins
- if: inputs.cache_plugins == 'true' && hashFiles(format('{0}/.terraform.lock.hcl', inputs.arg_chdir))
- uses: actions/cache@3624ceb22c1c5a301c8db4169662070a689d9ea8 # v4.1.1
- with:
- key: ${{ runner.os }}-${{ inputs.tf_tool }}-${{ inputs.tf_version }}-${{ hashFiles(format('{0}/.terraform.lock.hcl', inputs.arg_chdir)) }}
- path: |
- ~/.terraform.d
- ${{ inputs.arg_chdir }}/.terraform
-
- - name: Install TF via tenv
- if: inputs.tf_version != ''
- env:
- TENV_ARCH: ${{ inputs.tenv_arch }}
- TENV_VERSION: ${{ inputs.tenv_version }}
- TF_TOOL: ${{ inputs.tf_tool }}
- TF_VERSION: ${{ inputs.tf_version }}
- shell: bash
- run: |
- # If $TENV_VERSION is not set, then retrieve the latest version.
- if [ -z "$TENV_VERSION" ]; then
- TENV_VERSION=$(curl --no-progress-meter --location https://api.github.com/repos/tofuutils/tenv/releases/latest | jq -r .tag_name)
- fi
- curl --remote-name --no-progress-meter --location "https://github.com/tofuutils/tenv/releases/download/${TENV_VERSION}/tenv_${TENV_VERSION}_${TENV_ARCH}.deb"
- sudo dpkg --install "tenv_${TENV_VERSION}_${TENV_ARCH}.deb"
- tenv "$TF_TOOL" install "$TF_VERSION"
- tenv update-path
-
- - name: Command TF
- id: tf
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
- env:
- # Input parameters.
- cache_hit: ${{ steps.cache_plugins.outputs.cache-hit }}
- comment_pr: ${{ inputs.comment_pr }}
- encrypt_passphrase: ${{ inputs.encrypt_passphrase }}
- fmt_enable: ${{ inputs.fmt_enable }}
- label_pr: ${{ inputs.label_pr }}
- plan_parity: ${{ inputs.plan_parity }}
- tf_tool: ${{ inputs.tf_tool }}
- update_comment: ${{ inputs.update_comment }}
- validate_enable: ${{ inputs.validate_enable }}
-
- # TF environment variables.
- TF_CLI_ARGS: ${{ env.TF_CLI_ARGS != '' && format('{0} -no-color', env.TF_CLI_ARGS) || '-no-color' }}
- TF_IN_AUTOMATION: true
- TF_INPUT: false
- MATRIX: ${{ toJSON(matrix) }}
-
- # TF arguments.
- arg_auto_approve: ${{ inputs.arg_auto_approve != '' && format('-auto-approve={0}', inputs.arg_auto_approve) || '' }}
- arg_auto_approve_raw: ${{ inputs.arg_auto_approve }}
- arg_backend: ${{ inputs.arg_backend != '' && format('-backend={0}', inputs.arg_backend) || '' }}
- arg_backend_config: ${{ inputs.arg_backend_config != '' && format('-backend-config={0}', inputs.arg_backend_config) || '' }}
- arg_backup: ${{ inputs.arg_backup != '' && format('-backup={0}', inputs.arg_backup) || '' }}
- arg_chdir: ${{ inputs.arg_chdir != '' && format('-chdir={0}', inputs.arg_chdir) || '' }}
- arg_check: ${{ inputs.arg_check != '' && format('-check={0}', inputs.arg_check) || '' }}
- arg_cloud: ${{ inputs.arg_cloud != '' && format('-cloud={0}', inputs.arg_cloud) || '' }}
- arg_command: ${{ inputs.arg_command }}
- arg_compact_warnings: ${{ inputs.arg_compact_warnings != '' && format('-compact-warnings={0}', inputs.arg_compact_warnings) || '' }}
- arg_concise: ${{ inputs.arg_concise != '' && format('-concise={0}', inputs.arg_concise) || '' }}
- arg_destroy: ${{ inputs.arg_destroy != '' && format('-destroy={0}', inputs.arg_destroy) || '' }}
- arg_detailed_exitcode: ${{ inputs.arg_detailed_exitcode != '' && format('-detailed-exitcode={0}', inputs.arg_detailed_exitcode) || '' }}
- arg_diff: ${{ inputs.arg_diff != '' && format('-diff={0}', inputs.arg_diff) || '' }}
- arg_force_copy: ${{ inputs.arg_force_copy != '' && format('-force-copy={0}', inputs.arg_force_copy) || '' }}
- arg_from_module: ${{ inputs.arg_from_module != '' && format('-from-module={0}', inputs.arg_from_module) || '' }}
- arg_generate_config_out: ${{ inputs.arg_generate_config_out != '' && format('-generate-config-out={0}', inputs.arg_generate_config_out) || '' }}
- arg_get: ${{ inputs.arg_get != '' && format('-get={0}', inputs.arg_get) || '' }}
- arg_ignore_remote_version: ${{ inputs.arg_ignore_remote_version != '' && format('-ignore-remote-version={0}', inputs.arg_ignore_remote_version) || '' }}
- arg_json: ${{ inputs.arg_json != '' && format('-json={0}', inputs.arg_json) || '' }}
- arg_list: ${{ inputs.arg_list != '' && format('-list={0}', inputs.arg_list) || '' }}
- arg_lock: ${{ inputs.arg_lock != '' && format('-lock={0}', inputs.arg_lock) || '' }}
- arg_lock_timeout: ${{ inputs.arg_lock_timeout != '' && format('-lock-timeout={0}', inputs.arg_lock_timeout) || '' }}
- arg_lockfile: ${{ inputs.arg_lockfile != '' && format('-lockfile={0}', inputs.arg_lockfile) || '' }}
- arg_migrate_state: ${{ inputs.arg_migrate_state != '' && format('-migrate-state={0}', inputs.arg_migrate_state) || '' }}
- arg_no_tests: ${{ inputs.arg_no_tests != '' && format('-no-tests={0}', inputs.arg_no_tests) || '' }}
- arg_or_create: ${{ inputs.arg_or_create != '' && format('-or-create={0}', inputs.arg_or_create) || '' }}
- arg_out: ${{ inputs.arg_out != '' && format('-out={0}', inputs.arg_out) || '' }}
- arg_parallelism: ${{ inputs.arg_parallelism != '' && format('-parallelism={0}', inputs.arg_parallelism) || '' }}
- arg_plugin_dir: ${{ inputs.arg_plugin_dir != '' && format('-plugin-dir={0}', inputs.arg_plugin_dir) || '' }}
- arg_reconfigure: ${{ inputs.arg_reconfigure != '' && format('-reconfigure={0}', inputs.arg_reconfigure) || '' }}
- arg_recursive: ${{ inputs.arg_recursive != '' && format('-recursive={0}', inputs.arg_recursive) || '' }}
- arg_refresh: ${{ inputs.arg_refresh != '' && format('-refresh={0}', inputs.arg_refresh) || '' }}
- arg_refresh_only: ${{ inputs.arg_refresh_only != '' && format('-refresh-only={0}', inputs.arg_refresh_only) || '' }}
- arg_replace: ${{ inputs.arg_replace != '' && format('-replace={0}', inputs.arg_replace) || '' }}
- arg_state: ${{ inputs.arg_state != '' && format('-state={0}', inputs.arg_state) || '' }}
- arg_state_out: ${{ inputs.arg_state_out != '' && format('-state-out={0}', inputs.arg_state_out) || '' }}
- arg_target: ${{ inputs.arg_target != '' && format('-target={0}', inputs.arg_target) || '' }}
- arg_test_directory: ${{ inputs.arg_test_directory != '' && format('-test-directory={0}', inputs.arg_test_directory) || '' }}
- arg_upgrade: ${{ inputs.arg_upgrade != '' && format('-upgrade={0}', inputs.arg_upgrade) || '' }}
- arg_var: ${{ inputs.arg_var != '' && format('-var={0}', inputs.arg_var) || '' }}
- arg_var_file: ${{ inputs.arg_var_file != '' && format('-var-file={0}', inputs.arg_var_file) || '' }}
- arg_workspace: ${{ inputs.arg_workspace }}
- arg_workspace_alt: ${{ inputs.arg_workspace != '' && format('-workspace={0}', inputs.arg_workspace) || '' }}
- arg_write: ${{ inputs.arg_write != '' && format('-write={0}', inputs.arg_write) || '' }}
- with:
- retries: 3
- result-encoding: string
- script: await require(`${process.env.GITHUB_ACTION_PATH}/action.js`)({ context, core, exec, github });
-
- - name: Encrypt TF plan file
- if: inputs.encrypt_passphrase != ''
- env:
- encrypt_passphrase: ${{ inputs.encrypt_passphrase }}
- working_directory: ${{ inputs.arg_chdir }}/${{ inputs.arg_out }}
- shell: bash
- run: |
- TEMP_FILE=$(mktemp)
- printf %s "$encrypt_passphrase" > "$TEMP_FILE"
- openssl enc -aes-256-ctr -pbkdf2 -salt -in "$working_directory" -out "$working_directory.encrypted" -pass file:"$TEMP_FILE"
- mv "$working_directory.encrypted" "$working_directory"
-
- - name: Upload TF plan file
- if: inputs.arg_command == 'plan'
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
- with:
- name: ${{ steps.tf.outputs.identifier }}
- path: ${{ inputs.arg_chdir }}/${{ inputs.arg_out }}
- overwrite: true