diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
new file mode 100644
index 0000000..adb2255
--- /dev/null
+++ b/.github/workflows/ci.yaml
@@ -0,0 +1,16 @@
+name: "build"
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ - 'v*'
+
+jobs:
+ build: # make sure build/ci work properly
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v1
+ - run: |
+ yarn install
+ yarn run all
\ No newline at end of file
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 398076c..1211743 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,45 +1,86 @@
-name: "build-test"
-on: # rebuild any PRs and main branch changes
+name: "test"
+on:
pull_request:
- push:
branches:
- master
- - 'releases/*'
+ - 'v*'
jobs:
- build: # make sure build/ci work properly
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v1
- - run: |
- yarn install
- yarn run all
- test: # make sure the action works on a clean machine without building only on PRs
- if: ${{ github.event_name == 'pull_request' }}
+ test-action: # make sure the action works on a clean machine without building
+ name: Test normal PR use
runs-on: ubuntu-latest
+ env:
+ tf_version: 0.12.27
steps:
- uses: actions/checkout@v1
+ - uses: hashicorp/setup-terraform@v1
+ with:
+ terraform_version: ${{ env.tf_version }}
+ - run: terraform init
+ working-directory: __tests__
+ - run: terraform plan -out test-plan.tfplan
+ working-directory: __tests__
- uses: ./
with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- terraform_plan_json: |
- {
- "resource_changes": [
- {
- "address": "module.app.module.database.aws_db_instance.database",
- "type": "aws_db_instance",
- "name": "database",
- "change": {
- "actions": ["delete"]
- }
- },
- {
- "address": "module.app.aws_security_group_rule.db_access",
- "type": "aws_security_group_rule",
- "name": "db_access",
- "change": {
- "actions": ["delete", "create"]
- }
- }
- ]
- }
\ No newline at end of file
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ working-directory: __tests__
+ terraform-plan-file: test-plan.tfplan
+
+ test-action-plan-in-diff-dir: # make sure the action works with plan in different directory from working dir
+ name: Test TF plan in different dir
+ runs-on: ubuntu-latest
+ env:
+ tf_version: 0.12.27
+ steps:
+ - uses: actions/checkout@v1
+ - uses: hashicorp/setup-terraform@v1
+ with:
+ terraform_version: ${{ env.tf_version }}
+ - run: terraform init
+ working-directory: __tests__
+ - run: terraform plan -out ../test-plan.tfplan
+ working-directory: __tests__
+ - uses: ./
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ working-directory: __tests__
+ terraform-plan-file: ../test-plan.tfplan
+
+ test-action-without-tf-wrapper: # make sure the action works without the terraform wrapper
+ name: Test without TF wrapper
+ runs-on: ubuntu-latest
+ env:
+ tf_version: 0.12.27
+ steps:
+ - uses: actions/checkout@v1
+ - uses: hashicorp/setup-terraform@v1
+ with:
+ terraform_version: ${{ env.tf_version }}
+ terraform_wrapper: false
+ - run: terraform init
+ working-directory: __tests__
+ - run: terraform plan -out test-plan.tfplan
+ working-directory: __tests__
+ - uses: ./
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ working-directory: __tests__
+ terraform-plan-file: test-plan.tfplan
+
+ test-action-at-root: # make sure the action works with terraform dir as root dir
+ name: Test from Root
+ runs-on: ubuntu-latest
+ env:
+ tf_version: 0.12.27
+ steps:
+ - uses: actions/checkout@v1
+ - run: mv __tests__/* .
+ - uses: hashicorp/setup-terraform@v1
+ with:
+ terraform_version: ${{ env.tf_version }}
+ - run: terraform init
+ - run: terraform plan -out test-plan.tfplan
+ - uses: ./
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ terraform-plan-file: test-plan.tfplan
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index c46a03a..cc63d61 100644
--- a/.gitignore
+++ b/.gitignore
@@ -99,4 +99,8 @@ __tests__/runner/*
lib/**/*
.idea
-*.iml
\ No newline at end of file
+*.iml
+
+!__tests__/main.test.ts
+!__tests__/test.tf
+__tests__/
diff --git a/README.md b/README.md
index e70525d..b145091 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,13 @@
-
-
-
+![build](https://github.com/byu-oit/github-action-tf-plan-comment/workflows/build/badge.svg)
+![test](https://github.com/byu-oit/github-action-tf-plan-comment/workflows/test/badge.svg)
# ![BYU logo](https://www.hscripts.com/freeimages/logos/university-logos/byu/byu-logo-clipart-128.gif) github-action-tf-plan-comment
GitHub Action to make a comment on a pull request with the proposed updated terraform plan
-This action takes in a JSON representation of your terraform plan and creates a comment on the Pull Request (PR) with basic info about what the plan will create, update, replace, or delete.
+This action takes in the terraform plan and creates a comment on the Pull Request (PR) with basic info about what the plan will create, update, replace, or delete.
-**Note:** this action does not run terraform plan for you, you must pass in the plan as an input.
+**Note:** this action does not run `terraform plan` for you, you must pass in the plan as an input as well as the directory of the terraform configuration (where the plan and .terraform dir are located after `terraform init`).
## Usage
```yaml
@@ -19,34 +18,33 @@ jobs:
runs-on: ubuntu-latest
steps:
# ...
- - name: Terraform Plan JSON
- id: json_plan
- run: terraform show -json plan
+ # terraform init
+ # terraform plan
- name: Comment Terraform Plan
uses: byu-oit/github-action-tf-plan-comment@v1
with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- terraform_plan_json: ${{ steps.json_plan.outputs.stdout }}
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ working-directory: terraform-iac/dev/app # where your terraform files are
+ terraform-plan-file: plan.tfplan # relative to working directory
```
-**Note:** make sure you run your `terraform show-json plan` in the same working directory as the `terraform plan` step, and make sure you
+## Inputs
+* `github-token` - (**required**) pass in the GitHub token to make comments on the PR
+* `working-directory` - (_optional_) the directory of the terraform configuration files (defaults to `.`)
+* `terraform-plan-file` - (**required**) Filename of the terraform plan (relative to `working-directory`)
+## Output
This action will create a comment on your PR like:
> ## Terraform Plan:
-> will replace (delete then create) 1 resources:
+> will **replace (delete then create)** 1 resources:
> - aws_security_group_rule - db_access
>
-> will delete 1 resources:
+> will **delete** 1 resources:
> - aws_db_instance - database
>
>[see details](link to the github action workflow)
-
-## Inputs
-* `github_token` - (**required**) pass in the GitHub token to make comments on the PR
-* `terraform_plan_json` - (**required**) JSON representation of the terraform plan to be executed
-
## Contributing
Hopefully this is useful to others at BYU.
Feel free to ask me some questions about it, but I make no promises about being able to commit time to support it.
diff --git a/__tests__/test.tf b/__tests__/test.tf
new file mode 100644
index 0000000..9c6eb92
--- /dev/null
+++ b/__tests__/test.tf
@@ -0,0 +1,10 @@
+resource "local_file" "fake_file" {
+ filename = "test.txt"
+ content = "Hello ${random_pet.name.id}"
+}
+
+resource "random_pet" "name"{
+ keepers = {
+ uuid = uuid()
+ }
+}
diff --git a/action.yml b/action.yml
index a9879b8..4cde159 100644
--- a/action.yml
+++ b/action.yml
@@ -2,12 +2,16 @@ name: 'Terraform Plan Comment'
description: 'Creates a comment on a pull request with the terraform plan'
author: 'Brigham Young University'
inputs:
- github_token:
+ github-token:
required: true
description: 'github token'
- terraform_plan_json:
+ terraform-plan-file:
required: true
- description: JSON of the terraform plan
+ description: File name of the terraform plan
+ working-directory:
+ required: false
+ description: Directory of the terraform configuration
+ default: .
runs:
using: 'node12'
main: 'dist/index.js'
diff --git a/dist/index.js b/dist/index.js
index 59fd97f..9e18cfb 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -43,6 +43,303 @@ module.exports =
/************************************************************************/
/******/ ({
+/***/ 1:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const childProcess = __webpack_require__(129);
+const path = __webpack_require__(622);
+const util_1 = __webpack_require__(669);
+const ioUtil = __webpack_require__(672);
+const exec = util_1.promisify(childProcess.exec);
+/**
+ * Copies a file or folder.
+ * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
+ *
+ * @param source source path
+ * @param dest destination path
+ * @param options optional. See CopyOptions.
+ */
+function cp(source, dest, options = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const { force, recursive } = readCopyOptions(options);
+ const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
+ // Dest is an existing file, but not forcing
+ if (destStat && destStat.isFile() && !force) {
+ return;
+ }
+ // If dest is an existing directory, should copy inside.
+ const newDest = destStat && destStat.isDirectory()
+ ? path.join(dest, path.basename(source))
+ : dest;
+ if (!(yield ioUtil.exists(source))) {
+ throw new Error(`no such file or directory: ${source}`);
+ }
+ const sourceStat = yield ioUtil.stat(source);
+ if (sourceStat.isDirectory()) {
+ if (!recursive) {
+ throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
+ }
+ else {
+ yield cpDirRecursive(source, newDest, 0, force);
+ }
+ }
+ else {
+ if (path.relative(source, newDest) === '') {
+ // a file cannot be copied to itself
+ throw new Error(`'${newDest}' and '${source}' are the same file`);
+ }
+ yield copyFile(source, newDest, force);
+ }
+ });
+}
+exports.cp = cp;
+/**
+ * Moves a path.
+ *
+ * @param source source path
+ * @param dest destination path
+ * @param options optional. See MoveOptions.
+ */
+function mv(source, dest, options = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (yield ioUtil.exists(dest)) {
+ let destExists = true;
+ if (yield ioUtil.isDirectory(dest)) {
+ // If dest is directory copy src into dest
+ dest = path.join(dest, path.basename(source));
+ destExists = yield ioUtil.exists(dest);
+ }
+ if (destExists) {
+ if (options.force == null || options.force) {
+ yield rmRF(dest);
+ }
+ else {
+ throw new Error('Destination already exists');
+ }
+ }
+ }
+ yield mkdirP(path.dirname(dest));
+ yield ioUtil.rename(source, dest);
+ });
+}
+exports.mv = mv;
+/**
+ * Remove a path recursively with force
+ *
+ * @param inputPath path to remove
+ */
+function rmRF(inputPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (ioUtil.IS_WINDOWS) {
+ // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
+ // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
+ try {
+ if (yield ioUtil.isDirectory(inputPath, true)) {
+ yield exec(`rd /s /q "${inputPath}"`);
+ }
+ else {
+ yield exec(`del /f /a "${inputPath}"`);
+ }
+ }
+ catch (err) {
+ // if you try to delete a file that doesn't exist, desired result is achieved
+ // other errors are valid
+ if (err.code !== 'ENOENT')
+ throw err;
+ }
+ // Shelling out fails to remove a symlink folder with missing source, this unlink catches that
+ try {
+ yield ioUtil.unlink(inputPath);
+ }
+ catch (err) {
+ // if you try to delete a file that doesn't exist, desired result is achieved
+ // other errors are valid
+ if (err.code !== 'ENOENT')
+ throw err;
+ }
+ }
+ else {
+ let isDir = false;
+ try {
+ isDir = yield ioUtil.isDirectory(inputPath);
+ }
+ catch (err) {
+ // if you try to delete a file that doesn't exist, desired result is achieved
+ // other errors are valid
+ if (err.code !== 'ENOENT')
+ throw err;
+ return;
+ }
+ if (isDir) {
+ yield exec(`rm -rf "${inputPath}"`);
+ }
+ else {
+ yield ioUtil.unlink(inputPath);
+ }
+ }
+ });
+}
+exports.rmRF = rmRF;
+/**
+ * Make a directory. Creates the full path with folders in between
+ * Will throw if it fails
+ *
+ * @param fsPath path to create
+ * @returns Promise
+ */
+function mkdirP(fsPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ yield ioUtil.mkdirP(fsPath);
+ });
+}
+exports.mkdirP = mkdirP;
+/**
+ * Returns path of a tool had the tool actually been invoked. Resolves via paths.
+ * If you check and the tool does not exist, it will throw.
+ *
+ * @param tool name of the tool
+ * @param check whether to check if tool exists
+ * @returns Promise path to tool
+ */
+function which(tool, check) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (!tool) {
+ throw new Error("parameter 'tool' is required");
+ }
+ // recursive when check=true
+ if (check) {
+ const result = yield which(tool, false);
+ if (!result) {
+ if (ioUtil.IS_WINDOWS) {
+ throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
+ }
+ else {
+ throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
+ }
+ }
+ }
+ try {
+ // build the list of extensions to try
+ const extensions = [];
+ if (ioUtil.IS_WINDOWS && process.env.PATHEXT) {
+ for (const extension of process.env.PATHEXT.split(path.delimiter)) {
+ if (extension) {
+ extensions.push(extension);
+ }
+ }
+ }
+ // if it's rooted, return it if exists. otherwise return empty.
+ if (ioUtil.isRooted(tool)) {
+ const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
+ if (filePath) {
+ return filePath;
+ }
+ return '';
+ }
+ // if any path separators, return empty
+ if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) {
+ return '';
+ }
+ // build the list of directories
+ //
+ // Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
+ // it feels like we should not do this. Checking the current directory seems like more of a use
+ // case of a shell, and the which() function exposed by the toolkit should strive for consistency
+ // across platforms.
+ const directories = [];
+ if (process.env.PATH) {
+ for (const p of process.env.PATH.split(path.delimiter)) {
+ if (p) {
+ directories.push(p);
+ }
+ }
+ }
+ // return the first match
+ for (const directory of directories) {
+ const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions);
+ if (filePath) {
+ return filePath;
+ }
+ }
+ return '';
+ }
+ catch (err) {
+ throw new Error(`which failed with message ${err.message}`);
+ }
+ });
+}
+exports.which = which;
+function readCopyOptions(options) {
+ const force = options.force == null ? true : options.force;
+ const recursive = Boolean(options.recursive);
+ return { force, recursive };
+}
+function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Ensure there is not a run away recursive copy
+ if (currentDepth >= 255)
+ return;
+ currentDepth++;
+ yield mkdirP(destDir);
+ const files = yield ioUtil.readdir(sourceDir);
+ for (const fileName of files) {
+ const srcFile = `${sourceDir}/${fileName}`;
+ const destFile = `${destDir}/${fileName}`;
+ const srcFileStat = yield ioUtil.lstat(srcFile);
+ if (srcFileStat.isDirectory()) {
+ // Recurse
+ yield cpDirRecursive(srcFile, destFile, currentDepth, force);
+ }
+ else {
+ yield copyFile(srcFile, destFile, force);
+ }
+ }
+ // Change the mode for the newly created directory
+ yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
+ });
+}
+// Buffered file copy
+function copyFile(srcFile, destFile, force) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
+ // unlink/re-link it
+ try {
+ yield ioUtil.lstat(destFile);
+ yield ioUtil.unlink(destFile);
+ }
+ catch (e) {
+ // Try to override file permission
+ if (e.code === 'EPERM') {
+ yield ioUtil.chmod(destFile, '0666');
+ yield ioUtil.unlink(destFile);
+ }
+ // other errors = it doesn't exist, no work to do
+ }
+ // Copy over symlink
+ const symlinkFull = yield ioUtil.readlink(srcFile);
+ yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
+ }
+ else if (!(yield ioUtil.exists(destFile)) || force) {
+ yield ioUtil.copyFile(srcFile, destFile);
+ }
+ });
+}
+//# sourceMappingURL=io.js.map
+
+/***/ }),
+
/***/ 2:
/***/ (function(module, __unusedexports, __webpack_require__) {
@@ -98,103 +395,609 @@ module.exports = osName;
/***/ }),
/***/ 9:
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var once = __webpack_require__(969);
-
-var noop = function() {};
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-var isRequest = function(stream) {
- return stream.setHeader && typeof stream.abort === 'function';
-};
+"use strict";
-var isChildProcess = function(stream) {
- return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
};
-
-var eos = function(stream, opts, callback) {
- if (typeof opts === 'function') return eos(stream, null, opts);
- if (!opts) opts = {};
-
- callback = once(callback || noop);
-
- var ws = stream._writableState;
- var rs = stream._readableState;
- var readable = opts.readable || (opts.readable !== false && stream.readable);
- var writable = opts.writable || (opts.writable !== false && stream.writable);
- var cancelled = false;
-
- var onlegacyfinish = function() {
- if (!stream.writable) onfinish();
- };
-
- var onfinish = function() {
- writable = false;
- if (!readable) callback.call(stream);
- };
-
- var onend = function() {
- readable = false;
- if (!writable) callback.call(stream);
- };
-
- var onexit = function(exitCode) {
- callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
- };
-
- var onerror = function(err) {
- callback.call(stream, err);
- };
-
- var onclose = function() {
- process.nextTick(onclosenexttick);
- };
-
- var onclosenexttick = function() {
- if (cancelled) return;
- if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
- if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
- };
-
- var onrequest = function() {
- stream.req.on('finish', onfinish);
- };
-
- if (isRequest(stream)) {
- stream.on('complete', onfinish);
- stream.on('abort', onclose);
- if (stream.req) onrequest();
- else stream.on('request', onrequest);
- } else if (writable && !ws) { // legacy streams
- stream.on('end', onlegacyfinish);
- stream.on('close', onlegacyfinish);
- }
-
- if (isChildProcess(stream)) stream.on('exit', onexit);
-
- stream.on('end', onend);
- stream.on('finish', onfinish);
- if (opts.error !== false) stream.on('error', onerror);
- stream.on('close', onclose);
-
- return function() {
- cancelled = true;
- stream.removeListener('complete', onfinish);
- stream.removeListener('abort', onclose);
- stream.removeListener('request', onrequest);
- if (stream.req) stream.req.removeListener('finish', onfinish);
- stream.removeListener('end', onlegacyfinish);
- stream.removeListener('close', onlegacyfinish);
- stream.removeListener('finish', onfinish);
- stream.removeListener('exit', onexit);
- stream.removeListener('end', onend);
- stream.removeListener('error', onerror);
- stream.removeListener('close', onclose);
- };
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
};
-
-module.exports = eos;
-
+Object.defineProperty(exports, "__esModule", { value: true });
+const os = __importStar(__webpack_require__(87));
+const events = __importStar(__webpack_require__(614));
+const child = __importStar(__webpack_require__(129));
+const path = __importStar(__webpack_require__(622));
+const io = __importStar(__webpack_require__(1));
+const ioUtil = __importStar(__webpack_require__(672));
+/* eslint-disable @typescript-eslint/unbound-method */
+const IS_WINDOWS = process.platform === 'win32';
+/*
+ * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
+ */
+class ToolRunner extends events.EventEmitter {
+ constructor(toolPath, args, options) {
+ super();
+ if (!toolPath) {
+ throw new Error("Parameter 'toolPath' cannot be null or empty.");
+ }
+ this.toolPath = toolPath;
+ this.args = args || [];
+ this.options = options || {};
+ }
+ _debug(message) {
+ if (this.options.listeners && this.options.listeners.debug) {
+ this.options.listeners.debug(message);
+ }
+ }
+ _getCommandString(options, noPrefix) {
+ const toolPath = this._getSpawnFileName();
+ const args = this._getSpawnArgs(options);
+ let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
+ if (IS_WINDOWS) {
+ // Windows + cmd file
+ if (this._isCmdFile()) {
+ cmd += toolPath;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ // Windows + verbatim
+ else if (options.windowsVerbatimArguments) {
+ cmd += `"${toolPath}"`;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ // Windows (regular)
+ else {
+ cmd += this._windowsQuoteCmdArg(toolPath);
+ for (const a of args) {
+ cmd += ` ${this._windowsQuoteCmdArg(a)}`;
+ }
+ }
+ }
+ else {
+ // OSX/Linux - this can likely be improved with some form of quoting.
+ // creating processes on Unix is fundamentally different than Windows.
+ // on Unix, execvp() takes an arg array.
+ cmd += toolPath;
+ for (const a of args) {
+ cmd += ` ${a}`;
+ }
+ }
+ return cmd;
+ }
+ _processLineBuffer(data, strBuffer, onLine) {
+ try {
+ let s = strBuffer + data.toString();
+ let n = s.indexOf(os.EOL);
+ while (n > -1) {
+ const line = s.substring(0, n);
+ onLine(line);
+ // the rest of the string ...
+ s = s.substring(n + os.EOL.length);
+ n = s.indexOf(os.EOL);
+ }
+ strBuffer = s;
+ }
+ catch (err) {
+ // streaming lines to console is best effort. Don't fail a build.
+ this._debug(`error processing line. Failed with error ${err}`);
+ }
+ }
+ _getSpawnFileName() {
+ if (IS_WINDOWS) {
+ if (this._isCmdFile()) {
+ return process.env['COMSPEC'] || 'cmd.exe';
+ }
+ }
+ return this.toolPath;
+ }
+ _getSpawnArgs(options) {
+ if (IS_WINDOWS) {
+ if (this._isCmdFile()) {
+ let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
+ for (const a of this.args) {
+ argline += ' ';
+ argline += options.windowsVerbatimArguments
+ ? a
+ : this._windowsQuoteCmdArg(a);
+ }
+ argline += '"';
+ return [argline];
+ }
+ }
+ return this.args;
+ }
+ _endsWith(str, end) {
+ return str.endsWith(end);
+ }
+ _isCmdFile() {
+ const upperToolPath = this.toolPath.toUpperCase();
+ return (this._endsWith(upperToolPath, '.CMD') ||
+ this._endsWith(upperToolPath, '.BAT'));
+ }
+ _windowsQuoteCmdArg(arg) {
+ // for .exe, apply the normal quoting rules that libuv applies
+ if (!this._isCmdFile()) {
+ return this._uvQuoteCmdArg(arg);
+ }
+ // otherwise apply quoting rules specific to the cmd.exe command line parser.
+ // the libuv rules are generic and are not designed specifically for cmd.exe
+ // command line parser.
+ //
+ // for a detailed description of the cmd.exe command line parser, refer to
+ // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
+ // need quotes for empty arg
+ if (!arg) {
+ return '""';
+ }
+ // determine whether the arg needs to be quoted
+ const cmdSpecialChars = [
+ ' ',
+ '\t',
+ '&',
+ '(',
+ ')',
+ '[',
+ ']',
+ '{',
+ '}',
+ '^',
+ '=',
+ ';',
+ '!',
+ "'",
+ '+',
+ ',',
+ '`',
+ '~',
+ '|',
+ '<',
+ '>',
+ '"'
+ ];
+ let needsQuotes = false;
+ for (const char of arg) {
+ if (cmdSpecialChars.some(x => x === char)) {
+ needsQuotes = true;
+ break;
+ }
+ }
+ // short-circuit if quotes not needed
+ if (!needsQuotes) {
+ return arg;
+ }
+ // the following quoting rules are very similar to the rules that by libuv applies.
+ //
+ // 1) wrap the string in quotes
+ //
+ // 2) double-up quotes - i.e. " => ""
+ //
+ // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
+ // doesn't work well with a cmd.exe command line.
+ //
+ // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
+ // for example, the command line:
+ // foo.exe "myarg:""my val"""
+ // is parsed by a .NET console app into an arg array:
+ // [ "myarg:\"my val\"" ]
+ // which is the same end result when applying libuv quoting rules. although the actual
+ // command line from libuv quoting rules would look like:
+ // foo.exe "myarg:\"my val\""
+ //
+ // 3) double-up slashes that precede a quote,
+ // e.g. hello \world => "hello \world"
+ // hello\"world => "hello\\""world"
+ // hello\\"world => "hello\\\\""world"
+ // hello world\ => "hello world\\"
+ //
+ // technically this is not required for a cmd.exe command line, or the batch argument parser.
+ // the reasons for including this as a .cmd quoting rule are:
+ //
+ // a) this is optimized for the scenario where the argument is passed from the .cmd file to an
+ // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
+ //
+ // b) it's what we've been doing previously (by deferring to node default behavior) and we
+ // haven't heard any complaints about that aspect.
+ //
+ // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
+ // escaped when used on the command line directly - even though within a .cmd file % can be escaped
+ // by using %%.
+ //
+ // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
+ // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
+ //
+ // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
+ // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
+ // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
+ // to an external program.
+ //
+ // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
+ // % can be escaped within a .cmd file.
+ let reverse = '"';
+ let quoteHit = true;
+ for (let i = arg.length; i > 0; i--) {
+ // walk the string in reverse
+ reverse += arg[i - 1];
+ if (quoteHit && arg[i - 1] === '\\') {
+ reverse += '\\'; // double the slash
+ }
+ else if (arg[i - 1] === '"') {
+ quoteHit = true;
+ reverse += '"'; // double the quote
+ }
+ else {
+ quoteHit = false;
+ }
+ }
+ reverse += '"';
+ return reverse
+ .split('')
+ .reverse()
+ .join('');
+ }
+ _uvQuoteCmdArg(arg) {
+ // Tool runner wraps child_process.spawn() and needs to apply the same quoting as
+ // Node in certain cases where the undocumented spawn option windowsVerbatimArguments
+ // is used.
+ //
+ // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
+ // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
+ // pasting copyright notice from Node within this function:
+ //
+ // Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+ //
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
+ // of this software and associated documentation files (the "Software"), to
+ // deal in the Software without restriction, including without limitation the
+ // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ // sell copies of the Software, and to permit persons to whom the Software is
+ // furnished to do so, subject to the following conditions:
+ //
+ // The above copyright notice and this permission notice shall be included in
+ // all copies or substantial portions of the Software.
+ //
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ // IN THE SOFTWARE.
+ if (!arg) {
+ // Need double quotation for empty argument
+ return '""';
+ }
+ if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
+ // No quotation needed
+ return arg;
+ }
+ if (!arg.includes('"') && !arg.includes('\\')) {
+ // No embedded double quotes or backslashes, so I can just wrap
+ // quote marks around the whole thing.
+ return `"${arg}"`;
+ }
+ // Expected input/output:
+ // input : hello"world
+ // output: "hello\"world"
+ // input : hello""world
+ // output: "hello\"\"world"
+ // input : hello\world
+ // output: hello\world
+ // input : hello\\world
+ // output: hello\\world
+ // input : hello\"world
+ // output: "hello\\\"world"
+ // input : hello\\"world
+ // output: "hello\\\\\"world"
+ // input : hello world\
+ // output: "hello world\\" - note the comment in libuv actually reads "hello world\"
+ // but it appears the comment is wrong, it should be "hello world\\"
+ let reverse = '"';
+ let quoteHit = true;
+ for (let i = arg.length; i > 0; i--) {
+ // walk the string in reverse
+ reverse += arg[i - 1];
+ if (quoteHit && arg[i - 1] === '\\') {
+ reverse += '\\';
+ }
+ else if (arg[i - 1] === '"') {
+ quoteHit = true;
+ reverse += '\\';
+ }
+ else {
+ quoteHit = false;
+ }
+ }
+ reverse += '"';
+ return reverse
+ .split('')
+ .reverse()
+ .join('');
+ }
+ _cloneExecOptions(options) {
+ options = options || {};
+ const result = {
+ cwd: options.cwd || process.cwd(),
+ env: options.env || process.env,
+ silent: options.silent || false,
+ windowsVerbatimArguments: options.windowsVerbatimArguments || false,
+ failOnStdErr: options.failOnStdErr || false,
+ ignoreReturnCode: options.ignoreReturnCode || false,
+ delay: options.delay || 10000
+ };
+ result.outStream = options.outStream || process.stdout;
+ result.errStream = options.errStream || process.stderr;
+ return result;
+ }
+ _getSpawnOptions(options, toolPath) {
+ options = options || {};
+ const result = {};
+ result.cwd = options.cwd;
+ result.env = options.env;
+ result['windowsVerbatimArguments'] =
+ options.windowsVerbatimArguments || this._isCmdFile();
+ if (options.windowsVerbatimArguments) {
+ result.argv0 = `"${toolPath}"`;
+ }
+ return result;
+ }
+ /**
+ * Exec a tool.
+ * Output will be streamed to the live console.
+ * Returns promise with return code
+ *
+ * @param tool path to tool to exec
+ * @param options optional exec options. See ExecOptions
+ * @returns number
+ */
+ exec() {
+ return __awaiter(this, void 0, void 0, function* () {
+ // root the tool path if it is unrooted and contains relative pathing
+ if (!ioUtil.isRooted(this.toolPath) &&
+ (this.toolPath.includes('/') ||
+ (IS_WINDOWS && this.toolPath.includes('\\')))) {
+ // prefer options.cwd if it is specified, however options.cwd may also need to be rooted
+ this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
+ }
+ // if the tool is only a file name, then resolve it from the PATH
+ // otherwise verify it exists (add extension on Windows if necessary)
+ this.toolPath = yield io.which(this.toolPath, true);
+ return new Promise((resolve, reject) => {
+ this._debug(`exec tool: ${this.toolPath}`);
+ this._debug('arguments:');
+ for (const arg of this.args) {
+ this._debug(` ${arg}`);
+ }
+ const optionsNonNull = this._cloneExecOptions(this.options);
+ if (!optionsNonNull.silent && optionsNonNull.outStream) {
+ optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
+ }
+ const state = new ExecState(optionsNonNull, this.toolPath);
+ state.on('debug', (message) => {
+ this._debug(message);
+ });
+ const fileName = this._getSpawnFileName();
+ const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
+ const stdbuffer = '';
+ if (cp.stdout) {
+ cp.stdout.on('data', (data) => {
+ if (this.options.listeners && this.options.listeners.stdout) {
+ this.options.listeners.stdout(data);
+ }
+ if (!optionsNonNull.silent && optionsNonNull.outStream) {
+ optionsNonNull.outStream.write(data);
+ }
+ this._processLineBuffer(data, stdbuffer, (line) => {
+ if (this.options.listeners && this.options.listeners.stdline) {
+ this.options.listeners.stdline(line);
+ }
+ });
+ });
+ }
+ const errbuffer = '';
+ if (cp.stderr) {
+ cp.stderr.on('data', (data) => {
+ state.processStderr = true;
+ if (this.options.listeners && this.options.listeners.stderr) {
+ this.options.listeners.stderr(data);
+ }
+ if (!optionsNonNull.silent &&
+ optionsNonNull.errStream &&
+ optionsNonNull.outStream) {
+ const s = optionsNonNull.failOnStdErr
+ ? optionsNonNull.errStream
+ : optionsNonNull.outStream;
+ s.write(data);
+ }
+ this._processLineBuffer(data, errbuffer, (line) => {
+ if (this.options.listeners && this.options.listeners.errline) {
+ this.options.listeners.errline(line);
+ }
+ });
+ });
+ }
+ cp.on('error', (err) => {
+ state.processError = err.message;
+ state.processExited = true;
+ state.processClosed = true;
+ state.CheckComplete();
+ });
+ cp.on('exit', (code) => {
+ state.processExitCode = code;
+ state.processExited = true;
+ this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
+ state.CheckComplete();
+ });
+ cp.on('close', (code) => {
+ state.processExitCode = code;
+ state.processExited = true;
+ state.processClosed = true;
+ this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
+ state.CheckComplete();
+ });
+ state.on('done', (error, exitCode) => {
+ if (stdbuffer.length > 0) {
+ this.emit('stdline', stdbuffer);
+ }
+ if (errbuffer.length > 0) {
+ this.emit('errline', errbuffer);
+ }
+ cp.removeAllListeners();
+ if (error) {
+ reject(error);
+ }
+ else {
+ resolve(exitCode);
+ }
+ });
+ if (this.options.input) {
+ if (!cp.stdin) {
+ throw new Error('child process missing stdin');
+ }
+ cp.stdin.end(this.options.input);
+ }
+ });
+ });
+ }
+}
+exports.ToolRunner = ToolRunner;
+/**
+ * Convert an arg string to an array of args. Handles escaping
+ *
+ * @param argString string of arguments
+ * @returns string[] array of arguments
+ */
+function argStringToArray(argString) {
+ const args = [];
+ let inQuotes = false;
+ let escaped = false;
+ let arg = '';
+ function append(c) {
+ // we only escape double quotes.
+ if (escaped && c !== '"') {
+ arg += '\\';
+ }
+ arg += c;
+ escaped = false;
+ }
+ for (let i = 0; i < argString.length; i++) {
+ const c = argString.charAt(i);
+ if (c === '"') {
+ if (!escaped) {
+ inQuotes = !inQuotes;
+ }
+ else {
+ append(c);
+ }
+ continue;
+ }
+ if (c === '\\' && escaped) {
+ append(c);
+ continue;
+ }
+ if (c === '\\' && inQuotes) {
+ escaped = true;
+ continue;
+ }
+ if (c === ' ' && !inQuotes) {
+ if (arg.length > 0) {
+ args.push(arg);
+ arg = '';
+ }
+ continue;
+ }
+ append(c);
+ }
+ if (arg.length > 0) {
+ args.push(arg.trim());
+ }
+ return args;
+}
+exports.argStringToArray = argStringToArray;
+class ExecState extends events.EventEmitter {
+ constructor(options, toolPath) {
+ super();
+ this.processClosed = false; // tracks whether the process has exited and stdio is closed
+ this.processError = '';
+ this.processExitCode = 0;
+ this.processExited = false; // tracks whether the process has exited
+ this.processStderr = false; // tracks whether stderr was written to
+ this.delay = 10000; // 10 seconds
+ this.done = false;
+ this.timeout = null;
+ if (!toolPath) {
+ throw new Error('toolPath must not be empty');
+ }
+ this.options = options;
+ this.toolPath = toolPath;
+ if (options.delay) {
+ this.delay = options.delay;
+ }
+ }
+ CheckComplete() {
+ if (this.done) {
+ return;
+ }
+ if (this.processClosed) {
+ this._setResult();
+ }
+ else if (this.processExited) {
+ this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this);
+ }
+ }
+ _debug(message) {
+ this.emit('debug', message);
+ }
+ _setResult() {
+ // determine whether there is an error
+ let error;
+ if (this.processExited) {
+ if (this.processError) {
+ error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
+ }
+ else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
+ error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
+ }
+ else if (this.processStderr && this.options.failOnStdErr) {
+ error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
+ }
+ }
+ // clear the timeout
+ if (this.timeout) {
+ clearTimeout(this.timeout);
+ this.timeout = null;
+ }
+ this.done = true;
+ this.emit('done', error, this.processExitCode);
+ }
+ static HandleTimeout(state) {
+ if (state.done) {
+ return;
+ }
+ if (!state.processClosed && state.processExited) {
+ const message = `The STDIO streams did not close within ${state.delay /
+ 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
+ state._debug(message);
+ }
+ state._setResult();
+ }
+}
+//# sourceMappingURL=toolrunner.js.map
/***/ }),
@@ -938,6 +1741,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
+const exec = __importStar(__webpack_require__(986));
const github = __importStar(__webpack_require__(469));
const types_1 = __webpack_require__(251);
const commentPrefix = '## Terraform Plan:';
@@ -950,24 +1754,61 @@ async function run() {
return;
}
core.debug('got pull request');
- const terraformPlan = JSON.parse(core.getInput('terraform_plan_json'));
- const token = core.getInput('github_token');
+ const planFileName = core.getInput('terraform-plan-file');
+ const workingDir = core.getInput('working-directory');
+ const json = await jsonFromPlan(workingDir, planFileName);
+ const terraformPlan = JSON.parse(json);
+ core.debug('successfully parsed json');
+ const token = core.getInput('github-token');
const runId = parseInt(process.env['GITHUB_RUN_ID'] || '-1');
+ if (runId === -1) {
+ core.setFailed('No GITHUB_RUN_ID found');
+ return;
+ }
const commenter = new PlanCommenter(token, runId, pr);
- await commenter.makePlanComment(terraformPlan);
+ await commenter.commentWithPlanSummary(terraformPlan);
}
catch (error) {
core.setFailed(error.message);
}
}
+// we need to parse the terraform plan into a json string
+async function jsonFromPlan(workingDir, planFileName) {
+ // run terraform show -json to parse the plan into a json string
+ let output = '';
+ const options = {
+ listeners: {
+ stdout: (data) => {
+ // captures the standard output of the terraform show command and appends it to the variable 'output'
+ output += data.toString('utf8');
+ }
+ },
+ cwd: workingDir // execute the command from working directory 'dir'
+ };
+ core.debug(`execOptions: ${JSON.stringify(options)}`);
+ await exec.exec('terraform', ['show', '-json', planFileName], options);
+ // pull out any extra fluff from terraform wrapper from the hashicorp/setup-terraform action
+ const json = output.match(/{.*}/);
+ if (json === null) {
+ core.error('null match...');
+ core.debug('** start of output **');
+ core.debug(output);
+ core.debug('** end of output **');
+ throw Error("output didn't match with /{.*}/ correctly");
+ }
+ core.debug('** matched json **');
+ core.debug(json[0]);
+ core.debug('** end matched json **');
+ return json[0];
+}
class PlanCommenter {
constructor(token, runId, pr) {
this.octokit = github.getOctokit(token);
this.runId = runId;
this.pr = pr;
}
- async makePlanComment(terraformPlan) {
- const body = await this.planComment(terraformPlan);
+ async commentWithPlanSummary(terraformPlan) {
+ const body = await this.planSummaryBody(terraformPlan);
// find previous comment if it exists
const comments = await this.octokit.issues.listComments({
...github.context.repo,
@@ -1001,15 +1842,15 @@ class PlanCommenter {
return createdComment.data.id;
}
}
- async planComment(terraformPlan) {
+ async planSummaryBody(terraformPlan) {
const toCreate = [];
const toDelete = [];
const toReplace = [];
const toUpdate = [];
for (const resourceChange of terraformPlan.resource_changes) {
- core.debug(`resource: ${JSON.stringify(resourceChange)}`);
const actions = resourceChange.change.actions;
const resourceName = `${resourceChange.type} - ${resourceChange.name}`;
+ core.debug(` resource: ${resourceName}, actions: ${actions}`);
if (actions.length === 1 && actions.includes(types_1.Action.create)) {
toCreate.push(resourceName);
}
@@ -1024,7 +1865,7 @@ class PlanCommenter {
else if (actions.length === 1 && actions.includes(types_1.Action.update)) {
toUpdate.push(resourceName);
}
- else {
+ else if (!actions.includes(types_1.Action['no-op'])) {
core.debug(`Not found? ${actions}`);
}
}
@@ -1051,9 +1892,9 @@ class PlanCommenter {
static resourcesToChangeSection(changeType, list) {
let str = '';
if (list.length > 0) {
- str += `will ${changeType} ${list.length} resource${list.length > 1 ? 's' : ''}: \n`;
+ str += `will ${changeType} ${list.length} resource${list.length > 1 ? 's' : ''}:`;
for (const resource of list) {
- str += `- ${resource}`;
+ str += ` \n * ${resource}`;
}
str += '\n\n';
}
@@ -3790,7 +4631,7 @@ exports.Octokit = Octokit;
/***/ (function(module, __unusedexports, __webpack_require__) {
var once = __webpack_require__(969)
-var eos = __webpack_require__(9)
+var eos = __webpack_require__(562)
var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
var noop = function () {}
@@ -6739,6 +7580,107 @@ function isPlainObject(o) {
module.exports = isPlainObject;
+/***/ }),
+
+/***/ 562:
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var once = __webpack_require__(969);
+
+var noop = function() {};
+
+var isRequest = function(stream) {
+ return stream.setHeader && typeof stream.abort === 'function';
+};
+
+var isChildProcess = function(stream) {
+ return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
+};
+
+var eos = function(stream, opts, callback) {
+ if (typeof opts === 'function') return eos(stream, null, opts);
+ if (!opts) opts = {};
+
+ callback = once(callback || noop);
+
+ var ws = stream._writableState;
+ var rs = stream._readableState;
+ var readable = opts.readable || (opts.readable !== false && stream.readable);
+ var writable = opts.writable || (opts.writable !== false && stream.writable);
+ var cancelled = false;
+
+ var onlegacyfinish = function() {
+ if (!stream.writable) onfinish();
+ };
+
+ var onfinish = function() {
+ writable = false;
+ if (!readable) callback.call(stream);
+ };
+
+ var onend = function() {
+ readable = false;
+ if (!writable) callback.call(stream);
+ };
+
+ var onexit = function(exitCode) {
+ callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
+ };
+
+ var onerror = function(err) {
+ callback.call(stream, err);
+ };
+
+ var onclose = function() {
+ process.nextTick(onclosenexttick);
+ };
+
+ var onclosenexttick = function() {
+ if (cancelled) return;
+ if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
+ if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
+ };
+
+ var onrequest = function() {
+ stream.req.on('finish', onfinish);
+ };
+
+ if (isRequest(stream)) {
+ stream.on('complete', onfinish);
+ stream.on('abort', onclose);
+ if (stream.req) onrequest();
+ else stream.on('request', onrequest);
+ } else if (writable && !ws) { // legacy streams
+ stream.on('end', onlegacyfinish);
+ stream.on('close', onlegacyfinish);
+ }
+
+ if (isChildProcess(stream)) stream.on('exit', onexit);
+
+ stream.on('end', onend);
+ stream.on('finish', onfinish);
+ if (opts.error !== false) stream.on('error', onerror);
+ stream.on('close', onclose);
+
+ return function() {
+ cancelled = true;
+ stream.removeListener('complete', onfinish);
+ stream.removeListener('abort', onclose);
+ stream.removeListener('request', onrequest);
+ if (stream.req) stream.req.removeListener('finish', onfinish);
+ stream.removeListener('end', onlegacyfinish);
+ stream.removeListener('close', onlegacyfinish);
+ stream.removeListener('finish', onfinish);
+ stream.removeListener('exit', onexit);
+ stream.removeListener('end', onend);
+ stream.removeListener('error', onerror);
+ stream.removeListener('close', onclose);
+ };
+};
+
+module.exports = eos;
+
+
/***/ }),
/***/ 568:
@@ -7072,6 +8014,208 @@ module.exports = require("util");
/***/ }),
+/***/ 672:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var _a;
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert_1 = __webpack_require__(357);
+const fs = __webpack_require__(747);
+const path = __webpack_require__(622);
+_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
+exports.IS_WINDOWS = process.platform === 'win32';
+function exists(fsPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ try {
+ yield exports.stat(fsPath);
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ return false;
+ }
+ throw err;
+ }
+ return true;
+ });
+}
+exports.exists = exists;
+function isDirectory(fsPath, useStat = false) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
+ return stats.isDirectory();
+ });
+}
+exports.isDirectory = isDirectory;
+/**
+ * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
+ * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
+ */
+function isRooted(p) {
+ p = normalizeSeparators(p);
+ if (!p) {
+ throw new Error('isRooted() parameter "p" cannot be empty');
+ }
+ if (exports.IS_WINDOWS) {
+ return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
+ ); // e.g. C: or C:\hello
+ }
+ return p.startsWith('/');
+}
+exports.isRooted = isRooted;
+/**
+ * Recursively create a directory at `fsPath`.
+ *
+ * This implementation is optimistic, meaning it attempts to create the full
+ * path first, and backs up the path stack from there.
+ *
+ * @param fsPath The path to create
+ * @param maxDepth The maximum recursion depth
+ * @param depth The current recursion depth
+ */
+function mkdirP(fsPath, maxDepth = 1000, depth = 1) {
+ return __awaiter(this, void 0, void 0, function* () {
+ assert_1.ok(fsPath, 'a path argument must be provided');
+ fsPath = path.resolve(fsPath);
+ if (depth >= maxDepth)
+ return exports.mkdir(fsPath);
+ try {
+ yield exports.mkdir(fsPath);
+ return;
+ }
+ catch (err) {
+ switch (err.code) {
+ case 'ENOENT': {
+ yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1);
+ yield exports.mkdir(fsPath);
+ return;
+ }
+ default: {
+ let stats;
+ try {
+ stats = yield exports.stat(fsPath);
+ }
+ catch (err2) {
+ throw err;
+ }
+ if (!stats.isDirectory())
+ throw err;
+ }
+ }
+ }
+ });
+}
+exports.mkdirP = mkdirP;
+/**
+ * Best effort attempt to determine whether a file exists and is executable.
+ * @param filePath file path to check
+ * @param extensions additional file extensions to try
+ * @return if file exists and is executable, returns the file path. otherwise empty string.
+ */
+function tryGetExecutablePath(filePath, extensions) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let stats = undefined;
+ try {
+ // test file exists
+ stats = yield exports.stat(filePath);
+ }
+ catch (err) {
+ if (err.code !== 'ENOENT') {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+ }
+ }
+ if (stats && stats.isFile()) {
+ if (exports.IS_WINDOWS) {
+ // on Windows, test for valid extension
+ const upperExt = path.extname(filePath).toUpperCase();
+ if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
+ return filePath;
+ }
+ }
+ else {
+ if (isUnixExecutable(stats)) {
+ return filePath;
+ }
+ }
+ }
+ // try each extension
+ const originalFilePath = filePath;
+ for (const extension of extensions) {
+ filePath = originalFilePath + extension;
+ stats = undefined;
+ try {
+ stats = yield exports.stat(filePath);
+ }
+ catch (err) {
+ if (err.code !== 'ENOENT') {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+ }
+ }
+ if (stats && stats.isFile()) {
+ if (exports.IS_WINDOWS) {
+ // preserve the case of the actual file (since an extension was appended)
+ try {
+ const directory = path.dirname(filePath);
+ const upperName = path.basename(filePath).toUpperCase();
+ for (const actualName of yield exports.readdir(directory)) {
+ if (upperName === actualName.toUpperCase()) {
+ filePath = path.join(directory, actualName);
+ break;
+ }
+ }
+ }
+ catch (err) {
+ // eslint-disable-next-line no-console
+ console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
+ }
+ return filePath;
+ }
+ else {
+ if (isUnixExecutable(stats)) {
+ return filePath;
+ }
+ }
+ }
+ }
+ return '';
+ });
+}
+exports.tryGetExecutablePath = tryGetExecutablePath;
+function normalizeSeparators(p) {
+ p = p || '';
+ if (exports.IS_WINDOWS) {
+ // convert slashes on Windows
+ p = p.replace(/\//g, '\\');
+ // remove redundant slashes
+ return p.replace(/\\\\+/g, '\\');
+ }
+ // remove redundant slashes
+ return p.replace(/\/\/+/g, '/');
+}
+// on Mac/Linux, test the execute bit
+// R W X R W X R W X
+// 256 128 64 32 16 8 4 2 1
+function isUnixExecutable(stats) {
+ return ((stats.mode & 1) > 0 ||
+ ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
+ ((stats.mode & 64) > 0 && stats.uid === process.getuid()));
+}
+//# sourceMappingURL=io-util.js.map
+
+/***/ }),
+
/***/ 692:
/***/ (function(__unusedmodule, exports) {
@@ -10212,6 +11356,57 @@ function onceStrict (fn) {
}
+/***/ }),
+
+/***/ 986:
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const tr = __importStar(__webpack_require__(9));
+/**
+ * Exec a command.
+ * Output will be streamed to the live console.
+ * Returns promise with return code
+ *
+ * @param commandLine command to execute (can include additional args). Must be correctly escaped.
+ * @param args optional arguments for tool. Escaping is handled by the lib.
+ * @param options optional exec options. See ExecOptions
+ * @returns Promise exit code
+ */
+function exec(commandLine, args, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const commandArgs = tr.argStringToArray(commandLine);
+ if (commandArgs.length === 0) {
+ throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
+ }
+ // Path to tool to execute should be first arg
+ const toolPath = commandArgs[0];
+ args = commandArgs.slice(1).concat(args || []);
+ const runner = new tr.ToolRunner(toolPath, args, options);
+ return runner.exec();
+ });
+}
+exports.exec = exec;
+//# sourceMappingURL=exec.js.map
+
/***/ })
/******/ });
\ No newline at end of file
diff --git a/package.json b/package.json
index 7f09212..59ccad9 100644
--- a/package.json
+++ b/package.json
@@ -26,6 +26,7 @@
"license": "Apache-2.0",
"dependencies": {
"@actions/core": "^1.2.0",
+ "@actions/exec": "^1.0.4",
"@actions/github": "^3.0.0"
},
"devDependencies": {
diff --git a/src/main.ts b/src/main.ts
index 89d14cc..bf835eb 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -1,7 +1,9 @@
import * as core from '@actions/core'
+import * as exec from '@actions/exec'
import * as github from '@actions/github'
import {GitHub} from '@actions/github/lib/utils'
import {Action, PullRequest, TerraformPlan} from './types'
+import {ExecOptions} from '@actions/exec'
const commentPrefix = '## Terraform Plan:'
@@ -16,17 +18,60 @@ async function run(): Promise {
}
core.debug('got pull request')
- const terraformPlan: TerraformPlan = JSON.parse(core.getInput('terraform_plan_json'))
- const token = core.getInput('github_token')
+ const planFileName = core.getInput('terraform-plan-file')
+ const workingDir = core.getInput('working-directory')
+
+ const json = await jsonFromPlan(workingDir, planFileName)
+ const terraformPlan: TerraformPlan = JSON.parse(json)
+ core.debug('successfully parsed json')
+
+ const token = core.getInput('github-token')
const runId = parseInt(process.env['GITHUB_RUN_ID'] || '-1')
+ if (runId === -1) {
+ core.setFailed('No GITHUB_RUN_ID found')
+ return
+ }
const commenter = new PlanCommenter(token, runId, pr)
- await commenter.makePlanComment(terraformPlan)
+ await commenter.commentWithPlanSummary(terraformPlan)
} catch (error) {
core.setFailed(error.message)
}
}
+// we need to parse the terraform plan into a json string
+async function jsonFromPlan(workingDir: string, planFileName: string): Promise {
+ // run terraform show -json to parse the plan into a json string
+ let output = ''
+ const options: ExecOptions = {
+ listeners: {
+ stdout: (data: Buffer) => {
+ // captures the standard output of the terraform show command and appends it to the variable 'output'
+ output += data.toString('utf8')
+ }
+ },
+ cwd: workingDir // execute the command from working directory 'dir'
+ }
+
+ core.debug(`execOptions: ${JSON.stringify(options)}`)
+ await exec.exec('terraform', ['show', '-json', planFileName], options)
+
+ // pull out any extra fluff from terraform wrapper from the hashicorp/setup-terraform action
+ const json = output.match(/{.*}/)
+ if (json === null) {
+ core.error('null match...')
+ core.debug('** start of output **')
+ core.debug(output)
+ core.debug('** end of output **')
+ throw Error("output didn't match with /{.*}/ correctly")
+ }
+ core.debug('** matched json **')
+ core.debug(json[0])
+ core.debug('** end matched json **')
+
+ return json[0]
+}
+
class PlanCommenter {
octokit: InstanceType
runId: number
@@ -38,8 +83,8 @@ class PlanCommenter {
this.pr = pr
}
- async makePlanComment(terraformPlan: TerraformPlan): Promise {
- const body = await this.planComment(terraformPlan)
+ async commentWithPlanSummary(terraformPlan: TerraformPlan): Promise {
+ const body = await this.planSummaryBody(terraformPlan)
// find previous comment if it exists
const comments = await this.octokit.issues.listComments({
...github.context.repo,
@@ -72,15 +117,16 @@ class PlanCommenter {
return createdComment.data.id
}
}
- async planComment(terraformPlan: TerraformPlan): Promise {
+
+ async planSummaryBody(terraformPlan: TerraformPlan): Promise {
const toCreate = []
const toDelete = []
const toReplace = []
const toUpdate = []
for (const resourceChange of terraformPlan.resource_changes) {
- core.debug(`resource: ${JSON.stringify(resourceChange)}`)
const actions = resourceChange.change.actions
const resourceName = `${resourceChange.type} - ${resourceChange.name}`
+ core.debug(` resource: ${resourceName}, actions: ${actions}`)
if (actions.length === 1 && actions.includes(Action.create)) {
toCreate.push(resourceName)
} else if (actions.length === 1 && actions.includes(Action.delete)) {
@@ -93,7 +139,7 @@ class PlanCommenter {
toReplace.push(resourceName)
} else if (actions.length === 1 && actions.includes(Action.update)) {
toUpdate.push(resourceName)
- } else {
+ } else if (!actions.includes(Action['no-op'])) {
core.debug(`Not found? ${actions}`)
}
}
@@ -124,9 +170,9 @@ class PlanCommenter {
private static resourcesToChangeSection(changeType: string, list: string[]): string {
let str = ''
if (list.length > 0) {
- str += `will ${changeType} ${list.length} resource${list.length > 1 ? 's' : ''}: \n`
+ str += `will ${changeType} ${list.length} resource${list.length > 1 ? 's' : ''}:`
for (const resource of list) {
- str += `- ${resource}`
+ str += ` \n * ${resource}`
}
str += '\n\n'
}
diff --git a/yarn.lock b/yarn.lock
index b897c6c..fefa6c1 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -7,6 +7,13 @@
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.4.tgz#96179dbf9f8d951dd74b40a0dbd5c22555d186ab"
integrity sha512-YJCEq8BE3CdN8+7HPZ/4DxJjk/OkZV2FFIf+DlZTC/4iBlzYCD5yjRR6eiOS5llO11zbRltIRuKAjMKaWTE6cg==
+"@actions/exec@^1.0.4":
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/@actions/exec/-/exec-1.0.4.tgz#99d75310e62e59fc37d2ee6dcff6d4bffadd3a5d"
+ integrity sha512-4DPChWow9yc9W3WqEbUj8Nr86xkpyE29ZzWjXucHItclLbEW6jr80Zx4nqv18QL6KK65+cifiQZXvnqgTV6oHw==
+ dependencies:
+ "@actions/io" "^1.0.1"
+
"@actions/github@^3.0.0":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@actions/github/-/github-3.0.0.tgz#ce1b721a266ad5ac522da0c9c013c999009604bf"
@@ -24,6 +31,11 @@
dependencies:
tunnel "0.0.6"
+"@actions/io@^1.0.1":
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/@actions/io/-/io-1.0.2.tgz#2f614b6e69ce14d191180451eb38e6576a6e6b27"
+ integrity sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg==
+
"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.3":
version "7.10.3"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.3.tgz#324bcfd8d35cd3d47dae18cde63d752086435e9a"