Compare commits

..

1 Commits
v1 ... v0.1.14

Author SHA1 Message Date
1e07f43987 release prep for 0.1.14 2021-11-15 01:29:26 -05:00
15 changed files with 4258 additions and 7136 deletions

View File

@ -1,14 +0,0 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/"
schedule:
interval: weekly
ignore:
- dependency-name: node-fetch
versions:
- ">=3.0.0"
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: weekly

View File

@ -8,7 +8,7 @@ jobs:
steps: steps:
# https://github.com/actions/checkout # https://github.com/actions/checkout
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v2
- name: Install - name: Install
run: npm ci run: npm ci
- name: Build - name: Build

1
.nvmrc
View File

@ -1 +0,0 @@
16.13.1

View File

@ -1,9 +1,3 @@
## 0.1.15
- Upgrade to action.yml declaration to node16 to address deprecations
- Upgrade dependencies
- Add `asset` output as a JSON array containing information about the uploaded assets
## 0.1.14 ## 0.1.14
- provides an new workflow input option `generate_release_notes` which when set to true will automatically generate release notes for you based on GitHub activity [#179](https://github.com/softprops/action-gh-release/pull/179). Please see the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information - provides an new workflow input option `generate_release_notes` which when set to true will automatically generate release notes for you based on GitHub activity [#179](https://github.com/softprops/action-gh-release/pull/179). Please see the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information

View File

@ -176,11 +176,10 @@ The following are optional as `step.with` keys
| `tag_name` | String | Name of a tag. defaults to `github.ref` | | `tag_name` | String | Name of a tag. defaults to `github.ref` |
| `fail_on_unmatched_files` | Boolean | Indicator of whether to fail if any of the `files` globs match nothing | | `fail_on_unmatched_files` | Boolean | Indicator of whether to fail if any of the `files` globs match nothing |
| `repository` | String | Name of a target repository in `<owner>/<repo>` format. Defaults to GITHUB_REPOSITORY env variable | | `repository` | String | Name of a target repository in `<owner>/<repo>` format. Defaults to GITHUB_REPOSITORY env variable |
| `target_commitish` | String | Commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Defaults to repository default branch. | | `target_commitish` | String | Commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. |
| `token` | String | Secret GitHub Personal Access Token. Defaults to `${{ github.token }}` | | `token` | String | Secret GitHub Personal Access Token. Defaults to `${{ github.token }}` |
| `discussion_category_name` | String | If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see ["Managing categories for discussions in your repository."](https://docs.github.com/en/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository) | | `discussion_category_name` | String | If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see ["Managing categories for discussions in your repository."](https://docs.github.com/en/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository) |
| `generate_release_notes` | Boolean | Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes. See the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information | | `generate_release_notes` | Boolean | Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes. See the [GitHub docs for this feature](https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes) for more information |
| `append_body` | Boolean | Append to existing body instead of overwriting it |
💡 When providing a `body` and `body_path` at the same time, `body_path` will be 💡 When providing a `body` and `body_path` at the same time, `body_path` will be
attempted first, then falling back on `body` if the path can not be read from. attempted first, then falling back on `body` if the path can not be read from.
@ -194,13 +193,10 @@ release will retain its original info.
The following outputs can be accessed via `${{ steps.<step-id>.outputs }}` from this action The following outputs can be accessed via `${{ steps.<step-id>.outputs }}` from this action
| Name | Type | Description | | Name | Type | Description |
| ------------ | ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | ------------ | ------ | --------------------------------------- |
| `url` | String | Github.com URL for the release | | `url` | String | Github.com URL for the release |
| `id` | String | Release ID | | `id` | String | Release ID |
| `upload_url` | String | URL for uploading assets to the release | | `upload_url` | String | URL for uploading assets to the release |
| `assets` | String | JSON array containing information about each uploaded asset, in the format given [here](https://docs.github.com/en/rest/releases/assets#get-a-release-asset) (minus the `uploader` field) |
As an example, you can use `${{ fromJSON(steps.<step-id>.outputs.assets)[0].browser_download_url }}` to get the download URL of the first asset.
#### environment variables #### environment variables
@ -222,14 +218,6 @@ permissions:
contents: write contents: write
``` ```
When used with `discussion_category_name`, additional permission is needed:
```yaml
permissions:
contents: write
discussions: write
```
[GitHub token permissions](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token) can be set for an individual job, workflow, or for Actions as a whole. [GitHub token permissions](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token) can be set for an individual job, workflow, or for Actions as a whole.
Doug Tangren (softprops) 2019 Doug Tangren (softprops) 2019

View File

@ -5,7 +5,7 @@ import {
parseConfig, parseConfig,
parseInputFiles, parseInputFiles,
unmatchedPatterns, unmatchedPatterns,
uploadUrl, uploadUrl
} from "../src/util"; } from "../src/util";
import * as assert from "assert"; import * as assert from "assert";
@ -51,7 +51,7 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
}) })
); );
}); });
@ -71,7 +71,7 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
}) })
); );
}); });
@ -91,7 +91,7 @@ describe("util", () => {
input_tag_name: undefined, input_tag_name: undefined,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
}) })
); );
}); });
@ -107,13 +107,12 @@ describe("util", () => {
// so we cover that in a test case here to ensure undefined values are actually // so we cover that in a test case here to ensure undefined values are actually
// resolved as undefined and not empty strings // resolved as undefined and not empty strings
INPUT_TARGET_COMMITISH: "", INPUT_TARGET_COMMITISH: "",
INPUT_DISCUSSION_CATEGORY_NAME: "", INPUT_DISCUSSION_CATEGORY_NAME: ""
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: undefined, input_draft: undefined,
@ -124,7 +123,7 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
} }
); );
}); });
@ -132,13 +131,12 @@ describe("util", () => {
it("parses basic config with commitish", () => { it("parses basic config with commitish", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_TARGET_COMMITISH: "affa18ef97bc9db20076945705aba8c516139abd", INPUT_TARGET_COMMITISH: "affa18ef97bc9db20076945705aba8c516139abd"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: undefined, input_draft: undefined,
@ -149,20 +147,19 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: "affa18ef97bc9db20076945705aba8c516139abd", input_target_commitish: "affa18ef97bc9db20076945705aba8c516139abd",
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
} }
); );
}); });
it("supports discussion category names", () => { it("supports discussion category names", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_DISCUSSION_CATEGORY_NAME: "releases", INPUT_DISCUSSION_CATEGORY_NAME: "releases"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: undefined, input_draft: undefined,
@ -173,7 +170,7 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: "releases", input_discussion_category_name: "releases",
input_generate_release_notes: false, input_generate_release_notes: false
} }
); );
}); });
@ -181,13 +178,12 @@ describe("util", () => {
it("supports generating release notes", () => { it("supports generating release notes", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_GENERATE_RELEASE_NOTES: "true", INPUT_GENERATE_RELEASE_NOTES: "true"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: undefined, input_draft: undefined,
@ -198,7 +194,7 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: true, input_generate_release_notes: true
} }
); );
}); });
@ -209,13 +205,12 @@ describe("util", () => {
INPUT_DRAFT: "false", INPUT_DRAFT: "false",
INPUT_PRERELEASE: "true", INPUT_PRERELEASE: "true",
GITHUB_TOKEN: "env-token", GITHUB_TOKEN: "env-token",
INPUT_TOKEN: "input-token", INPUT_TOKEN: "input-token"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "env-token", github_token: "env-token",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: false, input_draft: false,
@ -226,7 +221,7 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
} }
); );
}); });
@ -235,13 +230,12 @@ describe("util", () => {
parseConfig({ parseConfig({
INPUT_DRAFT: "false", INPUT_DRAFT: "false",
INPUT_PRERELEASE: "true", INPUT_PRERELEASE: "true",
INPUT_TOKEN: "input-token", INPUT_TOKEN: "input-token"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "input-token", github_token: "input-token",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: false, input_draft: false,
@ -252,7 +246,7 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
} }
); );
}); });
@ -260,13 +254,12 @@ describe("util", () => {
assert.deepStrictEqual( assert.deepStrictEqual(
parseConfig({ parseConfig({
INPUT_DRAFT: "false", INPUT_DRAFT: "false",
INPUT_PRERELEASE: "true", INPUT_PRERELEASE: "true"
}), }),
{ {
github_ref: "", github_ref: "",
github_repository: "", github_repository: "",
github_token: "", github_token: "",
input_append_body: false,
input_body: undefined, input_body: undefined,
input_body_path: undefined, input_body_path: undefined,
input_draft: false, input_draft: false,
@ -277,31 +270,7 @@ describe("util", () => {
input_fail_on_unmatched_files: false, input_fail_on_unmatched_files: false,
input_target_commitish: undefined, input_target_commitish: undefined,
input_discussion_category_name: undefined, input_discussion_category_name: undefined,
input_generate_release_notes: false, input_generate_release_notes: false
}
);
});
it("parses basic config with append_body", () => {
assert.deepStrictEqual(
parseConfig({
INPUT_APPEND_BODY: "true",
}),
{
github_ref: "",
github_repository: "",
github_token: "",
input_append_body: true,
input_body: undefined,
input_body_path: undefined,
input_draft: undefined,
input_prerelease: undefined,
input_files: [],
input_name: undefined,
input_tag_name: undefined,
input_fail_on_unmatched_files: false,
input_target_commitish: undefined,
input_discussion_category_name: undefined,
input_generate_release_notes: false,
} }
); );
}); });

View File

@ -43,9 +43,6 @@ inputs:
generate_release_notes: generate_release_notes:
description: "Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes." description: "Whether to automatically generate the name and body for this release. If name is specified, the specified name will be used; otherwise, a name will be automatically generated. If body is specified, the body will be pre-pended to the automatically generated notes."
required: false required: false
append_body:
description: "Append to existing body instead of overwriting it. Default is false."
required: false
env: env:
"GITHUB_TOKEN": "As provided by Github Actions" "GITHUB_TOKEN": "As provided by Github Actions"
outputs: outputs:
@ -55,10 +52,8 @@ outputs:
description: "Release ID" description: "Release ID"
upload_url: upload_url:
description: "URL for uploading assets to the release" description: "URL for uploading assets to the release"
assets:
description: "JSON array containing information about each uploaded asset, in the format given [here](https://docs.github.com/en/rest/reference/repos#upload-a-release-asset--code-samples) (minus the `uploader` field)"
runs: runs:
using: "node16" using: "node12"
main: "dist/index.js" main: "dist/index.js"
branding: branding:
color: "green" color: "green"

452
dist/37.index.js vendored
View File

@ -1,452 +0,0 @@
"use strict";
exports.id = 37;
exports.ids = [37];
exports.modules = {
/***/ 4037:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
/* harmony export */ });
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}
/***/ })
};
;

8
dist/index.js vendored

File diff suppressed because one or more lines are too long

10479
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "action-gh-release", "name": "action-gh-release",
"version": "0.1.15", "version": "0.1.14",
"private": true, "private": true,
"description": "GitHub Action for creating GitHub Releases", "description": "GitHub Action for creating GitHub Releases",
"main": "lib/main.js", "main": "lib/main.js",
@ -12,33 +12,34 @@
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/softprops/action-gh-release.git" "url": "git+https://github.com/softprops/action-gh-template.git"
}, },
"keywords": [ "keywords": [
"actions" "actions"
], ],
"author": "softprops", "author": "softprops",
"license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.4.0",
"@actions/github": "^5.1.1", "@actions/github": "^5.0.0",
"@octokit/plugin-retry": "^4.0.3", "@octokit/plugin-retry": "^3.0.9",
"@octokit/plugin-throttling": "^4.3.2", "@octokit/plugin-throttling": "^3.5.1",
"glob": "^8.0.3", "glob": "^7.1.6",
"mime": "^3.0.0", "mime": "^2.4.4",
"node-fetch": "^2.6.7" "node-fetch": "^2.6.1"
}, },
"devDependencies": { "devDependencies": {
"@types/glob": "^8.0.0", "@types/glob": "^7.1.1",
"@types/jest": "^29.2.2", "@types/jest": "^24.0.25",
"@types/mime": "^3.0.1", "@types/mime": "^2.0.1",
"@types/node": "^18.11.9", "@types/node": "^12.12.24",
"@types/node-fetch": "^2.5.12", "@types/node-fetch": "^2.5.12",
"@vercel/ncc": "^0.34.0", "@zeit/ncc": "^0.21.0",
"jest": "^29.3.1", "jest": "^24.9.0",
"jest-circus": "^29.3.1", "jest-circus": "^24.9.0",
"prettier": "2.7.1", "prettier": "1.19.1",
"ts-jest": "^29.0.3", "ts-jest": "^24.2.0",
"typescript": "^4.8.4", "typescript": "^3.7.4",
"typescript-formatter": "^7.2.2" "typescript-formatter": "^7.2.2"
} }
} }

View File

@ -1,7 +1,7 @@
import fetch from "node-fetch"; import fetch from "node-fetch";
import { GitHub } from "@actions/github/lib/utils"; import { GitHub } from "@actions/github/lib/utils";
import { Config, isTag, releaseBody } from "./util"; import { Config, isTag, releaseBody } from "./util";
import { statSync, readFileSync } from "fs"; import { lstatSync, readFileSync } from "fs";
import { getType } from "mime"; import { getType } from "mime";
import { basename } from "path"; import { basename } from "path";
@ -127,8 +127,8 @@ export const asset = (path: string): ReleaseAsset => {
return { return {
name: basename(path), name: basename(path),
mime: mimeOrDefault(path), mime: mimeOrDefault(path),
size: statSync(path).size, size: lstatSync(path).size,
data: readFileSync(path), data: readFileSync(path)
}; };
}; };
@ -153,7 +153,7 @@ export const upload = async (
await github.rest.repos.deleteReleaseAsset({ await github.rest.repos.deleteReleaseAsset({
asset_id: currentAsset.id || 1, asset_id: currentAsset.id || 1,
owner, owner,
repo, repo
}); });
} }
console.log(`⬆️ Uploading ${name}...`); console.log(`⬆️ Uploading ${name}...`);
@ -163,10 +163,10 @@ export const upload = async (
headers: { headers: {
"content-length": `${size}`, "content-length": `${size}`,
"content-type": mime, "content-type": mime,
authorization: `token ${config.github_token}`, authorization: `token ${config.github_token}`
}, },
method: "POST", method: "POST",
body, body
}); });
const json = await resp.json(); const json = await resp.json();
if (resp.status !== 201) { if (resp.status !== 201) {
@ -204,9 +204,9 @@ export const release = async (
if (config.input_draft) { if (config.input_draft) {
for await (const response of releaser.allReleases({ for await (const response of releaser.allReleases({
owner, owner,
repo, repo
})) { })) {
let release = response.data.find((release) => release.tag_name === tag); let release = response.data.find(release => release.tag_name === tag);
if (release) { if (release) {
return release; return release;
} }
@ -215,7 +215,7 @@ export const release = async (
let existingRelease = await releaser.getReleaseByTag({ let existingRelease = await releaser.getReleaseByTag({
owner, owner,
repo, repo,
tag, tag
}); });
const release_id = existingRelease.data.id; const release_id = existingRelease.data.id;
@ -238,14 +238,7 @@ export const release = async (
// body parts as a release gets updated. some users will likely want this while // body parts as a release gets updated. some users will likely want this while
// others won't previously this was duplicating content for most which // others won't previously this was duplicating content for most which
// no one wants // no one wants
const workflowBody = releaseBody(config) || ""; let body = releaseBody(config) || existingRelease.data.body || "";
const existingReleaseBody = existingRelease.data.body || "";
let body: string;
if (config.input_append_body && workflowBody && existingReleaseBody) {
body = existingReleaseBody + "\n" + workflowBody;
} else {
body = workflowBody || existingReleaseBody;
}
const draft = const draft =
config.input_draft !== undefined config.input_draft !== undefined
@ -267,7 +260,7 @@ export const release = async (
draft, draft,
prerelease, prerelease,
discussion_category_name, discussion_category_name,
generate_release_notes, generate_release_notes
}); });
return release.data; return release.data;
} catch (error) { } catch (error) {
@ -296,7 +289,7 @@ export const release = async (
prerelease, prerelease,
target_commitish, target_commitish,
discussion_category_name, discussion_category_name,
generate_release_notes, generate_release_notes
}); });
return release.data; return release.data;
} catch (error) { } catch (error) {
@ -304,9 +297,9 @@ export const release = async (
console.log( console.log(
`⚠️ GitHub release failed with status: ${ `⚠️ GitHub release failed with status: ${
error.status error.status
}\n${JSON.stringify(error.response.data.errors)}\nretrying... (${ }\n${JSON.stringify(
maxRetries - 1 error.response.data.errors
} retries remaining)` )}\nretrying... (${maxRetries - 1} retries remaining)`
); );
return release(config, releaser, maxRetries - 1); return release(config, releaser, maxRetries - 1);
} }

View File

@ -3,7 +3,7 @@ import {
parseConfig, parseConfig,
isTag, isTag,
unmatchedPatterns, unmatchedPatterns,
uploadUrl, uploadUrl
} from "./util"; } from "./util";
import { release, upload, GitHubReleaser } from "./github"; import { release, upload, GitHubReleaser } from "./github";
import { getOctokit } from "@actions/github"; import { getOctokit } from "@actions/github";
@ -24,7 +24,7 @@ async function run() {
} }
if (config.input_files) { if (config.input_files) {
const patterns = unmatchedPatterns(config.input_files); const patterns = unmatchedPatterns(config.input_files);
patterns.forEach((pattern) => patterns.forEach(pattern =>
console.warn(`🤔 Pattern '${pattern}' does not match any files.`) console.warn(`🤔 Pattern '${pattern}' does not match any files.`)
); );
if (patterns.length > 0 && config.input_fail_on_unmatched_files) { if (patterns.length > 0 && config.input_fail_on_unmatched_files) {
@ -55,8 +55,8 @@ async function run() {
console.warn( console.warn(
`Abuse detected for request ${options.method} ${options.url}` `Abuse detected for request ${options.method} ${options.url}`
); );
}, }
}, }
}); });
//); //);
const rel = await release(config, new GitHubReleaser(gh)); const rel = await release(config, new GitHubReleaser(gh));
@ -65,23 +65,20 @@ async function run() {
if (files.length == 0) { if (files.length == 0) {
console.warn(`🤔 ${config.input_files} not include valid file.`); console.warn(`🤔 ${config.input_files} not include valid file.`);
} }
const currentAssets = rel.assets; const currentAsserts = rel.assets;
const assets = await Promise.all( await Promise.all(
files.map(async (path) => { files.map(async path => {
const json = await upload( await upload(
config, config,
gh, gh,
uploadUrl(rel.upload_url), uploadUrl(rel.upload_url),
path, path,
currentAssets currentAsserts
); );
delete json.uploader;
return json;
}) })
).catch((error) => { ).catch(error => {
throw error; throw error;
}); });
setOutput("assets", assets);
} }
console.log(`🎉 Release ready at ${rel.html_url}`); console.log(`🎉 Release ready at ${rel.html_url}`);
setOutput("url", rel.html_url); setOutput("url", rel.html_url);

View File

@ -1,5 +1,5 @@
import * as glob from "glob"; import * as glob from "glob";
import { statSync, readFileSync } from "fs"; import { lstatSync, readFileSync } from "fs";
export interface Config { export interface Config {
github_token: string; github_token: string;
@ -18,7 +18,6 @@ export interface Config {
input_target_commitish?: string; input_target_commitish?: string;
input_discussion_category_name?: string; input_discussion_category_name?: string;
input_generate_release_notes?: boolean; input_generate_release_notes?: boolean;
input_append_body?: boolean;
} }
export const uploadUrl = (url: string): string => { export const uploadUrl = (url: string): string => {
@ -44,8 +43,8 @@ export const parseInputFiles = (files: string): string[] => {
(acc, line) => (acc, line) =>
acc acc
.concat(line.split(",")) .concat(line.split(","))
.filter((pat) => pat) .filter(pat => pat)
.map((pat) => pat.trim()), .map(pat => pat.trim()),
[] []
); );
}; };
@ -68,15 +67,14 @@ export const parseConfig = (env: Env): Config => {
input_target_commitish: env.INPUT_TARGET_COMMITISH || undefined, input_target_commitish: env.INPUT_TARGET_COMMITISH || undefined,
input_discussion_category_name: input_discussion_category_name:
env.INPUT_DISCUSSION_CATEGORY_NAME || undefined, env.INPUT_DISCUSSION_CATEGORY_NAME || undefined,
input_generate_release_notes: env.INPUT_GENERATE_RELEASE_NOTES == "true", input_generate_release_notes: env.INPUT_GENERATE_RELEASE_NOTES == "true"
input_append_body: env.INPUT_APPEND_BODY == "true",
}; };
}; };
export const paths = (patterns: string[]): string[] => { export const paths = (patterns: string[]): string[] => {
return patterns.reduce((acc: string[], pattern: string): string[] => { return patterns.reduce((acc: string[], pattern: string): string[] => {
return acc.concat( return acc.concat(
glob.sync(pattern).filter((path) => statSync(path).isFile()) glob.sync(pattern).filter(path => lstatSync(path).isFile())
); );
}, []); }, []);
}; };
@ -84,7 +82,7 @@ export const paths = (patterns: string[]): string[] => {
export const unmatchedPatterns = (patterns: string[]): string[] => { export const unmatchedPatterns = (patterns: string[]): string[] => {
return patterns.reduce((acc: string[], pattern: string): string[] => { return patterns.reduce((acc: string[], pattern: string): string[] => {
return acc.concat( return acc.concat(
glob.sync(pattern).filter((path) => statSync(path).isFile()).length == 0 glob.sync(pattern).filter(path => lstatSync(path).isFile()).length == 0
? [pattern] ? [pattern]
: [] : []
); );

View File

@ -1,6 +1,5 @@
{ {
"compilerOptions": { "compilerOptions": {
"useUnknownInCatchVariables": false,
/* Basic Options */ /* Basic Options */
// "incremental": true, /* Enable incremental compilation */ // "incremental": true, /* Enable incremental compilation */
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */