Compare commits

..

1 Commits

Author SHA1 Message Date
Tom Hu
07b62770b8 Create README.md 2021-07-23 09:51:05 -07:00
21 changed files with 4111 additions and 22483 deletions

View File

@@ -8,7 +8,7 @@ jobs:
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Upload coverage to Codecov (script)
uses: ./
with:
@@ -22,14 +22,6 @@ jobs:
file: ./coverage/coverage-final.json
flags: demo,${{ matrix.os }}
name: codecov-demo
- name: Upload coverage to Codecov (version)
uses: ./
with:
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
file: ./coverage/coverage-final.json
flags: version,${{ matrix.os }}
name: codecov-version
version: v0.1.0_8880
run:
runs-on: ${{ matrix.os }}
strategy:
@@ -37,7 +29,7 @@ jobs:
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install dependencies
run: npm install
- name: Lint
@@ -57,11 +49,3 @@ jobs:
file: ./coverage/coverage-final.json
flags: demo,${{ matrix.os }}
name: codecov-demo
- name: Upload coverage to Codecov (version)
uses: ./
with:
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
file: ./coverage/coverage-final.json
flags: version,${{ matrix.os }}
name: codecov-version
version: v0.1.0_8880

View File

@@ -1,66 +1,3 @@
## 3.1.0
### Features
- #699 Incorporate `xcode` arguments for the Codecov uploader
### Dependencies
- #694 build(deps-dev): bump @vercel/ncc from 0.33.3 to 0.33.4
- #696 build(deps-dev): bump @types/node from 17.0.23 to 17.0.25
- #698 build(deps-dev): bump jest-junit from 13.0.0 to 13.2.0
## 3.0.0
### Breaking Changes
- #689 Bump to node16 and small fixes
### Features
- #688 Incorporate `gcov` arguments for the Codecov uploader
### Dependencies
- #548 build(deps-dev): bump jest-junit from 12.2.0 to 13.0.0
- #603 [Snyk] Upgrade @actions/core from 1.5.0 to 1.6.0
- #628 build(deps): bump node-fetch from 2.6.1 to 3.1.1
- #634 build(deps): bump node-fetch from 3.1.1 to 3.2.0
- #636 build(deps): bump openpgp from 5.0.1 to 5.1.0
- #652 build(deps-dev): bump @vercel/ncc from 0.30.0 to 0.33.3
- #653 build(deps-dev): bump @types/node from 16.11.21 to 17.0.18
- #659 build(deps-dev): bump @types/jest from 27.4.0 to 27.4.1
- #667 build(deps): bump actions/checkout from 2 to 3
- #673 build(deps): bump node-fetch from 3.2.0 to 3.2.3
- #683 build(deps): bump minimist from 1.2.5 to 1.2.6
- #685 build(deps): bump @actions/github from 5.0.0 to 5.0.1
- #681 build(deps-dev): bump @types/node from 17.0.18 to 17.0.23
- #682 build(deps-dev): bump typescript from 4.5.5 to 4.6.3
- #676 build(deps): bump @actions/exec from 1.1.0 to 1.1.1
- #675 build(deps): bump openpgp from 5.1.0 to 5.2.1
## 2.1.0
### Features
- #515 Allow specifying version of Codecov uploader
### Dependencies
- #499 build(deps-dev): bump @vercel/ncc from 0.29.0 to 0.30.0
- #508 build(deps): bump openpgp from 5.0.0-5 to 5.0.0
- #514 build(deps-dev): bump @types/node from 16.6.0 to 16.9.0
## 2.0.3
### Fixes
- #464 Fix wrong link in the readme
- #485 fix: Add override OS and linux default to platform
### Dependencies
- #447 build(deps): bump openpgp from 5.0.0-4 to 5.0.0-5
- #458 build(deps-dev): bump eslint from 7.31.0 to 7.32.0
- #465 build(deps-dev): bump @typescript-eslint/eslint-plugin from 4.28.4 to 4.29.1
- #466 build(deps-dev): bump @typescript-eslint/parser from 4.28.4 to 4.29.1
- #468 build(deps-dev): bump @types/jest from 26.0.24 to 27.0.0
- #470 build(deps-dev): bump @types/node from 16.4.0 to 16.6.0
- #472 build(deps): bump path-parse from 1.0.6 to 1.0.7
- #473 build(deps-dev): bump @types/jest from 27.0.0 to 27.0.1
- #478 build(deps-dev): bump @typescript-eslint/parser from 4.29.1 to 4.29.2
- #479 build(deps-dev): bump @typescript-eslint/eslint-plugin from 4.29.1 to 4.29.2
- #481 build(deps-dev): bump @types/node from 16.6.0 to 16.6.2
- #483 build(deps-dev): bump @vercel/ncc from 0.29.0 to 0.29.2
- #484 build(deps): bump @actions/core from 1.4.0 to 1.5.0
## 2.0.2
### Fixes
- Underlying uploader fixes issues with tokens not being sent properly for users seeing

View File

@@ -1,7 +1,7 @@
deploy:
$(eval VERSION := $(shell cat package.json | grep '"version": ' | cut -d\" -f4))
git tag -d v3
git push origin :v3
git tag v3
git tag v$(VERSION) -s -m ""
git tag -d v2
git push origin :v2
git tag v2
git tag v$(VERSION) -m ""
git push origin --tags

View File

@@ -10,7 +10,7 @@
## ⚠️ Deprecration of v1
**On February 1, 2022, this version will be fully sunset and no longer function**
Due to the [deprecation](https://about.codecov.io/blog/introducing-codecovs-new-uploader/) of the underlying bash uploader,
Due to the [deprecation](https://about.codecov.io/blog/introducting-codecovs-new-uploader/) of the underlying bash uploader,
the Codecov GitHub Action has released `v2` which will use the new [uploader](https://github.com/codecov/uploader). You can learn
more about our deprecation plan and the new uploader on our [blog](https://about.codecov.io/blog/introducing-codecovs-new-uploader/).
@@ -59,12 +59,8 @@ Codecov's Action currently supports five inputs from the user: `token`, `file`,
| `commit_parent` | The commit SHA of the parent for which you are uploading coverage. If not present, the parent will be determined using the API of your repository provider. When using the repository provider's API, the parent is determined via finding the closest ancestor to the commit. | Optional
| `env_vars` | Environment variables to tag the upload with. Multiple env variables can be separated with commas (e.g. `OS,PYTHON`) | Optional
| `fail_ci_if_error` | Specify if CI pipeline should fail when Codecov runs into errors during upload. *Defaults to **false*** | Optional
| `functionalities` | Toggle functionalities | Optional
| | `network` Disable uploading the file network |
| `gcov` | Run with gcov support | Optional
| `gcov_args` | Extra arguments to pass to gcov | Optional
| `gcov_ignore` | Paths to ignore during gcov gathering | Optional
| `gcov_include` | Paths to include during gcov gathering | Optional
-| `functionalities` | Toggle functionalities | Optional
-| | `network` Disable uploading the file network |
| `move_coverage_to_trash` | Move discovered coverage reports to the trash | Optional
| `name` | Custom defined name for the upload | Optional
| `override_branch` | Specify the branch name | Optional
@@ -77,11 +73,7 @@ Codecov's Action currently supports five inputs from the user: `token`, `file`,
| `slug` | Specify the slug manually (Enterprise use) | Optional
| `url` | Change the upload host (Enterprise use) | Optional
| `verbose` | Specify whether the Codecov output should be verbose | Optional
| `version` | Specify which version of the Codecov Uploader should be used. Defaults to `latest` | Optional
| `working-directory` | Directory in which to execute `codecov.sh` | Optional
| `xcode` | Run with xcode support | Optional
| `xcode_archive_path` | Specify the xcode archive path. Likely specified as the -resultBundlePath and should end in .xcresult | Optional
### Example `workflow.yml` with Codecov Action

View File

@@ -32,18 +32,6 @@ inputs:
functionalities:
description: 'Comma-separated list, see the README for options and their usage'
required: false
gcov:
description: 'Run with gcov support'
required: false
gcov_args:
description: 'Extra arguments to pass to gcov'
required: false
gcov_ignore:
description: 'Paths to ignore during gcov gathering'
required: false
gcov_include:
description: 'Paths to include during gcov gathering'
required: false
move_coverage_to_trash:
description: 'Move discovered coverage reports to the trash'
required: false
@@ -65,9 +53,6 @@ inputs:
override_tag:
description: 'Specify the git tag'
required: false
os:
description: 'Override the assumed OS. Options are alpine | linux | macos | windows.'
required: false
root_dir:
description: 'Used when not in git/hg project to identify project root directory'
required: false
@@ -80,21 +65,12 @@ inputs:
verbose:
description: 'Specify whether the Codecov output should be verbose'
required: false
version:
description: 'Specify which version of the Codecov Uploader should be used. Defaults to `latest`'
required: false
working-directory:
description: 'Directory in which to execute codecov.sh'
required: false
xcode:
description: 'Run with xcode support'
required: false
xcode_archive_path:
description: 'Specify the xcode archive path. Likely specified as the -resultBundlePath and should end in .xcresult'
required: false
branding:
color: 'red'
icon: 'umbrella'
runs:
using: 'node16'
using: 'node12'
main: 'dist/index.js'

453
dist/37.index.js vendored
View File

@@ -1,453 +0,0 @@
"use strict";
exports.id = 37;
exports.ids = [37];
exports.modules = {
/***/ 4037:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
/* harmony export */ });
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}
/***/ })
};
;
//# sourceMappingURL=37.index.js.map

File diff suppressed because one or more lines are too long

453
dist/629.index.js vendored
View File

@@ -1,453 +0,0 @@
exports.id = 629;
exports.ids = [629];
exports.modules = {
/***/ 6629:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
/* harmony export */ });
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(4818);
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(1402);
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}
/***/ })
};
;
//# sourceMappingURL=629.index.js.map

File diff suppressed because one or more lines are too long

14044
dist/index.js vendored

File diff suppressed because one or more lines are too long

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

11257
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "codecov-action",
"version": "3.1.0",
"version": "2.0.2",
"description": "Upload coverage reports to Codecov from GitHub Actions",
"main": "index.js",
"scripts": {
@@ -16,30 +16,30 @@
"url": "git+https://github.com/codecov/codecov-action.git"
},
"keywords": [],
"author": "Codecov",
"author": "Ibrahim Ali",
"license": "MIT",
"bugs": {
"url": "https://github.com/codecov/codecov-action/issues"
},
"homepage": "https://github.com/codecov/codecov-action#readme",
"dependencies": {
"@actions/core": "^1.6.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^5.0.1",
"node-fetch": "^3.2.3",
"openpgp": "^5.2.1"
"@actions/core": "^1.4.0",
"@actions/exec": "^1.1.0",
"@actions/github": "^5.0.0",
"node-fetch": "^2.6.1",
"openpgp": "^5.0.0-4"
},
"devDependencies": {
"@types/jest": "^27.4.1",
"@types/node": "^17.0.25",
"@typescript-eslint/eslint-plugin": "^4.29.2",
"@typescript-eslint/parser": "^4.29.2",
"@vercel/ncc": "^0.33.4",
"eslint": "^7.32.0",
"@types/jest": "^26.0.24",
"@types/node": "^16.4.0",
"@typescript-eslint/eslint-plugin": "^4.28.4",
"@typescript-eslint/parser": "^4.28.4",
"@vercel/ncc": "^0.29.0",
"eslint": "^7.31.0",
"eslint-config-google": "^0.14.0",
"jest": "^26.6.3",
"jest-junit": "^13.2.0",
"jest-junit": "^12.2.0",
"ts-jest": "^26.5.6",
"typescript": "^4.6.3"
"typescript": "^4.3.5"
}
}

View File

@@ -35,10 +35,6 @@ test('all arguments', () => {
'flags': 'test,test2',
'functionalities':
'network',
'gcov': 'true',
'gcov_args': '-v',
'gcov_ignore': '*.fake',
'gcov_include': 'real_file',
'move_coverage_to_trash': 'true',
'name': 'codecov',
'override_branch': 'thomasrockhu/test',
@@ -53,8 +49,6 @@ test('all arguments', () => {
'url': 'https://codecov.enterprise.com',
'verbose': 't',
'working-directory': 'src',
'xcode': 'true',
'xcode_archive_path': '/test.xcresult',
};
for (const env of Object.keys(envs)) {
@@ -86,13 +80,6 @@ test('all arguments', () => {
'test',
'-F',
'test2',
'-g',
'--gcovArgs',
'-v',
'--gcovIgnore',
'*.fake',
'--gcovInclude',
'real_file',
'-B',
'thomasrockhu/test',
'-b',
@@ -112,9 +99,6 @@ test('all arguments', () => {
'-u',
'https://codecov.enterprise.com',
'-v',
'--xc',
'--xp',
'/test.xcresult',
]);
expect(failCi).toBeTruthy();

View File

@@ -25,13 +25,8 @@ const buildExec = () => {
const file = core.getInput('file');
const files = core.getInput('files');
const flags = core.getInput('flags');
const gcov = core.getInput('gcov');
const gcovArgs = core.getInput('gcov_args');
const gcovIgnore = core.getInput('gcov_ignore');
const gcovInclude = core.getInput('gcov_include');
const functionalities = core.getInput('functionalities');
const name = core.getInput('name');
const os = core.getInput('os');
const overrideBranch = core.getInput('override_branch');
const overrideBuild = core.getInput('override_build');
const overrideCommit = core.getInput('override_commit');
@@ -41,12 +36,9 @@ const buildExec = () => {
const searchDir = core.getInput('directory');
const slug = core.getInput('slug');
const token = core.getInput('token');
let uploaderVersion = core.getInput('version');
const url = core.getInput('url');
const verbose = isTrue(core.getInput('verbose'));
const url = core.getInput('url');
const workingDir = core.getInput('working-directory');
const xcode = core.getInput('xcode');
const xcodeArchivePath = core.getInput('xcode_archive_path');
const execArgs = [];
execArgs.push(
@@ -111,20 +103,6 @@ const buildExec = () => {
execArgs.push('-F', `${f}`);
});
}
if (gcov) {
execArgs.push('-g');
}
if (gcovArgs) {
execArgs.push('--gcovArgs', `${gcovArgs}`);
}
if (gcovIgnore) {
execArgs.push('--gcovIgnore', `${gcovIgnore}`);
}
if (gcovInclude) {
execArgs.push('--gcovInclude', `${gcovInclude}`);
}
if (overrideBranch) {
execArgs.push('-B', `${overrideBranch}`);
}
@@ -167,16 +145,8 @@ const buildExec = () => {
if (workingDir) {
options.cwd = workingDir;
}
if (xcode && xcodeArchivePath) {
execArgs.push('--xc');
execArgs.push('--xp', `${xcodeArchivePath}`);
}
if (uploaderVersion == '') {
uploaderVersion = 'latest';
}
return {execArgs, options, failCi, os, uploaderVersion};
return {execArgs, options, failCi};
};
export default buildExec;

View File

@@ -1,70 +0,0 @@
import {
getBaseUrl,
getPlatform,
isValidPlatform,
isWindows,
PLATFORMS,
} from './helpers';
let OLDOS = process.env.RUNNER_OS;
beforeEach(() => {
jest.resetModules();
OLDOS = process.env.RUNNER_OS;
});
afterAll(() => {
process.env.RUNNER_OS = OLDOS;
});
test('getPlatform', () => {
expect(getPlatform('linux')).toBe('linux');
expect(getPlatform('windows')).toBe('windows');
const defaultPlatform =
process.env.RUNNER_OS ? process.env.RUNNER_OS.toLowerCase() : 'linux';
expect(getPlatform('fakeos')).toBe(defaultPlatform);
expect(getPlatform()).toBe(defaultPlatform);
process.env.RUNNER_OS = 'macos';
expect(getPlatform('fakeos')).toBe('macos');
expect(getPlatform()).toBe('macos');
process.env.RUNNER_OS = 'alsofakeos';
expect(getPlatform()).toBe('linux');
expect(getPlatform('fakeos')).toBe('linux');
});
test('getBaseUrl', () => {
expect(PLATFORMS.map((platform) => {
return getBaseUrl(platform, 'latest');
})).toEqual([
'https://uploader.codecov.io/latest/alpine/codecov',
'https://uploader.codecov.io/latest/linux/codecov',
'https://uploader.codecov.io/latest/macos/codecov',
'https://uploader.codecov.io/latest/windows/codecov.exe',
]);
expect(PLATFORMS.map((platform) => {
return getBaseUrl(platform, 'v0.1.0_8880');
})).toEqual([
'https://uploader.codecov.io/v0.1.0_8880/alpine/codecov',
'https://uploader.codecov.io/v0.1.0_8880/linux/codecov',
'https://uploader.codecov.io/v0.1.0_8880/macos/codecov',
'https://uploader.codecov.io/v0.1.0_8880/windows/codecov.exe',
]);
});
test('isWindows', () => {
expect(PLATFORMS.map((platform) => {
return isWindows(platform);
})).toEqual([false, false, false, true]);
});
test('isValidPlatform', () => {
expect(PLATFORMS.map((platform) => {
return isValidPlatform(platform);
})).toEqual([true, true, true, true]);
expect(isValidPlatform('fakeos')).toBeFalsy();
});

View File

@@ -9,47 +9,30 @@ const setFailure = (message: string, failCi: boolean): void => {
}
};
const getUploaderName = (platform: string): string => {
if (isWindows(platform)) {
const getUploaderName = (): string => {
if (isWindows()) {
return 'codecov.exe';
} else {
return 'codecov';
}
};
const isValidPlatform = (platform: string): boolean => {
return PLATFORMS.includes(platform);
const isValidPlatform = (): boolean => {
return PLATFORMS.includes(getPlatform());
};
const isWindows = (platform: string): boolean => {
return platform === 'windows';
const isWindows = (): boolean => {
return getPlatform() === 'windows';
};
const getPlatform = (os?: string): string => {
if (isValidPlatform(os)) {
core.info(`==> ${os} OS provided`);
return os;
}
const platform = process.env.RUNNER_OS?.toLowerCase();
if (isValidPlatform(platform)) {
core.info(`==> ${platform} OS detected`);
return platform;
}
core.info(
'==> Could not detect OS or provided OS is invalid. Defaulting to linux',
);
return 'linux';
const getPlatform = (): string => {
return process.env.RUNNER_OS.toLowerCase();
};
const getBaseUrl = (platform: string, version: string): string => {
return `https://uploader.codecov.io/${version}/${platform}/${getUploaderName(platform)}`;
};
const BASEURL = `https://uploader.codecov.io/latest/${getPlatform()}/${getUploaderName()}`;
export {
PLATFORMS,
getBaseUrl,
BASEURL,
getPlatform,
getUploaderName,
isValidPlatform,

View File

@@ -6,23 +6,28 @@ import * as exec from '@actions/exec';
import buildExec from './buildExec';
import {
getBaseUrl,
BASEURL,
getPlatform,
getUploaderName,
isValidPlatform,
setFailure,
} from './helpers';
import verify from './validate';
import versionInfo from './version';
let failCi;
try {
const {execArgs, options, failCi, os, uploaderVersion} = buildExec();
const platform = getPlatform(os);
const filename = path.join( __dirname, getUploaderName(platform));
https.get(getBaseUrl(platform, uploaderVersion), (res) => {
const {execArgs, options, failCi} = buildExec();
const platform = getPlatform();
if (!isValidPlatform()) {
setFailure(
`Codecov: Encountered an unexpected platform: ${platform}`,
failCi,
);
}
const filename = path.join( __dirname, getUploaderName());
https.get(BASEURL, (res) => {
// Image will be stored at this path
const filePath = fs.createWriteStream(filename);
res.pipe(filePath);
@@ -35,8 +40,7 @@ try {
}).on('finish', async () => {
filePath.close();
await verify(filename, platform, uploaderVersion);
await versionInfo(platform, uploaderVersion);
await verify(filename);
await fs.chmodSync(filename, '777');
const unlink = () => {

View File

@@ -7,18 +7,14 @@ import * as openpgp from 'openpgp';
import * as fetch from 'node-fetch';
import {
getBaseUrl,
BASEURL,
getUploaderName,
setFailure,
} from './helpers';
const verify = async (
filename: string,
platform: string,
version: string,
): Promise<void> => {
const verify = async (filename: string) => {
try {
const uploaderName = getUploaderName(platform);
const uploaderName = getUploaderName();
// Read in public key
const publicKeyArmored = await fs.readFileSync(
@@ -27,15 +23,10 @@ const verify = async (
);
// Get SHASUM and SHASUM signature files
console.log(`${getBaseUrl(platform, version)}.SHA256SUM`);
const shasumRes = await fetch.default(
`${getBaseUrl(platform, version)}.SHA256SUM`,
);
const shasumRes = await fetch( `${BASEURL}.SHA256SUM`);
const shasum = await shasumRes.text();
const shaSigRes = await fetch.default(
`${getBaseUrl(platform, version)}.SHA256SUM.sig`,
);
const shaSigRes = await fetch( `${BASEURL}.SHA256SUM.sig`);
const shaSig = await shaSigRes.text();
// Verify shasum

View File

@@ -1,22 +0,0 @@
import * as core from '@actions/core';
import * as fetch from 'node-fetch';
const versionInfo = async (
platform: string,
version?: string,
): Promise<void> => {
if (version) {
core.info(`==> Running version ${version}`);
}
try {
const metadataRes = await fetch.default( `https://uploader.codecov.io/${platform}/latest`, {
headers: {'Accept': 'application/json'},
});
const metadata = await metadataRes.json();
core.info(`==> Running version ${metadata['version']}`);
} catch (err) {
core.info(`Could not pull latest version information: ${err}`);
}
};
export default versionInfo;