Compare commits

..

4 Commits

Author SHA1 Message Date
Tom Hu
ab904c41d6 chore(release): bump to 3.1.6 2024-01-30 11:17:54 -08:00
Tom Hu
a8c374ae46 Revert "action.yml: Update to Node.js 20 (#1228)"
This reverts commit 9140fdcf54.
2024-01-30 11:17:26 -08:00
Tom Hu
4fe8c5f003 chore(release): bump to 3.1.5 2024-01-25 10:44:09 -08:00
Robin Hallabro-Kokko
9140fdcf54 action.yml: Update to Node.js 20 (#1228)
Node.js 16 actions are deprecated.

This should suppress the deprecation notice: Please update the following actions to use Node.js 20: codecov/codecov-action@v3
2024-01-25 10:41:05 -08:00
19 changed files with 27298 additions and 30576 deletions

View File

@@ -37,11 +37,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.23.2
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -52,7 +52,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3.23.2
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -66,4 +66,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.23.2
uses: github/codeql-action/analyze@v2

View File

@@ -1,14 +0,0 @@
name: Enforce License Compliance
on:
pull_request:
branches: [main, master]
jobs:
enforce-license-compliance:
runs-on: ubuntu-latest
steps:
- name: 'Enforce License Compliance'
uses: getsentry/action-enforce-license-compliance@57ba820387a1a9315a46115ee276b2968da51f3d # main
with:
fossa_api_key: ${{ secrets.FOSSA_API_KEY }}

View File

@@ -8,7 +8,7 @@ jobs:
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- name: Checkout
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3
- name: Upload coverage to Codecov (script)
uses: ./
with:
@@ -16,7 +16,6 @@ jobs:
flags: script,${{ matrix.os }}
name: codecov-script
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload coverage to Codecov (demo)
uses: ./
with:
@@ -25,7 +24,6 @@ jobs:
flags: demo,${{ matrix.os }}
name: codecov-demo
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload coverage to Codecov (version)
uses: ./
with:
@@ -33,17 +31,16 @@ jobs:
file: ./coverage/coverage-final.json
flags: version,${{ matrix.os }}
name: codecov-version
version: v0.2.0
version: v0.1.0_8880
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
run:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, windows-latest, ubuntu-latest, macos-latest-xlarge]
os: [macos-latest, windows-latest, ubuntu-latest]
steps:
- name: Checkout
uses: actions/checkout@v4.1.1
uses: actions/checkout@v3
- name: Install dependencies
run: npm install
- name: Lint
@@ -57,7 +54,6 @@ jobs:
flags: script,${{ matrix.os }}
name: codecov-script
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload coverage to Codecov (demo)
uses: ./
with:
@@ -66,7 +62,6 @@ jobs:
flags: demo,${{ matrix.os }}
name: codecov-demo
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload coverage to Codecov (version)
uses: ./
with:
@@ -74,6 +69,5 @@ jobs:
file: ./coverage/coverage-final.json
flags: version,${{ matrix.os }}
name: codecov-version
version: v0.2.0
version: v0.1.0_8880
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -24,12 +24,12 @@ jobs:
steps:
- name: "Checkout code"
uses: actions/checkout@v4.1.1 # v3.0.0
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # v3.0.0
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
uses: ossf/scorecard-action@80e868c13c90f172d68d1f4501dee99e2479f7af # v2.1.3
with:
results_file: results.sarif
results_format: sarif
@@ -48,7 +48,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 # v4.3.0
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
with:
name: SARIF file
path: results.sarif
@@ -56,6 +56,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@v3.23.2 # v1.0.26
uses: github/codeql-action/upload-sarif@5f532563584d71fdef14ee64d17bafb34f751ce5 # v1.0.26
with:
sarif_file: results.sarif

View File

@@ -1,16 +1,3 @@
## 4.0.0-beta.2
### Fixes
- #1085 not adding -n if empty to do-upload command
## 4.0.0-beta.1
`v4` represents a move from the [universal uploader](https://github.com/codecov/uploader) to the [Codecov CLI](https://github.com/codecov/codecov-cli). Although this will unlock new features for our users, the CLI is not yet at feature parity with the universal uploader.
### Breaking Changes
- No current support for `aarch64` and `alpine` architectures.
- Tokenless uploading is unsuported
- Various arguments to the Action have been removed
## 3.1.4
### Fixes
- #967 Fix typo in README.md

View File

@@ -1,7 +1,7 @@
deploy:
$(eval VERSION := $(shell cat package.json | grep '"version": ' | cut -d\" -f4))
git tag -d v4
git push origin :v4
git tag v4
git tag -d v3
git push origin :v3
git tag v3
git tag v$(VERSION) -s -m ""
git push origin --tags

View File

@@ -5,43 +5,42 @@
[![Workflow for Codecov Action](https://github.com/codecov/codecov-action/actions/workflows/main.yml/badge.svg)](https://github.com/codecov/codecov-action/actions/workflows/main.yml)
### Easily upload coverage reports to Codecov from GitHub Actions
## v4 Release
`v4` of the Codecov GitHub Action will use the [Codecov CLI](https://github.com/codecov/codecov-cli) to upload coverage reports to Codecov.
>The latest release of this Action adds support for tokenless uploads from GitHub Actions!
Breaking Changes
- Tokenless uploading is unsupported. However, PRs made from forks to the upstream public repos will support tokenless (e.g. contributors to OS projects do not need the upstream repo's Codecov token)
- Various arguments to the Action have been removed
## ⚠️ Deprecation of v1
**As of February 1, 2022, v1 has been fully sunset and no longer functions**
`v3` versions and below will not have access to CLI features (e.g. global upload token, ATS).
Due to the [deprecation](https://about.codecov.io/blog/introducing-codecovs-new-uploader/) of the underlying bash uploader,
the Codecov GitHub Action has released `v2`/`v3` which will use the new [uploader](https://github.com/codecov/uploader). You can learn
more about our deprecation plan and the new uploader on our [blog](https://about.codecov.io/blog/introducing-codecovs-new-uploader/).
We will be restricting any updates to the `v1` Action to security updates and hotfixes.
### Migration from `v1` to `v3`
The `v3` uploader has a few breaking changes for users
- Multiple fields have not been transferred from the bash uploader or have been deprecated. Notably
many of the `functionalities` and `gcov_` arguments have been removed. Please check the documentation
below for the full list.
## Usage
To integrate Codecov with your Actions pipeline, specify the name of this repository with a tag number (`@v4` is recommended) as a `step` within your `workflow.yml` file.
To integrate Codecov with your Actions pipeline, specify the name of this repository with a tag number (`@v3` is recommended) as a `step` within your `workflow.yml` file.
This Action also requires you to [provide an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) from [codecov.io](https://www.codecov.io) (tip: in order to avoid exposing your token, [store it](https://docs.codecov.com/docs/adding-the-codecov-token#github-actions) as a `secret`).
Currently, the Action will identify linux, macos, and windows runners. However, the Action may misidentify other architectures. The OS can be specified as
- alpine
- alpine-arm64
- linux
- linux-arm64
- macos
- windows
If you have a *private repository*, this Action also requires you to [provide an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) from [codecov.io](https://www.codecov.io) (tip: in order to avoid exposing your token, store it as a `secret`). Optionally, you can choose to include up to four additional inputs to customize the upload context. **For public repositories, no token is needed**
Inside your `.github/workflows/workflow.yml` file:
```yaml
steps:
- uses: actions/checkout@master
- uses: codecov/codecov-action@v4
- uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage1.xml,./coverage2.xml # optional
flags: unittests # optional
name: codecov-umbrella # optional
fail_ci_if_error: true # optional (default = false)
verbose: true # optional (default = false)
env:
token: ${{ secrets.CODECOV_TOKEN }}
```
>**Note**: This assumes that you've set your Codecov token inside *Settings > Secrets* as `CODECOV_TOKEN`. If not, you can [get an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) for your specific repo on [codecov.io](https://www.codecov.io). Keep in mind that secrets are *not* available to forks of repositories.
@@ -117,8 +116,9 @@ jobs:
pip install pytest-cov
pytest --cov=./ --cov-report=xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./coverage/reports/
env_vars: OS,PYTHON
fail_ci_if_error: true
@@ -126,8 +126,6 @@ jobs:
flags: unittests
name: codecov-umbrella
verbose: true
env:
token: ${{ secrets.CODECOV_TOKEN }}
```
## Contributing

View File

@@ -11,32 +11,59 @@ inputs:
files:
description: 'Comma-separated list of files to upload'
required: false
commit_parent:
description: 'Override to specify the parent commit SHA'
required: false
directory:
description: 'Directory to search for coverage reports.'
required: false
flags:
description: 'Flag upload to group coverage metrics (e.g. unittests | integration | ui,chrome)'
required: false
full_report:
description: Specify the path of a full Codecov report to re-upload
required: false
commit_parent:
description: 'The commit SHA of the parent for which you are uploading coverage. If not present, the parent will be determined using the API of your repository provider. When using the repository providers API, the parent is determined via finding the closest ancestor to the commit.'
required: false
dry_run:
description: "Don't upload files to Codecov"
required: false
env_vars:
description: 'Environment variables to tag the upload with (e.g. PYTHON | OS,PYTHON)'
required: false
exclude:
description: 'Folders to exclude from search'
required: false
fail_ci_if_error:
description: 'Specify whether or not CI build should fail if Codecov runs into an error during upload'
required: false
flags:
description: 'Flag upload to group coverage metrics (e.g. unittests | integration | ui,chrome)'
functionalities:
description: 'Comma-separated list, see the README for options and their usage. Options include `network`, `fixes`, `search`.'
required: false
gcov:
description: 'Run with gcov support'
required: false
gcov_args:
description: 'Extra arguments to pass to gcov'
required: false
gcov_executable:
description: 'gcov executable to run. Defaults to gcov'
required: false
gcov_ignore:
description: 'Paths to ignore during gcov gathering'
required: false
gcov_include:
description: 'Paths to include during gcov gathering'
required: false
move_coverage_to_trash:
description: 'Move discovered coverage reports to the trash'
required: false
name:
description: 'User defined upload name. Visible in Codecov UI'
required: false
network_filter:
description: 'Specify a filter on the files listed in the network section of the Codecov report. Useful for upload-specific path fixing'
required: false
network_prefix:
description: 'Specify a prefix on files listed in the network section of the Codecov report. Useful to help resolve path fixing'
required: false
os:
description: 'Override the assumed OS. Options are linux | macos | windows.'
description: 'Override the assumed OS. Options are aarch64 | alpine | linux | macos | windows.'
required: false
override_branch:
description: 'Specify the branch name'
@@ -50,11 +77,8 @@ inputs:
override_pr:
description: 'Specify the pull request number'
required: false
plugin:
description: 'plugins to run. Options: xcode, gcov, pycoverage. The default behavior runs them all.'
required: false
plugins:
description: 'Comma-separated list of plugins for use during upload.'
override_tag:
description: 'Specify the git tag'
required: false
root_dir:
description: 'Used when not in git/hg project to identify project root directory'
@@ -62,21 +86,39 @@ inputs:
slug:
description: 'Specify the slug manually (Enterprise use)'
required: false
swift:
description: 'Run with swift coverage support'
required: false
swift_project:
description: 'Specify the swift project to speed up coverage conversion'
required: false
upstream_proxy:
description: 'The upstream http proxy server to connect through'
required: false
url:
description: 'Specify the base url to upload (Enterprise use)'
description: 'Change the upload host (Enterprise use)'
required: false
verbose:
description: 'Specify whether the Codecov output should be verbose'
required: false
version:
description: 'Specify which version of the Codecov CLI should be used. Defaults to `latest`'
description: 'Specify which version of the Codecov Uploader should be used. Defaults to `latest`'
required: false
working-directory:
description: 'Directory in which to execute codecov.sh'
required: false
xcode:
description: 'Run with xcode support'
required: false
xcode_archive_path:
description: 'Specify the xcode archive path. Likely specified as the -resultBundlePath and should end in .xcresult'
required: false
xtra_args:
description: 'Add additional uploader args that may be missing in the Action'
required: false
branding:
color: 'red'
icon: 'umbrella'
runs:
using: 'node20'
using: 'node16'
main: 'dist/index.js'

45211
dist/index.js vendored

File diff suppressed because one or more lines are too long

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

11453
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "codecov-action",
"version": "4.0.0",
"version": "3.1.6",
"description": "Upload coverage reports to Codecov from GitHub Actions",
"main": "index.js",
"scripts": {
@@ -23,22 +23,23 @@
},
"homepage": "https://github.com/codecov/codecov-action#readme",
"dependencies": {
"@actions/core": "^1.10.1",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
"gpg": "^0.6.0",
"undici": "5.28.2"
"@actions/github": "^5.1.1",
"node-fetch": "^3.3.1",
"openpgp": "5.8"
},
"devDependencies": {
"@types/jest": "^29.5.11",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"@vercel/ncc": "^0.38.1",
"eslint": "^8.56.0",
"@types/jest": "^27.5.0",
"@types/node": "^20.1.4",
"@typescript-eslint/eslint-plugin": "^4.29.2",
"@typescript-eslint/parser": "^4.29.2",
"@vercel/ncc": "^0.36.1",
"eslint": "^7.32.0",
"eslint-config-google": "^0.14.0",
"jest": "^29.7.0",
"jest": "^26.6.3",
"jest-junit": "^16.0.0",
"ts-jest": "^29.1.2",
"typescript": "^5.3.3"
"ts-jest": "^26.5.6",
"typescript": "^4.9.5"
}
}

View File

@@ -1,55 +1,31 @@
import * as github from '@actions/github';
import {
buildCommitExec,
buildGeneralExec,
buildReportExec,
buildUploadExec,
} from './buildExec';
import buildExec from './buildExec';
/* eslint-disable @typescript-eslint/no-var-requires */
const {version} = require('../package.json');
const context = github.context;
test('general args', () => {
const envs = {
url: 'https://codecov.enterprise.com',
verbose: 't',
};
for (const env of Object.keys(envs)) {
process.env['INPUT_' + env.toUpperCase()] = envs[env];
}
test('no arguments', () => {
const {execArgs, failCi} = buildExec();
const {args, verbose} = buildGeneralExec();
expect(args).toEqual(
expect.arrayContaining([
'--enterprise-url',
'https://codecov.enterprise.com',
'-v',
]));
expect(verbose).toBeTruthy();
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
test('upload args using context', () => {
const expectedArgs = [];
const {uploadExecArgs, uploadCommand} = buildUploadExec();
const args = [
'-n',
'',
'-Q',
`github-action-${version}`,
];
if (context.eventName == 'pull_request') {
expectedArgs.push('-C', `${context.payload.pull_request.head.sha}`);
args.push('-C', `${context.payload.pull_request.head.sha}`);
}
if (context.eventName == 'pull_request_target') {
expectedArgs.push('-P', `${context.payload.number}`);
}
expect(uploadExecArgs).toEqual(expectedArgs);
expect(uploadCommand).toEqual('do-upload');
expect(execArgs).toEqual(args);
expect(failCi).toBeFalsy();
});
test('upload args', () => {
test('all arguments', () => {
const envs = {
'commit_parent': '83231650328f11695dfb754ca0f540516f188d27',
'directory': 'coverage/',
'dry_run': 'true',
'env_vars': 'OS,PYTHON',
@@ -57,30 +33,53 @@ test('upload args', () => {
'file': 'coverage.xml',
'files': 'dir1/coverage.xml,dir2/coverage.xml',
'flags': 'test,test2',
'functionalities': 'network',
'full_report': 'oldDir/oldReport.json',
'gcov': 'true',
'gcov_args': '-v',
'gcov_ignore': '*.fake',
'gcov_include': 'real_file',
'gcov_executable': 'gcov2',
'move_coverage_to_trash': 'true',
'name': 'codecov',
'network_filter': 'src/',
'network_prefix': 'build/',
'override_branch': 'thomasrockhu/test',
'override_build': '1',
'override_commit': '9caabca5474b49de74ef5667deabaf74cdacc244',
'override_pr': '2',
'plugins': 'pycoverage,compress-pycoverage',
'override_tag': 'v1.2',
'root_dir': 'root/',
'swift': 'true',
'swift_project': 'MyApp',
'slug': 'fakeOwner/fakeRepo',
'token': 'd3859757-ab80-4664-924d-aef22fa7557b',
'working-directory': 'src',
'plugin': 'xcode',
'exclude': 'src',
'upstream_proxy': 'https://codecov.example.com',
'url': 'https://codecov.enterprise.com',
'verbose': 't',
'xcode': 'true',
'xcode_archive_path': '/test.xcresult',
'xtra_args': '--some --other --args',
};
for (const env of Object.keys(envs)) {
process.env['INPUT_' + env.toUpperCase()] = envs[env];
}
const {uploadExecArgs, uploadCommand} = buildUploadExec();
const expectedArgs = [
const {execArgs, failCi} = buildExec();
expect(execArgs).toEqual([
'-n',
'codecov',
'-Q',
`github-action-${version}`,
'-c',
'-N',
'83231650328f11695dfb754ca0f540516f188d27',
'-d',
'-e',
'OS,PYTHON',
'-X',
'network',
'-Z',
'-f',
'coverage.xml',
@@ -88,10 +87,25 @@ test('upload args', () => {
'dir1/coverage.xml',
'-f',
'dir2/coverage.xml',
'--full',
'oldDir/oldReport.json',
'-F',
'test',
'-F',
'test2',
'-g',
'--ga',
'-v',
'--gi',
'*.fake',
'--gI',
'real_file',
'--gx',
'gcov2',
'-i',
'src/',
'-k',
'build/',
'-B',
'thomasrockhu/test',
'-b',
@@ -100,123 +114,111 @@ test('upload args', () => {
'9caabca5474b49de74ef5667deabaf74cdacc244',
'-P',
'2',
'--plugin',
'pycoverage',
'--plugin',
'compress-pycoverage',
'--network-root-folder',
'-T',
'v1.2',
'-R',
'root/',
'-s',
'coverage/',
'-r',
'fakeOwner/fakeRepo',
'--plugin',
'xcode',
'--exclude',
'src',
'--xs',
'--xsp',
'MyApp',
'-U',
'https://codecov.example.com',
'-u',
'https://codecov.enterprise.com',
'-v',
'--xc',
'--xp',
'/test.xcresult',
'--some --other --args',
]);
expect(failCi).toBeTruthy();
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
describe('trim arguments after splitting them', () => {
const baseExpectation = [
'-n',
expect.stringContaining(''),
'-Q',
expect.stringContaining('github-action'),
];
expect(uploadExecArgs).toEqual(expectedArgs);
expect(uploadCommand).toEqual('do-upload');
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
test('files', () => {
const envs = {'files': './client-coverage.txt, ./lcov.info'};
test('report args', () => {
const envs = {
override_commit: '9caabca5474b49de74ef5667deabaf74cdacc244',
slug: 'fakeOwner/fakeRepo',
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
};
for (const env of Object.keys(envs)) {
process.env['INPUT_' + env.toUpperCase()] = envs[env];
for (const [name, value] of Object.entries(envs)) {
process.env['INPUT_' + name.toUpperCase()] = value;
}
const {reportExecArgs, reportCommand} = buildReportExec();
const {execArgs} = buildExec();
expect(reportExecArgs).toEqual(
expect(execArgs).toEqual(
expect.arrayContaining([
'-C',
'9caabca5474b49de74ef5667deabaf74cdacc244',
'--slug',
'fakeOwner/fakeRepo',
]));
expect(reportCommand).toEqual('create-report');
...baseExpectation,
'-f',
'./client-coverage.txt',
'-f',
'./lcov.info',
]),
);
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
test('flags', () => {
const envs = {'flags': 'ios, mobile'};
test('report args using context', () => {
const envs = {
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
};
for (const env of Object.keys(envs)) {
process.env['INPUT_' + env.toUpperCase()] = envs[env];
}
const expectedArgs : string[] = [];
if (context.eventName == 'pull_request') {
expectedArgs.push('-C', `${context.payload.pull_request.head.sha}`);
for (const [name, value] of Object.entries(envs)) {
process.env['INPUT_' + name.toUpperCase()] = value;
}
const {reportExecArgs, reportCommand} = buildReportExec();
const {execArgs} = buildExec();
expect(reportExecArgs).toEqual(expectedArgs);
expect(reportCommand).toEqual('create-report');
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
test('commit args', () => {
const envs = {
override_commit: '9caabca5474b49de74ef5667deabaf74cdacc244',
slug: 'fakeOwner/fakeRepo',
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
override_branch: 'thomasrockhu/test',
override_pr: '2',
commit_parent: '83231650328f11695dfb754ca0f540516f188d27',
};
for (const env of Object.keys(envs)) {
process.env['INPUT_' + env.toUpperCase()] = envs[env];
}
const {commitExecArgs, commitCommand} = buildCommitExec();
expect(commitExecArgs).toEqual(
expect(execArgs).toEqual(
expect.arrayContaining([
'-C',
'9caabca5474b49de74ef5667deabaf74cdacc244',
'--slug',
'fakeOwner/fakeRepo',
'-B',
'thomasrockhu/test',
'--pr',
'2',
'--parent-sha',
'83231650328f11695dfb754ca0f540516f188d27',
]));
expect(commitCommand).toEqual('create-commit');
...baseExpectation,
'-F',
'ios',
'-F',
'mobile',
]),
);
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
test('commit args using context', () => {
const expectedArgs :string[] = [];
test('functionalities', () => {
const envs = {'functionalities': 'network, gcov'};
const {commitExecArgs, commitCommand} = buildCommitExec();
if (context.eventName == 'pull_request') {
expectedArgs.push('-C', `${context.payload.pull_request.head.sha}`);
}
if (context.eventName == 'pull_request_target') {
expectedArgs.push('-P', `${context.payload.number}`);
for (const [name, value] of Object.entries(envs)) {
process.env['INPUT_' + name.toUpperCase()] = value;
}
expect(commitExecArgs).toEqual(expectedArgs);
expect(commitCommand).toEqual('create-commit');
const {execArgs} = buildExec();
expect(execArgs).toEqual(
expect.arrayContaining([
...baseExpectation,
'-X',
'network',
'-X',
'gcov',
]),
);
for (const env of Object.keys(envs)) {
delete process.env['INPUT_' + env.toUpperCase()];
}
});
});

View File

@@ -1,8 +1,7 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import * as core from '@actions/core';
import * as github from '@actions/github';
import {version} from '../package.json';
const context = github.context;
@@ -17,142 +16,56 @@ const isTrue = (variable) => {
);
};
const buildCommitExec = () => {
const buildExec = () => {
const clean = core.getInput('move_coverage_to_trash');
const commitParent = core.getInput('commit_parent');
const overrideBranch = core.getInput('override_branch');
const overrideCommit = core.getInput('override_commit');
const overridePr = core.getInput('override_pr');
const slug = core.getInput('slug');
const token = core.getInput('token');
const commitCommand = 'create-commit';
const commitExecArgs = [];
const commitOptions:any = {};
commitOptions.env = Object.assign(process.env, {
GITHUB_ACTION: process.env.GITHUB_ACTION,
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
GITHUB_REF: process.env.GITHUB_REF,
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
GITHUB_SHA: process.env.GITHUB_SHA,
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
});
if (token) {
commitOptions.env.CODECOV_TOKEN = token;
}
if (commitParent) {
commitExecArgs.push('--parent-sha', `${commitParent}`);
}
if (overrideBranch) {
commitExecArgs.push('-B', `${overrideBranch}`);
}
if (overrideCommit) {
commitExecArgs.push('-C', `${overrideCommit}`);
} else if (
`${context.eventName}` == 'pull_request' ||
`${context.eventName}` == 'pull_request_target'
) {
commitExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
}
if (overridePr) {
commitExecArgs.push('--pr', `${overridePr}`);
} else if (
`${context.eventName}` == 'pull_request_target'
) {
commitExecArgs.push('--pr', `${context.payload.number}`);
}
if (slug) {
commitExecArgs.push('--slug', `${slug}`);
}
return {commitExecArgs, commitOptions, commitCommand};
};
const buildGeneralExec = () => {
const url = core.getInput('url');
const verbose = isTrue(core.getInput('verbose'));
const args = [];
if (url) {
args.push('--enterprise-url', `${url}`);
}
if (verbose) {
args.push('-v');
}
return {args, verbose};
};
const buildReportExec = () => {
const overrideCommit = core.getInput('override_commit');
const slug = core.getInput('slug');
const token = core.getInput('token');
const reportCommand = 'create-report';
const reportExecArgs = [];
const reportOptions:any = {};
reportOptions.env = Object.assign(process.env, {
GITHUB_ACTION: process.env.GITHUB_ACTION,
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
GITHUB_REF: process.env.GITHUB_REF,
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
GITHUB_SHA: process.env.GITHUB_SHA,
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
});
if (token) {
reportOptions.env.CODECOV_TOKEN = token;
}
if (overrideCommit) {
reportExecArgs.push('-C', `${overrideCommit}`);
} else if (
`${context.eventName}` == 'pull_request' ||
`${context.eventName}` == 'pull_request_target'
) {
reportExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
}
if (slug) {
reportExecArgs.push('--slug', `${slug}`);
}
return {reportExecArgs, reportOptions, reportCommand};
};
const buildUploadExec = () => {
const envVars = core.getInput('env_vars');
const dryRun = isTrue(core.getInput('dry_run'));
const envVars = core.getInput('env_vars');
const failCi = isTrue(core.getInput('fail_ci_if_error'));
const file = core.getInput('file');
const files = core.getInput('files');
const flags = core.getInput('flags');
const fullReport = core.getInput('full_report');
const functionalities = core.getInput('functionalities');
const gcov = core.getInput('gcov');
const gcovArgs = core.getInput('gcov_args');
const gcovExecutable = core.getInput('gcov_executable');
const gcovIgnore = core.getInput('gcov_ignore');
const gcovInclude = core.getInput('gcov_include');
const name = core.getInput('name');
const networkFilter = core.getInput('network_filter');
const networkPrefix = core.getInput('network_prefix');
const os = core.getInput('os');
const overrideBranch = core.getInput('override_branch');
const overrideBuild = core.getInput('override_build');
const overrideCommit = core.getInput('override_commit');
const overridePr = core.getInput('override_pr');
const plugins = core.getInput('plugins');
const overrideTag = core.getInput('override_tag');
const rootDir = core.getInput('root_dir');
const searchDir = core.getInput('directory');
const slug = core.getInput('slug');
const swift = core.getInput('swift');
const swiftProject = core.getInput('swift_project');
const token = core.getInput('token');
let uploaderVersion = core.getInput('version');
const upstream = core.getInput('upstream_proxy');
const url = core.getInput('url');
const verbose = isTrue(core.getInput('verbose'));
const workingDir = core.getInput('working-directory');
const plugin = core.getInput('plugin');
const exclude = core.getInput('exclude');
const xcode = core.getInput('xcode');
const xcodeArchivePath = core.getInput('xcode_archive_path');
const xtraArgs = core.getInput('xtra_args');
let uploaderVersion = core.getInput('version');
const uploadExecArgs = [];
const uploadCommand = 'do-upload';
const uploadOptions:any = {};
uploadOptions.env = Object.assign(process.env, {
const execArgs = [];
execArgs.push(
'-n',
`${name}`,
'-Q',
`github-action-${version}`,
);
const options:any = {};
options.env = Object.assign(process.env, {
GITHUB_ACTION: process.env.GITHUB_ACTION,
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
GITHUB_REF: process.env.GITHUB_REF,
@@ -165,104 +78,144 @@ const buildUploadExec = () => {
for (const envVar of envVars.split(',')) {
const envVarClean = envVar.trim();
if (envVarClean) {
uploadOptions.env[envVarClean] = process.env[envVarClean];
options.env[envVarClean] = process.env[envVarClean];
envVarsArg.push(envVarClean);
}
}
if (name) {
uploadExecArgs.push(
'-n',
`${name}`,
);
}
if (token) {
uploadOptions.env.CODECOV_TOKEN = token;
options.env.CODECOV_TOKEN = token;
}
if (clean) {
execArgs.push('-c');
}
if (commitParent) {
execArgs.push('-N', `${commitParent}`);
}
if (dryRun) {
uploadExecArgs.push('-d');
execArgs.push('-d');
}
if (envVarsArg.length) {
uploadExecArgs.push('-e', envVarsArg.join(','));
execArgs.push('-e', envVarsArg.join(','));
}
if (functionalities) {
functionalities.split(',').map((f) => f.trim()).forEach((f) => {
execArgs.push('-X', `${f}`);
});
}
if (failCi) {
uploadExecArgs.push('-Z');
execArgs.push('-Z');
}
if (file) {
uploadExecArgs.push('-f', `${file}`);
execArgs.push('-f', `${file}`);
}
if (files) {
files.split(',').map((f) => f.trim()).forEach((f) => {
uploadExecArgs.push('-f', `${f}`);
execArgs.push('-f', `${f}`);
});
}
if (fullReport) {
execArgs.push('--full', `${fullReport}`);
}
if (flags) {
flags.split(',').map((f) => f.trim()).forEach((f) => {
uploadExecArgs.push('-F', `${f}`);
execArgs.push('-F', `${f}`);
});
}
if (gcov) {
execArgs.push('-g');
}
if (gcovArgs) {
execArgs.push('--ga', `${gcovArgs}`);
}
if (gcovIgnore) {
execArgs.push('--gi', `${gcovIgnore}`);
}
if (gcovInclude) {
execArgs.push('--gI', `${gcovInclude}`);
}
if (gcovExecutable) {
execArgs.push('--gx', `${gcovExecutable}`);
}
if (networkFilter) {
execArgs.push('-i', `${networkFilter}`);
}
if (networkPrefix) {
execArgs.push('-k', `${networkPrefix}`);
}
if (overrideBranch) {
uploadExecArgs.push('-B', `${overrideBranch}`);
execArgs.push('-B', `${overrideBranch}`);
}
if (overrideBuild) {
uploadExecArgs.push('-b', `${overrideBuild}`);
execArgs.push('-b', `${overrideBuild}`);
}
if (overrideCommit) {
uploadExecArgs.push('-C', `${overrideCommit}`);
execArgs.push('-C', `${overrideCommit}`);
} else if (
`${context.eventName}` == 'pull_request' ||
`${context.eventName}` == 'pull_request_target'
) {
uploadExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
execArgs.push('-C', `${context.payload.pull_request.head.sha}`);
}
if (overridePr) {
uploadExecArgs.push('-P', `${overridePr}`);
execArgs.push('-P', `${overridePr}`);
} else if (
`${context.eventName}` == 'pull_request_target'
) {
uploadExecArgs.push('-P', `${context.payload.number}`);
execArgs.push('-P', `${context.payload.number}`);
}
if (plugins) {
plugins.split(',').map((p) => p.trim()).forEach((p) => {
uploadExecArgs.push('--plugin', `${p}`);
});
if (overrideTag) {
execArgs.push('-T', `${overrideTag}`);
}
if (rootDir) {
uploadExecArgs.push('--network-root-folder', `${rootDir}`);
execArgs.push('-R', `${rootDir}`);
}
if (searchDir) {
uploadExecArgs.push('-s', `${searchDir}`);
execArgs.push('-s', `${searchDir}`);
}
if (slug) {
uploadExecArgs.push('-r', `${slug}`);
execArgs.push('-r', `${slug}`);
}
if (workingDir) {
uploadOptions.cwd = workingDir;
if (swift) {
execArgs.push('--xs');
}
if (plugin) {
uploadExecArgs.push('--plugin', `${plugin}`);
if (swift && swiftProject) {
execArgs.push('--xsp', `${swiftProject}`);
}
if (exclude) {
uploadExecArgs.push('--exclude', `${exclude}`);
if (upstream) {
execArgs.push('-U', `${upstream}`);
}
if (url) {
execArgs.push('-u', `${url}`);
}
if (verbose) {
execArgs.push('-v');
}
if (xcode && xcodeArchivePath) {
execArgs.push('--xc');
execArgs.push('--xp', `${xcodeArchivePath}`);
}
if (uploaderVersion == '') {
uploaderVersion = 'latest';
}
return {
uploadExecArgs,
uploadOptions,
failCi,
os,
uploaderVersion,
uploadCommand,
};
if (verbose) {
console.debug({execArgs});
}
if (workingDir) {
options.cwd = workingDir;
}
if (xtraArgs) {
execArgs.push(`${xtraArgs}`);
}
return {execArgs, options, failCi, os, uploaderVersion, verbose};
};
export {
buildCommitExec,
buildGeneralExec,
buildReportExec,
buildUploadExec,
};
export default buildExec;

View File

@@ -4,7 +4,6 @@ import {
isValidPlatform,
isWindows,
PLATFORMS,
getCommand,
} from './helpers';
let OLDOS = process.env.RUNNER_OS;
@@ -40,41 +39,34 @@ test('getBaseUrl', () => {
expect(PLATFORMS.map((platform) => {
return getBaseUrl(platform, 'latest');
})).toEqual([
'https://cli.codecov.io/latest/linux/codecov',
'https://cli.codecov.io/latest/macos/codecov',
'https://cli.codecov.io/latest/windows/codecov.exe',
'https://cli.codecov.io/latest/alpine/codecov',
'https://cli.codecov.io/latest/linux-arm64/codecov',
'https://cli.codecov.io/latest/alpine-arm64/codecov',
'https://uploader.codecov.io/latest/aarch64/codecov',
'https://uploader.codecov.io/latest/alpine/codecov',
'https://uploader.codecov.io/latest/linux/codecov',
'https://uploader.codecov.io/latest/macos/codecov',
'https://uploader.codecov.io/latest/windows/codecov.exe',
]);
expect(PLATFORMS.map((platform) => {
return getBaseUrl(platform, 'v0.1.0_8880');
})).toEqual([
'https://cli.codecov.io/v0.1.0_8880/linux/codecov',
'https://cli.codecov.io/v0.1.0_8880/macos/codecov',
'https://cli.codecov.io/v0.1.0_8880/windows/codecov.exe',
'https://cli.codecov.io/v0.1.0_8880/alpine/codecov',
'https://cli.codecov.io/v0.1.0_8880/linux-arm64/codecov',
'https://cli.codecov.io/v0.1.0_8880/alpine-arm64/codecov',
'https://uploader.codecov.io/v0.1.0_8880/aarch64/codecov',
'https://uploader.codecov.io/v0.1.0_8880/alpine/codecov',
'https://uploader.codecov.io/v0.1.0_8880/linux/codecov',
'https://uploader.codecov.io/v0.1.0_8880/macos/codecov',
'https://uploader.codecov.io/v0.1.0_8880/windows/codecov.exe',
]);
});
test('isWindows', () => {
expect(PLATFORMS.map((platform) => {
return isWindows(platform);
})).toEqual([false, false, true, false, false, false]);
})).toEqual([false, false, false, false, true]);
});
test('isValidPlatform', () => {
expect(PLATFORMS.map((platform) => {
return isValidPlatform(platform);
})).toEqual([true, true, true, true, true, true]);
})).toEqual([true, true, true, true, true]);
expect(isValidPlatform('fakeos')).toBeFalsy();
});
test('getCommand', () => {
expect(getCommand('path', ['-v', '-x'], 'do-upload'))
.toEqual(['path', '-v', '-x', 'do-upload']);
});

View File

@@ -1,12 +1,11 @@
import * as core from '@actions/core';
const PLATFORMS = [
'aarch64',
'alpine',
'linux',
'macos',
'windows',
'alpine',
'linux-arm64',
'alpine-arm64',
];
const setFailure = (message: string, failCi: boolean): void => {
@@ -51,17 +50,7 @@ const getPlatform = (os?: string): string => {
};
const getBaseUrl = (platform: string, version: string): string => {
return `https://cli.codecov.io/${version}/${platform}/${getUploaderName(platform)}`;
};
const getCommand = (
filename: string,
generalArgs:string[],
command: string,
): string[] => {
const fullCommand = [filename, ...generalArgs, command];
core.info(`==> Running command '${fullCommand.join(' ')}'`);
return fullCommand;
return `https://uploader.codecov.io/${version}/${platform}/${getUploaderName(platform)}`;
};
export {
@@ -72,5 +61,4 @@ export {
isValidPlatform,
isWindows,
setFailure,
getCommand,
};

View File

@@ -4,18 +4,12 @@ import * as path from 'path';
import * as exec from '@actions/exec';
import {
buildCommitExec,
buildGeneralExec,
buildReportExec,
buildUploadExec,
} from './buildExec';
import buildExec from './buildExec';
import {
getBaseUrl,
getPlatform,
getUploaderName,
setFailure,
getCommand,
} from './helpers';
import verify from './validate';
@@ -24,18 +18,7 @@ import versionInfo from './version';
let failCi;
try {
const {commitExecArgs, commitOptions, commitCommand} = buildCommitExec();
const {reportExecArgs, reportOptions, reportCommand} = buildReportExec();
const {
uploadExecArgs,
uploadOptions,
failCi,
os,
uploaderVersion,
uploadCommand,
} = buildUploadExec();
const {args, verbose} = buildGeneralExec();
const {execArgs, options, failCi, os, uploaderVersion, verbose} = buildExec();
const platform = getPlatform(os);
const filename = path.join( __dirname, getUploaderName(platform));
@@ -66,52 +49,14 @@ try {
}
});
};
const doUpload = async () => {
await exec.exec(getCommand(filename, args, uploadCommand).join(' '),
uploadExecArgs,
uploadOptions)
await exec.exec(filename, execArgs, options)
.catch((err) => {
setFailure(
`Codecov:
Failed to properly upload report: ${err.message}`,
`Codecov: Failed to properly upload: ${err.message}`,
failCi,
);
});
};
const createReport = async () => {
await exec.exec(
getCommand(filename, args, reportCommand).join(' '),
reportExecArgs,
reportOptions)
.then(async (exitCode) => {
if (exitCode == 0) {
await doUpload();
}
}).catch((err) => {
setFailure(
`Codecov:
Failed to properly create report: ${err.message}`,
failCi,
);
});
};
await exec.exec(
getCommand(
filename,
args,
commitCommand,
).join(' '),
commitExecArgs, commitOptions)
.then(async (exitCode) => {
if (exitCode == 0) {
await createReport();
}
}).then(() => {
unlink();
}).catch((err) => {
setFailure(
`Codecov: Failed to properly create commit: ${err.message}`,
failCi,
);
});
});
});

View File

@@ -1,10 +1,10 @@
import * as crypto from 'crypto';
import * as fs from 'fs';
import * as gpg from 'gpg';
import * as path from 'path';
import * as core from '@actions/core';
import {request} from 'undici';
import * as openpgp from 'openpgp';
import * as fetch from 'node-fetch';
import {
getBaseUrl,
@@ -22,33 +22,45 @@ const verify = async (
try {
const uploaderName = getUploaderName(platform);
// Read in public key
const publicKeyArmored = await fs.readFileSync(
path.join(__dirname, 'pgp_keys.asc'),
'utf-8',
);
// Get SHASUM and SHASUM signature files
console.log(`${getBaseUrl(platform, version)}.SHA256SUM`);
const shasumRes = await request(
const shasumRes = await fetch.default(
`${getBaseUrl(platform, version)}.SHA256SUM`,
);
const shasum = await shasumRes.body.text();
const shasum = await shasumRes.text();
if (verbose) {
console.log(`Received SHA256SUM ${shasum}`);
}
await fs.writeFileSync(
path.join(__dirname, `${uploaderName}.SHA256SUM`),
shasum,
);
const shaSigRes = await request(
const shaSigRes = await fetch.default(
`${getBaseUrl(platform, version)}.SHA256SUM.sig`,
);
const shaSig = await shaSigRes.body.text();
const shaSig = await shaSigRes.text();
if (verbose) {
console.log(`Received SHA256SUM signature ${shaSig}`);
}
await fs.writeFileSync(
path.join(__dirname, `${uploaderName}.SHA256SUM.sig`),
shaSig,
);
const validateSha = async () => {
// Verify shasum
const verified = await openpgp.verify({
message: await openpgp.createMessage({text: shasum}),
signature: await openpgp.readSignature({armoredSignature: shaSig}),
verificationKeys: await openpgp.readKeys({armoredKeys: publicKeyArmored}),
});
const valid = await verified.signatures[0].verified;
if (valid) {
core.info('==> SHASUM file signed by key id ' +
verified.signatures[0].keyID.toHex(),
);
} else {
setFailure('Codecov: Error validating SHASUM signature', failCi);
}
const calculateHash = async (filename: string) => {
const stream = fs.createReadStream(filename);
const uploaderSha = crypto.createHash(`sha256`);
@@ -62,9 +74,7 @@ const verify = async (
});
};
const hash = await calculateHash(
path.join(__dirname, `${uploaderName}`),
);
const hash = await calculateHash(filename);
if (hash === shasum) {
core.info(`==> Uploader SHASUM verified (${hash})`);
} else {
@@ -74,38 +84,6 @@ const verify = async (
failCi,
);
}
};
const verifySignature = () => {
gpg.call('', [
'--logger-fd',
'1',
'--verify',
path.join(__dirname, `${uploaderName}.SHA256SUM.sig`),
path.join(__dirname, `${uploaderName}.SHA256SUM`),
], async (err, verifyResult) => {
if (err) {
setFailure('Codecov: Error importing pgp key', failCi);
}
core.info(verifyResult);
await validateSha();
});
};
// Import gpg key
gpg.call('', [
'--logger-fd',
'1',
'--no-default-keyring',
'--import',
path.join(__dirname, 'pgp_keys.asc'),
], async (err, importResult) => {
if (err) {
setFailure('Codecov: Error importing pgp key', failCi);
}
core.info(importResult);
verifySignature();
});
} catch (err) {
setFailure(`Codecov: Error validating uploader: ${err.message}`, failCi);
}

View File

@@ -1,5 +1,5 @@
import * as core from '@actions/core';
import {request} from 'undici';
import * as fetch from 'node-fetch';
const versionInfo = async (
platform: string,
@@ -10,10 +10,10 @@ const versionInfo = async (
}
try {
const metadataRes = await request(`https://cli.codecov.io/${platform}/latest`, {
const metadataRes = await fetch.default( `https://uploader.codecov.io/${platform}/latest`, {
headers: {'Accept': 'application/json'},
});
const metadata = await metadataRes.body.json();
const metadata = await metadataRes.json();
core.info(`==> Running version ${metadata['version']}`);
} catch (err) {
core.info(`Could not pull latest version information: ${err}`);