mirror of
https://github.com/codecov/codecov-action.git
synced 2025-12-08 16:16:24 +00:00
Compare commits
150 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0565863a31 | ||
|
|
c545d7bd45 | ||
|
|
2488e996fa | ||
|
|
a46c158416 | ||
|
|
062ee7e02e | ||
|
|
1fecca8ce3 | ||
|
|
2e6e9c5a74 | ||
|
|
a5dc5a573e | ||
|
|
4898080f15 | ||
|
|
5efa07bc17 | ||
|
|
04351de6bb | ||
|
|
61d31d2d54 | ||
|
|
2d2cd3c094 | ||
|
|
13ce06bfc6 | ||
|
|
3e26040980 | ||
|
|
0da7aa657d | ||
|
|
1ca7ce65df | ||
|
|
65baa5fcb6 | ||
|
|
5a605bd927 | ||
|
|
5825942583 | ||
|
|
b1a63834ce | ||
|
|
6c5b693a58 | ||
|
|
ad45165bd4 | ||
|
|
79ee03789c | ||
|
|
25fe46e17d | ||
|
|
54a0566d1c | ||
|
|
adfacf2d27 | ||
|
|
1e68e06f1d | ||
|
|
277db52c20 | ||
|
|
c77245a013 | ||
|
|
9b01a34191 | ||
|
|
47e0552fc4 | ||
|
|
34ef57048d | ||
|
|
d93fc22ce0 | ||
|
|
5c93f7ab87 | ||
|
|
2c97106b3a | ||
|
|
7f8b4b4bde | ||
|
|
c2fcb216de | ||
|
|
0192401724 | ||
|
|
cda5e552ba | ||
|
|
2ed5abcb9f | ||
|
|
015f24e681 | ||
|
|
a76271d959 | ||
|
|
d168679d44 | ||
|
|
373fd71a64 | ||
|
|
288befbd10 | ||
|
|
7e69d37f7e | ||
|
|
985343d705 | ||
|
|
31d1900980 | ||
|
|
095cfe09c6 | ||
|
|
b542d5a35c | ||
|
|
05f5a9cfad | ||
|
|
28b3165f92 | ||
|
|
e8603fa307 | ||
|
|
9c78078f59 | ||
|
|
8754828e79 | ||
|
|
fb6775daf7 | ||
|
|
21f471b090 | ||
|
|
5031bf1562 | ||
|
|
5c47607acb | ||
|
|
3b1354a6c4 | ||
|
|
2e2a9c6d58 | ||
|
|
cfc521b7a1 | ||
|
|
06425412c8 | ||
|
|
968872560f | ||
|
|
2112eaec1b | ||
|
|
193421c5b3 | ||
|
|
6018df70b0 | ||
|
|
eff1a643d6 | ||
|
|
4582d54fd3 | ||
|
|
bb7467c2bc | ||
|
|
1d6059880c | ||
|
|
e587ce276e | ||
|
|
e43f28e103 | ||
|
|
5aab132b25 | ||
|
|
07e2828b4e | ||
|
|
f595cd1a4c | ||
|
|
74b1977972 | ||
|
|
49f7c0ab9c | ||
|
|
f212e6ccd4 | ||
|
|
c742c05cae | ||
|
|
2b8b0e7c33 | ||
|
|
9797fdaf17 | ||
|
|
6a3fba15c5 | ||
|
|
882f2c9a95 | ||
|
|
080969defa | ||
|
|
f9a806cd7a | ||
|
|
c38244677f | ||
|
|
7a8346be26 | ||
|
|
5807af2572 | ||
|
|
1f30f8b757 | ||
|
|
eb1badec34 | ||
|
|
11457c64c6 | ||
|
|
5e16f61f4f | ||
|
|
b8064a4dba | ||
|
|
549df63606 | ||
|
|
68708a9f7a | ||
|
|
ef609d6cb5 | ||
|
|
b9fd7d16f6 | ||
|
|
6f7612c64d | ||
|
|
26c7e28d7e | ||
|
|
6f744f78de | ||
|
|
543c3d42fc | ||
|
|
e379426d37 | ||
|
|
42656e4cb1 | ||
|
|
2296b6ba9e | ||
|
|
bd77bc323c | ||
|
|
180b964407 | ||
|
|
4beef3e59f | ||
|
|
8bacbcdafc | ||
|
|
0036103d21 | ||
|
|
f9253eb652 | ||
|
|
135f82e412 | ||
|
|
4b21c320b5 | ||
|
|
2439dfc05c | ||
|
|
bb813333f9 | ||
|
|
d2bac1a14c | ||
|
|
9f15ff6db1 | ||
|
|
60a33a541b | ||
|
|
943cea186c | ||
|
|
955f1f08ad | ||
|
|
922d8d7b31 | ||
|
|
764e2e4bac | ||
|
|
dd5d48487d | ||
|
|
6f75c27045 | ||
|
|
4168d1398b | ||
|
|
abe5d5a96b | ||
|
|
cdaae0e243 | ||
|
|
a06c39c510 | ||
|
|
a87fadc201 | ||
|
|
4ef269f796 | ||
|
|
992fc4eb1f | ||
|
|
9e145151be | ||
|
|
e1b169f283 | ||
|
|
4809d8a8f6 | ||
|
|
543b309c01 | ||
|
|
621cd670ee | ||
|
|
e4dbd7e64b | ||
|
|
893cfea3da | ||
|
|
e12e94d346 | ||
|
|
8cf010f6e4 | ||
|
|
0cbef546c8 | ||
|
|
af2ee03a4e | ||
|
|
3d7297c4a2 | ||
|
|
ca3e2d9612 | ||
|
|
f5469482a3 | ||
|
|
76ee96d868 | ||
|
|
9a146cf9d5 | ||
|
|
d3911e8ec2 | ||
|
|
d81eade746 |
@@ -1,23 +0,0 @@
|
|||||||
{
|
|
||||||
"env": {
|
|
||||||
"browser": true,
|
|
||||||
"commonjs": true,
|
|
||||||
"es2021": true
|
|
||||||
},
|
|
||||||
"extends": [
|
|
||||||
"google",
|
|
||||||
"eslint:recommended",
|
|
||||||
"plugin:@typescript-eslint/recommended"
|
|
||||||
],
|
|
||||||
"parser": "@typescript-eslint/parser",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 12
|
|
||||||
},
|
|
||||||
"plugins": [
|
|
||||||
"@typescript-eslint"
|
|
||||||
],
|
|
||||||
"rules": {
|
|
||||||
"max-len": ["error", { "code": 120 }],
|
|
||||||
"linebreak-style": 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -37,11 +37,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4.1.6
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.25.8
|
uses: github/codeql-action/init@v3.28.10
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -52,7 +52,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v3.25.8
|
uses: github/codeql-action/autobuild@v3.28.10
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@@ -66,4 +66,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.25.8
|
uses: github/codeql-action/analyze@v3.28.10
|
||||||
|
|||||||
89
.github/workflows/main.yml
vendored
89
.github/workflows/main.yml
vendored
@@ -1,5 +1,9 @@
|
|||||||
|
---
|
||||||
name: Workflow for Codecov Action
|
name: Workflow for Codecov Action
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
jobs:
|
jobs:
|
||||||
run:
|
run:
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -8,19 +12,19 @@ jobs:
|
|||||||
os: [macos-latest, windows-latest, ubuntu-latest]
|
os: [macos-latest, windows-latest, ubuntu-latest]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.6
|
uses: actions/checkout@v4.2.2
|
||||||
|
with:
|
||||||
|
submodules: 'true'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm install
|
run: pip install -r src/scripts/app/requirements.txt
|
||||||
- name: Lint
|
|
||||||
run: npm run lint
|
|
||||||
- name: Run tests and collect coverage
|
- name: Run tests and collect coverage
|
||||||
run: npm run test
|
run: pytest src/scripts/app/ --cov
|
||||||
- name: Upload coverage to Codecov (script)
|
- name: Upload coverage to Codecov (script)
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
files: ./coverage/script/coverage-final.json
|
files: ./coverage/script/coverage-final.json
|
||||||
flags: script,${{ matrix.os }}
|
flags: script-${{ matrix.os }}
|
||||||
name: codecov-script
|
name: codecov-script
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -28,9 +32,8 @@ jobs:
|
|||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json,./coverage/coverage-final.json
|
||||||
file: ./coverage/coverage-final.json
|
flags: demo-${{ matrix.os }}
|
||||||
flags: demo,${{ matrix.os }}
|
|
||||||
name: codecov-demo
|
name: codecov-demo
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -38,11 +41,10 @@ jobs:
|
|||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json,./coverage/coverage-final.json
|
||||||
file: ./coverage/coverage-final.json
|
flags: version-${{ matrix.os }}
|
||||||
flags: version,${{ matrix.os }}
|
|
||||||
name: codecov-version
|
name: codecov-version
|
||||||
version: v0.6.0
|
version: v9.1.0
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
@@ -51,19 +53,19 @@ jobs:
|
|||||||
runs-on: macos-latest-xlarge
|
runs-on: macos-latest-xlarge
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.6
|
uses: actions/checkout@v4.2.2
|
||||||
|
with:
|
||||||
|
submodules: 'true'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm install
|
run: pip install -r src/scripts/app/requirements.txt
|
||||||
- name: Lint
|
|
||||||
run: npm run lint
|
|
||||||
- name: Run tests and collect coverage
|
- name: Run tests and collect coverage
|
||||||
run: npm run test
|
run: pytest src/scripts/app/ --cov
|
||||||
- name: Upload coverage to Codecov (script)
|
- name: Upload coverage to Codecov (script)
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
files: ./coverage/script/coverage-final.json
|
files: ./coverage/script/coverage-final.json
|
||||||
flags: script,macos-latest-xlarge
|
flags: script-macos-latest-xlarge
|
||||||
name: codecov-script
|
name: codecov-script
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -71,60 +73,63 @@ jobs:
|
|||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json,./coverage/coverage-final.json
|
||||||
file: ./coverage/coverage-final.json
|
flags: demo-macos-latest-xlarge
|
||||||
flags: demo,macos-latest-xlarge
|
|
||||||
name: codecov-demo
|
name: codecov-demo
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
- name: Upload coverage to Codecov (oidc)
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
files: ./coverage/script/coverage-final.json
|
||||||
|
flags: script-${{ matrix.os }}
|
||||||
|
name: codecov-script
|
||||||
|
use_oidc: true
|
||||||
|
verbose: true
|
||||||
- name: Upload coverage to Codecov (version)
|
- name: Upload coverage to Codecov (version)
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: true
|
||||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json,./coverage/coverage-final.json
|
||||||
file: ./coverage/coverage-final.json
|
flags: version-maxos-latest-xlarge
|
||||||
flags: version,maxos-latest-xlarge
|
|
||||||
name: codecov-version
|
name: codecov-version
|
||||||
version: v0.6.0
|
version: v9.1.0
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
run-container:
|
run-container:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: node:18
|
container: python:latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4.1.6
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Install dependencies
|
with:
|
||||||
run: npm install
|
submodules: 'true'
|
||||||
- name: Lint
|
- name: Install deps
|
||||||
run: npm run lint
|
run: |
|
||||||
- name: Run tests and collect coverage
|
apt-get install git
|
||||||
run: npm run test
|
|
||||||
- name: Upload coverage to Codecov (script)
|
- name: Upload coverage to Codecov (script)
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
files: ./coverage/script/coverage-final.json
|
files: ./coverage/script/coverage-final.json
|
||||||
flags: script,${{ matrix.os }}
|
flags: script-${{ matrix.os }}
|
||||||
name: codecov-script
|
name: codecov-script
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- name: Upload coverage to Codecov (demo)
|
- name: Upload coverage to Codecov (demo)
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json,./coverage/coverage-final.json
|
||||||
file: ./coverage/coverage-final.json
|
flags: demo-${{ matrix.os }}
|
||||||
flags: demo,${{ matrix.os }}
|
|
||||||
name: codecov-demo
|
name: codecov-demo
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- name: Upload coverage to Codecov (version)
|
- name: Upload coverage to Codecov (version)
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json,./coverage/coverage-final.json
|
||||||
file: ./coverage/coverage-final.json
|
flags: version-${{ matrix.os }}
|
||||||
flags: version,${{ matrix.os }}
|
|
||||||
name: codecov-version
|
name: codecov-version
|
||||||
version: v0.6.0
|
version: v9.1.0
|
||||||
verbose: true
|
verbose: true
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
17
.github/workflows/scorecards-analysis.yml
vendored
17
.github/workflows/scorecards-analysis.yml
vendored
@@ -12,6 +12,7 @@ permissions: read-all
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analysis:
|
analysis:
|
||||||
|
if: github.repository == 'codecov/codecov-action'
|
||||||
name: Scorecards analysis
|
name: Scorecards analysis
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
@@ -21,15 +22,15 @@ jobs:
|
|||||||
id-token: write
|
id-token: write
|
||||||
actions: read
|
actions: read
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout code"
|
- name: "Checkout code"
|
||||||
uses: actions/checkout@v4.1.6 # v3.0.0
|
uses: actions/checkout@v4.2.2 # v3.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: "Run analysis"
|
- name: "Run analysis"
|
||||||
uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3
|
uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
|
||||||
with:
|
with:
|
||||||
results_file: results.sarif
|
results_file: results.sarif
|
||||||
results_format: sarif
|
results_format: sarif
|
||||||
@@ -40,22 +41,22 @@ jobs:
|
|||||||
# repo_token: ${{ secrets.SCORECARD_READ_TOKEN }}
|
# repo_token: ${{ secrets.SCORECARD_READ_TOKEN }}
|
||||||
|
|
||||||
# Publish the results for public repositories to enable scorecard badges. For more details, see
|
# Publish the results for public repositories to enable scorecard badges. For more details, see
|
||||||
# https://github.com/ossf/scorecard-action#publishing-results.
|
# https://github.com/ossf/scorecard-action#publishing-results.
|
||||||
# For private repositories, `publish_results` will automatically be set to `false`, regardless
|
# For private repositories, `publish_results` will automatically be set to `false`, regardless
|
||||||
# of the value entered here.
|
# of the value entered here.
|
||||||
publish_results: true
|
publish_results: true
|
||||||
|
|
||||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||||
# format to the repository Actions tab.
|
# format to the repository Actions tab.
|
||||||
- name: "Upload artifact"
|
- name: "Upload artifact"
|
||||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
|
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||||
with:
|
with:
|
||||||
name: SARIF file
|
name: SARIF file
|
||||||
path: results.sarif
|
path: results.sarif
|
||||||
retention-days: 5
|
retention-days: 5
|
||||||
|
|
||||||
# Upload the results to GitHub's code scanning dashboard.
|
# Upload the results to GitHub's code scanning dashboard.
|
||||||
- name: "Upload to code-scanning"
|
- name: "Upload to code-scanning"
|
||||||
uses: github/codeql-action/upload-sarif@v3.25.8 # v1.0.26
|
uses: github/codeql-action/upload-sarif@v3.28.10 # v1.0.26
|
||||||
with:
|
with:
|
||||||
sarif_file: results.sarif
|
sarif_file: results.sarif
|
||||||
|
|||||||
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[submodule "src/scripts"]
|
||||||
|
path = src/scripts
|
||||||
|
url = https://github.com/codecov/wrapper
|
||||||
|
branch = main
|
||||||
965
CHANGELOG.md
965
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
8
Makefile
8
Makefile
@@ -1,7 +1,7 @@
|
|||||||
deploy:
|
deploy:
|
||||||
$(eval VERSION := $(shell cat package.json | grep '"version": ' | cut -d\" -f4))
|
$(eval VERSION := $(shell cat src/version))
|
||||||
git tag -d v4
|
git tag -d v5
|
||||||
git push origin :v4
|
git push origin :v5
|
||||||
git tag v4
|
git tag v5
|
||||||
git tag v$(VERSION) -s -m ""
|
git tag v$(VERSION) -s -m ""
|
||||||
git push origin --tags
|
git push origin --tags
|
||||||
|
|||||||
119
README.md
119
README.md
@@ -1,15 +1,39 @@
|
|||||||
# Codecov GitHub Action
|
# Codecov GitHub Action
|
||||||
|
|
||||||
[](https://github.com/marketplace/actions/codecov)
|
[](https://github.com/marketplace/actions/codecov)
|
||||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcodecov%2Fcodecov-action?ref=badge_shield)
|
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcodecov%2Fcodecov-action?ref=badge_shield)
|
||||||
[](https://github.com/codecov/codecov-action/actions/workflows/main.yml)
|
[](https://github.com/codecov/codecov-action/actions/workflows/main.yml)
|
||||||
### Easily upload coverage reports to Codecov from GitHub Actions
|
### Easily upload coverage reports to Codecov from GitHub Actions
|
||||||
|
|
||||||
|
## v5 Release
|
||||||
|
`v5` of the Codecov GitHub Action will use the [Codecov Wrapper](https://github.com/codecov/wrapper) to encapsulate the [CLI](https://github.com/codecov/codecov-cli). This will help ensure that the Action gets updates quicker.
|
||||||
|
|
||||||
|
### Migration Guide
|
||||||
|
The `v5` release also coincides with the opt-out feature for tokens for public repositories. In the `Global Upload Token` section of the settings page of an organization in codecov.io, you can set the ability for Codecov to receive a coverage reports from any source. This will allow contributors or other members of a repository to upload without needing access to the Codecov token. For more details see [how to upload without a token](https://docs.codecov.com/docs/codecov-tokens#uploading-without-a-token).
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> **The following arguments have been changed**
|
||||||
|
> - `file` (this has been deprecated in favor of `files`)
|
||||||
|
> - `plugin` (this has been deprecated in favor of `plugins`)
|
||||||
|
|
||||||
|
The following arguments have been added:
|
||||||
|
|
||||||
|
- `binary`
|
||||||
|
- `gcov_args`
|
||||||
|
- `gcov_executable`
|
||||||
|
- `gcov_ignore`
|
||||||
|
- `gcov_include`
|
||||||
|
- `report_type`
|
||||||
|
- `skip_validation`
|
||||||
|
- `swift_project`
|
||||||
|
|
||||||
|
You can see their usage in the `action.yml` [file](https://github.com/codecov/codecov-action/blob/main/action.yml).
|
||||||
|
|
||||||
## v4 Release
|
## v4 Release
|
||||||
`v4` of the Codecov GitHub Action will use the [Codecov CLI](https://github.com/codecov/codecov-cli) to upload coverage reports to Codecov.
|
`v4` of the Codecov GitHub Action will use the [Codecov CLI](https://github.com/codecov/codecov-cli) to upload coverage reports to Codecov.
|
||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
- Tokenless uploading is unsupported. However, PRs made from forks to the upstream public repos will support tokenless (e.g. contributors to OS projects do not need the upstream repo's Codecov token). For details, [see our docs](https://docs.codecov.com/docs/codecov-uploader#supporting-token-less-uploads-for-forks-of-open-source-repos-using-codecov)
|
- Tokenless uploading is unsupported. However, PRs made from forks to the upstream public repos will support tokenless (e.g. contributors to OSS projects do not need the upstream repo's Codecov token). For details, [see our docs](https://docs.codecov.com/docs/codecov-uploader#supporting-token-less-uploads-for-forks-of-open-source-repos-using-codecov)
|
||||||
- Various arguments to the Action have been removed
|
- Various arguments to the Action have been removed
|
||||||
|
|
||||||
### Dependabot
|
### Dependabot
|
||||||
@@ -19,7 +43,10 @@
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
To integrate Codecov with your Actions pipeline, specify the name of this repository with a tag number (`@v4` is recommended) as a `step` within your `workflow.yml` file.
|
To integrate Codecov with your Actions pipeline, specify the name of this repository with a tag number (`@v5` is recommended) as a `step` within your `workflow.yml` file.
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> In order for the Action to work seamlessly, you will need to have `curl`, `git`, and `gpg` installed on your runner. You will also need to run the [actions/checkout](https://github.com/actions/checkout) before calling the Codecov action.
|
||||||
|
|
||||||
This Action also requires you to [provide an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) from [codecov.io](https://www.codecov.io) (tip: in order to avoid exposing your token, [store it](https://docs.codecov.com/docs/adding-the-codecov-token#github-actions) as a `secret`).
|
This Action also requires you to [provide an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) from [codecov.io](https://www.codecov.io) (tip: in order to avoid exposing your token, [store it](https://docs.codecov.com/docs/adding-the-codecov-token#github-actions) as a `secret`).
|
||||||
|
|
||||||
@@ -36,13 +63,13 @@ Inside your `.github/workflows/workflow.yml` file:
|
|||||||
```yaml
|
```yaml
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@main
|
- uses: actions/checkout@main
|
||||||
- uses: codecov/codecov-action@v4
|
- uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true # optional (default = false)
|
fail_ci_if_error: true # optional (default = false)
|
||||||
files: ./coverage1.xml,./coverage2.xml # optional
|
files: ./coverage1.xml,./coverage2.xml # optional
|
||||||
flags: unittests # optional
|
flags: unittests # optional
|
||||||
name: codecov-umbrella # optional
|
name: codecov-umbrella # optional
|
||||||
token: ${{ secrets.CODECOV_TOKEN }} # required
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true # optional (default = false)
|
verbose: true # optional (default = false)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -51,7 +78,7 @@ The Codecov token can also be passed in via environment variables:
|
|||||||
```yaml
|
```yaml
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@main
|
- uses: actions/checkout@main
|
||||||
- uses: codecov/codecov-action@v4
|
- uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
fail_ci_if_error: true # optional (default = false)
|
fail_ci_if_error: true # optional (default = false)
|
||||||
files: ./coverage1.xml,./coverage2.xml # optional
|
files: ./coverage1.xml,./coverage2.xml # optional
|
||||||
@@ -68,7 +95,7 @@ steps:
|
|||||||
For users with [OpenID Connect(OIDC) enabled](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect), the Codecov token is not necessary. You can use OIDC with the `use_oidc` argument as following.
|
For users with [OpenID Connect(OIDC) enabled](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect), the Codecov token is not necessary. You can use OIDC with the `use_oidc` argument as following.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: codecov/codecov-action@v4
|
- uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
use_oidc: true
|
use_oidc: true
|
||||||
```
|
```
|
||||||
@@ -81,38 +108,54 @@ Codecov's Action supports inputs from the user. These inputs, along with their d
|
|||||||
|
|
||||||
| Input | Description | Required |
|
| Input | Description | Required |
|
||||||
| :--- | :--- | :---: |
|
| :--- | :--- | :---: |
|
||||||
| `token` | Repository Codecov token. Used to authorize report uploads | *Required
|
| `base_sha` | 'The base SHA to select. This is only used in the "pr-base-picking" run command' | Optional
|
||||||
| `codecov_yml_path` | Specify the path to the Codecov YML | Optional
|
| `binary` | The file location of a pre-downloaded version of the CLI. If specified, integrity checking will be bypassed. | Optional
|
||||||
| `commit_parent` | Override to specify the parent commit SHA | Optional
|
| `codecov_yml_path` | The location of the codecov.yml file. This is currently ONLY used for automated test selection (https://docs.codecov.com/docs/getting-started-with-ats). Note that for all other cases, the Codecov yaml will need to be located as described here: https://docs.codecov.com/docs/codecov-yaml#can-i-name-the-file-codecovyml | Optional
|
||||||
| `directory` | Directory to search for coverage reports. | Optional
|
| `commit_parent` | SHA (with 40 chars) of what should be the parent of this commit. | Optional
|
||||||
| `disable_search` | Disable search for coverage files. This is helpful when specifying what files you want to upload with the --file option. | Optional
|
| `directory` | Folder to search for coverage files. Default to the current working directory | Optional
|
||||||
| `disable_file_fixes` | Disable file fixes to ignore common lines from coverage (e.g. blank lines or empty brackets) | Optional
|
| `disable_file_fixes` | Disable file fixes to ignore common lines from coverage (e.g. blank lines or empty brackets). Read more here https://docs.codecov.com/docs/fixing-reports | Optional
|
||||||
|
| `disable_search` | Disable search for coverage files. This is helpful when specifying what files you want to upload with the files option. | Optional
|
||||||
|
| `disable_safe_directory` | Disable setting safe directory. Set to true to disable. | Optional
|
||||||
|
| `disable_telem` | Disable sending telemetry data to Codecov. Set to true to disable. | Optional
|
||||||
| `dry_run` | Don't upload files to Codecov | Optional
|
| `dry_run` | Don't upload files to Codecov | Optional
|
||||||
| `env_vars` | Environment variables to tag the upload with (e.g. PYTHON \| OS,PYTHON) | Optional
|
| `env_vars` | Environment variables to tag the upload with (e.g. PYTHON \| OS,PYTHON) | Optional
|
||||||
| `exclude` | Folders to exclude from search | Optional
|
| `exclude` | Comma-separated list of folders to exclude from search. | Optional
|
||||||
| `fail_ci_if_error` | Specify whether or not CI build should fail if Codecov runs into an error during upload | Optional
|
| `fail_ci_if_error` | On error, exit with non-zero code | Optional
|
||||||
| `file` | Path to coverage file to upload | Optional
|
| `files` | Comma-separated explicit list of files to upload. These will be added to the coverage files found for upload. If you wish to only upload the specified files, please consider using "disable-search" to disable uploading other files. | Optional
|
||||||
| `files` | Comma-separated list of files to upload | Optional
|
| `flags` | Comma-separated list of flags to upload to group coverage metrics. | Optional
|
||||||
| `flags` | Flag upload to group coverage metrics (e.g. unittests \| integration \| ui,chrome) | Optional
|
| `force` | Only used for empty-upload run command | Optional
|
||||||
| `handle_no_reports_found` | Raise no exceptions when no coverage reports found | Optional
|
| `git_service` | Override the git_service (e.g. github_enterprise) | Optional
|
||||||
| `job_code` | The job code | Optional
|
| `gcov_args` | Extra arguments to pass to gcov | Optional
|
||||||
| `name` | User defined upload name. Visible in Codecov UI | Optional
|
| `gcov_executable` | gcov executable to run. Defaults to 'gcov' | Optional
|
||||||
| `os` | Override the assumed OS. Options are linux \| macos \| windows \| . | Optional
|
| `gcov_ignore` | Paths to ignore during gcov gathering | Optional
|
||||||
| `override_branch` | Specify the branch name | Optional
|
| `gcov_include` | Paths to include during gcov gathering | Optional
|
||||||
| `override_build` | Specify the build number | Optional
|
| `handle_no_reports_found` | If no coverage reports are found, do not raise an exception. | Optional
|
||||||
|
| `job_code` | | Optional
|
||||||
|
| `name` | Custom defined name of the upload. Visible in the Codecov UI | Optional
|
||||||
|
| `network_filter` | Specify a filter on the files listed in the network section of the Codecov report. This will only add files whose path begin with the specified filter. Useful for upload-specific path fixing. | Optional
|
||||||
|
| `network_prefix` | Specify a prefix on files listed in the network section of the Codecov report. Useful to help resolve path fixing. | Optional
|
||||||
|
| `os` | Override the assumed OS. Options available at cli.codecov.io | Optional
|
||||||
|
| `override_branch` | Specify the branch to be displayed with this commit on Codecov | Optional
|
||||||
|
| `override_build` | Specify the build number manually | Optional
|
||||||
| `override_build_url` | The URL of the build where this is running | Optional
|
| `override_build_url` | The URL of the build where this is running | Optional
|
||||||
| `override_commit` | Specify the commit SHA | Optional
|
| `override_commit` | Commit SHA (with 40 chars) | Optional
|
||||||
| `override_pr` | Specify the pull request number | Optional
|
| `override_pr` | Specify the pull request number manually. Used to override pre-existing CI environment variables. | Optional
|
||||||
| `plugin` | plugins to run. Options: xcode, gcov, pycoverage. The default behavior runs them all. | Optional
|
| `plugins` | Comma-separated list of plugins to run. Specify `noop` to turn off all plugins | Optional
|
||||||
| `plugins` | Comma-separated list of plugins for use during upload. | Optional
|
| `recurse_submodules` | Whether to enumerate files inside of submodules for path-fixing purposes. Off by default. | Optional
|
||||||
| `report_code` | The code of the report. If unsure, do not include | Optional
|
| `report_code` | The code of the report if using local upload. If unsure, leave unset. Read more here https://docs.codecov.com/docs/the-codecov-cli#how-to-use-local-upload | Optional
|
||||||
| `root_dir` | Used to specify the location of your .git root to identify project root directory | Optional
|
| `report_type` | The type of file to upload, coverage by default. Possible values are "test_results", "coverage". | Optional
|
||||||
| `slug` | Specify the slug manually (Enterprise use) | Optional
|
| `root_dir` | Root folder from which to consider paths on the network section. Defaults to current working directory. | Optional
|
||||||
| `url` | Specify the base url to upload (Enterprise use) | Optional
|
| `run_command` | Choose which CLI command to run. Options are "upload-coverage", "empty-upload", "pr-base-picking", "send-notifications". "upload-coverage" is run by default.' | Optional
|
||||||
| `use_legacy_upload_endpoint` | Use the legacy upload endpoint | Optional
|
| `skip_validation` | Skip integrity checking of the CLI. This is NOT recommended. | Optional
|
||||||
| `use_oidc` | Use OpenID Connect for verification instead of token. This will ignore any token supplied. Please see [GitHub documentation](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect) for details.
|
| `slug` | [Required when using the org token] Set to the owner/repo slug used instead of the private repo token. Only applicable to some Enterprise users. | Optional
|
||||||
| `verbose` | Specify whether the Codecov output should be verbose | Optional
|
| `swift_project` | Specify the swift project name. Useful for optimization. | Optional
|
||||||
| `version` | Specify which version of the Codecov CLI should be used. Defaults to `latest` | Optional
|
| `token` | Repository Codecov token. Used to authorize report uploads | Optional
|
||||||
|
| `url` | Set to the Codecov instance URl. Used by Dedicated Enterprise Cloud customers. | Optional
|
||||||
|
| `use_legacy_upload_endpoint` | Use the legacy upload endpoint. | Optional
|
||||||
|
| `use_oidc` | Use OIDC instead of token. This will ignore any token supplied | Optional
|
||||||
|
| `use_pypi` | Use the pypi version of the CLI instead of from cli.codecov.io. If specified, integrity checking will be bypassed. | Optional
|
||||||
|
| `verbose` | Enable verbose logging | Optional
|
||||||
|
| `version` | Which version of the Codecov CLI to use (defaults to 'latest') | Optional
|
||||||
| `working-directory` | Directory in which to execute codecov.sh | Optional
|
| `working-directory` | Directory in which to execute codecov.sh | Optional
|
||||||
|
|
||||||
### Example `workflow.yml` with Codecov Action
|
### Example `workflow.yml` with Codecov Action
|
||||||
@@ -134,14 +177,14 @@ jobs:
|
|||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@main
|
uses: actions/setup-python@main
|
||||||
with:
|
with:
|
||||||
python-version: 3.10
|
python-version: '3.10'
|
||||||
- name: Generate coverage report
|
- name: Generate coverage report
|
||||||
run: |
|
run: |
|
||||||
pip install pytest
|
pip install pytest
|
||||||
pip install pytest-cov
|
pip install pytest-cov
|
||||||
pytest --cov=./ --cov-report=xml
|
pytest --cov=./ --cov-report=xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
directory: ./coverage/reports/
|
directory: ./coverage/reports/
|
||||||
env_vars: OS,PYTHON
|
env_vars: OS,PYTHON
|
||||||
|
|||||||
273
action.yml
273
action.yml
@@ -1,121 +1,316 @@
|
|||||||
|
---
|
||||||
|
# yamllint disable rule:line-length
|
||||||
name: 'Codecov'
|
name: 'Codecov'
|
||||||
description: 'GitHub Action that uploads coverage reports for your repository to codecov.io'
|
description: 'GitHub Action that uploads coverage reports for your repository to codecov.io'
|
||||||
author: 'Ibrahim Ali <@ibrahim0814> & Thomas Hu <@thomasrockhu> | Codecov'
|
author: 'Thomas Hu <@thomasrockhu-codecov> | Codecov'
|
||||||
inputs:
|
inputs:
|
||||||
token:
|
base_sha:
|
||||||
description: 'Repository Codecov token. Used to authorize report uploads'
|
description: 'The base SHA to select. This is only used in the "pr-base-picking" run command'
|
||||||
|
required: false
|
||||||
|
binary:
|
||||||
|
description: 'The file location of a pre-downloaded version of the CLI. If specified, integrity checking will be bypassed.'
|
||||||
required: false
|
required: false
|
||||||
codecov_yml_path:
|
codecov_yml_path:
|
||||||
description: 'Specify the path to the Codecov YML'
|
description: 'The location of the codecov.yml file. This is crrently ONLY used for automated test selection (https://docs.codecov.com/docs/getting-started-with-ats). Note that for all other cases, the Codecov yaml will need to be located as described here: https://docs.codecov.com/docs/codecov-yaml#can-i-name-the-file-codecovyml'
|
||||||
required: false
|
required: false
|
||||||
commit_parent:
|
commit_parent:
|
||||||
description: 'Override to specify the parent commit SHA'
|
description: 'SHA (with 40 chars) of what should be the parent of this commit.'
|
||||||
required: false
|
required: false
|
||||||
directory:
|
directory:
|
||||||
description: 'Directory to search for coverage reports.'
|
description: 'Folder to search for coverage files. Default to the current working directory'
|
||||||
required: false
|
required: false
|
||||||
disable_file_fixes:
|
disable_file_fixes:
|
||||||
description: 'Disable file fixes to ignore common lines from coverage (e.g. blank lines or empty brackets)'
|
description: 'Disable file fixes to ignore common lines from coverage (e.g. blank lines or empty brackets). Read more here https://docs.codecov.com/docs/fixing-reports'
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
disable_search:
|
disable_search:
|
||||||
description: 'Disable search for coverage files. This is helpful when specifying what files you want to upload with the --file option.'
|
description: 'Disable search for coverage files. This is helpful when specifying what files you want to upload with the files option.'
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
disable_safe_directory:
|
disable_safe_directory:
|
||||||
description: 'Disable setting safe directory. Set to true to disable.'
|
description: 'Disable setting safe directory. Set to true to disable.'
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
disable_telem:
|
||||||
|
description: 'Disable sending telemetry data to Codecov. Set to true to disable.'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
dry_run:
|
dry_run:
|
||||||
description: "Don't upload files to Codecov"
|
description: "Don't upload files to Codecov"
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
env_vars:
|
env_vars:
|
||||||
description: 'Environment variables to tag the upload with (e.g. PYTHON | OS,PYTHON)'
|
description: 'Environment variables to tag the upload with (e.g. PYTHON | OS,PYTHON)'
|
||||||
required: false
|
required: false
|
||||||
exclude:
|
exclude:
|
||||||
description: 'Folders to exclude from search'
|
description: 'Comma-separated list of folders to exclude from search.'
|
||||||
required: false
|
required: false
|
||||||
fail_ci_if_error:
|
fail_ci_if_error:
|
||||||
description: 'Specify whether or not CI build should fail if Codecov runs into an error during upload'
|
description: 'On error, exit with non-zero code'
|
||||||
required: false
|
|
||||||
file:
|
|
||||||
description: 'Path to coverage file to upload'
|
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
files:
|
files:
|
||||||
description: 'Comma-separated list of files to upload'
|
description: 'Comma-separated list of explicit files to upload. These will be added to the coverage files found for upload. If you wish to only upload the specified files, please consider using disable-search to disable uploading other files.'
|
||||||
required: false
|
required: false
|
||||||
flags:
|
flags:
|
||||||
description: 'Flag upload to group coverage metrics (e.g. unittests | integration | ui,chrome)'
|
description: 'Comma-separated list of flags to upload to group coverage metrics.'
|
||||||
|
required: false
|
||||||
|
force:
|
||||||
|
description: 'Only used for empty-upload run command'
|
||||||
required: false
|
required: false
|
||||||
git_service:
|
git_service:
|
||||||
description: 'Override the git_service (e.g. github_enterprise)'
|
description: 'Override the git_service (e.g. github_enterprise)'
|
||||||
required: false
|
required: false
|
||||||
handle_no_reports_found:
|
default: 'github'
|
||||||
description: 'Raise no exceptions when no coverage reports found'
|
gcov_args:
|
||||||
|
description: 'Extra arguments to pass to gcov'
|
||||||
required: false
|
required: false
|
||||||
|
gcov_executable:
|
||||||
|
description: "gcov executable to run. Defaults to 'gcov'"
|
||||||
|
required: false
|
||||||
|
default: 'gcov'
|
||||||
|
gcov_ignore:
|
||||||
|
description: 'Paths to ignore during gcov gathering'
|
||||||
|
required: false
|
||||||
|
gcov_include:
|
||||||
|
description: "Paths to include during gcov gathering"
|
||||||
|
required: false
|
||||||
|
handle_no_reports_found:
|
||||||
|
description: 'If no coverage reports are found, do not raise an exception.'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
job_code:
|
job_code:
|
||||||
description: 'The job code'
|
description: ''
|
||||||
required: false
|
required: false
|
||||||
name:
|
name:
|
||||||
description: 'User defined upload name. Visible in Codecov UI'
|
description: 'Custom defined name of the upload. Visible in the Codecov UI'
|
||||||
required: false
|
required: false
|
||||||
network_filter:
|
network_filter:
|
||||||
description: 'Specify a filter on the files listed in the network section of the Codecov report. This will only add files whose path begin with the specified filter. Useful for upload-specific path fixing'
|
description: 'Specify a filter on the files listed in the network section of the Codecov report. This will only add files whose path begin with the specified filter. Useful for upload-specific path fixing.'
|
||||||
required: false
|
required: false
|
||||||
network_prefix:
|
network_prefix:
|
||||||
description: 'Specify a prefix on files listed in the network section of the Codecov report. Useful to help resolve path fixing'
|
description: 'Specify a prefix on files listed in the network section of the Codecov report. Useful to help resolve path fixing.'
|
||||||
required: false
|
required: false
|
||||||
os:
|
os:
|
||||||
description: 'Override the assumed OS. Options are linux | macos | windows.'
|
description: 'Override the assumed OS. Options available at cli.codecov.io'
|
||||||
required: false
|
required: false
|
||||||
override_branch:
|
override_branch:
|
||||||
description: 'Specify the branch name'
|
description: 'Specify the branch to be displayed with this commit on Codecov'
|
||||||
required: false
|
required: false
|
||||||
override_build:
|
override_build:
|
||||||
description: 'Specify the build number'
|
description: 'Specify the build number manually'
|
||||||
required: false
|
required: false
|
||||||
override_build_url:
|
override_build_url:
|
||||||
description: 'The URL of the build where this is running'
|
description: 'The URL of the build where this is running'
|
||||||
required: false
|
required: false
|
||||||
override_commit:
|
override_commit:
|
||||||
description: 'Specify the commit SHA'
|
description: 'Commit SHA (with 40 chars)'
|
||||||
required: false
|
required: false
|
||||||
override_pr:
|
override_pr:
|
||||||
description: 'Specify the pull request number'
|
description: 'Specify the pull request number manually. Used to override pre-existing CI environment variables.'
|
||||||
required: false
|
|
||||||
plugin:
|
|
||||||
description: 'plugins to run. Options: xcode, gcov, pycoverage. The default behavior runs them all.'
|
|
||||||
required: false
|
required: false
|
||||||
plugins:
|
plugins:
|
||||||
description: 'Comma-separated list of plugins for use during upload.'
|
description: 'Comma-separated list of plugins to run. Specify `noop` to turn off all plugins'
|
||||||
required: false
|
required: false
|
||||||
|
recurse_submodules:
|
||||||
|
description: 'Whether to enumerate files inside of submodules for path-fixing purposes. Off by default.'
|
||||||
|
default: 'false'
|
||||||
report_code:
|
report_code:
|
||||||
description: 'The code of the report. If unsure, do not include'
|
description: 'The code of the report if using local upload. If unsure, leave default. Read more here https://docs.codecov.com/docs/the-codecov-cli#how-to-use-local-upload'
|
||||||
|
required: false
|
||||||
|
report_type:
|
||||||
|
description: 'The type of file to upload, coverage by default. Possible values are "test_results", "coverage".'
|
||||||
required: false
|
required: false
|
||||||
root_dir:
|
root_dir:
|
||||||
description: 'Used when not in git/hg project to identify project root directory'
|
description: 'Root folder from which to consider paths on the network section. Defaults to current working directory.'
|
||||||
required: false
|
required: false
|
||||||
|
run_command:
|
||||||
|
description: 'Choose which CLI command to run. Options are "upload-coverage", "empty-upload", "pr-base-picking", "send-notifications". "upload-coverage" is run by default.'
|
||||||
|
required: false
|
||||||
|
default: 'upload-coverage'
|
||||||
|
skip_validation:
|
||||||
|
description: 'Skip integrity checking of the CLI. This is NOT recommended.'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
slug:
|
slug:
|
||||||
description: 'Specify the slug manually (Enterprise use)'
|
description: '[Required when using the org token] Set to the owner/repo slug used instead of the private repo token. Only applicable to some Enterprise users.'
|
||||||
|
required: false
|
||||||
|
swift_project:
|
||||||
|
description: 'Specify the swift project name. Useful for optimization.'
|
||||||
|
required: false
|
||||||
|
token:
|
||||||
|
description: 'Repository Codecov token. Used to authorize report uploads'
|
||||||
required: false
|
required: false
|
||||||
url:
|
url:
|
||||||
description: 'Specify the base url to upload (Enterprise use)'
|
description: 'Set to the Codecov instance URl. Used by Dedicated Enterprise Cloud customers.'
|
||||||
required: false
|
required: false
|
||||||
use_legacy_upload_endpoint:
|
use_legacy_upload_endpoint:
|
||||||
description: 'Use the legacy upload endpoint'
|
description: 'Use the legacy upload endpoint.'
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
use_oidc:
|
use_oidc:
|
||||||
description: 'Use OIDC instead of token. This will ignore any token supplied'
|
description: 'Use OIDC instead of token. This will ignore any token supplied'
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
use_pypi:
|
||||||
|
description: 'Use the pypi version of the CLI instead of from cli.codecov.io'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
verbose:
|
verbose:
|
||||||
description: 'Specify whether the Codecov output should be verbose'
|
description: 'Enable verbose logging'
|
||||||
required: false
|
required: false
|
||||||
|
default: 'false'
|
||||||
version:
|
version:
|
||||||
description: 'Specify which version of the Codecov CLI should be used. Defaults to `latest`'
|
description: "Which version of the Codecov CLI to use (defaults to 'latest')"
|
||||||
required: false
|
required: false
|
||||||
|
default: 'latest'
|
||||||
working-directory:
|
working-directory:
|
||||||
description: 'Directory in which to execute codecov.sh'
|
description: 'Directory in which to execute codecov.sh'
|
||||||
required: false
|
required: false
|
||||||
|
|
||||||
branding:
|
branding:
|
||||||
color: 'red'
|
color: 'red'
|
||||||
icon: 'umbrella'
|
icon: 'umbrella'
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: 'node20'
|
using: "composite"
|
||||||
main: 'dist/index.js'
|
steps:
|
||||||
|
- name: Action version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
CC_ACTION_VERSION=$(cat ${GITHUB_ACTION_PATH}/src/version)
|
||||||
|
echo -e "\033[0;32m==>\033[0m Running Action version $CC_ACTION_VERSION"
|
||||||
|
- name: Set safe directory
|
||||||
|
if: ${{ inputs.disable_safe_directory != 'true' }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config --global --add safe.directory "${{ github.workspace }}"
|
||||||
|
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||||
|
|
||||||
|
- name: Set fork
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
CC_FORK="false"
|
||||||
|
if [ -n "$GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FULL_NAME" ] && [ "$GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FULL_NAME" != "$GITHUB_REPOSITORY" ];
|
||||||
|
then
|
||||||
|
echo -e "\033[0;32m==>\033[0m Fork detected"
|
||||||
|
CC_FORK="true"
|
||||||
|
fi
|
||||||
|
echo "CC_FORK=$CC_FORK" >> "$GITHUB_ENV"
|
||||||
|
env:
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_LABEL: ${{ github.event.pull_request.head.label }}
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FULL_NAME: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||||
|
|
||||||
|
|
||||||
|
- name: Get and set token
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ "${{ inputs.use_oidc }}" == 'true' ] && [ "$CC_FORK" != 'true' ];
|
||||||
|
then
|
||||||
|
# {"count":1984,"value":"***"}
|
||||||
|
echo -e "\033[0;32m==>\033[0m Requesting OIDC token from '$ACTIONS_ID_TOKEN_REQUEST_URL'"
|
||||||
|
CC_TOKEN=$(curl -H "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" "$ACTIONS_ID_TOKEN_REQUEST_URL&audience=$CC_OIDC_AUDIENCE" | cut -d\" -f6)
|
||||||
|
echo "CC_TOKEN=$CC_TOKEN" >> "$GITHUB_ENV"
|
||||||
|
elif [ -n "${{ env.CODECOV_TOKEN }}" ];
|
||||||
|
then
|
||||||
|
echo -e "\033[0;32m==>\033[0m Token set from env"
|
||||||
|
echo "CC_TOKEN=${{ env.CODECOV_TOKEN }}" >> "$GITHUB_ENV"
|
||||||
|
else
|
||||||
|
if [ -n "${{ inputs.token }}" ];
|
||||||
|
then
|
||||||
|
echo -e "\033[0;32m==>\033[0m Token set from input"
|
||||||
|
CC_TOKEN=$(echo "${{ inputs.token }}" | tr -d '\n')
|
||||||
|
echo "CC_TOKEN=$CC_TOKEN" >> "$GITHUB_ENV"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
CC_OIDC_AUDIENCE: ${{ inputs.url || 'https://codecov.io' }}
|
||||||
|
|
||||||
|
- name: Override branch for forks
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ -z "$CC_BRANCH" ] && [ -z "$CC_TOKEN" ] && [ "$CC_FORK" == 'true' ]
|
||||||
|
then
|
||||||
|
echo -e "\033[0;32m==>\033[0m Fork detected, setting branch to $GITHUB_EVENT_PULL_REQUEST_HEAD_LABEL"
|
||||||
|
TOKENLESS="$GITHUB_EVENT_PULL_REQUEST_HEAD_LABEL"
|
||||||
|
CC_BRANCH="$GITHUB_EVENT_PULL_REQUEST_HEAD_LABEL"
|
||||||
|
echo "TOKENLESS=$TOKENLESS" >> "$GITHUB_ENV"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "CC_BRANCH=$CC_BRANCH" >> "$GITHUB_ENV"
|
||||||
|
env:
|
||||||
|
CC_BRANCH: ${{ inputs.override_branch }}
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_LABEL: ${{ github.event.pull_request.head.label }}
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FULL_NAME: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||||
|
|
||||||
|
- name: Override commits and pr for pull requests
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ -z "$CC_SHA" ];
|
||||||
|
then
|
||||||
|
CC_SHA="$GITHUB_EVENT_PULL_REQUEST_HEAD_SHA"
|
||||||
|
fi
|
||||||
|
if [ -z "$CC_PR" ] && [ "${GITHUB_EVENT_NAME}" == "pull_request_target" ];
|
||||||
|
then
|
||||||
|
CC_PR="$GITHUB_EVENT_NUMBER"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "CC_SHA=$CC_SHA" >> "$GITHUB_ENV"
|
||||||
|
echo "CC_PR=$CC_PR" >> "$GITHUB_ENV"
|
||||||
|
env:
|
||||||
|
CC_PR: ${{ inputs.override_pr }}
|
||||||
|
CC_SHA: ${{ inputs.override_commit }}
|
||||||
|
GITHUB_EVENT_NAME: ${{ github.event_name }}
|
||||||
|
GITHUB_EVENT_NUMBER: ${{ github.event.number }}
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||||
|
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
run: ${GITHUB_ACTION_PATH}/dist/codecov.sh
|
||||||
|
shell: bash
|
||||||
|
working-directory: ${{ inputs.working-directory }}
|
||||||
|
env:
|
||||||
|
CC_BASE_SHA: ${{ inputs.base_sha }}
|
||||||
|
CC_BINARY: ${{ inputs.binary }}
|
||||||
|
CC_BUILD: ${{ inputs.override_build }}
|
||||||
|
CC_BUILD_URL: ${{ inputs.override_build_url }}
|
||||||
|
CC_CODE: ${{ inputs.report_code }}
|
||||||
|
CC_DIR: ${{ inputs.directory }}
|
||||||
|
CC_DISABLE_FILE_FIXES: ${{ inputs.disable_file_fixes }}
|
||||||
|
CC_DISABLE_SEARCH: ${{ inputs.disable_search }}
|
||||||
|
CC_DISABLE_TELEM: ${{ inputs.disable_telem }}
|
||||||
|
CC_DRY_RUN: ${{ inputs.dry_run }}
|
||||||
|
CC_ENTERPRISE_URL: ${{ inputs.url }}
|
||||||
|
CC_ENV: ${{ inputs.env_vars }}
|
||||||
|
CC_EXCLUDES: ${{ inputs.exclude }}
|
||||||
|
CC_FAIL_ON_ERROR: ${{ inputs.fail_ci_if_error }}
|
||||||
|
CC_FILES: ${{ inputs.files }}
|
||||||
|
CC_FLAGS: ${{ inputs.flags }}
|
||||||
|
CC_FORCE: ${{ inputs.force }}
|
||||||
|
CC_GCOV_ARGS: ${{ inputs.gcov_args }}
|
||||||
|
CC_GCOV_EXECUTABLE: ${{ inputs.gcov_executable }}
|
||||||
|
CC_GCOV_IGNORE: ${{ inputs.gcov_ignore }}
|
||||||
|
CC_GCOV_INCLUDE: ${{ inputs.gcov_include }}
|
||||||
|
CC_GIT_SERVICE: ${{ inputs.git_service }}
|
||||||
|
CC_HANDLE_NO_REPORTS_FOUND: ${{ inputs.handle_no_reports_found }}
|
||||||
|
CC_JOB_CODE: ${{ inputs.job_code }}
|
||||||
|
CC_LEGACY: ${{ inputs.use_legacy_upload_endpoint }}
|
||||||
|
CC_NAME: ${{ inputs.name }}
|
||||||
|
CC_NETWORK_FILTER: ${{ inputs.network_filter }}
|
||||||
|
CC_NETWORK_PREFIX: ${{ inputs.network_prefix }}
|
||||||
|
CC_NETWORK_ROOT_FOLDER: ${{ inputs.root_dir }}
|
||||||
|
CC_OS: ${{ inputs.os }}
|
||||||
|
CC_PARENT_SHA: ${{ inputs.commit_parent }}
|
||||||
|
CC_PLUGINS: ${{ inputs.plugins }}
|
||||||
|
CC_RECURSE_SUBMODULES: ${{ inputs.recurse_submodules }}
|
||||||
|
CC_REPORT_TYPE: ${{ inputs.report_type }}
|
||||||
|
CC_RUN_CMD: ${{ inputs.run_command }}
|
||||||
|
CC_SERVICE: ${{ inputs.git_service }}
|
||||||
|
CC_SKIP_VALIDATION: ${{ inputs.skip_validation }}
|
||||||
|
CC_SLUG: ${{ inputs.slug }}
|
||||||
|
CC_SWIFT_PROJECT: ${{ inputs.swift_project }}
|
||||||
|
CC_USE_PYPI: ${{ inputs.use_pypi }}
|
||||||
|
CC_VERBOSE: ${{ inputs.verbose }}
|
||||||
|
CC_VERSION: ${{ inputs.version }}
|
||||||
|
CC_YML_PATH: ${{ inputs.codecov_yml_path }}
|
||||||
|
|||||||
76
changelog.py
Normal file
76
changelog.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import json
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
def update_changelog():
|
||||||
|
with open('src/version', 'r') as f:
|
||||||
|
version = f.read()
|
||||||
|
changelog = [f"## v{version}"]
|
||||||
|
changelog.append("### What\'s Changed")
|
||||||
|
|
||||||
|
with open('CHANGELOG.md', 'r') as f:
|
||||||
|
previous = f.readline().replace("##", '').strip()
|
||||||
|
|
||||||
|
if previous == version:
|
||||||
|
print(f"No changes to version {version}")
|
||||||
|
return
|
||||||
|
print(f"Adding logs from {previous}..v{version}")
|
||||||
|
|
||||||
|
raw_current_branch = subprocess.run([
|
||||||
|
"git",
|
||||||
|
"branch",
|
||||||
|
"--show-current",
|
||||||
|
], capture_output=True)
|
||||||
|
current_branch = raw_current_branch.stdout.decode('utf-8').strip()
|
||||||
|
|
||||||
|
raw_commits = subprocess.run([
|
||||||
|
"git",
|
||||||
|
"log",
|
||||||
|
f"{previous}..{current_branch}",
|
||||||
|
"--oneline",
|
||||||
|
], capture_output=True)
|
||||||
|
commits = [line[:7] for line in raw_commits.stdout.decode('utf-8').split('\n')]
|
||||||
|
print(commits)
|
||||||
|
|
||||||
|
prs = set()
|
||||||
|
for commit in commits:
|
||||||
|
if not commit:
|
||||||
|
continue
|
||||||
|
commit_output = subprocess.run([
|
||||||
|
'gh',
|
||||||
|
'pr',
|
||||||
|
'list',
|
||||||
|
'--json',
|
||||||
|
'author,number,title,url',
|
||||||
|
'--search',
|
||||||
|
f'"{commit}"',
|
||||||
|
'--state',
|
||||||
|
'merged',
|
||||||
|
], capture_output=True)
|
||||||
|
|
||||||
|
commit_details = commit_output.stdout.decode('utf-8')
|
||||||
|
if not commit_details or not json.loads(commit_details):
|
||||||
|
continue
|
||||||
|
commit_details = json.loads(commit_details)[0]
|
||||||
|
|
||||||
|
|
||||||
|
if not commit_details['number']:
|
||||||
|
continue
|
||||||
|
if commit_details['number'] in prs:
|
||||||
|
continue
|
||||||
|
prs.add(commit_details['number'])
|
||||||
|
changelog.append(f"* {commit_details['title']} by @{commit_details['author']['login']} in {commit_details['url']}")
|
||||||
|
|
||||||
|
changelog.append('\n')
|
||||||
|
changelog.append(f"**Full Changelog**: https://github.com/codecov/codecov-action/compare/{previous}..v{version}\n")
|
||||||
|
|
||||||
|
with open('CHANGELOG.md', 'r') as f:
|
||||||
|
for line in f:
|
||||||
|
changelog.append(line.strip())
|
||||||
|
|
||||||
|
with open('CHANGELOG.md', 'w') as f:
|
||||||
|
f.write('\n'.join(changelog))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
update_changelog()
|
||||||
453
dist/37.index.js
vendored
453
dist/37.index.js
vendored
@@ -1,453 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
exports.id = 37;
|
|
||||||
exports.ids = [37];
|
|
||||||
exports.modules = {
|
|
||||||
|
|
||||||
/***/ 4037:
|
|
||||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
||||||
|
|
||||||
__webpack_require__.r(__webpack_exports__);
|
|
||||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
|
||||||
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
|
|
||||||
/* harmony export */ });
|
|
||||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
|
|
||||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
let s = 0;
|
|
||||||
const S = {
|
|
||||||
START_BOUNDARY: s++,
|
|
||||||
HEADER_FIELD_START: s++,
|
|
||||||
HEADER_FIELD: s++,
|
|
||||||
HEADER_VALUE_START: s++,
|
|
||||||
HEADER_VALUE: s++,
|
|
||||||
HEADER_VALUE_ALMOST_DONE: s++,
|
|
||||||
HEADERS_ALMOST_DONE: s++,
|
|
||||||
PART_DATA_START: s++,
|
|
||||||
PART_DATA: s++,
|
|
||||||
END: s++
|
|
||||||
};
|
|
||||||
|
|
||||||
let f = 1;
|
|
||||||
const F = {
|
|
||||||
PART_BOUNDARY: f,
|
|
||||||
LAST_BOUNDARY: f *= 2
|
|
||||||
};
|
|
||||||
|
|
||||||
const LF = 10;
|
|
||||||
const CR = 13;
|
|
||||||
const SPACE = 32;
|
|
||||||
const HYPHEN = 45;
|
|
||||||
const COLON = 58;
|
|
||||||
const A = 97;
|
|
||||||
const Z = 122;
|
|
||||||
|
|
||||||
const lower = c => c | 0x20;
|
|
||||||
|
|
||||||
const noop = () => {};
|
|
||||||
|
|
||||||
class MultipartParser {
|
|
||||||
/**
|
|
||||||
* @param {string} boundary
|
|
||||||
*/
|
|
||||||
constructor(boundary) {
|
|
||||||
this.index = 0;
|
|
||||||
this.flags = 0;
|
|
||||||
|
|
||||||
this.onHeaderEnd = noop;
|
|
||||||
this.onHeaderField = noop;
|
|
||||||
this.onHeadersEnd = noop;
|
|
||||||
this.onHeaderValue = noop;
|
|
||||||
this.onPartBegin = noop;
|
|
||||||
this.onPartData = noop;
|
|
||||||
this.onPartEnd = noop;
|
|
||||||
|
|
||||||
this.boundaryChars = {};
|
|
||||||
|
|
||||||
boundary = '\r\n--' + boundary;
|
|
||||||
const ui8a = new Uint8Array(boundary.length);
|
|
||||||
for (let i = 0; i < boundary.length; i++) {
|
|
||||||
ui8a[i] = boundary.charCodeAt(i);
|
|
||||||
this.boundaryChars[ui8a[i]] = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.boundary = ui8a;
|
|
||||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
|
||||||
this.state = S.START_BOUNDARY;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} data
|
|
||||||
*/
|
|
||||||
write(data) {
|
|
||||||
let i = 0;
|
|
||||||
const length_ = data.length;
|
|
||||||
let previousIndex = this.index;
|
|
||||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
|
||||||
const boundaryLength = this.boundary.length;
|
|
||||||
const boundaryEnd = boundaryLength - 1;
|
|
||||||
const bufferLength = data.length;
|
|
||||||
let c;
|
|
||||||
let cl;
|
|
||||||
|
|
||||||
const mark = name => {
|
|
||||||
this[name + 'Mark'] = i;
|
|
||||||
};
|
|
||||||
|
|
||||||
const clear = name => {
|
|
||||||
delete this[name + 'Mark'];
|
|
||||||
};
|
|
||||||
|
|
||||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
|
||||||
if (start === undefined || start !== end) {
|
|
||||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const dataCallback = (name, clear) => {
|
|
||||||
const markSymbol = name + 'Mark';
|
|
||||||
if (!(markSymbol in this)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (clear) {
|
|
||||||
callback(name, this[markSymbol], i, data);
|
|
||||||
delete this[markSymbol];
|
|
||||||
} else {
|
|
||||||
callback(name, this[markSymbol], data.length, data);
|
|
||||||
this[markSymbol] = 0;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for (i = 0; i < length_; i++) {
|
|
||||||
c = data[i];
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case S.START_BOUNDARY:
|
|
||||||
if (index === boundary.length - 2) {
|
|
||||||
if (c === HYPHEN) {
|
|
||||||
flags |= F.LAST_BOUNDARY;
|
|
||||||
} else if (c !== CR) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
break;
|
|
||||||
} else if (index - 1 === boundary.length - 2) {
|
|
||||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
|
||||||
state = S.END;
|
|
||||||
flags = 0;
|
|
||||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
|
||||||
index = 0;
|
|
||||||
callback('onPartBegin');
|
|
||||||
state = S.HEADER_FIELD_START;
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c !== boundary[index + 2]) {
|
|
||||||
index = -2;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c === boundary[index + 2]) {
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.HEADER_FIELD_START:
|
|
||||||
state = S.HEADER_FIELD;
|
|
||||||
mark('onHeaderField');
|
|
||||||
index = 0;
|
|
||||||
// falls through
|
|
||||||
case S.HEADER_FIELD:
|
|
||||||
if (c === CR) {
|
|
||||||
clear('onHeaderField');
|
|
||||||
state = S.HEADERS_ALMOST_DONE;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
if (c === HYPHEN) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c === COLON) {
|
|
||||||
if (index === 1) {
|
|
||||||
// empty header field
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
dataCallback('onHeaderField', true);
|
|
||||||
state = S.HEADER_VALUE_START;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
cl = lower(c);
|
|
||||||
if (cl < A || cl > Z) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.HEADER_VALUE_START:
|
|
||||||
if (c === SPACE) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
mark('onHeaderValue');
|
|
||||||
state = S.HEADER_VALUE;
|
|
||||||
// falls through
|
|
||||||
case S.HEADER_VALUE:
|
|
||||||
if (c === CR) {
|
|
||||||
dataCallback('onHeaderValue', true);
|
|
||||||
callback('onHeaderEnd');
|
|
||||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.HEADER_VALUE_ALMOST_DONE:
|
|
||||||
if (c !== LF) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
state = S.HEADER_FIELD_START;
|
|
||||||
break;
|
|
||||||
case S.HEADERS_ALMOST_DONE:
|
|
||||||
if (c !== LF) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
callback('onHeadersEnd');
|
|
||||||
state = S.PART_DATA_START;
|
|
||||||
break;
|
|
||||||
case S.PART_DATA_START:
|
|
||||||
state = S.PART_DATA;
|
|
||||||
mark('onPartData');
|
|
||||||
// falls through
|
|
||||||
case S.PART_DATA:
|
|
||||||
previousIndex = index;
|
|
||||||
|
|
||||||
if (index === 0) {
|
|
||||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
|
||||||
i += boundaryEnd;
|
|
||||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
|
||||||
i += boundaryLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
i -= boundaryEnd;
|
|
||||||
c = data[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (index < boundary.length) {
|
|
||||||
if (boundary[index] === c) {
|
|
||||||
if (index === 0) {
|
|
||||||
dataCallback('onPartData', true);
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
} else if (index === boundary.length) {
|
|
||||||
index++;
|
|
||||||
if (c === CR) {
|
|
||||||
// CR = part boundary
|
|
||||||
flags |= F.PART_BOUNDARY;
|
|
||||||
} else if (c === HYPHEN) {
|
|
||||||
// HYPHEN = end boundary
|
|
||||||
flags |= F.LAST_BOUNDARY;
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
} else if (index - 1 === boundary.length) {
|
|
||||||
if (flags & F.PART_BOUNDARY) {
|
|
||||||
index = 0;
|
|
||||||
if (c === LF) {
|
|
||||||
// unset the PART_BOUNDARY flag
|
|
||||||
flags &= ~F.PART_BOUNDARY;
|
|
||||||
callback('onPartEnd');
|
|
||||||
callback('onPartBegin');
|
|
||||||
state = S.HEADER_FIELD_START;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else if (flags & F.LAST_BOUNDARY) {
|
|
||||||
if (c === HYPHEN) {
|
|
||||||
callback('onPartEnd');
|
|
||||||
state = S.END;
|
|
||||||
flags = 0;
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (index > 0) {
|
|
||||||
// when matching a possible boundary, keep a lookbehind reference
|
|
||||||
// in case it turns out to be a false lead
|
|
||||||
lookbehind[index - 1] = c;
|
|
||||||
} else if (previousIndex > 0) {
|
|
||||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
|
||||||
// belongs to partData
|
|
||||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
|
||||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
|
||||||
previousIndex = 0;
|
|
||||||
mark('onPartData');
|
|
||||||
|
|
||||||
// reconsider the current character even so it interrupted the sequence
|
|
||||||
// it could be the beginning of a new sequence
|
|
||||||
i--;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.END:
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unexpected state entered: ${state}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dataCallback('onHeaderField');
|
|
||||||
dataCallback('onHeaderValue');
|
|
||||||
dataCallback('onPartData');
|
|
||||||
|
|
||||||
// Update properties for the next call
|
|
||||||
this.index = index;
|
|
||||||
this.state = state;
|
|
||||||
this.flags = flags;
|
|
||||||
}
|
|
||||||
|
|
||||||
end() {
|
|
||||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
|
||||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
|
||||||
this.onPartEnd();
|
|
||||||
} else if (this.state !== S.END) {
|
|
||||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _fileName(headerValue) {
|
|
||||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
|
||||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
|
||||||
if (!m) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const match = m[2] || m[3] || '';
|
|
||||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
|
||||||
filename = filename.replace(/%22/g, '"');
|
|
||||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
|
||||||
return String.fromCharCode(code);
|
|
||||||
});
|
|
||||||
return filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function toFormData(Body, ct) {
|
|
||||||
if (!/multipart/i.test(ct)) {
|
|
||||||
throw new TypeError('Failed to fetch');
|
|
||||||
}
|
|
||||||
|
|
||||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
|
||||||
|
|
||||||
if (!m) {
|
|
||||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
|
||||||
}
|
|
||||||
|
|
||||||
const parser = new MultipartParser(m[1] || m[2]);
|
|
||||||
|
|
||||||
let headerField;
|
|
||||||
let headerValue;
|
|
||||||
let entryValue;
|
|
||||||
let entryName;
|
|
||||||
let contentType;
|
|
||||||
let filename;
|
|
||||||
const entryChunks = [];
|
|
||||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
|
|
||||||
|
|
||||||
const onPartData = ui8a => {
|
|
||||||
entryValue += decoder.decode(ui8a, {stream: true});
|
|
||||||
};
|
|
||||||
|
|
||||||
const appendToFile = ui8a => {
|
|
||||||
entryChunks.push(ui8a);
|
|
||||||
};
|
|
||||||
|
|
||||||
const appendFileToFormData = () => {
|
|
||||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
|
|
||||||
formData.append(entryName, file);
|
|
||||||
};
|
|
||||||
|
|
||||||
const appendEntryToFormData = () => {
|
|
||||||
formData.append(entryName, entryValue);
|
|
||||||
};
|
|
||||||
|
|
||||||
const decoder = new TextDecoder('utf-8');
|
|
||||||
decoder.decode();
|
|
||||||
|
|
||||||
parser.onPartBegin = function () {
|
|
||||||
parser.onPartData = onPartData;
|
|
||||||
parser.onPartEnd = appendEntryToFormData;
|
|
||||||
|
|
||||||
headerField = '';
|
|
||||||
headerValue = '';
|
|
||||||
entryValue = '';
|
|
||||||
entryName = '';
|
|
||||||
contentType = '';
|
|
||||||
filename = null;
|
|
||||||
entryChunks.length = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.onHeaderField = function (ui8a) {
|
|
||||||
headerField += decoder.decode(ui8a, {stream: true});
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.onHeaderValue = function (ui8a) {
|
|
||||||
headerValue += decoder.decode(ui8a, {stream: true});
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.onHeaderEnd = function () {
|
|
||||||
headerValue += decoder.decode();
|
|
||||||
headerField = headerField.toLowerCase();
|
|
||||||
|
|
||||||
if (headerField === 'content-disposition') {
|
|
||||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
|
||||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
|
||||||
|
|
||||||
if (m) {
|
|
||||||
entryName = m[2] || m[3] || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
filename = _fileName(headerValue);
|
|
||||||
|
|
||||||
if (filename) {
|
|
||||||
parser.onPartData = appendToFile;
|
|
||||||
parser.onPartEnd = appendFileToFormData;
|
|
||||||
}
|
|
||||||
} else if (headerField === 'content-type') {
|
|
||||||
contentType = headerValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
headerValue = '';
|
|
||||||
headerField = '';
|
|
||||||
};
|
|
||||||
|
|
||||||
for await (const chunk of Body) {
|
|
||||||
parser.write(chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
parser.end();
|
|
||||||
|
|
||||||
return formData;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***/ })
|
|
||||||
|
|
||||||
};
|
|
||||||
;
|
|
||||||
//# sourceMappingURL=37.index.js.map
|
|
||||||
1
dist/37.index.js.map
vendored
1
dist/37.index.js.map
vendored
File diff suppressed because one or more lines are too long
453
dist/629.index.js
vendored
453
dist/629.index.js
vendored
@@ -1,453 +0,0 @@
|
|||||||
exports.id = 629;
|
|
||||||
exports.ids = [629];
|
|
||||||
exports.modules = {
|
|
||||||
|
|
||||||
/***/ 6629:
|
|
||||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
__webpack_require__.r(__webpack_exports__);
|
|
||||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
|
||||||
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
|
|
||||||
/* harmony export */ });
|
|
||||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(4818);
|
|
||||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(1402);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
let s = 0;
|
|
||||||
const S = {
|
|
||||||
START_BOUNDARY: s++,
|
|
||||||
HEADER_FIELD_START: s++,
|
|
||||||
HEADER_FIELD: s++,
|
|
||||||
HEADER_VALUE_START: s++,
|
|
||||||
HEADER_VALUE: s++,
|
|
||||||
HEADER_VALUE_ALMOST_DONE: s++,
|
|
||||||
HEADERS_ALMOST_DONE: s++,
|
|
||||||
PART_DATA_START: s++,
|
|
||||||
PART_DATA: s++,
|
|
||||||
END: s++
|
|
||||||
};
|
|
||||||
|
|
||||||
let f = 1;
|
|
||||||
const F = {
|
|
||||||
PART_BOUNDARY: f,
|
|
||||||
LAST_BOUNDARY: f *= 2
|
|
||||||
};
|
|
||||||
|
|
||||||
const LF = 10;
|
|
||||||
const CR = 13;
|
|
||||||
const SPACE = 32;
|
|
||||||
const HYPHEN = 45;
|
|
||||||
const COLON = 58;
|
|
||||||
const A = 97;
|
|
||||||
const Z = 122;
|
|
||||||
|
|
||||||
const lower = c => c | 0x20;
|
|
||||||
|
|
||||||
const noop = () => {};
|
|
||||||
|
|
||||||
class MultipartParser {
|
|
||||||
/**
|
|
||||||
* @param {string} boundary
|
|
||||||
*/
|
|
||||||
constructor(boundary) {
|
|
||||||
this.index = 0;
|
|
||||||
this.flags = 0;
|
|
||||||
|
|
||||||
this.onHeaderEnd = noop;
|
|
||||||
this.onHeaderField = noop;
|
|
||||||
this.onHeadersEnd = noop;
|
|
||||||
this.onHeaderValue = noop;
|
|
||||||
this.onPartBegin = noop;
|
|
||||||
this.onPartData = noop;
|
|
||||||
this.onPartEnd = noop;
|
|
||||||
|
|
||||||
this.boundaryChars = {};
|
|
||||||
|
|
||||||
boundary = '\r\n--' + boundary;
|
|
||||||
const ui8a = new Uint8Array(boundary.length);
|
|
||||||
for (let i = 0; i < boundary.length; i++) {
|
|
||||||
ui8a[i] = boundary.charCodeAt(i);
|
|
||||||
this.boundaryChars[ui8a[i]] = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.boundary = ui8a;
|
|
||||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
|
||||||
this.state = S.START_BOUNDARY;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} data
|
|
||||||
*/
|
|
||||||
write(data) {
|
|
||||||
let i = 0;
|
|
||||||
const length_ = data.length;
|
|
||||||
let previousIndex = this.index;
|
|
||||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
|
||||||
const boundaryLength = this.boundary.length;
|
|
||||||
const boundaryEnd = boundaryLength - 1;
|
|
||||||
const bufferLength = data.length;
|
|
||||||
let c;
|
|
||||||
let cl;
|
|
||||||
|
|
||||||
const mark = name => {
|
|
||||||
this[name + 'Mark'] = i;
|
|
||||||
};
|
|
||||||
|
|
||||||
const clear = name => {
|
|
||||||
delete this[name + 'Mark'];
|
|
||||||
};
|
|
||||||
|
|
||||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
|
||||||
if (start === undefined || start !== end) {
|
|
||||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const dataCallback = (name, clear) => {
|
|
||||||
const markSymbol = name + 'Mark';
|
|
||||||
if (!(markSymbol in this)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (clear) {
|
|
||||||
callback(name, this[markSymbol], i, data);
|
|
||||||
delete this[markSymbol];
|
|
||||||
} else {
|
|
||||||
callback(name, this[markSymbol], data.length, data);
|
|
||||||
this[markSymbol] = 0;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for (i = 0; i < length_; i++) {
|
|
||||||
c = data[i];
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case S.START_BOUNDARY:
|
|
||||||
if (index === boundary.length - 2) {
|
|
||||||
if (c === HYPHEN) {
|
|
||||||
flags |= F.LAST_BOUNDARY;
|
|
||||||
} else if (c !== CR) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
break;
|
|
||||||
} else if (index - 1 === boundary.length - 2) {
|
|
||||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
|
||||||
state = S.END;
|
|
||||||
flags = 0;
|
|
||||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
|
||||||
index = 0;
|
|
||||||
callback('onPartBegin');
|
|
||||||
state = S.HEADER_FIELD_START;
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c !== boundary[index + 2]) {
|
|
||||||
index = -2;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c === boundary[index + 2]) {
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.HEADER_FIELD_START:
|
|
||||||
state = S.HEADER_FIELD;
|
|
||||||
mark('onHeaderField');
|
|
||||||
index = 0;
|
|
||||||
// falls through
|
|
||||||
case S.HEADER_FIELD:
|
|
||||||
if (c === CR) {
|
|
||||||
clear('onHeaderField');
|
|
||||||
state = S.HEADERS_ALMOST_DONE;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
if (c === HYPHEN) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c === COLON) {
|
|
||||||
if (index === 1) {
|
|
||||||
// empty header field
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
dataCallback('onHeaderField', true);
|
|
||||||
state = S.HEADER_VALUE_START;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
cl = lower(c);
|
|
||||||
if (cl < A || cl > Z) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.HEADER_VALUE_START:
|
|
||||||
if (c === SPACE) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
mark('onHeaderValue');
|
|
||||||
state = S.HEADER_VALUE;
|
|
||||||
// falls through
|
|
||||||
case S.HEADER_VALUE:
|
|
||||||
if (c === CR) {
|
|
||||||
dataCallback('onHeaderValue', true);
|
|
||||||
callback('onHeaderEnd');
|
|
||||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.HEADER_VALUE_ALMOST_DONE:
|
|
||||||
if (c !== LF) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
state = S.HEADER_FIELD_START;
|
|
||||||
break;
|
|
||||||
case S.HEADERS_ALMOST_DONE:
|
|
||||||
if (c !== LF) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
callback('onHeadersEnd');
|
|
||||||
state = S.PART_DATA_START;
|
|
||||||
break;
|
|
||||||
case S.PART_DATA_START:
|
|
||||||
state = S.PART_DATA;
|
|
||||||
mark('onPartData');
|
|
||||||
// falls through
|
|
||||||
case S.PART_DATA:
|
|
||||||
previousIndex = index;
|
|
||||||
|
|
||||||
if (index === 0) {
|
|
||||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
|
||||||
i += boundaryEnd;
|
|
||||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
|
||||||
i += boundaryLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
i -= boundaryEnd;
|
|
||||||
c = data[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (index < boundary.length) {
|
|
||||||
if (boundary[index] === c) {
|
|
||||||
if (index === 0) {
|
|
||||||
dataCallback('onPartData', true);
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
} else if (index === boundary.length) {
|
|
||||||
index++;
|
|
||||||
if (c === CR) {
|
|
||||||
// CR = part boundary
|
|
||||||
flags |= F.PART_BOUNDARY;
|
|
||||||
} else if (c === HYPHEN) {
|
|
||||||
// HYPHEN = end boundary
|
|
||||||
flags |= F.LAST_BOUNDARY;
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
} else if (index - 1 === boundary.length) {
|
|
||||||
if (flags & F.PART_BOUNDARY) {
|
|
||||||
index = 0;
|
|
||||||
if (c === LF) {
|
|
||||||
// unset the PART_BOUNDARY flag
|
|
||||||
flags &= ~F.PART_BOUNDARY;
|
|
||||||
callback('onPartEnd');
|
|
||||||
callback('onPartBegin');
|
|
||||||
state = S.HEADER_FIELD_START;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else if (flags & F.LAST_BOUNDARY) {
|
|
||||||
if (c === HYPHEN) {
|
|
||||||
callback('onPartEnd');
|
|
||||||
state = S.END;
|
|
||||||
flags = 0;
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
index = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (index > 0) {
|
|
||||||
// when matching a possible boundary, keep a lookbehind reference
|
|
||||||
// in case it turns out to be a false lead
|
|
||||||
lookbehind[index - 1] = c;
|
|
||||||
} else if (previousIndex > 0) {
|
|
||||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
|
||||||
// belongs to partData
|
|
||||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
|
||||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
|
||||||
previousIndex = 0;
|
|
||||||
mark('onPartData');
|
|
||||||
|
|
||||||
// reconsider the current character even so it interrupted the sequence
|
|
||||||
// it could be the beginning of a new sequence
|
|
||||||
i--;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
|
||||||
case S.END:
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unexpected state entered: ${state}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dataCallback('onHeaderField');
|
|
||||||
dataCallback('onHeaderValue');
|
|
||||||
dataCallback('onPartData');
|
|
||||||
|
|
||||||
// Update properties for the next call
|
|
||||||
this.index = index;
|
|
||||||
this.state = state;
|
|
||||||
this.flags = flags;
|
|
||||||
}
|
|
||||||
|
|
||||||
end() {
|
|
||||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
|
||||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
|
||||||
this.onPartEnd();
|
|
||||||
} else if (this.state !== S.END) {
|
|
||||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _fileName(headerValue) {
|
|
||||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
|
||||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
|
||||||
if (!m) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const match = m[2] || m[3] || '';
|
|
||||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
|
||||||
filename = filename.replace(/%22/g, '"');
|
|
||||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
|
||||||
return String.fromCharCode(code);
|
|
||||||
});
|
|
||||||
return filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function toFormData(Body, ct) {
|
|
||||||
if (!/multipart/i.test(ct)) {
|
|
||||||
throw new TypeError('Failed to fetch');
|
|
||||||
}
|
|
||||||
|
|
||||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
|
||||||
|
|
||||||
if (!m) {
|
|
||||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
|
||||||
}
|
|
||||||
|
|
||||||
const parser = new MultipartParser(m[1] || m[2]);
|
|
||||||
|
|
||||||
let headerField;
|
|
||||||
let headerValue;
|
|
||||||
let entryValue;
|
|
||||||
let entryName;
|
|
||||||
let contentType;
|
|
||||||
let filename;
|
|
||||||
const entryChunks = [];
|
|
||||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
|
|
||||||
|
|
||||||
const onPartData = ui8a => {
|
|
||||||
entryValue += decoder.decode(ui8a, {stream: true});
|
|
||||||
};
|
|
||||||
|
|
||||||
const appendToFile = ui8a => {
|
|
||||||
entryChunks.push(ui8a);
|
|
||||||
};
|
|
||||||
|
|
||||||
const appendFileToFormData = () => {
|
|
||||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
|
|
||||||
formData.append(entryName, file);
|
|
||||||
};
|
|
||||||
|
|
||||||
const appendEntryToFormData = () => {
|
|
||||||
formData.append(entryName, entryValue);
|
|
||||||
};
|
|
||||||
|
|
||||||
const decoder = new TextDecoder('utf-8');
|
|
||||||
decoder.decode();
|
|
||||||
|
|
||||||
parser.onPartBegin = function () {
|
|
||||||
parser.onPartData = onPartData;
|
|
||||||
parser.onPartEnd = appendEntryToFormData;
|
|
||||||
|
|
||||||
headerField = '';
|
|
||||||
headerValue = '';
|
|
||||||
entryValue = '';
|
|
||||||
entryName = '';
|
|
||||||
contentType = '';
|
|
||||||
filename = null;
|
|
||||||
entryChunks.length = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.onHeaderField = function (ui8a) {
|
|
||||||
headerField += decoder.decode(ui8a, {stream: true});
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.onHeaderValue = function (ui8a) {
|
|
||||||
headerValue += decoder.decode(ui8a, {stream: true});
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.onHeaderEnd = function () {
|
|
||||||
headerValue += decoder.decode();
|
|
||||||
headerField = headerField.toLowerCase();
|
|
||||||
|
|
||||||
if (headerField === 'content-disposition') {
|
|
||||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
|
||||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
|
||||||
|
|
||||||
if (m) {
|
|
||||||
entryName = m[2] || m[3] || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
filename = _fileName(headerValue);
|
|
||||||
|
|
||||||
if (filename) {
|
|
||||||
parser.onPartData = appendToFile;
|
|
||||||
parser.onPartEnd = appendFileToFormData;
|
|
||||||
}
|
|
||||||
} else if (headerField === 'content-type') {
|
|
||||||
contentType = headerValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
headerValue = '';
|
|
||||||
headerField = '';
|
|
||||||
};
|
|
||||||
|
|
||||||
for await (const chunk of Body) {
|
|
||||||
parser.write(chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
parser.end();
|
|
||||||
|
|
||||||
return formData;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/***/ })
|
|
||||||
|
|
||||||
};
|
|
||||||
;
|
|
||||||
//# sourceMappingURL=629.index.js.map
|
|
||||||
1
dist/629.index.js.map
vendored
1
dist/629.index.js.map
vendored
File diff suppressed because one or more lines are too long
259
dist/codecov.sh
vendored
Executable file
259
dist/codecov.sh
vendored
Executable file
@@ -0,0 +1,259 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
CC_WRAPPER_VERSION="0.2.0"
|
||||||
|
set +u
|
||||||
|
say() {
|
||||||
|
echo -e "$1"
|
||||||
|
}
|
||||||
|
exit_if_error() {
|
||||||
|
say "$r==> $1$x"
|
||||||
|
if [ "$CC_FAIL_ON_ERROR" = true ];
|
||||||
|
then
|
||||||
|
say "$r Exiting...$x"
|
||||||
|
exit 1;
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
lower() {
|
||||||
|
echo $(echo $1 | sed 's/CC//' | sed 's/_/-/g' | tr '[:upper:]' '[:lower:]')
|
||||||
|
}
|
||||||
|
k_arg() {
|
||||||
|
if [ -n "$(eval echo \$"CC_$1")" ];
|
||||||
|
then
|
||||||
|
echo "--$(lower "$1")"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
v_arg() {
|
||||||
|
if [ -n "$(eval echo \$"CC_$1")" ];
|
||||||
|
then
|
||||||
|
echo "$(eval echo \$"CC_$1")"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
write_bool_args() {
|
||||||
|
if [ "$(eval echo \$$1)" = "true" ] || [ "$(eval echo \$$1)" = "1" ];
|
||||||
|
then
|
||||||
|
echo "-$(lower $1)"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
b="\033[0;36m" # variables/constants
|
||||||
|
g="\033[0;32m" # info/debug
|
||||||
|
r="\033[0;31m" # errors
|
||||||
|
x="\033[0m"
|
||||||
|
say " _____ _
|
||||||
|
/ ____| | |
|
||||||
|
| | ___ __| | ___ ___ _____ __
|
||||||
|
| | / _ \\ / _\` |/ _ \\/ __/ _ \\ \\ / /
|
||||||
|
| |___| (_) | (_| | __/ (_| (_) \\ V /
|
||||||
|
\\_____\\___/ \\__,_|\\___|\\___\\___/ \\_/
|
||||||
|
$r Wrapper-$CC_WRAPPER_VERSION$x
|
||||||
|
"
|
||||||
|
CC_VERSION="${CC_VERSION:-latest}"
|
||||||
|
CC_FAIL_ON_ERROR="${CC_FAIL_ON_ERROR:-false}"
|
||||||
|
CC_RUN_CMD="${CC_RUN_CMD:-upload-coverage}"
|
||||||
|
if [ -n "$CC_BINARY" ];
|
||||||
|
then
|
||||||
|
if [ -f "$CC_BINARY" ];
|
||||||
|
then
|
||||||
|
cc_filename=$CC_BINARY
|
||||||
|
cc_command=$CC_BINARY
|
||||||
|
else
|
||||||
|
exit_if_error "Could not find binary file $CC_BINARY"
|
||||||
|
fi
|
||||||
|
elif [ "$CC_USE_PYPI" == "true" ];
|
||||||
|
then
|
||||||
|
if ! pip install codecov-cli"$([ "$CC_VERSION" == "latest" ] && echo "" || echo "==$CC_VERSION" )"; then
|
||||||
|
exit_if_error "Could not install via pypi."
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
cc_command="codecovcli"
|
||||||
|
else
|
||||||
|
if [ -n "$CC_OS" ];
|
||||||
|
then
|
||||||
|
say "$g==>$x Overridden OS: $b${CC_OS}$x"
|
||||||
|
else
|
||||||
|
CC_OS="windows"
|
||||||
|
family=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||||
|
[[ $family == "darwin" ]] && CC_OS="macos"
|
||||||
|
[[ $family == "linux" ]] && CC_OS="linux"
|
||||||
|
[[ $CC_OS == "linux" ]] && \
|
||||||
|
osID=$(grep -e "^ID=" /etc/os-release | cut -c4-)
|
||||||
|
[[ $osID == "alpine" ]] && CC_OS="alpine"
|
||||||
|
[[ $(arch) == "aarch64" && $family == "linux" ]] && CC_OS+="-arm64"
|
||||||
|
say "$g==>$x Detected $b${CC_OS}$x"
|
||||||
|
fi
|
||||||
|
cc_filename="codecov"
|
||||||
|
[[ $CC_OS == "windows" ]] && cc_filename+=".exe"
|
||||||
|
cc_command="./$cc_filename"
|
||||||
|
[[ $CC_OS == "macos" ]] && \
|
||||||
|
! command -v gpg 2>&1 >/dev/null && \
|
||||||
|
HOMEBREW_NO_AUTO_UPDATE=1 brew install gpg
|
||||||
|
cc_url="https://cli.codecov.io"
|
||||||
|
cc_url="$cc_url/${CC_VERSION}"
|
||||||
|
cc_url="$cc_url/${CC_OS}/${cc_filename}"
|
||||||
|
say "$g ->$x Downloading $b${cc_url}$x"
|
||||||
|
curl -O --retry 5 --retry-delay 2 "$cc_url"
|
||||||
|
say "$g==>$x Finishing downloading $b${CC_OS}:${CC_VERSION}$x"
|
||||||
|
version_url="https://cli.codecov.io/api/${CC_OS}/${CC_VERSION}"
|
||||||
|
version=$(curl -s "$version_url" -H "Accept:application/json" | tr \{ '\n' | tr , '\n' | tr \} '\n' | grep "\"version\"" | awk -F'"' '{print $4}' | tail -1)
|
||||||
|
say " Version: $b$version$x"
|
||||||
|
say " "
|
||||||
|
fi
|
||||||
|
if [ "$CC_SKIP_VALIDATION" == "true" ] || [ -n "$CC_BINARY" ] || [ "$CC_USE_PYPI" == "true" ];
|
||||||
|
then
|
||||||
|
say "$r==>$x Bypassing validation..."
|
||||||
|
else
|
||||||
|
CC_PUBLIC_PGP_KEY=$(curl -s https://keybase.io/codecovsecurity/pgp_keys.asc)
|
||||||
|
echo "${CC_PUBLIC_PGP_KEY}" | \
|
||||||
|
gpg --no-default-keyring --import
|
||||||
|
# One-time step
|
||||||
|
say "$g==>$x Verifying GPG signature integrity"
|
||||||
|
sha_url="https://cli.codecov.io"
|
||||||
|
sha_url="${sha_url}/${CC_VERSION}/${CC_OS}"
|
||||||
|
sha_url="${sha_url}/${cc_filename}.SHA256SUM"
|
||||||
|
say "$g ->$x Downloading $b${sha_url}$x"
|
||||||
|
say "$g ->$x Downloading $b${sha_url}.sig$x"
|
||||||
|
say " "
|
||||||
|
curl -Os --retry 5 --retry-delay 2 --connect-timeout 2 "$sha_url"
|
||||||
|
curl -Os --retry 5 --retry-delay 2 --connect-timeout 2 "${sha_url}.sig"
|
||||||
|
if ! gpg --verify "${cc_filename}.SHA256SUM.sig" "${cc_filename}.SHA256SUM";
|
||||||
|
then
|
||||||
|
exit_if_error "Could not verify signature. Please contact Codecov if problem continues"
|
||||||
|
fi
|
||||||
|
if ! (shasum -a 256 -c "${cc_filename}.SHA256SUM" 2>/dev/null || \
|
||||||
|
sha256sum -c "${cc_filename}.SHA256SUM");
|
||||||
|
then
|
||||||
|
exit_if_error "Could not verify SHASUM. Please contact Codecov if problem continues"
|
||||||
|
fi
|
||||||
|
say "$g==>$x CLI integrity verified"
|
||||||
|
say
|
||||||
|
chmod +x "$cc_command"
|
||||||
|
fi
|
||||||
|
if [ -n "$CC_BINARY_LOCATION" ];
|
||||||
|
then
|
||||||
|
mkdir -p "$CC_BINARY_LOCATION" && mv "$cc_filename" $_
|
||||||
|
say "$g==>$x Codecov binary moved to ${CC_BINARY_LOCATION}"
|
||||||
|
fi
|
||||||
|
if [ "$CC_DOWNLOAD_ONLY" = "true" ];
|
||||||
|
then
|
||||||
|
say "$g==>$x Codecov download only called. Exiting..."
|
||||||
|
fi
|
||||||
|
cc_cli_args=()
|
||||||
|
cc_cli_args+=( $(k_arg AUTO_LOAD_PARAMS_FROM) $(v_arg AUTO_LOAD_PARAMS_FROM))
|
||||||
|
cc_cli_args+=( $(k_arg ENTERPRISE_URL) $(v_arg ENTERPRISE_URL))
|
||||||
|
if [ -n "$CC_YML_PATH" ]
|
||||||
|
then
|
||||||
|
cc_cli_args+=( "--codecov-yml-path" )
|
||||||
|
cc_cli_args+=( "$CC_YML_PATH" )
|
||||||
|
fi
|
||||||
|
cc_cli_args+=( $(write_bool_args CC_DISABLE_TELEM) )
|
||||||
|
cc_cli_args+=( $(write_bool_args CC_VERBOSE) )
|
||||||
|
if [ -n "$CC_TOKEN_VAR" ];
|
||||||
|
then
|
||||||
|
token="$(eval echo \$$CC_TOKEN_VAR)"
|
||||||
|
else
|
||||||
|
token="$(eval echo $CC_TOKEN)"
|
||||||
|
fi
|
||||||
|
say "$g ->$x Token of length ${#token} detected"
|
||||||
|
token_str=""
|
||||||
|
token_arg=()
|
||||||
|
if [ -n "$token" ];
|
||||||
|
then
|
||||||
|
token_str+=" -t <redacted>"
|
||||||
|
token_arg+=( " -t " "$token")
|
||||||
|
fi
|
||||||
|
if [ "$CC_RUN_CMD" == "upload-coverage" ]; then
|
||||||
|
cc_args=()
|
||||||
|
# Args for create commit
|
||||||
|
cc_args+=( $(write_bool_args CC_FAIL_ON_ERROR) )
|
||||||
|
cc_args+=( $(k_arg GIT_SERVICE) $(v_arg GIT_SERVICE))
|
||||||
|
cc_args+=( $(k_arg PARENT_SHA) $(v_arg PARENT_SHA))
|
||||||
|
cc_args+=( $(k_arg PR) $(v_arg PR))
|
||||||
|
cc_args+=( $(k_arg SHA) $(v_arg SHA))
|
||||||
|
cc_args+=( $(k_arg SLUG) $(v_arg SLUG))
|
||||||
|
# Args for create report
|
||||||
|
cc_args+=( $(k_arg CODE) $(v_arg CODE))
|
||||||
|
# Args for do upload
|
||||||
|
cc_args+=( $(k_arg ENV) $(v_arg ENV))
|
||||||
|
OLDIFS=$IFS;IFS=,
|
||||||
|
cc_args+=( $(k_arg BRANCH) $(v_arg BRANCH))
|
||||||
|
cc_args+=( $(k_arg BUILD) $(v_arg BUILD))
|
||||||
|
cc_args+=( $(k_arg BUILD_URL) $(v_arg BUILD_URL))
|
||||||
|
cc_args+=( $(k_arg DIR) $(v_arg DIR))
|
||||||
|
cc_args+=( $(write_bool_args CC_DISABLE_FILE_FIXES) )
|
||||||
|
cc_args+=( $(write_bool_args CC_DISABLE_SEARCH) )
|
||||||
|
cc_args+=( $(write_bool_args CC_DRY_RUN) )
|
||||||
|
if [ -n "$CC_EXCLUDES" ];
|
||||||
|
then
|
||||||
|
for directory in $CC_EXCLUDES; do
|
||||||
|
cc_args+=( "--exclude" "$directory" )
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
if [ -n "$CC_FILES" ];
|
||||||
|
then
|
||||||
|
for file in $CC_FILES; do
|
||||||
|
cc_args+=( "--file" "$file" )
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
if [ -n "$CC_FLAGS" ];
|
||||||
|
then
|
||||||
|
for flag in $CC_FLAGS; do
|
||||||
|
cc_args+=( "--flag" "$flag" )
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
cc_args+=( $(k_arg GCOV_ARGS) $(v_arg GCOV_ARGS))
|
||||||
|
cc_args+=( $(k_arg GCOV_EXECUTABLE) $(v_arg GCOV_EXECUTABLE))
|
||||||
|
cc_args+=( $(k_arg GCOV_IGNORE) $(v_arg GCOV_IGNORE))
|
||||||
|
cc_args+=( $(k_arg GCOV_INCLUDE) $(v_arg GCOV_INCLUDE))
|
||||||
|
cc_args+=( $(write_bool_args CC_HANDLE_NO_REPORTS_FOUND) )
|
||||||
|
cc_args+=( $(write_bool_args CC_RECURSE_SUBMODULES) )
|
||||||
|
cc_args+=( $(k_arg JOB_CODE) $(v_arg JOB_CODE))
|
||||||
|
cc_args+=( $(write_bool_args CC_LEGACY) )
|
||||||
|
if [ -n "$CC_NAME" ];
|
||||||
|
then
|
||||||
|
cc_args+=( "--name" "$CC_NAME" )
|
||||||
|
fi
|
||||||
|
cc_args+=( $(k_arg NETWORK_FILTER) $(v_arg NETWORK_FILTER))
|
||||||
|
cc_args+=( $(k_arg NETWORK_PREFIX) $(v_arg NETWORK_PREFIX))
|
||||||
|
cc_args+=( $(k_arg NETWORK_ROOT_FOLDER) $(v_arg NETWORK_ROOT_FOLDER))
|
||||||
|
if [ -n "$CC_PLUGINS" ];
|
||||||
|
then
|
||||||
|
for plugin in $CC_PLUGINS; do
|
||||||
|
cc_args+=( "--plugin" "$plugin" )
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
cc_args+=( $(k_arg REPORT_TYPE) $(v_arg REPORT_TYPE))
|
||||||
|
cc_args+=( $(k_arg SWIFT_PROJECT) $(v_arg SWIFT_PROJECT))
|
||||||
|
IFS=$OLDIFS
|
||||||
|
elif [ "$CC_RUN_CMD" == "empty-upload" ]; then
|
||||||
|
cc_args=()
|
||||||
|
cc_args+=( $(write_bool_args CC_FAIL_ON_ERROR) )
|
||||||
|
cc_args+=( $(write_bool_args CC_FORCE) )
|
||||||
|
cc_args+=( $(k_arg GIT_SERVICE) $(v_arg GIT_SERVICE))
|
||||||
|
cc_args+=( $(k_arg SHA) $(v_arg SHA))
|
||||||
|
cc_args+=( $(k_arg SLUG) $(v_arg SLUG))
|
||||||
|
elif [ "$CC_RUN_CMD" == "pr-base-picking" ]; then
|
||||||
|
cc_args=()
|
||||||
|
cc_args+=( $(k_arg BASE_SHA) $(v_arg BASE_SHA))
|
||||||
|
cc_args+=( $(k_arg PR) $(v_arg PR))
|
||||||
|
cc_args+=( $(k_arg SLUG) $(v_arg SLUG))
|
||||||
|
cc_args+=( $(k_arg SERVICE) $(v_arg SERVICE))
|
||||||
|
elif [ "$CC_RUN_CMD" == "send-notifications" ]; then
|
||||||
|
cc_args=()
|
||||||
|
cc_args+=( $(k_arg SHA) $(v_arg SHA))
|
||||||
|
cc_args+=( $(write_bool_args CC_FAIL_ON_ERROR) )
|
||||||
|
cc_args+=( $(k_arg GIT_SERVICE) $(v_arg GIT_SERVICE))
|
||||||
|
cc_args+=( $(k_arg SLUG) $(v_arg SLUG))
|
||||||
|
else
|
||||||
|
exit_if_error "Invalid run command specified: $CC_RUN_CMD"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
unset NODE_OPTIONS
|
||||||
|
# https://github.com/codecov/uploader/issues/475
|
||||||
|
say "$g==>$x Running $CC_RUN_CMD"
|
||||||
|
say " $b$cc_command $(echo "${cc_cli_args[@]}") $CC_RUN_CMD$token_str $(echo "${cc_args[@]}")$x"
|
||||||
|
if ! $cc_command \
|
||||||
|
${cc_cli_args[*]} \
|
||||||
|
${CC_RUN_CMD} \
|
||||||
|
${token_arg[*]} \
|
||||||
|
"${cc_args[@]}";
|
||||||
|
then
|
||||||
|
exit_if_error "Failed to run $CC_RUN_CMD"
|
||||||
|
fi
|
||||||
32879
dist/index.js
vendored
32879
dist/index.js
vendored
File diff suppressed because one or more lines are too long
1
dist/index.js.map
vendored
1
dist/index.js.map
vendored
File diff suppressed because one or more lines are too long
52
dist/pgp_keys.asc
vendored
52
dist/pgp_keys.asc
vendored
@@ -1,52 +0,0 @@
|
|||||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
|
||||||
|
|
||||||
mQINBGCsMn0BEACiCKZOhkbhUjb+obvhH49p3ShjJzU5b/GqAXSDhRhdXUq7ZoGq
|
|
||||||
KEKCd7sQHrCf16Pi5UVacGIyE9hS93HwY15kMlLwM+lNeAeCglEscOjpCly1qUIr
|
|
||||||
sN1wjkd2cwDXS6zHBJTqJ7wSOiXbZfTAeKhd6DuLEpmA+Rz4Yc+4qZP+fVxVG3Pv
|
|
||||||
2v06m+E5CP/JQVQPO8HYi+S36hJImTh+zaDspu+VujSai5KzJ6YKmgwslVNIp5X5
|
|
||||||
GnEr2uAh5w6UTnt9UQUjFFliAvQ3lPLWzm7DWs6AP9hslYxSWzwbzVF5qbOIjUJL
|
|
||||||
KfoUpvCYDs2ObgRn8WUQO0ndkRCBIxhlF3HGGYWKQaCEsiom7lyi8VbAszmUCDjw
|
|
||||||
HdbQHFmm5yHLpTXJbg+iaxQzKnhWVXzye5/x92IJmJswW81Ky346VxYdC1XFL/+Y
|
|
||||||
zBaj9oMmV7WfRpdch09Gf4TgosMzWf3NjJbtKE5xkaghJckIgxwzcrRmF/RmCJue
|
|
||||||
IMqZ8A5qUUlK7NBzj51xmAQ4BtkUa2bcCBRV/vP+rk9wcBWz2LiaW+7Mwlfr/C/Q
|
|
||||||
Swvv/JW2LsQ4iWc1BY7m7ksn9dcdypEq/1JbIzVLCRDG7pbMj9yLgYmhe5TtjOM3
|
|
||||||
ygk25584EhXSgUA3MZw+DIqhbHQBYgrKndTr2N/wuBQY62zZg1YGQByD4QARAQAB
|
|
||||||
tEpDb2RlY292IFVwbG9hZGVyIChDb2RlY292IFVwbG9hZGVyIFZlcmlmaWNhdGlv
|
|
||||||
biBLZXkpIDxzZWN1cml0eUBjb2RlY292LmlvPokCTgQTAQoAOBYhBCcDTn/bhQ4L
|
|
||||||
vCxi/4Brsortd5hpBQJgrDJ9AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJ
|
|
||||||
EIBrsortd5hpxLMP/3Fbgx5EG7zUUOqPZ+Ya9z8JlZFIkh3FxYMfMFE8jH9Es26F
|
|
||||||
V2ZTJLO259MxM+5N0XzObi3h4XqIzBn42pDRfwtojY5wl2STJ9Bzu+ykPog7OB1u
|
|
||||||
yfWXDRKcqPTUIxI1/WdU+c0/WNE6wjyzK+lRc1YUlp4pdNU7l+j2vKN+jGi2b6nV
|
|
||||||
PTPRsMcwy3B90fKf5h2wNMNqO+KX/rjgpG9Uhej+xyFWkGM1tZDQQYFj+ugQUj61
|
|
||||||
BMsQrUmxOnaVVnix21cHnACDCaxqgQZH3iZyEOKPNMsRFRP+0fLEnUMP+DVnQE6J
|
|
||||||
Brk1Z+XhtjGI9PISQVx5KKDKscreS/D5ae2Cw/FUlQMf57kir6mkbZVhz2khtccz
|
|
||||||
atD0r59WomNywIDyk1QfAKV0+O0WeJg8A69/Jk6yegsrUb5qEfkih/I38vvI0OVL
|
|
||||||
BYve/mQIHuQo5ziBptNytCrN5TXHXzguX9GOW1V1+3DR+w/vXcnz67sjlYDysf1f
|
|
||||||
JUZv9edZ2RGKW7agbrgOw2hB+zuWZ10tjoEcsaSGOLtKRGFDfmu/dBxzl8yopUpa
|
|
||||||
Tn79QKOieleRm5+uCcKCPTeKV0GbhDntCZJ+Yiw6ZPmrpcjDowAoMQ9kiMVa10+Q
|
|
||||||
WwwoaRWuqhf+dL6Q2OLFOxlyCDKVSyW0YF4Vrf3fKGyxKJmszAL+NS1mVcdxuQIN
|
|
||||||
BGCsMn0BEADLrIesbpfdAfWRvUFDN+PoRfa0ROwa/JOMhEgVsowQuk9No8yRva/X
|
|
||||||
VyiA6oCq6na7IvZXMxT7di4FWDjDtw5xHjbtFg336IJTGBcnzm7WIsjvyyw8kKfB
|
|
||||||
8cvG7D2OkzAUF8SVXLarJ1zdBP/Dr1Nz6F/gJsx5+BM8wGHEz4DsdMRV7ZMTVh6b
|
|
||||||
PaGuPZysPjSEw62R8MFJ1fSyDGCKJYwMQ/sKFzseNaY/kZVR5lq0dmhiYjNVQeG9
|
|
||||||
HJ6ZCGSGT5PKNOwx/UEkT6jhvzWgfr2eFVGJTcdwSLEgIrJIDzP7myHGxuOiuCmJ
|
|
||||||
ENgL1f7mzGkJ/hYXq1RWqsn1Fh2I9KZMHggqu4a+s3RiscmNcbIlIhJLXoE1bxZ/
|
|
||||||
TfYZ9Aod6Bd5TsSMTZNwV2am9zelhDiFF60FWww/5nEbhm/X4suC9W86qWBxs3Kh
|
|
||||||
vk1dxhElRjtgwUEHA5OFOO48ERHfR7COH719D/YmqLU3EybBgJbGoC/yjlGJxv0R
|
|
||||||
kOMAiG2FneNKEZZihReh8A5Jt6jYrSoHFRwL6oJIZfLezB7Rdajx1uH7uYcUyIaE
|
|
||||||
SiDWlkDw/IFM315NYFA8c1TCSIfnabUYaAxSLNFRmXnt+GQpm44qAK1x8EGhY633
|
|
||||||
e5B4FWorIXx0tTmsVM4rkQ6IgAodeywKG+c2Ikd+5dQLFmb7dW/6CwARAQABiQI2
|
|
||||||
BBgBCgAgFiEEJwNOf9uFDgu8LGL/gGuyiu13mGkFAmCsMn0CGwwACgkQgGuyiu13
|
|
||||||
mGkYWxAAkzF64SVpYvY9nY/QSYikL8UHlyyqirs6eFZ3Mj9lMRpHM2Spn9a3c701
|
|
||||||
0Ge4wDbRP2oftCyPP+p9pdUA77ifMTlRcoMYX8oXAuyE5RT2emBDiWvSR6hQQ8bZ
|
|
||||||
WFNXal+bUPpaRiruCCUPD2b8Od1ftzLqbYOosxr/m5Du0uahgOuGw6zlGBJCVOo7
|
|
||||||
UB2Y++oZ8P7oDGF722opepWQ+bl2a6TRMLNWWlj4UANknyjlhyZZ7PKhWLjoC6MU
|
|
||||||
dAKcwQUdp+XYLc/3b00bvgju0e99QgHZMX2fN3d3ktdN5Q2fqiAi5R6BmCCO4ISF
|
|
||||||
o5j10gGU/sdqGHvNhv5C21ibun7HEzMtxBhnhGmytfBJzrsj7GOReePsfTLoCoUq
|
|
||||||
dFMOAVUDciVfRtL2m8cv42ZJOXtPfDjsFOf8AKJk40/tc8mMMqZP7RVBr9RWOoq5
|
|
||||||
y9D37NfI6UB8rPZ6qs0a1Vfm8lIh2/k1AFECduXgftMDTsmmXOgXXS37HukGW7AL
|
|
||||||
QKWiWJQF/XopkXwkyAYpyuyRMZ77oF7nuqLFnl5VVEiRo0Fwu45erebc6ccSwYZU
|
|
||||||
8pmeSx7s0aJtxCZPSZEKZ3mn0BXOR32Cgs48CjzFWf6PKucTwOy/YO0/4Gt/upNJ
|
|
||||||
3DyeINcYcKyD08DEIF9f5tLyoiD4xz+N23ltTBoMPyv4f3X/wCQ=
|
|
||||||
=ch7z
|
|
||||||
-----END PGP PUBLIC KEY BLOCK-----
|
|
||||||
1
dist/sourcemap-register.js
vendored
1
dist/sourcemap-register.js
vendored
File diff suppressed because one or more lines are too long
@@ -2,8 +2,12 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
npm install
|
cp src/scripts/dist/codecov.sh dist/codecov.sh
|
||||||
npm run lint
|
git add dist/codecov.sh
|
||||||
npm run build
|
|
||||||
git add dist/
|
git diff --cached --name-only | if grep --quiet "src/version"
|
||||||
git add package-lock.json
|
then
|
||||||
|
python changelog.py
|
||||||
|
fi
|
||||||
|
|
||||||
|
git add CHANGELOG.md
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
preset: 'ts-jest',
|
|
||||||
testEnvironment: 'node',
|
|
||||||
};
|
|
||||||
8793
package-lock.json
generated
8793
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
43
package.json
43
package.json
@@ -1,43 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "codecov-action",
|
|
||||||
"version": "4.5.0",
|
|
||||||
"description": "Upload coverage reports to Codecov from GitHub Actions",
|
|
||||||
"main": "index.js",
|
|
||||||
"scripts": {
|
|
||||||
"build": "ncc build src/index.ts --source-map",
|
|
||||||
"lint": "eslint src/**/*.ts",
|
|
||||||
"test": "npm run test-script && npm run test-calculator && npm run test-coverage",
|
|
||||||
"test-calculator": "jest --testPathPattern=demo/calculator/ --coverage --coverageDirectory=coverage/calculator",
|
|
||||||
"test-coverage": "jest --testPathPattern=demo/coverage-test/ --coverage --coverageDirectory=coverage/coverage-test",
|
|
||||||
"test-script": "jest --testPathPattern=src/ --coverage --coverageDirectory=coverage/script"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git+https://github.com/codecov/codecov-action.git"
|
|
||||||
},
|
|
||||||
"keywords": [],
|
|
||||||
"author": "Codecov",
|
|
||||||
"license": "MIT",
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/codecov/codecov-action/issues"
|
|
||||||
},
|
|
||||||
"homepage": "https://github.com/codecov/codecov-action#readme",
|
|
||||||
"dependencies": {
|
|
||||||
"@actions/core": "^1.10.1",
|
|
||||||
"@actions/exec": "^1.1.1",
|
|
||||||
"@actions/github": "^6.0.0",
|
|
||||||
"undici": "5.28.4"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/jest": "^29.5.12",
|
|
||||||
"@typescript-eslint/eslint-plugin": "^7.13.0",
|
|
||||||
"@typescript-eslint/parser": "^7.13.0",
|
|
||||||
"@vercel/ncc": "^0.38.1",
|
|
||||||
"eslint": "^8.57.0",
|
|
||||||
"eslint-config-google": "^0.14.0",
|
|
||||||
"jest": "^29.7.0",
|
|
||||||
"jest-junit": "^16.0.0",
|
|
||||||
"ts-jest": "^29.1.4",
|
|
||||||
"typescript": "^5.4.5"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,309 +0,0 @@
|
|||||||
import * as github from '@actions/github';
|
|
||||||
|
|
||||||
import {
|
|
||||||
buildCommitExec,
|
|
||||||
buildGeneralExec,
|
|
||||||
buildReportExec,
|
|
||||||
buildUploadExec,
|
|
||||||
} from './buildExec';
|
|
||||||
|
|
||||||
const context = github.context;
|
|
||||||
|
|
||||||
let OLDOS = process.env.RUNNER_OS;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
jest.resetModules();
|
|
||||||
OLDOS = process.env.RUNNER_OS;
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
process.env.RUNNER_OS = OLDOS;
|
|
||||||
});
|
|
||||||
|
|
||||||
test('general args', async () => {
|
|
||||||
const envs = {
|
|
||||||
codecov_yml_path: 'dev/codecov.yml',
|
|
||||||
url: 'https://codecov.enterprise.com',
|
|
||||||
verbose: 't',
|
|
||||||
};
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
|
||||||
}
|
|
||||||
|
|
||||||
const {args, verbose} = await buildGeneralExec();
|
|
||||||
|
|
||||||
expect(args).toEqual(
|
|
||||||
expect.arrayContaining([
|
|
||||||
'--codecov-yml-path',
|
|
||||||
'dev/codecov.yml',
|
|
||||||
'--enterprise-url',
|
|
||||||
'https://codecov.enterprise.com',
|
|
||||||
'-v',
|
|
||||||
]));
|
|
||||||
expect(verbose).toBeTruthy();
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
delete process.env['INPUT_' + env.toUpperCase()];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('upload args using context', async () => {
|
|
||||||
const expectedArgs = [
|
|
||||||
'--git-service',
|
|
||||||
'github',
|
|
||||||
];
|
|
||||||
const {uploadExecArgs, uploadCommand} = await buildUploadExec();
|
|
||||||
if (context.eventName == 'pull_request') {
|
|
||||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
|
||||||
}
|
|
||||||
if (context.eventName == 'pull_request_target') {
|
|
||||||
expectedArgs.push('-P', `${context.payload.number}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(uploadExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(uploadCommand).toEqual('do-upload');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('upload args', async () => {
|
|
||||||
const envs = {
|
|
||||||
'codecov_yml_path': 'dev/codecov.yml',
|
|
||||||
'commit_parent': 'fakeparentcommit',
|
|
||||||
'directory': 'coverage/',
|
|
||||||
'disable_file_fixes': 'true',
|
|
||||||
'disable_search': 'true',
|
|
||||||
'dry_run': 'true',
|
|
||||||
'env_vars': 'OS,PYTHON',
|
|
||||||
'exclude': 'node_modules/',
|
|
||||||
'fail_ci_if_error': 'true',
|
|
||||||
'file': 'coverage.xml',
|
|
||||||
'files': 'dir1/coverage.xml,dir2/coverage.xml,',
|
|
||||||
'flags': 'test,test2',
|
|
||||||
'git_service': 'github_enterprise',
|
|
||||||
'handle_no_reports_found': 'true',
|
|
||||||
'job_code': '32',
|
|
||||||
'name': 'codecov',
|
|
||||||
'os': 'macos',
|
|
||||||
'override_branch': 'thomasrockhu/test',
|
|
||||||
'override_build': '1',
|
|
||||||
'override_build_url': 'https://example.com/build/2',
|
|
||||||
'override_commit': '9caabca5474b49de74ef5667deabaf74cdacc244',
|
|
||||||
'override_pr': '2',
|
|
||||||
'network_filter': 'subA/',
|
|
||||||
'network_prefix': 'forA/',
|
|
||||||
'plugin': 'xcode',
|
|
||||||
'plugins': 'pycoverage,compress-pycoverage',
|
|
||||||
'report_code': 'testCode',
|
|
||||||
'root_dir': 'root/',
|
|
||||||
'slug': 'fakeOwner/fakeRepo',
|
|
||||||
'token': 'd3859757-ab80-4664-924d-aef22fa7557b',
|
|
||||||
'url': 'https://enterprise.example.com',
|
|
||||||
'use_legacy_upload_endpoint': 'true',
|
|
||||||
'verbose': 'true',
|
|
||||||
'version': '0.1.2',
|
|
||||||
'working-directory': 'src',
|
|
||||||
};
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
|
||||||
}
|
|
||||||
|
|
||||||
const {uploadExecArgs, uploadCommand} = await buildUploadExec();
|
|
||||||
const expectedArgs = [
|
|
||||||
'--disable-file-fixes',
|
|
||||||
'--disable-search',
|
|
||||||
'-d',
|
|
||||||
'-e',
|
|
||||||
'OS,PYTHON',
|
|
||||||
'--exclude',
|
|
||||||
'node_modules/',
|
|
||||||
'-Z',
|
|
||||||
'-f',
|
|
||||||
'coverage.xml',
|
|
||||||
'-f',
|
|
||||||
'dir1/coverage.xml',
|
|
||||||
'-f',
|
|
||||||
'dir2/coverage.xml',
|
|
||||||
'-F',
|
|
||||||
'test',
|
|
||||||
'-F',
|
|
||||||
'test2',
|
|
||||||
'--git-service',
|
|
||||||
'github_enterprise',
|
|
||||||
'--handle-no-reports-found',
|
|
||||||
'--job-code',
|
|
||||||
'32',
|
|
||||||
'-n',
|
|
||||||
'codecov',
|
|
||||||
'--network-filter',
|
|
||||||
'subA/',
|
|
||||||
'--network-prefix',
|
|
||||||
'forA/',
|
|
||||||
'-B',
|
|
||||||
'thomasrockhu/test',
|
|
||||||
'-b',
|
|
||||||
'1',
|
|
||||||
'--build-url',
|
|
||||||
'https://example.com/build/2',
|
|
||||||
'-C',
|
|
||||||
'9caabca5474b49de74ef5667deabaf74cdacc244',
|
|
||||||
'-P',
|
|
||||||
'2',
|
|
||||||
'--plugin',
|
|
||||||
'xcode',
|
|
||||||
'--plugin',
|
|
||||||
'pycoverage',
|
|
||||||
'--plugin',
|
|
||||||
'compress-pycoverage',
|
|
||||||
'--report-code',
|
|
||||||
'testCode',
|
|
||||||
'--network-root-folder',
|
|
||||||
'root/',
|
|
||||||
'-s',
|
|
||||||
'coverage/',
|
|
||||||
'-r',
|
|
||||||
'fakeOwner/fakeRepo',
|
|
||||||
'--legacy',
|
|
||||||
];
|
|
||||||
|
|
||||||
expect(uploadExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(uploadCommand).toEqual('do-upload');
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
delete process.env['INPUT_' + env.toUpperCase()];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
test('report args', async () => {
|
|
||||||
const envs = {
|
|
||||||
git_service: 'github_enterprise',
|
|
||||||
override_commit: '9caabca5474b49de74ef5667deabaf74cdacc244',
|
|
||||||
override_pr: 'fakePR',
|
|
||||||
slug: 'fakeOwner/fakeRepo',
|
|
||||||
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
|
|
||||||
fail_ci_if_error: 'true',
|
|
||||||
};
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
|
||||||
}
|
|
||||||
|
|
||||||
const {reportExecArgs, reportCommand} = await buildReportExec();
|
|
||||||
|
|
||||||
const expectedArgs = [
|
|
||||||
'--git-service',
|
|
||||||
'github_enterprise',
|
|
||||||
'-C',
|
|
||||||
'9caabca5474b49de74ef5667deabaf74cdacc244',
|
|
||||||
'-P',
|
|
||||||
'fakePR',
|
|
||||||
'--slug',
|
|
||||||
'fakeOwner/fakeRepo',
|
|
||||||
'-Z',
|
|
||||||
];
|
|
||||||
|
|
||||||
expect(reportExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(reportCommand).toEqual('create-report');
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
delete process.env['INPUT_' + env.toUpperCase()];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
test('report args using context', async () => {
|
|
||||||
const envs = {
|
|
||||||
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
|
|
||||||
};
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
|
||||||
}
|
|
||||||
const expectedArgs : string[] = [
|
|
||||||
'--git-service',
|
|
||||||
'github',
|
|
||||||
];
|
|
||||||
if (context.eventName == 'pull_request') {
|
|
||||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const {reportExecArgs, reportCommand} = await buildReportExec();
|
|
||||||
|
|
||||||
expect(reportExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(reportCommand).toEqual('create-report');
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
delete process.env['INPUT_' + env.toUpperCase()];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
test('commit args', async () => {
|
|
||||||
const envs = {
|
|
||||||
git_service: 'github_enterprise',
|
|
||||||
commit_parent: '83231650328f11695dfb754ca0f540516f188d27',
|
|
||||||
override_branch: 'thomasrockhu/test',
|
|
||||||
override_commit: '9caabca5474b49de74ef5667deabaf74cdacc244',
|
|
||||||
override_pr: '2',
|
|
||||||
slug: 'fakeOwner/fakeRepo',
|
|
||||||
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
|
|
||||||
fail_ci_if_error: 'true',
|
|
||||||
};
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
|
||||||
}
|
|
||||||
|
|
||||||
const {commitExecArgs, commitCommand} = await buildCommitExec();
|
|
||||||
const expectedArgs = [
|
|
||||||
'--parent-sha',
|
|
||||||
'83231650328f11695dfb754ca0f540516f188d27',
|
|
||||||
'--git-service',
|
|
||||||
'github_enterprise',
|
|
||||||
'-B',
|
|
||||||
'thomasrockhu/test',
|
|
||||||
'-C',
|
|
||||||
'9caabca5474b49de74ef5667deabaf74cdacc244',
|
|
||||||
'--pr',
|
|
||||||
'2',
|
|
||||||
'--slug',
|
|
||||||
'fakeOwner/fakeRepo',
|
|
||||||
'-Z',
|
|
||||||
];
|
|
||||||
|
|
||||||
expect(commitExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(commitCommand).toEqual('create-commit');
|
|
||||||
for (const env of Object.keys(envs)) {
|
|
||||||
delete process.env['INPUT_' + env.toUpperCase()];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('commit args using context', async () => {
|
|
||||||
const expectedArgs :string[] = [
|
|
||||||
'--git-service',
|
|
||||||
'github',
|
|
||||||
];
|
|
||||||
|
|
||||||
const {commitExecArgs, commitCommand} = await buildCommitExec();
|
|
||||||
if (context.eventName == 'pull_request') {
|
|
||||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
|
||||||
}
|
|
||||||
if (context.eventName == 'pull_request_target') {
|
|
||||||
expectedArgs.push('-P', `${context.payload.number}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(commitExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(commitCommand).toEqual('create-commit');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('commit args using github server url', async () => {
|
|
||||||
const expectedArgs :string[] = [
|
|
||||||
'--git-service',
|
|
||||||
'github_enterprise',
|
|
||||||
];
|
|
||||||
|
|
||||||
process.env.GITHUB_SERVER_URL = 'https://example.com';
|
|
||||||
|
|
||||||
const {commitExecArgs, commitCommand} = await buildCommitExec();
|
|
||||||
if (context.eventName == 'pull_request') {
|
|
||||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
|
||||||
}
|
|
||||||
if (context.eventName == 'pull_request_target') {
|
|
||||||
expectedArgs.push('-P', `${context.payload.number}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(commitExecArgs).toEqual(expectedArgs);
|
|
||||||
expect(commitCommand).toEqual('create-commit');
|
|
||||||
});
|
|
||||||
412
src/buildExec.ts
412
src/buildExec.ts
@@ -1,412 +0,0 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
||||||
|
|
||||||
import * as core from '@actions/core';
|
|
||||||
import * as github from '@actions/github';
|
|
||||||
|
|
||||||
import {setFailure} from './helpers';
|
|
||||||
|
|
||||||
const context = github.context;
|
|
||||||
|
|
||||||
const isTrue = (variable: string): boolean => {
|
|
||||||
const lowercase = variable.toLowerCase();
|
|
||||||
return (
|
|
||||||
lowercase === '1' ||
|
|
||||||
lowercase === 't' ||
|
|
||||||
lowercase === 'true' ||
|
|
||||||
lowercase === 'y' ||
|
|
||||||
lowercase === 'yes'
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const getGitService = (): string => {
|
|
||||||
const overrideGitService = core.getInput('git_service');
|
|
||||||
const serverUrl = process.env.GITHUB_SERVER_URL;
|
|
||||||
if (overrideGitService) {
|
|
||||||
return overrideGitService;
|
|
||||||
} else if (serverUrl !== undefined && serverUrl !== 'https://github.com') {
|
|
||||||
return 'github_enterprise';
|
|
||||||
}
|
|
||||||
return 'github';
|
|
||||||
};
|
|
||||||
|
|
||||||
const isPullRequestFromFork = (): boolean => {
|
|
||||||
core.info(`evenName: ${context.eventName}`);
|
|
||||||
if (
|
|
||||||
`${context.eventName}` !== 'pull_request' &&
|
|
||||||
`${context.eventName}` !== 'pull_request_target'
|
|
||||||
) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseLabel = context.payload.pull_request.base.label;
|
|
||||||
const headLabel = context.payload.pull_request.head.label;
|
|
||||||
|
|
||||||
core.info(`baseRef: ${baseLabel} | headRef: ${headLabel}`);
|
|
||||||
return (baseLabel.split(':')[0] !== headLabel.split(':')[0]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const getToken = async (): Promise<string> => {
|
|
||||||
let token = core.getInput('token');
|
|
||||||
if (!token && isPullRequestFromFork()) {
|
|
||||||
core.info('==> Fork detected, tokenless uploading used');
|
|
||||||
process.env['TOKENLESS'] = context.payload.pull_request.head.label;
|
|
||||||
return Promise.resolve('');
|
|
||||||
}
|
|
||||||
let url = core.getInput('url');
|
|
||||||
const useOIDC = isTrue(core.getInput('use_oidc'));
|
|
||||||
if (useOIDC) {
|
|
||||||
if (!url) {
|
|
||||||
url = 'https://codecov.io';
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
token = await core.getIDToken(url);
|
|
||||||
return token;
|
|
||||||
} catch (err) {
|
|
||||||
setFailure(
|
|
||||||
`Codecov: Failed to get OIDC token with url: ${url}. ${err.message}`,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return token;
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildCommitExec = async (): Promise<{
|
|
||||||
commitExecArgs: any[];
|
|
||||||
commitOptions: any;
|
|
||||||
commitCommand: string;
|
|
||||||
}> => {
|
|
||||||
const commitParent = core.getInput('commit_parent');
|
|
||||||
const gitService = getGitService();
|
|
||||||
const overrideBranch = core.getInput('override_branch');
|
|
||||||
const overrideCommit = core.getInput('override_commit');
|
|
||||||
const overridePr = core.getInput('override_pr');
|
|
||||||
const slug = core.getInput('slug');
|
|
||||||
const token = await getToken();
|
|
||||||
const failCi = isTrue(core.getInput('fail_ci_if_error'));
|
|
||||||
const workingDir = core.getInput('working-directory');
|
|
||||||
|
|
||||||
const commitCommand = 'create-commit';
|
|
||||||
const commitExecArgs = [];
|
|
||||||
|
|
||||||
const commitOptions:any = {};
|
|
||||||
commitOptions.env = Object.assign(process.env, {
|
|
||||||
GITHUB_ACTION: process.env.GITHUB_ACTION,
|
|
||||||
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
|
|
||||||
GITHUB_REF: process.env.GITHUB_REF,
|
|
||||||
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
|
|
||||||
GITHUB_SHA: process.env.GITHUB_SHA,
|
|
||||||
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
if (token) {
|
|
||||||
commitOptions.env.CODECOV_TOKEN = token;
|
|
||||||
}
|
|
||||||
if (commitParent) {
|
|
||||||
commitExecArgs.push('--parent-sha', `${commitParent}`);
|
|
||||||
}
|
|
||||||
commitExecArgs.push('--git-service', `${gitService}`);
|
|
||||||
|
|
||||||
if (overrideBranch) {
|
|
||||||
commitExecArgs.push('-B', `${overrideBranch}`);
|
|
||||||
}
|
|
||||||
if (overrideCommit) {
|
|
||||||
commitExecArgs.push('-C', `${overrideCommit}`);
|
|
||||||
} else if (
|
|
||||||
`${context.eventName}` == 'pull_request' ||
|
|
||||||
`${context.eventName}` == 'pull_request_target'
|
|
||||||
) {
|
|
||||||
commitExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
|
||||||
}
|
|
||||||
if (overridePr) {
|
|
||||||
commitExecArgs.push('--pr', `${overridePr}`);
|
|
||||||
} else if (
|
|
||||||
`${context.eventName}` == 'pull_request_target'
|
|
||||||
) {
|
|
||||||
commitExecArgs.push('--pr', `${context.payload.number}`);
|
|
||||||
}
|
|
||||||
if (slug) {
|
|
||||||
commitExecArgs.push('--slug', `${slug}`);
|
|
||||||
}
|
|
||||||
if (failCi) {
|
|
||||||
commitExecArgs.push('-Z');
|
|
||||||
}
|
|
||||||
if (workingDir) {
|
|
||||||
commitOptions.cwd = workingDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
return {commitExecArgs, commitOptions, commitCommand};
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildGeneralExec = (): {
|
|
||||||
args: any[];
|
|
||||||
verbose: boolean;
|
|
||||||
} => {
|
|
||||||
const codecovYmlPath = core.getInput('codecov_yml_path');
|
|
||||||
const url = core.getInput('url');
|
|
||||||
const verbose = isTrue(core.getInput('verbose'));
|
|
||||||
const args = [];
|
|
||||||
|
|
||||||
if (codecovYmlPath) {
|
|
||||||
args.push('--codecov-yml-path', `${codecovYmlPath}`);
|
|
||||||
}
|
|
||||||
if (url) {
|
|
||||||
args.push('--enterprise-url', `${url}`);
|
|
||||||
}
|
|
||||||
if (verbose) {
|
|
||||||
args.push('-v');
|
|
||||||
}
|
|
||||||
return {args, verbose};
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildReportExec = async (): Promise<{
|
|
||||||
reportExecArgs: any[];
|
|
||||||
reportOptions: any;
|
|
||||||
reportCommand: string;
|
|
||||||
}> => {
|
|
||||||
const gitService = getGitService();
|
|
||||||
const overrideCommit = core.getInput('override_commit');
|
|
||||||
const overridePr = core.getInput('override_pr');
|
|
||||||
const slug = core.getInput('slug');
|
|
||||||
const token = await getToken();
|
|
||||||
const failCi = isTrue(core.getInput('fail_ci_if_error'));
|
|
||||||
const workingDir = core.getInput('working-directory');
|
|
||||||
|
|
||||||
|
|
||||||
const reportCommand = 'create-report';
|
|
||||||
const reportExecArgs = [];
|
|
||||||
|
|
||||||
const reportOptions:any = {};
|
|
||||||
reportOptions.env = Object.assign(process.env, {
|
|
||||||
GITHUB_ACTION: process.env.GITHUB_ACTION,
|
|
||||||
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
|
|
||||||
GITHUB_REF: process.env.GITHUB_REF,
|
|
||||||
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
|
|
||||||
GITHUB_SHA: process.env.GITHUB_SHA,
|
|
||||||
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
if (token) {
|
|
||||||
reportOptions.env.CODECOV_TOKEN = token;
|
|
||||||
}
|
|
||||||
reportExecArgs.push('--git-service', `${gitService}`);
|
|
||||||
|
|
||||||
if (overrideCommit) {
|
|
||||||
reportExecArgs.push('-C', `${overrideCommit}`);
|
|
||||||
} else if (
|
|
||||||
`${context.eventName}` == 'pull_request' ||
|
|
||||||
`${context.eventName}` == 'pull_request_target'
|
|
||||||
) {
|
|
||||||
reportExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
|
||||||
}
|
|
||||||
if (overridePr) {
|
|
||||||
reportExecArgs.push('-P', `${overridePr}`);
|
|
||||||
} else if (
|
|
||||||
`${context.eventName}` == 'pull_request_target'
|
|
||||||
) {
|
|
||||||
reportExecArgs.push('-P', `${context.payload.number}`);
|
|
||||||
}
|
|
||||||
if (slug) {
|
|
||||||
reportExecArgs.push('--slug', `${slug}`);
|
|
||||||
}
|
|
||||||
if (failCi) {
|
|
||||||
reportExecArgs.push('-Z');
|
|
||||||
}
|
|
||||||
if (workingDir) {
|
|
||||||
reportOptions.cwd = workingDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {reportExecArgs, reportOptions, reportCommand};
|
|
||||||
};
|
|
||||||
|
|
||||||
const buildUploadExec = async (): Promise<{
|
|
||||||
uploadExecArgs: any[];
|
|
||||||
uploadOptions: any;
|
|
||||||
disableSafeDirectory: boolean;
|
|
||||||
failCi: boolean;
|
|
||||||
os: string;
|
|
||||||
uploaderVersion: string;
|
|
||||||
uploadCommand: string;
|
|
||||||
}> => {
|
|
||||||
const disableFileFixes = isTrue(core.getInput('disable_file_fixes'));
|
|
||||||
const disableSafeDirectory = isTrue(core.getInput('disable_safe_directory'));
|
|
||||||
const disableSearch = isTrue(core.getInput('disable_search'));
|
|
||||||
const dryRun = isTrue(core.getInput('dry_run'));
|
|
||||||
const envVars = core.getInput('env_vars');
|
|
||||||
const exclude = core.getInput('exclude');
|
|
||||||
const failCi = isTrue(core.getInput('fail_ci_if_error'));
|
|
||||||
const file = core.getInput('file');
|
|
||||||
const files = core.getInput('files');
|
|
||||||
const flags = core.getInput('flags');
|
|
||||||
const gitService = getGitService();
|
|
||||||
const handleNoReportsFound = isTrue(core.getInput('handle_no_reports_found'));
|
|
||||||
const jobCode = core.getInput('job_code');
|
|
||||||
const name = core.getInput('name');
|
|
||||||
const networkFilter = core.getInput('network_filter');
|
|
||||||
const networkPrefix = core.getInput('network_prefix');
|
|
||||||
const os = core.getInput('os');
|
|
||||||
const overrideBranch = core.getInput('override_branch');
|
|
||||||
const overrideBuild = core.getInput('override_build');
|
|
||||||
const overrideBuildUrl = core.getInput('override_build_url');
|
|
||||||
const overrideCommit = core.getInput('override_commit');
|
|
||||||
const overridePr = core.getInput('override_pr');
|
|
||||||
const plugin = core.getInput('plugin');
|
|
||||||
const plugins = core.getInput('plugins');
|
|
||||||
const reportCode = core.getInput('report_code');
|
|
||||||
const rootDir = core.getInput('root_dir');
|
|
||||||
const searchDir = core.getInput('directory');
|
|
||||||
const slug = core.getInput('slug');
|
|
||||||
const token = await getToken();
|
|
||||||
let uploaderVersion = core.getInput('version');
|
|
||||||
const useLegacyUploadEndpoint = isTrue(
|
|
||||||
core.getInput('use_legacy_upload_endpoint'),
|
|
||||||
);
|
|
||||||
const workingDir = core.getInput('working-directory');
|
|
||||||
|
|
||||||
const uploadExecArgs = [];
|
|
||||||
const uploadCommand = 'do-upload';
|
|
||||||
const uploadOptions:any = {};
|
|
||||||
uploadOptions.env = Object.assign(process.env, {
|
|
||||||
GITHUB_ACTION: process.env.GITHUB_ACTION,
|
|
||||||
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
|
|
||||||
GITHUB_REF: process.env.GITHUB_REF,
|
|
||||||
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
|
|
||||||
GITHUB_SHA: process.env.GITHUB_SHA,
|
|
||||||
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
|
|
||||||
});
|
|
||||||
|
|
||||||
const envVarsArg = [];
|
|
||||||
for (const envVar of envVars.split(',')) {
|
|
||||||
const envVarClean = envVar.trim();
|
|
||||||
if (envVarClean) {
|
|
||||||
uploadOptions.env[envVarClean] = process.env[envVarClean];
|
|
||||||
envVarsArg.push(envVarClean);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (token) {
|
|
||||||
uploadOptions.env.CODECOV_TOKEN = token;
|
|
||||||
}
|
|
||||||
if (disableFileFixes) {
|
|
||||||
uploadExecArgs.push('--disable-file-fixes');
|
|
||||||
}
|
|
||||||
if (disableSearch) {
|
|
||||||
uploadExecArgs.push('--disable-search');
|
|
||||||
}
|
|
||||||
if (dryRun) {
|
|
||||||
uploadExecArgs.push('-d');
|
|
||||||
}
|
|
||||||
if (envVarsArg.length) {
|
|
||||||
uploadExecArgs.push('-e', envVarsArg.join(','));
|
|
||||||
}
|
|
||||||
if (exclude) {
|
|
||||||
uploadExecArgs.push('--exclude', `${exclude}`);
|
|
||||||
}
|
|
||||||
if (failCi) {
|
|
||||||
uploadExecArgs.push('-Z');
|
|
||||||
}
|
|
||||||
if (file) {
|
|
||||||
uploadExecArgs.push('-f', `${file}`);
|
|
||||||
}
|
|
||||||
if (files) {
|
|
||||||
files.split(',').map((f) => f.trim()).forEach((f) => {
|
|
||||||
if (f.length > 0) { // this handles trailing commas
|
|
||||||
uploadExecArgs.push('-f', `${f}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (flags) {
|
|
||||||
flags.split(',').map((f) => f.trim()).forEach((f) => {
|
|
||||||
uploadExecArgs.push('-F', `${f}`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
uploadExecArgs.push('--git-service', `${gitService}`);
|
|
||||||
if (handleNoReportsFound) {
|
|
||||||
uploadExecArgs.push('--handle-no-reports-found');
|
|
||||||
}
|
|
||||||
if (jobCode) {
|
|
||||||
uploadExecArgs.push('--job-code', `${jobCode}`);
|
|
||||||
}
|
|
||||||
if (name) {
|
|
||||||
uploadExecArgs.push('-n', `${name}`);
|
|
||||||
}
|
|
||||||
if (networkFilter) {
|
|
||||||
uploadExecArgs.push('--network-filter', `${networkFilter}`);
|
|
||||||
}
|
|
||||||
if (networkPrefix) {
|
|
||||||
uploadExecArgs.push('--network-prefix', `${networkPrefix}`);
|
|
||||||
}
|
|
||||||
if (overrideBranch) {
|
|
||||||
uploadExecArgs.push('-B', `${overrideBranch}`);
|
|
||||||
}
|
|
||||||
if (overrideBuild) {
|
|
||||||
uploadExecArgs.push('-b', `${overrideBuild}`);
|
|
||||||
}
|
|
||||||
if (overrideBuildUrl) {
|
|
||||||
uploadExecArgs.push('--build-url', `${overrideBuildUrl}`);
|
|
||||||
}
|
|
||||||
if (overrideCommit) {
|
|
||||||
uploadExecArgs.push('-C', `${overrideCommit}`);
|
|
||||||
} else if (
|
|
||||||
`${context.eventName}` == 'pull_request' ||
|
|
||||||
`${context.eventName}` == 'pull_request_target'
|
|
||||||
) {
|
|
||||||
uploadExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
|
||||||
}
|
|
||||||
if (overridePr) {
|
|
||||||
uploadExecArgs.push('-P', `${overridePr}`);
|
|
||||||
} else if (
|
|
||||||
`${context.eventName}` == 'pull_request_target'
|
|
||||||
) {
|
|
||||||
uploadExecArgs.push('-P', `${context.payload.number}`);
|
|
||||||
}
|
|
||||||
if (plugin) {
|
|
||||||
uploadExecArgs.push('--plugin', `${plugin}`);
|
|
||||||
}
|
|
||||||
if (plugins) {
|
|
||||||
plugins.split(',').map((p) => p.trim()).forEach((p) => {
|
|
||||||
uploadExecArgs.push('--plugin', `${p}`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (reportCode) {
|
|
||||||
uploadExecArgs.push('--report-code', `${reportCode}`);
|
|
||||||
}
|
|
||||||
if (rootDir) {
|
|
||||||
uploadExecArgs.push('--network-root-folder', `${rootDir}`);
|
|
||||||
}
|
|
||||||
if (searchDir) {
|
|
||||||
uploadExecArgs.push('-s', `${searchDir}`);
|
|
||||||
}
|
|
||||||
if (slug) {
|
|
||||||
uploadExecArgs.push('-r', `${slug}`);
|
|
||||||
}
|
|
||||||
if (workingDir) {
|
|
||||||
uploadOptions.cwd = workingDir;
|
|
||||||
}
|
|
||||||
if (uploaderVersion == '') {
|
|
||||||
uploaderVersion = 'latest';
|
|
||||||
}
|
|
||||||
if (useLegacyUploadEndpoint) {
|
|
||||||
uploadExecArgs.push('--legacy');
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
uploadExecArgs,
|
|
||||||
uploadOptions,
|
|
||||||
disableSafeDirectory,
|
|
||||||
failCi,
|
|
||||||
os,
|
|
||||||
uploaderVersion,
|
|
||||||
uploadCommand,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
export {
|
|
||||||
buildCommitExec,
|
|
||||||
buildGeneralExec,
|
|
||||||
buildReportExec,
|
|
||||||
buildUploadExec,
|
|
||||||
};
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
import * as exec from '@actions/exec';
|
|
||||||
|
|
||||||
import {
|
|
||||||
PLATFORMS,
|
|
||||||
getBaseUrl,
|
|
||||||
getCommand,
|
|
||||||
getPlatform,
|
|
||||||
isValidPlatform,
|
|
||||||
isWindows,
|
|
||||||
setSafeDirectory,
|
|
||||||
} from './helpers';
|
|
||||||
|
|
||||||
let OLDOS = process.env.RUNNER_OS;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
jest.resetModules();
|
|
||||||
OLDOS = process.env.RUNNER_OS;
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
process.env.RUNNER_OS = OLDOS;
|
|
||||||
});
|
|
||||||
|
|
||||||
test('getPlatform', () => {
|
|
||||||
expect(getPlatform('linux')).toBe('linux');
|
|
||||||
expect(getPlatform('windows')).toBe('windows');
|
|
||||||
|
|
||||||
const defaultPlatform =
|
|
||||||
process.env.RUNNER_OS ? process.env.RUNNER_OS.toLowerCase() : 'linux';
|
|
||||||
expect(getPlatform('fakeos')).toBe(defaultPlatform);
|
|
||||||
expect(getPlatform()).toBe(defaultPlatform);
|
|
||||||
|
|
||||||
process.env.RUNNER_OS = 'macos';
|
|
||||||
expect(getPlatform('fakeos')).toBe('macos');
|
|
||||||
expect(getPlatform()).toBe('macos');
|
|
||||||
|
|
||||||
process.env.RUNNER_OS = 'alsofakeos';
|
|
||||||
expect(getPlatform()).toBe('linux');
|
|
||||||
expect(getPlatform('fakeos')).toBe('linux');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('getBaseUrl', () => {
|
|
||||||
expect(PLATFORMS.map((platform) => {
|
|
||||||
return getBaseUrl(platform, 'latest');
|
|
||||||
})).toEqual([
|
|
||||||
'https://cli.codecov.io/latest/linux/codecov',
|
|
||||||
'https://cli.codecov.io/latest/macos/codecov',
|
|
||||||
'https://cli.codecov.io/latest/windows/codecov.exe',
|
|
||||||
'https://cli.codecov.io/latest/alpine/codecov',
|
|
||||||
'https://cli.codecov.io/latest/linux-arm64/codecov',
|
|
||||||
'https://cli.codecov.io/latest/alpine-arm64/codecov',
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(PLATFORMS.map((platform) => {
|
|
||||||
return getBaseUrl(platform, 'v0.1.0_8880');
|
|
||||||
})).toEqual([
|
|
||||||
'https://cli.codecov.io/v0.1.0_8880/linux/codecov',
|
|
||||||
'https://cli.codecov.io/v0.1.0_8880/macos/codecov',
|
|
||||||
'https://cli.codecov.io/v0.1.0_8880/windows/codecov.exe',
|
|
||||||
'https://cli.codecov.io/v0.1.0_8880/alpine/codecov',
|
|
||||||
'https://cli.codecov.io/v0.1.0_8880/linux-arm64/codecov',
|
|
||||||
'https://cli.codecov.io/v0.1.0_8880/alpine-arm64/codecov',
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('isWindows', () => {
|
|
||||||
expect(PLATFORMS.map((platform) => {
|
|
||||||
return isWindows(platform);
|
|
||||||
})).toEqual([false, false, true, false, false, false]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('isValidPlatform', () => {
|
|
||||||
expect(PLATFORMS.map((platform) => {
|
|
||||||
return isValidPlatform(platform);
|
|
||||||
})).toEqual([true, true, true, true, true, true]);
|
|
||||||
|
|
||||||
expect(isValidPlatform('fakeos')).toBeFalsy();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('getCommand', () => {
|
|
||||||
expect(getCommand('path', ['-v', '-x'], 'do-upload'))
|
|
||||||
.toEqual(['path', '-v', '-x', 'do-upload']);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('setSafeDirectory', async () => {
|
|
||||||
process.env.GITHUB_WORKSPACE = 'testOrg/testRepo';
|
|
||||||
await setSafeDirectory();
|
|
||||||
const testSafeDirectory = ([
|
|
||||||
'git',
|
|
||||||
'config',
|
|
||||||
'--get',
|
|
||||||
'safe.directory',
|
|
||||||
]).join(' ');
|
|
||||||
const safeDirectory = await exec.getExecOutput(testSafeDirectory);
|
|
||||||
expect(safeDirectory.stdout).toBe('testOrg/testRepo\n');
|
|
||||||
});
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
import * as core from '@actions/core';
|
|
||||||
import * as exec from '@actions/exec';
|
|
||||||
|
|
||||||
const PLATFORMS = [
|
|
||||||
'linux',
|
|
||||||
'macos',
|
|
||||||
'windows',
|
|
||||||
'alpine',
|
|
||||||
'linux-arm64',
|
|
||||||
'alpine-arm64',
|
|
||||||
] as const;
|
|
||||||
type Platform = typeof PLATFORMS[number];
|
|
||||||
|
|
||||||
const setFailure = (message: string, failCi: boolean): void => {
|
|
||||||
failCi ? core.setFailed(message) : core.warning(message);
|
|
||||||
if (failCi) {
|
|
||||||
process.exit();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getUploaderName = (platform: string): string => {
|
|
||||||
if (isWindows(platform)) {
|
|
||||||
return 'codecov.exe';
|
|
||||||
} else {
|
|
||||||
return 'codecov';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const isValidPlatform = (platform: string): platform is Platform => {
|
|
||||||
return PLATFORMS.includes(platform as Platform);
|
|
||||||
};
|
|
||||||
|
|
||||||
const isWindows = (platform: string): boolean => {
|
|
||||||
return platform === 'windows';
|
|
||||||
};
|
|
||||||
|
|
||||||
const getPlatform = (os?: string): string => {
|
|
||||||
if (isValidPlatform(os)) {
|
|
||||||
core.info(`==> ${os} OS provided`);
|
|
||||||
return os;
|
|
||||||
}
|
|
||||||
|
|
||||||
const platform = process.env.RUNNER_OS?.toLowerCase();
|
|
||||||
if (isValidPlatform(platform)) {
|
|
||||||
core.info(`==> ${platform} OS detected`);
|
|
||||||
return platform;
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(
|
|
||||||
'==> Could not detect OS or provided OS is invalid. Defaulting to linux',
|
|
||||||
);
|
|
||||||
return 'linux';
|
|
||||||
};
|
|
||||||
|
|
||||||
const getBaseUrl = (platform: string, version: string): string => {
|
|
||||||
return `https://cli.codecov.io/${version}/${platform}/${getUploaderName(platform)}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
const getCommand = (
|
|
||||||
filename: string,
|
|
||||||
generalArgs:string[],
|
|
||||||
command: string,
|
|
||||||
): string[] => {
|
|
||||||
const fullCommand = [filename, ...generalArgs, command];
|
|
||||||
core.info(`==> Running command '${fullCommand.join(' ')}'`);
|
|
||||||
return fullCommand;
|
|
||||||
};
|
|
||||||
|
|
||||||
const setSafeDirectory = async () => {
|
|
||||||
const command = ([
|
|
||||||
'git',
|
|
||||||
'config',
|
|
||||||
'--global',
|
|
||||||
'--add',
|
|
||||||
'safe.directory',
|
|
||||||
`${process.env['GITHUB_WORKSPACE']}`,
|
|
||||||
].join(' '));
|
|
||||||
core.info(`==> Running ${command}`);
|
|
||||||
await exec.exec(command);
|
|
||||||
};
|
|
||||||
|
|
||||||
export {
|
|
||||||
PLATFORMS,
|
|
||||||
getBaseUrl,
|
|
||||||
getPlatform,
|
|
||||||
getUploaderName,
|
|
||||||
isValidPlatform,
|
|
||||||
isWindows,
|
|
||||||
setFailure,
|
|
||||||
setSafeDirectory,
|
|
||||||
getCommand,
|
|
||||||
};
|
|
||||||
129
src/index.ts
129
src/index.ts
@@ -1,129 +0,0 @@
|
|||||||
import * as fs from 'node:fs';
|
|
||||||
import * as https from 'node:https';
|
|
||||||
import * as path from 'node:path';
|
|
||||||
|
|
||||||
import * as exec from '@actions/exec';
|
|
||||||
|
|
||||||
import {
|
|
||||||
buildCommitExec,
|
|
||||||
buildGeneralExec,
|
|
||||||
buildReportExec,
|
|
||||||
buildUploadExec,
|
|
||||||
} from './buildExec';
|
|
||||||
import {
|
|
||||||
getBaseUrl,
|
|
||||||
getCommand,
|
|
||||||
getPlatform,
|
|
||||||
getUploaderName,
|
|
||||||
setFailure,
|
|
||||||
setSafeDirectory,
|
|
||||||
} from './helpers';
|
|
||||||
|
|
||||||
import verify from './validate';
|
|
||||||
import versionInfo from './version';
|
|
||||||
|
|
||||||
let failCi;
|
|
||||||
|
|
||||||
const run = async (): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const {commitExecArgs, commitOptions, commitCommand} = await buildCommitExec();
|
|
||||||
const {reportExecArgs, reportOptions, reportCommand} = await buildReportExec();
|
|
||||||
const {
|
|
||||||
uploadExecArgs,
|
|
||||||
uploadOptions,
|
|
||||||
disableSafeDirectory,
|
|
||||||
failCi,
|
|
||||||
os,
|
|
||||||
uploaderVersion,
|
|
||||||
uploadCommand,
|
|
||||||
} = await buildUploadExec();
|
|
||||||
const {args, verbose} = buildGeneralExec();
|
|
||||||
|
|
||||||
const platform = getPlatform(os);
|
|
||||||
|
|
||||||
const filename = path.join( __dirname, getUploaderName(platform));
|
|
||||||
https.get(getBaseUrl(platform, uploaderVersion), (res) => {
|
|
||||||
// Image will be stored at this path
|
|
||||||
const filePath = fs.createWriteStream(filename);
|
|
||||||
res.pipe(filePath);
|
|
||||||
filePath
|
|
||||||
.on('error', (err) => {
|
|
||||||
setFailure(
|
|
||||||
`Codecov: Failed to write uploader binary: ${err.message}`,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
}).on('finish', async () => {
|
|
||||||
filePath.close();
|
|
||||||
|
|
||||||
await verify(filename, platform, uploaderVersion, verbose, failCi);
|
|
||||||
await versionInfo(platform, uploaderVersion);
|
|
||||||
await fs.chmodSync(filename, '777');
|
|
||||||
if (!disableSafeDirectory) {
|
|
||||||
await setSafeDirectory();
|
|
||||||
}
|
|
||||||
|
|
||||||
const unlink = (): void => {
|
|
||||||
fs.unlink(filename, (err) => {
|
|
||||||
if (err) {
|
|
||||||
setFailure(
|
|
||||||
`Codecov: Could not unlink uploader: ${err.message}`,
|
|
||||||
failCi,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
const doUpload = async (): Promise<void> => {
|
|
||||||
await exec.exec(getCommand(filename, args, uploadCommand).join(' '),
|
|
||||||
uploadExecArgs,
|
|
||||||
uploadOptions)
|
|
||||||
.catch((err) => {
|
|
||||||
setFailure(
|
|
||||||
`Codecov:
|
|
||||||
Failed to properly upload report: ${err.message}`,
|
|
||||||
failCi,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
const createReport = async (): Promise<void> => {
|
|
||||||
await exec.exec(
|
|
||||||
getCommand(filename, args, reportCommand).join(' '),
|
|
||||||
reportExecArgs,
|
|
||||||
reportOptions)
|
|
||||||
.then(async (exitCode) => {
|
|
||||||
if (exitCode == 0) {
|
|
||||||
await doUpload();
|
|
||||||
}
|
|
||||||
}).catch((err) => {
|
|
||||||
setFailure(
|
|
||||||
`Codecov:
|
|
||||||
Failed to properly create report: ${err.message}`,
|
|
||||||
failCi,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
await exec.exec(
|
|
||||||
getCommand(
|
|
||||||
filename,
|
|
||||||
args,
|
|
||||||
commitCommand,
|
|
||||||
).join(' '),
|
|
||||||
commitExecArgs, commitOptions)
|
|
||||||
.then(async (exitCode) => {
|
|
||||||
if (exitCode == 0) {
|
|
||||||
await createReport();
|
|
||||||
}
|
|
||||||
unlink();
|
|
||||||
}).catch((err) => {
|
|
||||||
setFailure(
|
|
||||||
`Codecov: Failed to properly create commit: ${err.message}`,
|
|
||||||
failCi,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
setFailure(`Codecov: Encountered an unexpected error ${err.message}`, failCi);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
run();
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
|
||||||
|
|
||||||
mQINBGCsMn0BEACiCKZOhkbhUjb+obvhH49p3ShjJzU5b/GqAXSDhRhdXUq7ZoGq
|
|
||||||
KEKCd7sQHrCf16Pi5UVacGIyE9hS93HwY15kMlLwM+lNeAeCglEscOjpCly1qUIr
|
|
||||||
sN1wjkd2cwDXS6zHBJTqJ7wSOiXbZfTAeKhd6DuLEpmA+Rz4Yc+4qZP+fVxVG3Pv
|
|
||||||
2v06m+E5CP/JQVQPO8HYi+S36hJImTh+zaDspu+VujSai5KzJ6YKmgwslVNIp5X5
|
|
||||||
GnEr2uAh5w6UTnt9UQUjFFliAvQ3lPLWzm7DWs6AP9hslYxSWzwbzVF5qbOIjUJL
|
|
||||||
KfoUpvCYDs2ObgRn8WUQO0ndkRCBIxhlF3HGGYWKQaCEsiom7lyi8VbAszmUCDjw
|
|
||||||
HdbQHFmm5yHLpTXJbg+iaxQzKnhWVXzye5/x92IJmJswW81Ky346VxYdC1XFL/+Y
|
|
||||||
zBaj9oMmV7WfRpdch09Gf4TgosMzWf3NjJbtKE5xkaghJckIgxwzcrRmF/RmCJue
|
|
||||||
IMqZ8A5qUUlK7NBzj51xmAQ4BtkUa2bcCBRV/vP+rk9wcBWz2LiaW+7Mwlfr/C/Q
|
|
||||||
Swvv/JW2LsQ4iWc1BY7m7ksn9dcdypEq/1JbIzVLCRDG7pbMj9yLgYmhe5TtjOM3
|
|
||||||
ygk25584EhXSgUA3MZw+DIqhbHQBYgrKndTr2N/wuBQY62zZg1YGQByD4QARAQAB
|
|
||||||
tEpDb2RlY292IFVwbG9hZGVyIChDb2RlY292IFVwbG9hZGVyIFZlcmlmaWNhdGlv
|
|
||||||
biBLZXkpIDxzZWN1cml0eUBjb2RlY292LmlvPokCTgQTAQoAOBYhBCcDTn/bhQ4L
|
|
||||||
vCxi/4Brsortd5hpBQJgrDJ9AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJ
|
|
||||||
EIBrsortd5hpxLMP/3Fbgx5EG7zUUOqPZ+Ya9z8JlZFIkh3FxYMfMFE8jH9Es26F
|
|
||||||
V2ZTJLO259MxM+5N0XzObi3h4XqIzBn42pDRfwtojY5wl2STJ9Bzu+ykPog7OB1u
|
|
||||||
yfWXDRKcqPTUIxI1/WdU+c0/WNE6wjyzK+lRc1YUlp4pdNU7l+j2vKN+jGi2b6nV
|
|
||||||
PTPRsMcwy3B90fKf5h2wNMNqO+KX/rjgpG9Uhej+xyFWkGM1tZDQQYFj+ugQUj61
|
|
||||||
BMsQrUmxOnaVVnix21cHnACDCaxqgQZH3iZyEOKPNMsRFRP+0fLEnUMP+DVnQE6J
|
|
||||||
Brk1Z+XhtjGI9PISQVx5KKDKscreS/D5ae2Cw/FUlQMf57kir6mkbZVhz2khtccz
|
|
||||||
atD0r59WomNywIDyk1QfAKV0+O0WeJg8A69/Jk6yegsrUb5qEfkih/I38vvI0OVL
|
|
||||||
BYve/mQIHuQo5ziBptNytCrN5TXHXzguX9GOW1V1+3DR+w/vXcnz67sjlYDysf1f
|
|
||||||
JUZv9edZ2RGKW7agbrgOw2hB+zuWZ10tjoEcsaSGOLtKRGFDfmu/dBxzl8yopUpa
|
|
||||||
Tn79QKOieleRm5+uCcKCPTeKV0GbhDntCZJ+Yiw6ZPmrpcjDowAoMQ9kiMVa10+Q
|
|
||||||
WwwoaRWuqhf+dL6Q2OLFOxlyCDKVSyW0YF4Vrf3fKGyxKJmszAL+NS1mVcdxuQIN
|
|
||||||
BGCsMn0BEADLrIesbpfdAfWRvUFDN+PoRfa0ROwa/JOMhEgVsowQuk9No8yRva/X
|
|
||||||
VyiA6oCq6na7IvZXMxT7di4FWDjDtw5xHjbtFg336IJTGBcnzm7WIsjvyyw8kKfB
|
|
||||||
8cvG7D2OkzAUF8SVXLarJ1zdBP/Dr1Nz6F/gJsx5+BM8wGHEz4DsdMRV7ZMTVh6b
|
|
||||||
PaGuPZysPjSEw62R8MFJ1fSyDGCKJYwMQ/sKFzseNaY/kZVR5lq0dmhiYjNVQeG9
|
|
||||||
HJ6ZCGSGT5PKNOwx/UEkT6jhvzWgfr2eFVGJTcdwSLEgIrJIDzP7myHGxuOiuCmJ
|
|
||||||
ENgL1f7mzGkJ/hYXq1RWqsn1Fh2I9KZMHggqu4a+s3RiscmNcbIlIhJLXoE1bxZ/
|
|
||||||
TfYZ9Aod6Bd5TsSMTZNwV2am9zelhDiFF60FWww/5nEbhm/X4suC9W86qWBxs3Kh
|
|
||||||
vk1dxhElRjtgwUEHA5OFOO48ERHfR7COH719D/YmqLU3EybBgJbGoC/yjlGJxv0R
|
|
||||||
kOMAiG2FneNKEZZihReh8A5Jt6jYrSoHFRwL6oJIZfLezB7Rdajx1uH7uYcUyIaE
|
|
||||||
SiDWlkDw/IFM315NYFA8c1TCSIfnabUYaAxSLNFRmXnt+GQpm44qAK1x8EGhY633
|
|
||||||
e5B4FWorIXx0tTmsVM4rkQ6IgAodeywKG+c2Ikd+5dQLFmb7dW/6CwARAQABiQI2
|
|
||||||
BBgBCgAgFiEEJwNOf9uFDgu8LGL/gGuyiu13mGkFAmCsMn0CGwwACgkQgGuyiu13
|
|
||||||
mGkYWxAAkzF64SVpYvY9nY/QSYikL8UHlyyqirs6eFZ3Mj9lMRpHM2Spn9a3c701
|
|
||||||
0Ge4wDbRP2oftCyPP+p9pdUA77ifMTlRcoMYX8oXAuyE5RT2emBDiWvSR6hQQ8bZ
|
|
||||||
WFNXal+bUPpaRiruCCUPD2b8Od1ftzLqbYOosxr/m5Du0uahgOuGw6zlGBJCVOo7
|
|
||||||
UB2Y++oZ8P7oDGF722opepWQ+bl2a6TRMLNWWlj4UANknyjlhyZZ7PKhWLjoC6MU
|
|
||||||
dAKcwQUdp+XYLc/3b00bvgju0e99QgHZMX2fN3d3ktdN5Q2fqiAi5R6BmCCO4ISF
|
|
||||||
o5j10gGU/sdqGHvNhv5C21ibun7HEzMtxBhnhGmytfBJzrsj7GOReePsfTLoCoUq
|
|
||||||
dFMOAVUDciVfRtL2m8cv42ZJOXtPfDjsFOf8AKJk40/tc8mMMqZP7RVBr9RWOoq5
|
|
||||||
y9D37NfI6UB8rPZ6qs0a1Vfm8lIh2/k1AFECduXgftMDTsmmXOgXXS37HukGW7AL
|
|
||||||
QKWiWJQF/XopkXwkyAYpyuyRMZ77oF7nuqLFnl5VVEiRo0Fwu45erebc6ccSwYZU
|
|
||||||
8pmeSx7s0aJtxCZPSZEKZ3mn0BXOR32Cgs48CjzFWf6PKucTwOy/YO0/4Gt/upNJ
|
|
||||||
3DyeINcYcKyD08DEIF9f5tLyoiD4xz+N23ltTBoMPyv4f3X/wCQ=
|
|
||||||
=ch7z
|
|
||||||
-----END PGP PUBLIC KEY BLOCK-----
|
|
||||||
1
src/scripts
Submodule
1
src/scripts
Submodule
Submodule src/scripts added at 96f8531c88
120
src/validate.ts
120
src/validate.ts
@@ -1,120 +0,0 @@
|
|||||||
import {execSync} from 'node:child_process';
|
|
||||||
import * as crypto from 'node:crypto';
|
|
||||||
import * as fs from 'node:fs';
|
|
||||||
import * as path from 'node:path';
|
|
||||||
|
|
||||||
import * as core from '@actions/core';
|
|
||||||
import {request} from 'undici';
|
|
||||||
|
|
||||||
import {
|
|
||||||
getBaseUrl,
|
|
||||||
getUploaderName,
|
|
||||||
setFailure,
|
|
||||||
} from './helpers';
|
|
||||||
|
|
||||||
const verify = async (
|
|
||||||
filename: string,
|
|
||||||
platform: string,
|
|
||||||
version: string,
|
|
||||||
verbose: boolean,
|
|
||||||
failCi: boolean,
|
|
||||||
): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const uploaderName = getUploaderName(platform);
|
|
||||||
|
|
||||||
// Get SHASUM and SHASUM signature files
|
|
||||||
console.log(`${getBaseUrl(platform, version)}.SHA256SUM`);
|
|
||||||
const shasumRes = await request(
|
|
||||||
`${getBaseUrl(platform, version)}.SHA256SUM`,
|
|
||||||
);
|
|
||||||
const shasum = await shasumRes.body.text();
|
|
||||||
if (verbose) {
|
|
||||||
console.log(`Received SHA256SUM ${shasum}`);
|
|
||||||
}
|
|
||||||
await fs.writeFileSync(
|
|
||||||
path.join(__dirname, `${uploaderName}.SHA256SUM`),
|
|
||||||
shasum,
|
|
||||||
);
|
|
||||||
|
|
||||||
const shaSigRes = await request(
|
|
||||||
`${getBaseUrl(platform, version)}.SHA256SUM.sig`,
|
|
||||||
);
|
|
||||||
const shaSig = await shaSigRes.body.text();
|
|
||||||
if (verbose) {
|
|
||||||
console.log(`Received SHA256SUM signature ${shaSig}`);
|
|
||||||
}
|
|
||||||
await fs.writeFileSync(
|
|
||||||
path.join(__dirname, `${uploaderName}.SHA256SUM.sig`),
|
|
||||||
shaSig,
|
|
||||||
);
|
|
||||||
|
|
||||||
const validateSha = async () => {
|
|
||||||
const calculateHash = async (filename: string) => {
|
|
||||||
const stream = fs.createReadStream(filename);
|
|
||||||
const uploaderSha = crypto.createHash(`sha256`);
|
|
||||||
stream.pipe(uploaderSha);
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
stream.on('end', () => resolve(
|
|
||||||
`${uploaderSha.digest('hex')} ${uploaderName}`,
|
|
||||||
));
|
|
||||||
stream.on('error', reject);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const hash = await calculateHash(
|
|
||||||
path.join(__dirname, `${uploaderName}`),
|
|
||||||
);
|
|
||||||
if (hash === shasum) {
|
|
||||||
core.info(`==> Uploader SHASUM verified (${hash})`);
|
|
||||||
} else {
|
|
||||||
setFailure(
|
|
||||||
'Codecov: Uploader shasum does not match -- ' +
|
|
||||||
`uploader hash: ${hash}, public hash: ${shasum}`,
|
|
||||||
failCi,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const verifySignature = async () => {
|
|
||||||
const command = [
|
|
||||||
'gpg',
|
|
||||||
'--logger-fd',
|
|
||||||
'1',
|
|
||||||
'--verify',
|
|
||||||
path.join(__dirname, `${uploaderName}.SHA256SUM.sig`),
|
|
||||||
path.join(__dirname, `${uploaderName}.SHA256SUM`),
|
|
||||||
].join(' ');
|
|
||||||
|
|
||||||
try {
|
|
||||||
await execSync(command, {stdio: 'inherit'});
|
|
||||||
} catch (err) {
|
|
||||||
setFailure(`Codecov: Error verifying gpg signature: ${err.message}`, failCi);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const importKey = async () => {
|
|
||||||
const command = [
|
|
||||||
'gpg',
|
|
||||||
'--logger-fd',
|
|
||||||
'1',
|
|
||||||
'--no-default-keyring',
|
|
||||||
'--import',
|
|
||||||
path.join(__dirname, 'pgp_keys.asc'),
|
|
||||||
].join(' ');
|
|
||||||
|
|
||||||
try {
|
|
||||||
await execSync(command, {stdio: 'inherit'});
|
|
||||||
} catch (err) {
|
|
||||||
setFailure(`Codecov: Error importing gpg key: ${err.message}`, failCi);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
await importKey();
|
|
||||||
await verifySignature();
|
|
||||||
await validateSha();
|
|
||||||
} catch (err) {
|
|
||||||
setFailure(`Codecov: Error validating uploader: ${err.message}`, failCi);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
export default verify;
|
|
||||||
1
src/version
Normal file
1
src/version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
5.4.0
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
import * as core from '@actions/core';
|
|
||||||
import {request} from 'undici';
|
|
||||||
|
|
||||||
const versionInfo = async (
|
|
||||||
platform: string,
|
|
||||||
version: string,
|
|
||||||
): Promise<void> => {
|
|
||||||
core.info(`==> Running version ${version}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const metadataRes = await request(`https://cli.codecov.io/${platform}/${version}`, {
|
|
||||||
headers: {'Accept': 'application/json'},
|
|
||||||
});
|
|
||||||
const metadata = await metadataRes.body.json();
|
|
||||||
core.info(`==> Running version ${metadata['version']}`);
|
|
||||||
} catch (err) {
|
|
||||||
core.info(`Could not pull latest version information: ${err}`);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
export default versionInfo;
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"moduleResolution": "node",
|
|
||||||
"outDir": "dist/",
|
|
||||||
"resolveJsonModule": true,
|
|
||||||
"rootDir": ".",
|
|
||||||
"sourceMap": true,
|
|
||||||
"target": "es2015"
|
|
||||||
},
|
|
||||||
"include": [
|
|
||||||
"src/**/*.ts"
|
|
||||||
],
|
|
||||||
"exclude": [
|
|
||||||
"src/**/*.test.ts"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user