mirror of
https://github.com/codecov/codecov-action.git
synced 2025-12-08 16:16:24 +00:00
Compare commits
530 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e28ff129e5 | ||
|
|
7594baaf0c | ||
|
|
81c0a51dcc | ||
|
|
f5e203f3d9 | ||
|
|
7c48363ed3 | ||
|
|
69e5d09ca1 | ||
|
|
feaf700b2a | ||
|
|
7b6a727a4a | ||
|
|
ccf7a1fd43 | ||
|
|
f03f015db9 | ||
|
|
5fe1d91fef | ||
|
|
679cd7bee6 | ||
|
|
51fc5dccf6 | ||
|
|
5ba2c853f8 | ||
|
|
afaa0c08b8 | ||
|
|
a1504ae490 | ||
|
|
a05a16bd04 | ||
|
|
fb010ae47e | ||
|
|
125fc84a9a | ||
|
|
c9dbf6a905 | ||
|
|
59fc46f14a | ||
|
|
3889fddabb | ||
|
|
d42a336584 | ||
|
|
fd624e50e7 | ||
|
|
6d798873df | ||
|
|
37364fa731 | ||
|
|
2791a5c4fe | ||
|
|
b71af43c1e | ||
|
|
29f97fc54f | ||
|
|
645d2a5926 | ||
|
|
f691d469fb | ||
|
|
5ecb98a3c6 | ||
|
|
5a299d1127 | ||
|
|
dad251dcaf | ||
|
|
e8bbe5fc01 | ||
|
|
a6fd87fc19 | ||
|
|
76c8cd6163 | ||
|
|
1290bddc88 | ||
|
|
951ef79006 | ||
|
|
bb71c1ba2f | ||
|
|
acc5d43cad | ||
|
|
fab3a2f0ed | ||
|
|
daf8061cef | ||
|
|
a4d9e6ca72 | ||
|
|
c852ea6812 | ||
|
|
91f8da92ad | ||
|
|
a249bea245 | ||
|
|
8deb5f497d | ||
|
|
c34fe147c2 | ||
|
|
86801cdd03 | ||
|
|
8dff8940ff | ||
|
|
84508663e9 | ||
|
|
e8419092e9 | ||
|
|
363a65ad48 | ||
|
|
640b86a455 | ||
|
|
375c033fbe | ||
|
|
d701256d7a | ||
|
|
0bb547ab49 | ||
|
|
55e8381a3e | ||
|
|
7afa10ed9b | ||
|
|
d820d60619 | ||
|
|
3a20752bdd | ||
|
|
c16abc29c9 | ||
|
|
3e33441c19 | ||
|
|
85aacc9654 | ||
|
|
4ea9be0fa3 | ||
|
|
164fadeaa7 | ||
|
|
4621ecce09 | ||
|
|
251ba34669 | ||
|
|
5a593a5341 | ||
|
|
a15c0e43ca | ||
|
|
8be6ba5782 | ||
|
|
e774838850 | ||
|
|
54bcd8715e | ||
|
|
8ba77ef8d7 | ||
|
|
c60aa801e3 | ||
|
|
2fc4847d3f | ||
|
|
0cfda1dd0a | ||
|
|
7d3a55eb5e | ||
|
|
fe84a0b3c0 | ||
|
|
e12c9402c4 | ||
|
|
ef7f8a5d3c | ||
|
|
b8a1d6a424 | ||
|
|
6ef7ea4c74 | ||
|
|
f62c5eebfe | ||
|
|
fdbfa4b889 | ||
|
|
9855cf7bd1 | ||
|
|
162c44c603 | ||
|
|
a9883cc0b2 | ||
|
|
8c40c8cd55 | ||
|
|
781907b2be | ||
|
|
93b3dea0bf | ||
|
|
82ab52d110 | ||
|
|
e0b68c6749 | ||
|
|
1f9f5573d1 | ||
|
|
09686fcfcb | ||
|
|
f30e4959ba | ||
|
|
a7b945cea4 | ||
|
|
98ab2c591b | ||
|
|
43235cc5ae | ||
|
|
0cf8684c82 | ||
|
|
8e1e730371 | ||
|
|
61293af0e8 | ||
|
|
7a070cbe5b | ||
|
|
909716592c | ||
|
|
ac042eaf0f | ||
|
|
2ee76754cf | ||
|
|
ed33b8792b | ||
|
|
205d0f3bf2 | ||
|
|
f7a40362d5 | ||
|
|
347aed814d | ||
|
|
240e6ae968 | ||
|
|
22b99ac1d7 | ||
|
|
c256d56669 | ||
|
|
b655832634 | ||
|
|
f3c893f625 | ||
|
|
d64049153d | ||
|
|
082211867f | ||
|
|
36c4c273ec | ||
|
|
3557dec46e | ||
|
|
d09fc58aca | ||
|
|
53405dd159 | ||
|
|
9cae541a7d | ||
|
|
7a6de893e7 | ||
|
|
67144c77f4 | ||
|
|
7574dbd6ec | ||
|
|
bbeaa14035 | ||
|
|
560836068e | ||
|
|
bd76fb6ca2 | ||
|
|
81da1cc4bc | ||
|
|
6b11576289 | ||
|
|
ab88a5b6c7 | ||
|
|
e9a38394a1 | ||
|
|
35eb77e000 | ||
|
|
eb2fd70b01 | ||
|
|
2d2a8f93e1 | ||
|
|
4a1b7c9084 | ||
|
|
6e1433e83b | ||
|
|
6a5356fcde | ||
|
|
05c8e34a27 | ||
|
|
bc023a2d94 | ||
|
|
b0466b47a0 | ||
|
|
f33d135af5 | ||
|
|
438fa9e3ac | ||
|
|
4df8175b4e | ||
|
|
b414989bf5 | ||
|
|
8474edbadb | ||
|
|
8656f6c1c6 | ||
|
|
9234907bdc | ||
|
|
3789238b3b | ||
|
|
920a494b79 | ||
|
|
83b6689bed | ||
|
|
fdcdb22cba | ||
|
|
7d665f44bb | ||
|
|
4b48428164 | ||
|
|
4646528f20 | ||
|
|
f8eb26ef37 | ||
|
|
5e2c2ecdbd | ||
|
|
f4993f1f51 | ||
|
|
11e76d658b | ||
|
|
94d82392c4 | ||
|
|
34bba43c95 | ||
|
|
028beb0e20 | ||
|
|
b98ba29eaf | ||
|
|
70827de827 | ||
|
|
eef87f60cd | ||
|
|
35d91fd4ea | ||
|
|
6dacea127a | ||
|
|
ca9e0edcd3 | ||
|
|
aca8ff1dcc | ||
|
|
83d30f0e41 | ||
|
|
aceeca9bb1 | ||
|
|
d7fb5255d7 | ||
|
|
4a43355a6e | ||
|
|
d4985dcf35 | ||
|
|
535356c890 | ||
|
|
cdcd85ee2a | ||
|
|
525bbfffeb | ||
|
|
03dadc24b6 | ||
|
|
277c1b5977 | ||
|
|
f79a6d89b1 | ||
|
|
5bafa6898e | ||
|
|
0bd02822a2 | ||
|
|
d128348047 | ||
|
|
838dde6607 | ||
|
|
12886f0366 | ||
|
|
74d2e8d453 | ||
|
|
392e27ba63 | ||
|
|
72c1031b41 | ||
|
|
9ae347ac89 | ||
|
|
9701436596 | ||
|
|
32bfff88bd | ||
|
|
cafbda5927 | ||
|
|
ef2b0df87f | ||
|
|
f8c4fc3b04 | ||
|
|
4682b7b2df | ||
|
|
356138457c | ||
|
|
2f1c584520 | ||
|
|
b6dbf98f30 | ||
|
|
76d318c315 | ||
|
|
df7f1afac7 | ||
|
|
10187ae73d | ||
|
|
0b0064c1d1 | ||
|
|
44a150985a | ||
|
|
428cda1b1c | ||
|
|
572779f95b | ||
|
|
d6353e1297 | ||
|
|
d6b8a01e05 | ||
|
|
59959656dc | ||
|
|
3f71d3b255 | ||
|
|
266a44bf8b | ||
|
|
2409ca5030 | ||
|
|
b65fbdcf97 | ||
|
|
c4cf8a4f03 | ||
|
|
845c445181 | ||
|
|
0016507ac7 | ||
|
|
c9e4b73267 | ||
|
|
c9e0f0b3cf | ||
|
|
da8479a5b3 | ||
|
|
8e29a53ea6 | ||
|
|
162bda9838 | ||
|
|
46acd9de81 | ||
|
|
904bf5a031 | ||
|
|
267c1ae7ba | ||
|
|
398b9de041 | ||
|
|
a08d532ca1 | ||
|
|
db68c7a205 | ||
|
|
7811627418 | ||
|
|
c3a18848b8 | ||
|
|
36dbb833d4 | ||
|
|
04adcebd9b | ||
|
|
836e505c14 | ||
|
|
3d80684171 | ||
|
|
0a6ec24b18 | ||
|
|
f257191299 | ||
|
|
030fc6b0c7 | ||
|
|
6991c70c83 | ||
|
|
2b88a0bb95 | ||
|
|
de1b515b0f | ||
|
|
8a36907d84 | ||
|
|
8ccb892add | ||
|
|
c17956f54f | ||
|
|
e1dd05cde2 | ||
|
|
8f4394ce71 | ||
|
|
5f0db07700 | ||
|
|
c47ccb92bf | ||
|
|
223b2a5870 | ||
|
|
dc7955ad4d | ||
|
|
5b168f78a6 | ||
|
|
6a48e74ab1 | ||
|
|
eaaf4bedf3 | ||
|
|
c2ab9ab2e1 | ||
|
|
49c20db375 | ||
|
|
cf8e3e4262 | ||
|
|
1c34415a06 | ||
|
|
b4dfea724f | ||
|
|
5bf250470e | ||
|
|
1dd0ce34be | ||
|
|
894ff025c7 | ||
|
|
f539f977d5 | ||
|
|
6757614f24 | ||
|
|
cdee249da5 | ||
|
|
ce548e935a | ||
|
|
40a12dcee2 | ||
|
|
030a000ec6 | ||
|
|
91e184765d | ||
|
|
cc7fb3f71c | ||
|
|
fee48966e2 | ||
|
|
ddd8c1b461 | ||
|
|
76e2f5265e | ||
|
|
9b87723d6a | ||
|
|
13d8b070d5 | ||
|
|
4b062cb89c | ||
|
|
61ac410891 | ||
|
|
927bc26587 | ||
|
|
ce0bcc6545 | ||
|
|
48f9a51f89 | ||
|
|
83bb3d0210 | ||
|
|
e3c8ffd812 | ||
|
|
704d393840 | ||
|
|
742000aae0 | ||
|
|
8b063424e4 | ||
|
|
842e0917ee | ||
|
|
a0f542a57e | ||
|
|
d660f23757 | ||
|
|
4eab105708 | ||
|
|
5e7b9c90d0 | ||
|
|
e418d185b8 | ||
|
|
0a0c29f6e1 | ||
|
|
a622646a2c | ||
|
|
644e02d3c9 | ||
|
|
f480a52db5 | ||
|
|
e0fbd592d3 | ||
|
|
d9f34f8cd5 | ||
|
|
0e9e7b4e8a | ||
|
|
7f20bd4c41 | ||
|
|
13bc2536ab | ||
|
|
5c0da1b28f | ||
|
|
68d5f6d0be | ||
|
|
2a829b95de | ||
|
|
8e09eaf1b4 | ||
|
|
39e222921f | ||
|
|
b2b7703473 | ||
|
|
6f4f74dd60 | ||
|
|
3dbd745e61 | ||
|
|
76f0319234 | ||
|
|
3b97abb4e5 | ||
|
|
dd819f8a8a | ||
|
|
963c54109b | ||
|
|
95faa5a026 | ||
|
|
a2d3766e60 | ||
|
|
f62b75a7da | ||
|
|
6d96cd710b | ||
|
|
4d5e2702f8 | ||
|
|
0c0df47689 | ||
|
|
93e388beec | ||
|
|
721720f939 | ||
|
|
8eb06817e2 | ||
|
|
e12c307842 | ||
|
|
0fb8cc0045 | ||
|
|
17d5f7fd73 | ||
|
|
2037dcb530 | ||
|
|
f68665b473 | ||
|
|
eab630b0a5 | ||
|
|
b7d228a249 | ||
|
|
5cf08ddf91 | ||
|
|
cbf117b92b | ||
|
|
79227c8a3e | ||
|
|
a51f451c4d | ||
|
|
c99c922c5e | ||
|
|
3a932f30ee | ||
|
|
fe1c74e432 | ||
|
|
a4b66e8ccb | ||
|
|
5b0152ad71 | ||
|
|
34e7489f32 | ||
|
|
d633e271ad | ||
|
|
22ec2ddfa5 | ||
|
|
53cfbc752e | ||
|
|
81cd2dc814 | ||
|
|
a03184e530 | ||
|
|
6a6a9ae7b1 | ||
|
|
92a872a5e7 | ||
|
|
43a9c182dd | ||
|
|
13ce822ccd | ||
|
|
4d6dbaaea6 | ||
|
|
98f0f19300 | ||
|
|
d3021d9910 | ||
|
|
2c83f35c20 | ||
|
|
e3c560433a | ||
|
|
174efc5984 | ||
|
|
6243a75ef3 | ||
|
|
0d6466ff9c | ||
|
|
d4729eeb39 | ||
|
|
351baf62fa | ||
|
|
d8cf6802a8 | ||
|
|
b775e9013c | ||
|
|
2ebc2f0f6f | ||
|
|
8e2ef2bfcc | ||
|
|
5f458f03fe | ||
|
|
398eded4bc | ||
|
|
9af8a2b3c9 | ||
|
|
57fe0bb86d | ||
|
|
e0cb890d01 | ||
|
|
36c973b6e9 | ||
|
|
adc3ab95ef | ||
|
|
184819f51c | ||
|
|
351939570f | ||
|
|
86baaaf3a3 | ||
|
|
7ab2ce5d95 | ||
|
|
c3d40623c9 | ||
|
|
9e4b071621 | ||
|
|
ac6033346c | ||
|
|
c76bf12730 | ||
|
|
f37520c6b0 | ||
|
|
f78e7e3295 | ||
|
|
8930ac6d38 | ||
|
|
ef0a21c8b8 | ||
|
|
3568e51c1c | ||
|
|
8b32ec40ba | ||
|
|
b049ab51f4 | ||
|
|
a85bdd422f | ||
|
|
108007e68f | ||
|
|
bf6b3a72da | ||
|
|
a485989079 | ||
|
|
7b31018a7e | ||
|
|
bcaaa7b51e | ||
|
|
d1701787c1 | ||
|
|
d7cfb89995 | ||
|
|
e01bbc8240 | ||
|
|
db6357e737 | ||
|
|
6f19c0f88c | ||
|
|
6c33a9bebe | ||
|
|
c2991883fa | ||
|
|
56370063d3 | ||
|
|
50287531e1 | ||
|
|
2f3807462c | ||
|
|
d964f8d698 | ||
|
|
260aa3b4b2 | ||
|
|
42ece2600c | ||
|
|
f32b3a3741 | ||
|
|
72dfd4782e | ||
|
|
46edaeda0c | ||
|
|
b6fd8cc98b | ||
|
|
07a4e975bb | ||
|
|
c071c7087f | ||
|
|
f6d4366a4c | ||
|
|
2bbefc9105 | ||
|
|
5a8bb4701e | ||
|
|
3e9a2814f2 | ||
|
|
6feb914f25 | ||
|
|
9f40d310dd | ||
|
|
c4e74feb72 | ||
|
|
6ba3fb33c7 | ||
|
|
7efb9be09a | ||
|
|
514990d4ef | ||
|
|
cd6db5e313 | ||
|
|
678cc77a4f | ||
|
|
4d99e58921 | ||
|
|
92dc6d4776 | ||
|
|
b92422542f | ||
|
|
5a85075c68 | ||
|
|
53cd6a7259 | ||
|
|
be75906042 | ||
|
|
4af3634337 | ||
|
|
476c0d12c6 | ||
|
|
d37df59abd | ||
|
|
17debce911 | ||
|
|
ef233394d5 | ||
|
|
ef1306695a | ||
|
|
617d8c2876 | ||
|
|
d5d9accd2c | ||
|
|
0764691a81 | ||
|
|
b5f04a063c | ||
|
|
f059fc5a7a | ||
|
|
8f4c6c001f | ||
|
|
dc1883c9ed | ||
|
|
1266be0b5c | ||
|
|
71a740cb5b | ||
|
|
93f0e12d86 | ||
|
|
c31909abf8 | ||
|
|
71ef541ceb | ||
|
|
c452b99268 | ||
|
|
51d810878b | ||
|
|
88c796db18 | ||
|
|
0bbb08247a | ||
|
|
53f686aaf8 | ||
|
|
6ab08a75e2 | ||
|
|
f2242e1815 | ||
|
|
fc2878a530 | ||
|
|
e00e953908 | ||
|
|
8dcb1d2117 | ||
|
|
c5857ba40c | ||
|
|
d680b4c7af | ||
|
|
da21aa9fb6 | ||
|
|
0c50b7bcf4 | ||
|
|
3375d62f3d | ||
|
|
b0b3438bb4 | ||
|
|
9426b0fd6a | ||
|
|
7474287922 | ||
|
|
51773137e8 | ||
|
|
16bc085eaf | ||
|
|
37cfa1ca0a | ||
|
|
4322a1979b | ||
|
|
a558bcf782 | ||
|
|
a96e9cbd13 | ||
|
|
c4b374fc52 | ||
|
|
48b5a1f06e | ||
|
|
66926ef901 | ||
|
|
eac41f1afd | ||
|
|
4c95c92d70 | ||
|
|
c585afe366 | ||
|
|
852cffb352 | ||
|
|
d910efb490 | ||
|
|
439c1e508e | ||
|
|
fc5d663f82 | ||
|
|
5c832aefb8 | ||
|
|
71fa76a9e4 | ||
|
|
672fbdc6af | ||
|
|
1691a31d13 | ||
|
|
ad2b0032e0 | ||
|
|
51db270fe4 | ||
|
|
d4d413da82 | ||
|
|
e2a75c9ff3 | ||
|
|
635d4e88ad | ||
|
|
b215992d02 | ||
|
|
fcd8a3a0dc | ||
|
|
ca5f3da782 | ||
|
|
c21ba4838a | ||
|
|
858ce54168 | ||
|
|
ae4fdee8f8 | ||
|
|
129d2a4a5f | ||
|
|
8e28365500 | ||
|
|
bd60ae88ba | ||
|
|
8f4ab4d738 | ||
|
|
1af7f81eeb | ||
|
|
015c4ad5e2 | ||
|
|
e0bfb4cd95 | ||
|
|
aa268aafcb | ||
|
|
8e0cc2e4d6 | ||
|
|
c20b4eed29 | ||
|
|
70e827086f | ||
|
|
9107d98cec | ||
|
|
ef82f174a8 | ||
|
|
ecee53a380 | ||
|
|
8c24698b96 | ||
|
|
a432718b6a | ||
|
|
33974f67c6 | ||
|
|
d1f4b0126d | ||
|
|
dffe44534b | ||
|
|
094e9d4e9e | ||
|
|
854d359282 | ||
|
|
4463da3901 | ||
|
|
fe4ed48250 | ||
|
|
0a2a5814db | ||
|
|
b265804b8f | ||
|
|
861141ec76 | ||
|
|
61af985317 | ||
|
|
bb99ca5cc2 | ||
|
|
6a3e4db5e8 | ||
|
|
26c5f5e6b3 | ||
|
|
7e6a9d42b7 | ||
|
|
2d78cca173 | ||
|
|
ae8835afc4 | ||
|
|
6c61a504e3 | ||
|
|
28b82119da | ||
|
|
ccdd5a11a4 | ||
|
|
a9bf7a39e6 | ||
|
|
98a1251dbc | ||
|
|
265c6565d0 | ||
|
|
6c58d35c22 |
@@ -5,7 +5,9 @@
|
||||
"es2021": true
|
||||
},
|
||||
"extends": [
|
||||
"google"
|
||||
"google",
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
@@ -15,5 +17,7 @@
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"rules": {
|
||||
"max-len": ["error", { "code": 120 }],
|
||||
"linebreak-style": 0
|
||||
}
|
||||
}
|
||||
|
||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -1,7 +1,12 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
69
.github/workflows/codeql-analysis.yml
vendored
Normal file
69
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
schedule:
|
||||
- cron: '24 6 * * 5'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4.1.6
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.25.8
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3.25.8
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.25.8
|
||||
14
.github/workflows/enforce-license-compliance.yml
vendored
Normal file
14
.github/workflows/enforce-license-compliance.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Enforce License Compliance
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
enforce-license-compliance:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Enforce License Compliance'
|
||||
uses: getsentry/action-enforce-license-compliance@57ba820387a1a9315a46115ee276b2968da51f3d # main
|
||||
with:
|
||||
fossa_api_key: ${{ secrets.FOSSA_API_KEY }}
|
||||
119
.github/workflows/main.yml
vendored
119
.github/workflows/main.yml
vendored
@@ -2,26 +2,129 @@ name: Workflow for Codecov Action
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, windows-latest, ubuntu-latest]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
- name: Run tests and collect coverage
|
||||
run: yarn run test
|
||||
run: npm run test
|
||||
- name: Upload coverage to Codecov (script)
|
||||
uses: ./
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./coverage/script/coverage-final.json
|
||||
flags: script,${{ matrix.os }}
|
||||
name: codecov-script
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (demo)
|
||||
uses: ./
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
||||
file: ./coverage/coverage-final.json
|
||||
flags: demo,${{ matrix.os }}
|
||||
name: codecov-demo
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (version)
|
||||
uses: ./
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
||||
file: ./coverage/coverage-final.json
|
||||
flags: version,${{ matrix.os }}
|
||||
name: codecov-version
|
||||
version: v0.6.0
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
run-macos-latest-xlarge:
|
||||
if: github.head.repo.full_name == 'codecov/codecov-action'
|
||||
runs-on: macos-latest-xlarge
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
- name: Run tests and collect coverage
|
||||
run: npm run test
|
||||
- name: Upload coverage to Codecov (script)
|
||||
uses: ./
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./coverage/script/coverage-final.json
|
||||
flags: script,macos-latest-xlarge
|
||||
name: codecov-script
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (demo)
|
||||
uses: ./
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
||||
file: ./coverage/coverage-final.json
|
||||
flags: demo,macos-latest-xlarge
|
||||
name: codecov-demo
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (version)
|
||||
uses: ./
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
||||
file: ./coverage/coverage-final.json
|
||||
flags: version,maxos-latest-xlarge
|
||||
name: codecov-version
|
||||
version: v0.6.0
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
run-container:
|
||||
runs-on: ubuntu-latest
|
||||
container: node:18
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.6
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
- name: Run tests and collect coverage
|
||||
run: npm run test
|
||||
- name: Upload coverage to Codecov (script)
|
||||
uses: ./
|
||||
with:
|
||||
files: ./coverage/script/coverage-final.json
|
||||
flags: script,${{ matrix.os }}
|
||||
name: codecov-script
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (demo)
|
||||
uses: ./
|
||||
with:
|
||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
||||
file: ./coverage/coverage-final.json
|
||||
flags: demo
|
||||
flags: demo,${{ matrix.os }}
|
||||
name: codecov-demo
|
||||
- name: Upload coverage to Codecov (script)
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Upload coverage to Codecov (version)
|
||||
uses: ./
|
||||
with:
|
||||
files: ./coverage/script/coverage-final.json
|
||||
flags: script
|
||||
name: codecov-script
|
||||
files: ./coverage/calculator/coverage-final.json,./coverage/coverage-test/coverage-final.json
|
||||
file: ./coverage/coverage-final.json
|
||||
flags: version,${{ matrix.os }}
|
||||
name: codecov-version
|
||||
version: v0.6.0
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
61
.github/workflows/scorecards-analysis.yml
vendored
Normal file
61
.github/workflows/scorecards-analysis.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Scorecards supply-chain security
|
||||
on:
|
||||
# Only the default branch is supported.
|
||||
branch_protection_rule:
|
||||
schedule:
|
||||
- cron: '43 20 * * 1'
|
||||
push:
|
||||
branches: [ main ]
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecards analysis
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Used to receive a badge. (Upcoming feature)
|
||||
id-token: write
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v4.1.6 # v3.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) Read-only PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecards on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
|
||||
# repo_token: ${{ secrets.SCORECARD_READ_TOKEN }}
|
||||
|
||||
# Publish the results for public repositories to enable scorecard badges. For more details, see
|
||||
# https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories, `publish_results` will automatically be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@v3.25.8 # v1.0.26
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
194
CHANGELOG.md
194
CHANGELOG.md
@@ -1,3 +1,197 @@
|
||||
## 4.0.0-beta.2
|
||||
### Fixes
|
||||
- #1085 not adding -n if empty to do-upload command
|
||||
|
||||
## 4.0.0-beta.1
|
||||
|
||||
`v4` represents a move from the [universal uploader](https://github.com/codecov/uploader) to the [Codecov CLI](https://github.com/codecov/codecov-cli). Although this will unlock new features for our users, the CLI is not yet at feature parity with the universal uploader.
|
||||
|
||||
### Breaking Changes
|
||||
- No current support for `aarch64` and `alpine` architectures.
|
||||
- Tokenless uploading is unsuported
|
||||
- Various arguments to the Action have been removed
|
||||
|
||||
## 3.1.4
|
||||
### Fixes
|
||||
- #967 Fix typo in README.md
|
||||
- #971 fix: add back in working dir
|
||||
- #969 fix: CLI option names for uploader
|
||||
|
||||
### Dependencies
|
||||
- #970 build(deps-dev): bump @types/node from 18.15.12 to 18.16.3
|
||||
- #979 build(deps-dev): bump @types/node from 20.1.0 to 20.1.2
|
||||
- #981 build(deps-dev): bump @types/node from 20.1.2 to 20.1.4
|
||||
|
||||
## 3.1.3
|
||||
### Fixes
|
||||
- #960 fix: allow for aarch64 build
|
||||
|
||||
### Dependencies
|
||||
- #957 build(deps-dev): bump jest-junit from 15.0.0 to 16.0.0
|
||||
- #958 build(deps): bump openpgp from 5.7.0 to 5.8.0
|
||||
- #959 build(deps-dev): bump @types/node from 18.15.10 to 18.15.12
|
||||
|
||||
## 3.1.2
|
||||
### Fixes
|
||||
- #718 Update README.md
|
||||
- #851 Remove unsupported path_to_write_report argument
|
||||
- #898 codeql-analysis.yml
|
||||
- #901 Update README to contain correct information - inputs and negate feature
|
||||
- #955 fix: add in all the extra arguments for uploader
|
||||
|
||||
### Dependencies
|
||||
- #819 build(deps): bump openpgp from 5.4.0 to 5.5.0
|
||||
- #835 build(deps): bump node-fetch from 3.2.4 to 3.2.10
|
||||
- #840 build(deps): bump ossf/scorecard-action from 1.1.1 to 2.0.4
|
||||
- #841 build(deps): bump @actions/core from 1.9.1 to 1.10.0
|
||||
- #843 build(deps): bump @actions/github from 5.0.3 to 5.1.1
|
||||
- #869 build(deps): bump node-fetch from 3.2.10 to 3.3.0
|
||||
- #872 build(deps-dev): bump jest-junit from 13.2.0 to 15.0.0
|
||||
- #879 build(deps): bump decode-uri-component from 0.2.0 to 0.2.2
|
||||
- #889 build(deps): bump ossf/scorecard-action from 1.1.1 to 2.1.2
|
||||
- #895 build(deps): bump json5 from 2.2.1 to 2.2.3
|
||||
- #896 build(deps): bump actions/upload-artifact from 3.1.0 to 3.1.2
|
||||
- #900 build(deps-dev): bump @vercel/ncc from 0.34.0 to 0.36.1
|
||||
- #905 build(deps-dev): bump typescript from 4.7.4 to 4.9.5
|
||||
- #911 build(deps-dev): bump @types/node from 16.11.40 to 18.13.0
|
||||
- #922 build(deps-dev): bump @types/node from 18.13.0 to 18.14.0
|
||||
- #924 build(deps): bump openpgp from 5.5.0 to 5.7.0
|
||||
- #927 build(deps-dev): bump @types/node from 18.14.0 to 18.14.2
|
||||
- #933 build(deps-dev): bump @types/node from 18.14.2 to 18.14.6
|
||||
- #937 build(deps-dev): bump @types/node from 18.14.6 to 18.15.0
|
||||
- #938 build(deps): bump node-fetch from 3.3.0 to 3.3.1
|
||||
- #945 build(deps-dev): bump @types/node from 18.15.0 to 18.15.5
|
||||
- #946 build(deps-dev): bump @types/node from 18.15.5 to 18.15.6
|
||||
- #947 build(deps-dev): bump @types/node from 18.15.6 to 18.15.10
|
||||
- #951 build(deps): bump ossf/scorecard-action from 2.1.2 to 2.1.3
|
||||
|
||||
## 3.1.1
|
||||
### Fixes
|
||||
- #661 Update deprecation warning
|
||||
- #593 Create codeql-analysis.yml
|
||||
- #712 README: fix typo
|
||||
- #725 fix: Remove a blank row
|
||||
- #726 Update README.md with correct badge version
|
||||
- #633 Create scorecards-analysis.yml
|
||||
- #747 fix: add more verbosity to validation
|
||||
- #750 Regenerate scorecards-analysis.yml
|
||||
- #774 Switch to v3
|
||||
- #783 Fix network entry in table
|
||||
- #791 Trim arguments after splitting them
|
||||
- #769 Plumb failCi into verification function.
|
||||
|
||||
### Dependencies
|
||||
- #713 build(deps-dev): bump typescript from 4.6.3 to 4.6.4
|
||||
- #714 build(deps): bump node-fetch from 3.2.3 to 3.2.4
|
||||
- #724 build(deps): bump github/codeql-action from 1 to 2
|
||||
- #717 build(deps-dev): bump @types/jest from 27.4.1 to 27.5.0
|
||||
- #729 build(deps-dev): bump @types/node from 17.0.25 to 17.0.33
|
||||
- #734 build(deps-dev): downgrade @types/node to 16.11.35
|
||||
- #723 build(deps): bump actions/checkout from 2 to 3
|
||||
- #733 build(deps): bump @actions/github from 5.0.1 to 5.0.3
|
||||
- #732 build(deps): bump @actions/core from 1.6.0 to 1.8.2
|
||||
- #737 build(deps-dev): bump @types/node from 16.11.35 to 16.11.36
|
||||
- #749 build(deps): bump ossf/scorecard-action from 1.0.1 to 1.1.0
|
||||
- #755 build(deps-dev): bump typescript from 4.6.4 to 4.7.3
|
||||
- #759 build(deps-dev): bump @types/node from 16.11.36 to 16.11.39
|
||||
- #762 build(deps-dev): bump @types/node from 16.11.39 to 16.11.40
|
||||
- #746 build(deps-dev): bump @vercel/ncc from 0.33.4 to 0.34.0
|
||||
- #757 build(deps): bump ossf/scorecard-action from 1.1.0 to 1.1.1
|
||||
- #760 build(deps): bump openpgp from 5.2.1 to 5.3.0
|
||||
- #748 build(deps): bump actions/upload-artifact from 2.3.1 to 3.1.0
|
||||
- #766 build(deps-dev): bump typescript from 4.7.3 to 4.7.4
|
||||
- #799 build(deps): bump openpgp from 5.3.0 to 5.4.0
|
||||
- #798 build(deps): bump @actions/core from 1.8.2 to 1.9.1
|
||||
|
||||
## 3.1.0
|
||||
### Features
|
||||
- #699 Incorporate `xcode` arguments for the Codecov uploader
|
||||
|
||||
### Dependencies
|
||||
- #694 build(deps-dev): bump @vercel/ncc from 0.33.3 to 0.33.4
|
||||
- #696 build(deps-dev): bump @types/node from 17.0.23 to 17.0.25
|
||||
- #698 build(deps-dev): bump jest-junit from 13.0.0 to 13.2.0
|
||||
|
||||
## 3.0.0
|
||||
### Breaking Changes
|
||||
- #689 Bump to node16 and small fixes
|
||||
|
||||
### Features
|
||||
- #688 Incorporate `gcov` arguments for the Codecov uploader
|
||||
|
||||
### Dependencies
|
||||
- #548 build(deps-dev): bump jest-junit from 12.2.0 to 13.0.0
|
||||
- #603 [Snyk] Upgrade @actions/core from 1.5.0 to 1.6.0
|
||||
- #628 build(deps): bump node-fetch from 2.6.1 to 3.1.1
|
||||
- #634 build(deps): bump node-fetch from 3.1.1 to 3.2.0
|
||||
- #636 build(deps): bump openpgp from 5.0.1 to 5.1.0
|
||||
- #652 build(deps-dev): bump @vercel/ncc from 0.30.0 to 0.33.3
|
||||
- #653 build(deps-dev): bump @types/node from 16.11.21 to 17.0.18
|
||||
- #659 build(deps-dev): bump @types/jest from 27.4.0 to 27.4.1
|
||||
- #667 build(deps): bump actions/checkout from 2 to 3
|
||||
- #673 build(deps): bump node-fetch from 3.2.0 to 3.2.3
|
||||
- #683 build(deps): bump minimist from 1.2.5 to 1.2.6
|
||||
- #685 build(deps): bump @actions/github from 5.0.0 to 5.0.1
|
||||
- #681 build(deps-dev): bump @types/node from 17.0.18 to 17.0.23
|
||||
- #682 build(deps-dev): bump typescript from 4.5.5 to 4.6.3
|
||||
- #676 build(deps): bump @actions/exec from 1.1.0 to 1.1.1
|
||||
- #675 build(deps): bump openpgp from 5.1.0 to 5.2.1
|
||||
|
||||
## 2.1.0
|
||||
### Features
|
||||
- #515 Allow specifying version of Codecov uploader
|
||||
|
||||
### Dependencies
|
||||
- #499 build(deps-dev): bump @vercel/ncc from 0.29.0 to 0.30.0
|
||||
- #508 build(deps): bump openpgp from 5.0.0-5 to 5.0.0
|
||||
- #514 build(deps-dev): bump @types/node from 16.6.0 to 16.9.0
|
||||
|
||||
## 2.0.3
|
||||
### Fixes
|
||||
- #464 Fix wrong link in the readme
|
||||
- #485 fix: Add override OS and linux default to platform
|
||||
|
||||
### Dependencies
|
||||
- #447 build(deps): bump openpgp from 5.0.0-4 to 5.0.0-5
|
||||
- #458 build(deps-dev): bump eslint from 7.31.0 to 7.32.0
|
||||
- #465 build(deps-dev): bump @typescript-eslint/eslint-plugin from 4.28.4 to 4.29.1
|
||||
- #466 build(deps-dev): bump @typescript-eslint/parser from 4.28.4 to 4.29.1
|
||||
- #468 build(deps-dev): bump @types/jest from 26.0.24 to 27.0.0
|
||||
- #470 build(deps-dev): bump @types/node from 16.4.0 to 16.6.0
|
||||
- #472 build(deps): bump path-parse from 1.0.6 to 1.0.7
|
||||
- #473 build(deps-dev): bump @types/jest from 27.0.0 to 27.0.1
|
||||
- #478 build(deps-dev): bump @typescript-eslint/parser from 4.29.1 to 4.29.2
|
||||
- #479 build(deps-dev): bump @typescript-eslint/eslint-plugin from 4.29.1 to 4.29.2
|
||||
- #481 build(deps-dev): bump @types/node from 16.6.0 to 16.6.2
|
||||
- #483 build(deps-dev): bump @vercel/ncc from 0.29.0 to 0.29.2
|
||||
- #484 build(deps): bump @actions/core from 1.4.0 to 1.5.0
|
||||
|
||||
## 2.0.2
|
||||
### Fixes
|
||||
- Underlying uploader fixes issues with tokens not being sent properly for users seeing
|
||||
`Error!: Error: Error uploading to https://codecov.io: Error: Error uploading to Codecov: Error: Not Found`
|
||||
- #440 fix: Validation ordering
|
||||
|
||||
## 2.0.1
|
||||
### Fixes
|
||||
- #424 fix: Issue in building all deep dependencies
|
||||
|
||||
## 2.0.0
|
||||
On February 1, 2022, the `v1` uploader will be full sunset and no longer function. This is due
|
||||
to the deprecation of the underlying bash uploader. This version uses the new [uploader](https://github.com/codecov/uploader).
|
||||
|
||||
The `v2` Action downloads, verifies, and runs the Codecov binary.
|
||||
|
||||
### Breaking Changes
|
||||
- Multiple fields have not been transferred from the bash uploader or have been deprecated. Notably
|
||||
many of the `functionalities` and `gcov_` arguments have been removed. Please check the documentation
|
||||
for the full list.
|
||||
|
||||
### Features
|
||||
- `dry-run` argument allows Codecov flow without uploading reports to Codecov
|
||||
- (Enterprise only) `slug` allows specifying the repository slug manually
|
||||
- (Enterprise only) `url` allows changing the upload host
|
||||
|
||||
## 1.5.2
|
||||
### Fixes
|
||||
- # fix: Import version properly as string not object
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
:tada: Thanks for taking the time to contribute! :tada:
|
||||
|
||||
The following is a set of guidelines for contributing to this repository, which is hosted in the [Codecov Organization](https://github.com/codecov) on GitHub.
|
||||
The following is a set of guidelines for contributing to this repository, which is hosted in the [Codecov Organization](https://github.com/codecov) on GitHub.
|
||||
|
||||
## What does this repo do?
|
||||
|
||||
This repo is a GitHub Action, meaning it integrates with the GitHub Actions CI/CD pipeline. It's meant to take formatted reports with code coverage stats and upload them to codecov.io. Our Node action uses the Actions toolkit to make system calls that allow us to run Codecov's bash uploader inside of Node. Essentially what we're doing in this action is downloading Codecov's bash uploader script from codecov.io/bash, saving it as a file in the current directory, executing the file via `exec` calls, then removing the script from the current directory.
|
||||
This repo is a GitHub Action, meaning it integrates with the GitHub Actions CI/CD pipeline. It's meant to take formatted reports with code coverage stats and upload them to codecov.io. Our Node action uses the Actions toolkit to make system calls that allow us to run Codecov's bash uploader inside of Node. Essentially what we're doing in this action is downloading Codecov's bash uploader script from codecov.io/bash, saving it as a file in the current directory, executing the file via `exec` calls, then removing the script from the current directory.
|
||||
|
||||
## PRs, Issues, and Support
|
||||
|
||||
Feel free to clone, modify code and request a PR to this repository. All PRs and issues will be reviewed by the Codecov team. If your PR/issue has been sitting for a while or if you have any questions, ping us at support@codecov.io
|
||||
Feel free to clone, modify code and request a PR to this repository. All PRs and issues will be reviewed by the Codecov team. If your PR/issue has been sitting for a while or if you have any questions, ping us at support@codecov.io
|
||||
|
||||
8
Makefile
8
Makefile
@@ -1,7 +1,7 @@
|
||||
deploy:
|
||||
$(eval VERSION := $(shell cat package.json | grep '"version": ' | cut -d\" -f4))
|
||||
git tag -d v1
|
||||
git push origin :v1
|
||||
git tag v1
|
||||
git tag v$(VERSION) -m ""
|
||||
git tag -d v4
|
||||
git push origin :v4
|
||||
git tag v4
|
||||
git tag v$(VERSION) -s -m ""
|
||||
git push origin --tags
|
||||
|
||||
150
README.md
150
README.md
@@ -1,79 +1,119 @@
|
||||
# Codecov GitHub Action
|
||||
|
||||
[](https://github.com/marketplace/actions/codecov)
|
||||
[](https://github.com/marketplace/actions/codecov)
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcodecov%2Fcodecov-action?ref=badge_shield)
|
||||
[](https://github.com/codecov/codecov-action/actions/workflows/main.yml)
|
||||
### Easily upload coverage reports to Codecov from GitHub Actions
|
||||
|
||||
>The latest release of this Action adds support for tokenless uploads from GitHub Actions!
|
||||
## v4 Release
|
||||
`v4` of the Codecov GitHub Action will use the [Codecov CLI](https://github.com/codecov/codecov-cli) to upload coverage reports to Codecov.
|
||||
|
||||
### Breaking Changes
|
||||
- Tokenless uploading is unsupported. However, PRs made from forks to the upstream public repos will support tokenless (e.g. contributors to OS projects do not need the upstream repo's Codecov token). For details, [see our docs](https://docs.codecov.com/docs/codecov-uploader#supporting-token-less-uploads-for-forks-of-open-source-repos-using-codecov)
|
||||
- Various arguments to the Action have been removed
|
||||
|
||||
### Dependabot
|
||||
- For repositories using `Dependabot`, users will need to ensure that it has access to the Codecov token for PRs from Dependabot to upload coverage. To do this, please add your `CODECOV_TOKEN` as a Dependabot Secret. For more information, see ["Configuring access to private registries for Dependabot."](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/configuring-access-to-private-registries-for-dependabot#storing-credentials-for-dependabot-to-use)
|
||||
|
||||
`v3` versions and below will not have access to CLI features (e.g. global upload token, ATS).
|
||||
|
||||
## Usage
|
||||
|
||||
To integrate Codecov with your Actions pipeline, specify the name of this repository with a tag number (`@v1` is recommended) as a `step` within your `workflow.yml` file.
|
||||
To integrate Codecov with your Actions pipeline, specify the name of this repository with a tag number (`@v4` is recommended) as a `step` within your `workflow.yml` file.
|
||||
|
||||
If you have a *private repository*, this Action also requires you to [provide an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) from [codecov.io](https://www.codecov.io) (tip: in order to avoid exposing your token, store it as a `secret`). Optionally, you can choose to include up to four additional inputs to customize the upload context. **For public repositories, no token is needed**
|
||||
This Action also requires you to [provide an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) from [codecov.io](https://www.codecov.io) (tip: in order to avoid exposing your token, [store it](https://docs.codecov.com/docs/adding-the-codecov-token#github-actions) as a `secret`).
|
||||
|
||||
Currently, the Action will identify linux, macos, and windows runners. However, the Action may misidentify other architectures. The OS can be specified as
|
||||
- alpine
|
||||
- alpine-arm64
|
||||
- linux
|
||||
- linux-arm64
|
||||
- macos
|
||||
- windows
|
||||
|
||||
Inside your `.github/workflows/workflow.yml` file:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: codecov/codecov-action@v1
|
||||
- uses: actions/checkout@main
|
||||
- uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos
|
||||
fail_ci_if_error: true # optional (default = false)
|
||||
files: ./coverage1.xml,./coverage2.xml # optional
|
||||
flags: unittests # optional
|
||||
name: codecov-umbrella # optional
|
||||
fail_ci_if_error: true # optional (default = false)
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # required
|
||||
verbose: true # optional (default = false)
|
||||
```
|
||||
>**Note**: This assumes that you've set your Codecov token inside *Settings > Secrets* as `CODECOV_TOKEN`. If not, you can [get an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) for your specific repo on [codecov.io](https://www.codecov.io). Keep in mind that secrets are *not* available to forks of repositories.
|
||||
|
||||
The Codecov token can also be passed in via environment variables:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@main
|
||||
- uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: true # optional (default = false)
|
||||
files: ./coverage1.xml,./coverage2.xml # optional
|
||||
flags: unittests # optional
|
||||
name: codecov-umbrella # optional
|
||||
verbose: true # optional (default = false)
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
```
|
||||
> [!NOTE]
|
||||
> This assumes that you've set your Codecov token inside *Settings > Secrets* as `CODECOV_TOKEN`. If not, you can [get an upload token](https://docs.codecov.io/docs/frequently-asked-questions#section-where-is-the-repository-upload-token-found-) for your specific repo on [codecov.io](https://www.codecov.io). Keep in mind that secrets are *not* available to forks of repositories.
|
||||
|
||||
### Using OIDC
|
||||
For users with [OpenID Connect(OIDC) enabled](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect), the Codecov token is not necessary. You can use OIDC with the `use_oidc` argument as following.
|
||||
|
||||
```yaml
|
||||
- uses: codecov/codecov-action@v4
|
||||
with:
|
||||
use_oidc: true
|
||||
```
|
||||
|
||||
Any token supplied will be ignored, as Codecov will default to the OIDC token for verification.
|
||||
|
||||
## Arguments
|
||||
|
||||
Codecov's Action currently supports five inputs from the user: `token`, `file`, `flags`,`name`, and `fail_ci_if_error`. These inputs, along with their descriptions and usage contexts, are listed in the table below:
|
||||
Codecov's Action supports inputs from the user. These inputs, along with their descriptions and usage contexts, are listed in the table below:
|
||||
|
||||
| Input | Description | Usage |
|
||||
| :---: | :---: | :---: |
|
||||
| `token` | Used to authorize coverage report uploads | *Required for private repos* |
|
||||
| `files` | Comma-separated paths to the coverage report(s) | Optional
|
||||
| Input | Description | Required |
|
||||
| :--- | :--- | :---: |
|
||||
| `token` | Repository Codecov token. Used to authorize report uploads | *Required
|
||||
| `codecov_yml_path` | Specify the path to the Codecov YML | Optional
|
||||
| `commit_parent` | Override to specify the parent commit SHA | Optional
|
||||
| `directory` | Directory to search for coverage reports. | Optional
|
||||
| `flags` | Flag the upload to group coverage metrics (unittests, uitests, etc.). Multiple flags are separated by a comma (ui,chrome) | Optional
|
||||
| | |
|
||||
| `aws_curl_args` | Extra curl arguments to communicate with AWS. | Optional
|
||||
| `codecov_curl_args` | Extra curl arguments to communicate with Codecov. e.g., -U "--proxy http://http-proxy" | Optional
|
||||
| `commit_parent` | The commit SHA of the parent for which you are uploading coverage. If not present, the parent will be determined using the API of your repository provider. When using the repository provider's API, the parent is determined via finding the closest ancestor to the commit. | Optional
|
||||
| `env_vars` | Environment variables to tag the upload with. Multiple env variables can be separated with commas (e.g. `OS,PYTHON`) | Optional
|
||||
| `fail_ci_if_error` | Specify if CI pipeline should fail when Codecov runs into errors during upload. *Defaults to **false*** | Optional
|
||||
| `functionalities` | Toggle functionalities | Optional
|
||||
| | `coveragepy` Disable python coverage |
|
||||
| | `fix` Disable report fixing |
|
||||
| | `gcov` Disable gcov |
|
||||
| | `gcovout` Disable gcov output |
|
||||
| | `html` Enable coverage for HTML files |
|
||||
| | `network` Disable uploading the file network |
|
||||
| | `recursesubs` Enable recurse submodules in git projects when searching for source files | |
|
||||
| | `search` Disable searching for reports |
|
||||
| | `xcode` Disable xcode processing |
|
||||
| `gcov_path_include` | Paths to include during gcov gathering (as a glob) | Optional
|
||||
| `gcov_args` | extra arguments to pass to gcov | Optional
|
||||
| `gcov_executable` | gcov executable to run. Defaults to 'gcov' | Optional
|
||||
| `gcov_path_exclude` | Paths to ignore during gcov gathering (as a glob) | Optional
|
||||
| `gcov_prefix` | Prefix filepaths to help resolve path fixing | Optional
|
||||
| `gcov_root_dir` | Project root directory, also used when preparing gcov | Optional
|
||||
| `move_coverage_to_trash` | Move discovered coverage reports to the trash | Optional
|
||||
| `name` | Custom defined name for the upload | Optional
|
||||
| `disable_search` | Disable search for coverage files. This is helpful when specifying what files you want to upload with the --file option. | Optional
|
||||
| `disable_file_fixes` | Disable file fixes to ignore common lines from coverage (e.g. blank lines or empty brackets) | Optional
|
||||
| `dry_run` | Don't upload files to Codecov | Optional
|
||||
| `env_vars` | Environment variables to tag the upload with (e.g. PYTHON \| OS,PYTHON) | Optional
|
||||
| `exclude` | Folders to exclude from search | Optional
|
||||
| `fail_ci_if_error` | Specify whether or not CI build should fail if Codecov runs into an error during upload | Optional
|
||||
| `file` | Path to coverage file to upload | Optional
|
||||
| `files` | Comma-separated list of files to upload | Optional
|
||||
| `flags` | Flag upload to group coverage metrics (e.g. unittests \| integration \| ui,chrome) | Optional
|
||||
| `handle_no_reports_found` | Raise no exceptions when no coverage reports found | Optional
|
||||
| `job_code` | The job code | Optional
|
||||
| `name` | User defined upload name. Visible in Codecov UI | Optional
|
||||
| `os` | Override the assumed OS. Options are linux \| macos \| windows \| . | Optional
|
||||
| `override_branch` | Specify the branch name | Optional
|
||||
| `override_build` | Specify the build number | Optional
|
||||
| `override_build_url` | The URL of the build where this is running | Optional
|
||||
| `override_commit` | Specify the commit SHA | Optional
|
||||
| `override_pr` | Specify the pull request number | Optional
|
||||
| `override_tag` | Specify the git tag | Optional
|
||||
| `path_to_write_report` | Write upload file to path before uploading | Optional
|
||||
| `root_dir` | Used when not in git/hg project to identify project root directory | Optional
|
||||
| `plugin` | plugins to run. Options: xcode, gcov, pycoverage. The default behavior runs them all. | Optional
|
||||
| `plugins` | Comma-separated list of plugins for use during upload. | Optional
|
||||
| `report_code` | The code of the report. If unsure, do not include | Optional
|
||||
| `root_dir` | Used to specify the location of your .git root to identify project root directory | Optional
|
||||
| `slug` | Specify the slug manually (Enterprise use) | Optional
|
||||
| `url` | Specify the base url to upload (Enterprise use) | Optional
|
||||
| `use_legacy_upload_endpoint` | Use the legacy upload endpoint | Optional
|
||||
| `use_oidc` | Use OpenID Connect for verification instead of token. This will ignore any token supplied. Please see [GitHub documentation](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect) for details.
|
||||
| `verbose` | Specify whether the Codecov output should be verbose | Optional
|
||||
| `working-directory` | Directory in which to execute `codecov.sh` | Optional
|
||||
| `xcode_derived_data` | Custom Derived Data Path for Coverage.profdata and gcov processing | Optional
|
||||
| `xcode_package` | Specify packages to build coverage. Uploader will only build these packages. This can significantly reduces time to build coverage reports. -J 'MyAppName' Will match "MyAppName" and "MyAppNameTests" -J '^ExampleApp$' Will match only "ExampleApp" not "ExampleAppTests" | Optional
|
||||
| `version` | Specify which version of the Codecov CLI should be used. Defaults to `latest` | Optional
|
||||
| `working-directory` | Directory in which to execute codecov.sh | Optional
|
||||
|
||||
### Example `workflow.yml` with Codecov Action
|
||||
|
||||
@@ -88,29 +128,28 @@ jobs:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
PYTHON: '3.7'
|
||||
PYTHON: '3.10'
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- uses: actions/checkout@main
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@master
|
||||
uses: actions/setup-python@main
|
||||
with:
|
||||
python-version: 3.7
|
||||
python-version: 3.10
|
||||
- name: Generate coverage report
|
||||
run: |
|
||||
pip install pytest
|
||||
pip install pytest-cov
|
||||
pytest --cov=./ --cov-report=xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage1.xml,./coverage2.xml
|
||||
directory: ./coverage/reports/
|
||||
flags: unittests
|
||||
env_vars: OS,PYTHON
|
||||
name: codecov-umbrella
|
||||
fail_ci_if_error: true
|
||||
path_to_write_report: ./coverage/codecov_report.txt
|
||||
files: ./coverage1.xml,./coverage2.xml,!./cache
|
||||
flags: unittests
|
||||
name: codecov-umbrella
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
```
|
||||
## Contributing
|
||||
@@ -121,5 +160,4 @@ Contributions are welcome! Check out the [Contribution Guide](CONTRIBUTING.md).
|
||||
|
||||
The code in this project is released under the [MIT License](LICENSE).
|
||||
|
||||
|
||||
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fcodecov%2Fcodecov-action?ref=badge_large)
|
||||
|
||||
102
action.yml
102
action.yml
@@ -3,64 +3,67 @@ description: 'GitHub Action that uploads coverage reports for your repository to
|
||||
author: 'Ibrahim Ali <@ibrahim0814> & Thomas Hu <@thomasrockhu> | Codecov'
|
||||
inputs:
|
||||
token:
|
||||
description: 'Repository upload token - get it from codecov.io. Required only for private repositories'
|
||||
description: 'Repository Codecov token. Used to authorize report uploads'
|
||||
required: false
|
||||
files:
|
||||
description: 'Comma-separated list of files to upload'
|
||||
codecov_yml_path:
|
||||
description: 'Specify the path to the Codecov YML'
|
||||
required: false
|
||||
commit_parent:
|
||||
description: 'Override to specify the parent commit SHA'
|
||||
required: false
|
||||
directory:
|
||||
description: 'Directory to search for coverage reports.'
|
||||
required: false
|
||||
flags:
|
||||
description: 'Flag upload to group coverage metrics (e.g. unittests | integration | ui,chrome)'
|
||||
disable_file_fixes:
|
||||
description: 'Disable file fixes to ignore common lines from coverage (e.g. blank lines or empty brackets)'
|
||||
required: false
|
||||
aws_curl_args:
|
||||
description: 'Extra curl arguments to communicate with AWS.'
|
||||
disable_search:
|
||||
description: 'Disable search for coverage files. This is helpful when specifying what files you want to upload with the --file option.'
|
||||
required: false
|
||||
codecov_curl_args:
|
||||
description: 'Extra curl arguments to communicate with Codecov. e.g., -U "--proxy http://http-proxy"'
|
||||
disable_safe_directory:
|
||||
description: 'Disable setting safe directory. Set to true to disable.'
|
||||
required: false
|
||||
commit_parent:
|
||||
description: 'The commit SHA of the parent for which you are uploading coverage. If not present, the parent will be determined using the API of your repository provider. When using the repository providers API, the parent is determined via finding the closest ancestor to the commit.'
|
||||
dry_run:
|
||||
description: "Don't upload files to Codecov"
|
||||
required: false
|
||||
env_vars:
|
||||
description: 'Environment variables to tag the upload with (e.g. PYTHON | OS,PYTHON)'
|
||||
required: false
|
||||
exclude:
|
||||
description: 'Folders to exclude from search'
|
||||
required: false
|
||||
fail_ci_if_error:
|
||||
description: 'Specify whether or not CI build should fail if Codecov runs into an error during upload'
|
||||
required: false
|
||||
file:
|
||||
description: 'Path to coverage file to upload'
|
||||
required: false
|
||||
functionalities:
|
||||
description: 'Comma-separated list, see the README for options and their usage'
|
||||
files:
|
||||
description: 'Comma-separated list of files to upload'
|
||||
required: false
|
||||
gcov_args:
|
||||
description: 'extra arguments to pass to gcov'
|
||||
flags:
|
||||
description: 'Flag upload to group coverage metrics (e.g. unittests | integration | ui,chrome)'
|
||||
required: false
|
||||
gcov_executable:
|
||||
description: 'gcov executable to run. Defaults to gcov'
|
||||
git_service:
|
||||
description: 'Override the git_service (e.g. github_enterprise)'
|
||||
required: false
|
||||
gcov_path_exclude:
|
||||
description: 'Paths to ignore during gcov gathering (as a glob)'
|
||||
handle_no_reports_found:
|
||||
description: 'Raise no exceptions when no coverage reports found'
|
||||
required: false
|
||||
gcov_path_include:
|
||||
description: 'Paths to include during gcov gathering (as a glob)'
|
||||
required: false
|
||||
gcov_prefix:
|
||||
description: 'Prefix filepaths to help resolve path fixing'
|
||||
required: false
|
||||
gcov_root_dir:
|
||||
description: 'Project root directory, also used when preparing gcov'
|
||||
required: false
|
||||
move_coverage_to_trash:
|
||||
description: 'Move discovered coverage reports to the trash'
|
||||
job_code:
|
||||
description: 'The job code'
|
||||
required: false
|
||||
name:
|
||||
description: 'User defined upload name. Visible in Codecov UI'
|
||||
required: false
|
||||
network_filter:
|
||||
description: 'Used to restrict the set of git/hg files that can be matched with filenames in the coverage report. This is useful for monorepos or other setups where a full filepath may not be specified in the coverage report, and that shortened filepath may appear multiple times in a directory structure (e.g. __init__.py)'
|
||||
description: 'Specify a filter on the files listed in the network section of the Codecov report. This will only add files whose path begin with the specified filter. Useful for upload-specific path fixing'
|
||||
required: false
|
||||
network_prefix:
|
||||
description: 'Specify a prefix on files listed in the network section of the Codecov report. Useful to help resolve path fixing'
|
||||
required: false
|
||||
os:
|
||||
description: 'Override the assumed OS. Options are linux | macos | windows.'
|
||||
required: false
|
||||
override_branch:
|
||||
description: 'Specify the branch name'
|
||||
@@ -68,36 +71,51 @@ inputs:
|
||||
override_build:
|
||||
description: 'Specify the build number'
|
||||
required: false
|
||||
override_build_url:
|
||||
description: 'The URL of the build where this is running'
|
||||
required: false
|
||||
override_commit:
|
||||
description: 'Specify the commit SHA'
|
||||
required: false
|
||||
override_pr:
|
||||
description: 'Specify the pull request number'
|
||||
required: false
|
||||
override_tag:
|
||||
description: 'Specify the git tag'
|
||||
plugin:
|
||||
description: 'plugins to run. Options: xcode, gcov, pycoverage. The default behavior runs them all.'
|
||||
required: false
|
||||
path_to_write_report:
|
||||
description: 'Write upload file to path before uploading'
|
||||
plugins:
|
||||
description: 'Comma-separated list of plugins for use during upload.'
|
||||
required: false
|
||||
report_code:
|
||||
description: 'The code of the report. If unsure, do not include'
|
||||
required: false
|
||||
root_dir:
|
||||
description: 'Used when not in git/hg project to identify project root directory'
|
||||
required: false
|
||||
slug:
|
||||
description: 'Specify the slug manually (Enterprise use)'
|
||||
required: false
|
||||
url:
|
||||
description: 'Specify the base url to upload (Enterprise use)'
|
||||
required: false
|
||||
use_legacy_upload_endpoint:
|
||||
description: 'Use the legacy upload endpoint'
|
||||
required: false
|
||||
use_oidc:
|
||||
description: 'Use OIDC instead of token. This will ignore any token supplied'
|
||||
required: false
|
||||
verbose:
|
||||
description: 'Specify whether the Codecov output should be verbose'
|
||||
required: false
|
||||
version:
|
||||
description: 'Specify which version of the Codecov CLI should be used. Defaults to `latest`'
|
||||
required: false
|
||||
working-directory:
|
||||
description: 'Directory in which to execute codecov.sh'
|
||||
required: false
|
||||
xcode_derived_data:
|
||||
description: 'Custom Derived Data Path for Coverage.profdata and gcov processing'
|
||||
required: false
|
||||
xcode_package:
|
||||
description: 'Specify packages to build coverage. Uploader will only build these packages'
|
||||
required: false
|
||||
branding:
|
||||
color: 'red'
|
||||
icon: 'umbrella'
|
||||
runs:
|
||||
using: 'node12'
|
||||
using: 'node20'
|
||||
main: 'dist/index.js'
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import Calculator from './calculator'
|
||||
import Calculator from './calculator';
|
||||
|
||||
test('adds 2 + 3 to equal 5', () => {
|
||||
const calc = new Calculator()
|
||||
const calc = new Calculator();
|
||||
expect(calc.add(2, 3)).toBe(5);
|
||||
});
|
||||
|
||||
test('subtracts 2 - 3 to equal -1', () => {
|
||||
const calc = new Calculator()
|
||||
const calc = new Calculator();
|
||||
expect(calc.subtract(2, 3)).toBe(-1);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import Coverage from "./coverage";
|
||||
import Coverage from './coverage';
|
||||
|
||||
test('test uncovered if', () => {
|
||||
const coverageObj = new Coverage();
|
||||
|
||||
453
dist/37.index.js
vendored
Normal file
453
dist/37.index.js
vendored
Normal file
@@ -0,0 +1,453 @@
|
||||
"use strict";
|
||||
exports.id = 37;
|
||||
exports.ids = [37];
|
||||
exports.modules = {
|
||||
|
||||
/***/ 4037:
|
||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
||||
|
||||
__webpack_require__.r(__webpack_exports__);
|
||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
||||
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
|
||||
/* harmony export */ });
|
||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
|
||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
|
||||
|
||||
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
};
|
||||
;
|
||||
//# sourceMappingURL=37.index.js.map
|
||||
1
dist/37.index.js.map
vendored
Normal file
1
dist/37.index.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
453
dist/629.index.js
vendored
Normal file
453
dist/629.index.js
vendored
Normal file
@@ -0,0 +1,453 @@
|
||||
exports.id = 629;
|
||||
exports.ids = [629];
|
||||
exports.modules = {
|
||||
|
||||
/***/ 6629:
|
||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
||||
|
||||
"use strict";
|
||||
__webpack_require__.r(__webpack_exports__);
|
||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
||||
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
|
||||
/* harmony export */ });
|
||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(4818);
|
||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(1402);
|
||||
|
||||
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
};
|
||||
;
|
||||
//# sourceMappingURL=629.index.js.map
|
||||
1
dist/629.index.js.map
vendored
Normal file
1
dist/629.index.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
1883
dist/codecov
vendored
1883
dist/codecov
vendored
File diff suppressed because it is too large
Load Diff
37742
dist/index.js
vendored
Normal file → Executable file
37742
dist/index.js
vendored
Normal file → Executable file
File diff suppressed because one or more lines are too long
1
dist/index.js.map
vendored
Normal file
1
dist/index.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
52
dist/pgp_keys.asc
vendored
Normal file
52
dist/pgp_keys.asc
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGCsMn0BEACiCKZOhkbhUjb+obvhH49p3ShjJzU5b/GqAXSDhRhdXUq7ZoGq
|
||||
KEKCd7sQHrCf16Pi5UVacGIyE9hS93HwY15kMlLwM+lNeAeCglEscOjpCly1qUIr
|
||||
sN1wjkd2cwDXS6zHBJTqJ7wSOiXbZfTAeKhd6DuLEpmA+Rz4Yc+4qZP+fVxVG3Pv
|
||||
2v06m+E5CP/JQVQPO8HYi+S36hJImTh+zaDspu+VujSai5KzJ6YKmgwslVNIp5X5
|
||||
GnEr2uAh5w6UTnt9UQUjFFliAvQ3lPLWzm7DWs6AP9hslYxSWzwbzVF5qbOIjUJL
|
||||
KfoUpvCYDs2ObgRn8WUQO0ndkRCBIxhlF3HGGYWKQaCEsiom7lyi8VbAszmUCDjw
|
||||
HdbQHFmm5yHLpTXJbg+iaxQzKnhWVXzye5/x92IJmJswW81Ky346VxYdC1XFL/+Y
|
||||
zBaj9oMmV7WfRpdch09Gf4TgosMzWf3NjJbtKE5xkaghJckIgxwzcrRmF/RmCJue
|
||||
IMqZ8A5qUUlK7NBzj51xmAQ4BtkUa2bcCBRV/vP+rk9wcBWz2LiaW+7Mwlfr/C/Q
|
||||
Swvv/JW2LsQ4iWc1BY7m7ksn9dcdypEq/1JbIzVLCRDG7pbMj9yLgYmhe5TtjOM3
|
||||
ygk25584EhXSgUA3MZw+DIqhbHQBYgrKndTr2N/wuBQY62zZg1YGQByD4QARAQAB
|
||||
tEpDb2RlY292IFVwbG9hZGVyIChDb2RlY292IFVwbG9hZGVyIFZlcmlmaWNhdGlv
|
||||
biBLZXkpIDxzZWN1cml0eUBjb2RlY292LmlvPokCTgQTAQoAOBYhBCcDTn/bhQ4L
|
||||
vCxi/4Brsortd5hpBQJgrDJ9AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJ
|
||||
EIBrsortd5hpxLMP/3Fbgx5EG7zUUOqPZ+Ya9z8JlZFIkh3FxYMfMFE8jH9Es26F
|
||||
V2ZTJLO259MxM+5N0XzObi3h4XqIzBn42pDRfwtojY5wl2STJ9Bzu+ykPog7OB1u
|
||||
yfWXDRKcqPTUIxI1/WdU+c0/WNE6wjyzK+lRc1YUlp4pdNU7l+j2vKN+jGi2b6nV
|
||||
PTPRsMcwy3B90fKf5h2wNMNqO+KX/rjgpG9Uhej+xyFWkGM1tZDQQYFj+ugQUj61
|
||||
BMsQrUmxOnaVVnix21cHnACDCaxqgQZH3iZyEOKPNMsRFRP+0fLEnUMP+DVnQE6J
|
||||
Brk1Z+XhtjGI9PISQVx5KKDKscreS/D5ae2Cw/FUlQMf57kir6mkbZVhz2khtccz
|
||||
atD0r59WomNywIDyk1QfAKV0+O0WeJg8A69/Jk6yegsrUb5qEfkih/I38vvI0OVL
|
||||
BYve/mQIHuQo5ziBptNytCrN5TXHXzguX9GOW1V1+3DR+w/vXcnz67sjlYDysf1f
|
||||
JUZv9edZ2RGKW7agbrgOw2hB+zuWZ10tjoEcsaSGOLtKRGFDfmu/dBxzl8yopUpa
|
||||
Tn79QKOieleRm5+uCcKCPTeKV0GbhDntCZJ+Yiw6ZPmrpcjDowAoMQ9kiMVa10+Q
|
||||
WwwoaRWuqhf+dL6Q2OLFOxlyCDKVSyW0YF4Vrf3fKGyxKJmszAL+NS1mVcdxuQIN
|
||||
BGCsMn0BEADLrIesbpfdAfWRvUFDN+PoRfa0ROwa/JOMhEgVsowQuk9No8yRva/X
|
||||
VyiA6oCq6na7IvZXMxT7di4FWDjDtw5xHjbtFg336IJTGBcnzm7WIsjvyyw8kKfB
|
||||
8cvG7D2OkzAUF8SVXLarJ1zdBP/Dr1Nz6F/gJsx5+BM8wGHEz4DsdMRV7ZMTVh6b
|
||||
PaGuPZysPjSEw62R8MFJ1fSyDGCKJYwMQ/sKFzseNaY/kZVR5lq0dmhiYjNVQeG9
|
||||
HJ6ZCGSGT5PKNOwx/UEkT6jhvzWgfr2eFVGJTcdwSLEgIrJIDzP7myHGxuOiuCmJ
|
||||
ENgL1f7mzGkJ/hYXq1RWqsn1Fh2I9KZMHggqu4a+s3RiscmNcbIlIhJLXoE1bxZ/
|
||||
TfYZ9Aod6Bd5TsSMTZNwV2am9zelhDiFF60FWww/5nEbhm/X4suC9W86qWBxs3Kh
|
||||
vk1dxhElRjtgwUEHA5OFOO48ERHfR7COH719D/YmqLU3EybBgJbGoC/yjlGJxv0R
|
||||
kOMAiG2FneNKEZZihReh8A5Jt6jYrSoHFRwL6oJIZfLezB7Rdajx1uH7uYcUyIaE
|
||||
SiDWlkDw/IFM315NYFA8c1TCSIfnabUYaAxSLNFRmXnt+GQpm44qAK1x8EGhY633
|
||||
e5B4FWorIXx0tTmsVM4rkQ6IgAodeywKG+c2Ikd+5dQLFmb7dW/6CwARAQABiQI2
|
||||
BBgBCgAgFiEEJwNOf9uFDgu8LGL/gGuyiu13mGkFAmCsMn0CGwwACgkQgGuyiu13
|
||||
mGkYWxAAkzF64SVpYvY9nY/QSYikL8UHlyyqirs6eFZ3Mj9lMRpHM2Spn9a3c701
|
||||
0Ge4wDbRP2oftCyPP+p9pdUA77ifMTlRcoMYX8oXAuyE5RT2emBDiWvSR6hQQ8bZ
|
||||
WFNXal+bUPpaRiruCCUPD2b8Od1ftzLqbYOosxr/m5Du0uahgOuGw6zlGBJCVOo7
|
||||
UB2Y++oZ8P7oDGF722opepWQ+bl2a6TRMLNWWlj4UANknyjlhyZZ7PKhWLjoC6MU
|
||||
dAKcwQUdp+XYLc/3b00bvgju0e99QgHZMX2fN3d3ktdN5Q2fqiAi5R6BmCCO4ISF
|
||||
o5j10gGU/sdqGHvNhv5C21ibun7HEzMtxBhnhGmytfBJzrsj7GOReePsfTLoCoUq
|
||||
dFMOAVUDciVfRtL2m8cv42ZJOXtPfDjsFOf8AKJk40/tc8mMMqZP7RVBr9RWOoq5
|
||||
y9D37NfI6UB8rPZ6qs0a1Vfm8lIh2/k1AFECduXgftMDTsmmXOgXXS37HukGW7AL
|
||||
QKWiWJQF/XopkXwkyAYpyuyRMZ77oF7nuqLFnl5VVEiRo0Fwu45erebc6ccSwYZU
|
||||
8pmeSx7s0aJtxCZPSZEKZ3mn0BXOR32Cgs48CjzFWf6PKucTwOy/YO0/4Gt/upNJ
|
||||
3DyeINcYcKyD08DEIF9f5tLyoiD4xz+N23ltTBoMPyv4f3X/wCQ=
|
||||
=ch7z
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
1
dist/sourcemap-register.js
vendored
Normal file
1
dist/sourcemap-register.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -1,8 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
npm i --package-lock-only
|
||||
npm run lint --fix
|
||||
set -e
|
||||
|
||||
npm install
|
||||
npm run lint
|
||||
npm run build
|
||||
git add src/
|
||||
git add dist/
|
||||
git add package-lock.json
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
}
|
||||
};
|
||||
|
||||
12419
package-lock.json
generated
12419
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
42
package.json
42
package.json
@@ -1,45 +1,43 @@
|
||||
{
|
||||
"name": "codecov-action",
|
||||
"version": "1.5.2",
|
||||
"version": "4.5.0",
|
||||
"description": "Upload coverage reports to Codecov from GitHub Actions",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"lint": "eslint src/*.*",
|
||||
"test": "yarn run test-script && yarn run test-calculator && yarn run test-coverage",
|
||||
"build": "ncc build src/index.ts --source-map",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"test": "npm run test-script && npm run test-calculator && npm run test-coverage",
|
||||
"test-calculator": "jest --testPathPattern=demo/calculator/ --coverage --coverageDirectory=coverage/calculator",
|
||||
"test-coverage": "jest --testPathPattern=demo/coverage-test/ --coverage --coverageDirectory=coverage/coverage-test",
|
||||
"test-script": "jest --testPathPattern=src/ --coverage --coverageDirectory=coverage/script",
|
||||
"build": "ncc build src/index.ts"
|
||||
"test-script": "jest --testPathPattern=src/ --coverage --coverageDirectory=coverage/script"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/codecov/codecov-action.git"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Ibrahim Ali",
|
||||
"author": "Codecov",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/codecov/codecov-action/issues"
|
||||
},
|
||||
"homepage": "https://github.com/codecov/codecov-action#readme",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.4.0",
|
||||
"@actions/exec": "^1.1.0",
|
||||
"@actions/github": "^5.0.0",
|
||||
"@types/jest": "^26.0.23",
|
||||
"@zeit/ncc": "^0.22.3",
|
||||
"fs": "0.0.1-security",
|
||||
"jest": "^26.6.3",
|
||||
"jest-junit": "^12.1.0",
|
||||
"request": "^2.88.2",
|
||||
"ts-jest": "^26.5.6",
|
||||
"typescript": "^4.3.2",
|
||||
"yarn": "^1.22.10"
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"undici": "5.28.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.1",
|
||||
"@typescript-eslint/parser": "^4.26.1",
|
||||
"eslint": "^7.28.0",
|
||||
"eslint-config-google": "^0.14.0"
|
||||
"@types/jest": "^29.5.12",
|
||||
"@typescript-eslint/eslint-plugin": "^7.13.0",
|
||||
"@typescript-eslint/parser": "^7.13.0",
|
||||
"@vercel/ncc": "^0.38.1",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-google": "^0.14.0",
|
||||
"jest": "^29.7.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"ts-jest": "^29.1.4",
|
||||
"typescript": "^5.4.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,88 +1,119 @@
|
||||
import buildExec from './buildExec';
|
||||
const github = require('@actions/github');
|
||||
import * as github from '@actions/github';
|
||||
|
||||
import VERSION from './version';
|
||||
import {
|
||||
buildCommitExec,
|
||||
buildGeneralExec,
|
||||
buildReportExec,
|
||||
buildUploadExec,
|
||||
} from './buildExec';
|
||||
|
||||
const context = github.context;
|
||||
|
||||
test('no arguments', () => {
|
||||
const {execArgs, filepath, failCi} = buildExec();
|
||||
let OLDOS = process.env.RUNNER_OS;
|
||||
|
||||
const args = [
|
||||
'codecov.sh',
|
||||
'-n',
|
||||
'',
|
||||
'-F',
|
||||
'',
|
||||
'-Q',
|
||||
`github-action-${VERSION}`,
|
||||
];
|
||||
if (context.eventName == 'pull_request') {
|
||||
args.push('-C', `${context.payload.pull_request.head.sha}`);
|
||||
}
|
||||
expect(execArgs).toEqual(args);
|
||||
expect(filepath).toEqual('codecov.sh');
|
||||
expect(failCi).toBeFalsy();
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
OLDOS = process.env.RUNNER_OS;
|
||||
});
|
||||
|
||||
test('all arguments', () => {
|
||||
const envs = {
|
||||
'move_coverage_to_trash': 'true',
|
||||
'commit_parent': '83231650328f11695dfb754ca0f540516f188d27',
|
||||
'aws_curl_args': '--timeout 1',
|
||||
'codecov_curl_args': '--timeout 2',
|
||||
'env_vars': 'OS,PYTHON',
|
||||
'fail_ci_if_error': 'true',
|
||||
'file': 'coverage.xml',
|
||||
'files': 'dir1/coverage.xml,dir2/coverage.xml',
|
||||
'flags': 'test',
|
||||
'functionalities':
|
||||
'gcov,coveragepy,fix,search,code,network,gcovout,html,recursesubs',
|
||||
'gcov_args': '--timeout 3',
|
||||
'gcov_root_dr': 'gcov_dir/',
|
||||
'gcov_path_exclude': '**/exclude-dir/*.*',
|
||||
'gcov_executable': 'gcov',
|
||||
'gcov_path_include': '**/include-dir/*.*',
|
||||
'gcov_prefix': 'demo',
|
||||
'name': 'codecov',
|
||||
'network_filter': 'dir1',
|
||||
'override_branch': 'thomasrockhu/test',
|
||||
'override_build': '1',
|
||||
'override_commit': '9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
'override_pr': '2',
|
||||
'override_tag': 'v1.2',
|
||||
'root_dir': 'root/',
|
||||
'directory': 'coverage/',
|
||||
'token': 'd3859757-ab80-4664-924d-aef22fa7557b',
|
||||
'verbose': 't',
|
||||
'working-directory': 'src',
|
||||
'path_to_write_report': 'codecov/',
|
||||
'xcode_derived_data': '~/Library/Developer/Xcode/DerivedData',
|
||||
'xcode_package': 'MyApp',
|
||||
};
|
||||
afterAll(() => {
|
||||
process.env.RUNNER_OS = OLDOS;
|
||||
});
|
||||
|
||||
test('general args', async () => {
|
||||
const envs = {
|
||||
codecov_yml_path: 'dev/codecov.yml',
|
||||
url: 'https://codecov.enterprise.com',
|
||||
verbose: 't',
|
||||
};
|
||||
for (const env of Object.keys(envs)) {
|
||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
||||
}
|
||||
|
||||
const {execArgs, filepath, failCi} = buildExec();
|
||||
expect(execArgs).toEqual([
|
||||
'src/codecov.sh',
|
||||
'-n',
|
||||
'codecov',
|
||||
'-F',
|
||||
'test',
|
||||
'-Q',
|
||||
`github-action-${VERSION}`,
|
||||
'-c',
|
||||
'-N',
|
||||
'83231650328f11695dfb754ca0f540516f188d27',
|
||||
'-A',
|
||||
'--timeout 1',
|
||||
'-U',
|
||||
'--timeout 2',
|
||||
const {args, verbose} = await buildGeneralExec();
|
||||
|
||||
expect(args).toEqual(
|
||||
expect.arrayContaining([
|
||||
'--codecov-yml-path',
|
||||
'dev/codecov.yml',
|
||||
'--enterprise-url',
|
||||
'https://codecov.enterprise.com',
|
||||
'-v',
|
||||
]));
|
||||
expect(verbose).toBeTruthy();
|
||||
for (const env of Object.keys(envs)) {
|
||||
delete process.env['INPUT_' + env.toUpperCase()];
|
||||
}
|
||||
});
|
||||
|
||||
test('upload args using context', async () => {
|
||||
const expectedArgs = [
|
||||
'--git-service',
|
||||
'github',
|
||||
];
|
||||
const {uploadExecArgs, uploadCommand} = await buildUploadExec();
|
||||
if (context.eventName == 'pull_request') {
|
||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
||||
}
|
||||
if (context.eventName == 'pull_request_target') {
|
||||
expectedArgs.push('-P', `${context.payload.number}`);
|
||||
}
|
||||
|
||||
expect(uploadExecArgs).toEqual(expectedArgs);
|
||||
expect(uploadCommand).toEqual('do-upload');
|
||||
});
|
||||
|
||||
test('upload args', async () => {
|
||||
const envs = {
|
||||
'codecov_yml_path': 'dev/codecov.yml',
|
||||
'commit_parent': 'fakeparentcommit',
|
||||
'directory': 'coverage/',
|
||||
'disable_file_fixes': 'true',
|
||||
'disable_search': 'true',
|
||||
'dry_run': 'true',
|
||||
'env_vars': 'OS,PYTHON',
|
||||
'exclude': 'node_modules/',
|
||||
'fail_ci_if_error': 'true',
|
||||
'file': 'coverage.xml',
|
||||
'files': 'dir1/coverage.xml,dir2/coverage.xml,',
|
||||
'flags': 'test,test2',
|
||||
'git_service': 'github_enterprise',
|
||||
'handle_no_reports_found': 'true',
|
||||
'job_code': '32',
|
||||
'name': 'codecov',
|
||||
'os': 'macos',
|
||||
'override_branch': 'thomasrockhu/test',
|
||||
'override_build': '1',
|
||||
'override_build_url': 'https://example.com/build/2',
|
||||
'override_commit': '9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
'override_pr': '2',
|
||||
'network_filter': 'subA/',
|
||||
'network_prefix': 'forA/',
|
||||
'plugin': 'xcode',
|
||||
'plugins': 'pycoverage,compress-pycoverage',
|
||||
'report_code': 'testCode',
|
||||
'root_dir': 'root/',
|
||||
'slug': 'fakeOwner/fakeRepo',
|
||||
'token': 'd3859757-ab80-4664-924d-aef22fa7557b',
|
||||
'url': 'https://enterprise.example.com',
|
||||
'use_legacy_upload_endpoint': 'true',
|
||||
'verbose': 'true',
|
||||
'version': '0.1.2',
|
||||
'working-directory': 'src',
|
||||
};
|
||||
for (const env of Object.keys(envs)) {
|
||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
||||
}
|
||||
|
||||
const {uploadExecArgs, uploadCommand} = await buildUploadExec();
|
||||
const expectedArgs = [
|
||||
'--disable-file-fixes',
|
||||
'--disable-search',
|
||||
'-d',
|
||||
'-e',
|
||||
'OS,PYTHON',
|
||||
'--exclude',
|
||||
'node_modules/',
|
||||
'-Z',
|
||||
'-f',
|
||||
'coverage.xml',
|
||||
@@ -90,62 +121,189 @@ test('all arguments', () => {
|
||||
'dir1/coverage.xml',
|
||||
'-f',
|
||||
'dir2/coverage.xml',
|
||||
'-X',
|
||||
'gcov',
|
||||
'-X',
|
||||
'coveragepy',
|
||||
'-X',
|
||||
'fix',
|
||||
'-X',
|
||||
'search',
|
||||
'-X',
|
||||
'code',
|
||||
'-X',
|
||||
'network',
|
||||
'-X',
|
||||
'gcovout',
|
||||
'-X',
|
||||
'html',
|
||||
'-X',
|
||||
'recursesubs',
|
||||
'-a',
|
||||
'--timeout 3',
|
||||
'-g',
|
||||
'**/exclude-dir/*.*',
|
||||
'-x',
|
||||
'gcov',
|
||||
'-G',
|
||||
'**/include-dir/*.*',
|
||||
'-k',
|
||||
'demo',
|
||||
'-i',
|
||||
'dir1',
|
||||
'-F',
|
||||
'test',
|
||||
'-F',
|
||||
'test2',
|
||||
'--git-service',
|
||||
'github_enterprise',
|
||||
'--handle-no-reports-found',
|
||||
'--job-code',
|
||||
'32',
|
||||
'-n',
|
||||
'codecov',
|
||||
'--network-filter',
|
||||
'subA/',
|
||||
'--network-prefix',
|
||||
'forA/',
|
||||
'-B',
|
||||
'thomasrockhu/test',
|
||||
'-b',
|
||||
'1',
|
||||
'--build-url',
|
||||
'https://example.com/build/2',
|
||||
'-C',
|
||||
'9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
'-P',
|
||||
'2',
|
||||
'-T',
|
||||
'v1.2',
|
||||
'-R',
|
||||
'--plugin',
|
||||
'xcode',
|
||||
'--plugin',
|
||||
'pycoverage',
|
||||
'--plugin',
|
||||
'compress-pycoverage',
|
||||
'--report-code',
|
||||
'testCode',
|
||||
'--network-root-folder',
|
||||
'root/',
|
||||
'-s',
|
||||
'coverage/',
|
||||
'-v',
|
||||
'-q',
|
||||
'codecov/',
|
||||
'-D',
|
||||
'~/Library/Developer/Xcode/DerivedData',
|
||||
'-J',
|
||||
'MyApp',
|
||||
]);
|
||||
expect(filepath).toEqual('src/codecov.sh');
|
||||
expect(failCi).toBeTruthy();
|
||||
'-r',
|
||||
'fakeOwner/fakeRepo',
|
||||
'--legacy',
|
||||
];
|
||||
|
||||
expect(uploadExecArgs).toEqual(expectedArgs);
|
||||
expect(uploadCommand).toEqual('do-upload');
|
||||
for (const env of Object.keys(envs)) {
|
||||
delete process.env['INPUT_' + env.toUpperCase()];
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
test('report args', async () => {
|
||||
const envs = {
|
||||
git_service: 'github_enterprise',
|
||||
override_commit: '9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
override_pr: 'fakePR',
|
||||
slug: 'fakeOwner/fakeRepo',
|
||||
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
|
||||
fail_ci_if_error: 'true',
|
||||
};
|
||||
for (const env of Object.keys(envs)) {
|
||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
||||
}
|
||||
|
||||
const {reportExecArgs, reportCommand} = await buildReportExec();
|
||||
|
||||
const expectedArgs = [
|
||||
'--git-service',
|
||||
'github_enterprise',
|
||||
'-C',
|
||||
'9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
'-P',
|
||||
'fakePR',
|
||||
'--slug',
|
||||
'fakeOwner/fakeRepo',
|
||||
'-Z',
|
||||
];
|
||||
|
||||
expect(reportExecArgs).toEqual(expectedArgs);
|
||||
expect(reportCommand).toEqual('create-report');
|
||||
for (const env of Object.keys(envs)) {
|
||||
delete process.env['INPUT_' + env.toUpperCase()];
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
test('report args using context', async () => {
|
||||
const envs = {
|
||||
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
|
||||
};
|
||||
for (const env of Object.keys(envs)) {
|
||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
||||
}
|
||||
const expectedArgs : string[] = [
|
||||
'--git-service',
|
||||
'github',
|
||||
];
|
||||
if (context.eventName == 'pull_request') {
|
||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
||||
}
|
||||
|
||||
const {reportExecArgs, reportCommand} = await buildReportExec();
|
||||
|
||||
expect(reportExecArgs).toEqual(expectedArgs);
|
||||
expect(reportCommand).toEqual('create-report');
|
||||
for (const env of Object.keys(envs)) {
|
||||
delete process.env['INPUT_' + env.toUpperCase()];
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
test('commit args', async () => {
|
||||
const envs = {
|
||||
git_service: 'github_enterprise',
|
||||
commit_parent: '83231650328f11695dfb754ca0f540516f188d27',
|
||||
override_branch: 'thomasrockhu/test',
|
||||
override_commit: '9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
override_pr: '2',
|
||||
slug: 'fakeOwner/fakeRepo',
|
||||
token: 'd3859757-ab80-4664-924d-aef22fa7557b',
|
||||
fail_ci_if_error: 'true',
|
||||
};
|
||||
for (const env of Object.keys(envs)) {
|
||||
process.env['INPUT_' + env.toUpperCase()] = envs[env];
|
||||
}
|
||||
|
||||
const {commitExecArgs, commitCommand} = await buildCommitExec();
|
||||
const expectedArgs = [
|
||||
'--parent-sha',
|
||||
'83231650328f11695dfb754ca0f540516f188d27',
|
||||
'--git-service',
|
||||
'github_enterprise',
|
||||
'-B',
|
||||
'thomasrockhu/test',
|
||||
'-C',
|
||||
'9caabca5474b49de74ef5667deabaf74cdacc244',
|
||||
'--pr',
|
||||
'2',
|
||||
'--slug',
|
||||
'fakeOwner/fakeRepo',
|
||||
'-Z',
|
||||
];
|
||||
|
||||
expect(commitExecArgs).toEqual(expectedArgs);
|
||||
expect(commitCommand).toEqual('create-commit');
|
||||
for (const env of Object.keys(envs)) {
|
||||
delete process.env['INPUT_' + env.toUpperCase()];
|
||||
}
|
||||
});
|
||||
|
||||
test('commit args using context', async () => {
|
||||
const expectedArgs :string[] = [
|
||||
'--git-service',
|
||||
'github',
|
||||
];
|
||||
|
||||
const {commitExecArgs, commitCommand} = await buildCommitExec();
|
||||
if (context.eventName == 'pull_request') {
|
||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
||||
}
|
||||
if (context.eventName == 'pull_request_target') {
|
||||
expectedArgs.push('-P', `${context.payload.number}`);
|
||||
}
|
||||
|
||||
expect(commitExecArgs).toEqual(expectedArgs);
|
||||
expect(commitCommand).toEqual('create-commit');
|
||||
});
|
||||
|
||||
test('commit args using github server url', async () => {
|
||||
const expectedArgs :string[] = [
|
||||
'--git-service',
|
||||
'github_enterprise',
|
||||
];
|
||||
|
||||
process.env.GITHUB_SERVER_URL = 'https://example.com';
|
||||
|
||||
const {commitExecArgs, commitCommand} = await buildCommitExec();
|
||||
if (context.eventName == 'pull_request') {
|
||||
expectedArgs.push('-C', `${context.payload.pull_request?.head.sha}`);
|
||||
}
|
||||
if (context.eventName == 'pull_request_target') {
|
||||
expectedArgs.push('-P', `${context.payload.number}`);
|
||||
}
|
||||
|
||||
expect(commitExecArgs).toEqual(expectedArgs);
|
||||
expect(commitCommand).toEqual('create-commit');
|
||||
});
|
||||
|
||||
412
src/buildExec.ts
412
src/buildExec.ts
@@ -1,11 +1,13 @@
|
||||
const core = require('@actions/core');
|
||||
const github = require('@actions/github');
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
import VERSION from './version';
|
||||
import * as core from '@actions/core';
|
||||
import * as github from '@actions/github';
|
||||
|
||||
import {setFailure} from './helpers';
|
||||
|
||||
const context = github.context;
|
||||
|
||||
const isTrue = (variable) => {
|
||||
const isTrue = (variable: string): boolean => {
|
||||
const lowercase = variable.toLowerCase();
|
||||
return (
|
||||
lowercase === '1' ||
|
||||
@@ -16,54 +18,258 @@ const isTrue = (variable) => {
|
||||
);
|
||||
};
|
||||
|
||||
const buildExec = () => {
|
||||
const clean = core.getInput('move_coverage_to_trash');
|
||||
const getGitService = (): string => {
|
||||
const overrideGitService = core.getInput('git_service');
|
||||
const serverUrl = process.env.GITHUB_SERVER_URL;
|
||||
if (overrideGitService) {
|
||||
return overrideGitService;
|
||||
} else if (serverUrl !== undefined && serverUrl !== 'https://github.com') {
|
||||
return 'github_enterprise';
|
||||
}
|
||||
return 'github';
|
||||
};
|
||||
|
||||
const isPullRequestFromFork = (): boolean => {
|
||||
core.info(`evenName: ${context.eventName}`);
|
||||
if (
|
||||
`${context.eventName}` !== 'pull_request' &&
|
||||
`${context.eventName}` !== 'pull_request_target'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const baseLabel = context.payload.pull_request.base.label;
|
||||
const headLabel = context.payload.pull_request.head.label;
|
||||
|
||||
core.info(`baseRef: ${baseLabel} | headRef: ${headLabel}`);
|
||||
return (baseLabel.split(':')[0] !== headLabel.split(':')[0]);
|
||||
};
|
||||
|
||||
const getToken = async (): Promise<string> => {
|
||||
let token = core.getInput('token');
|
||||
if (!token && isPullRequestFromFork()) {
|
||||
core.info('==> Fork detected, tokenless uploading used');
|
||||
process.env['TOKENLESS'] = context.payload.pull_request.head.label;
|
||||
return Promise.resolve('');
|
||||
}
|
||||
let url = core.getInput('url');
|
||||
const useOIDC = isTrue(core.getInput('use_oidc'));
|
||||
if (useOIDC) {
|
||||
if (!url) {
|
||||
url = 'https://codecov.io';
|
||||
}
|
||||
try {
|
||||
token = await core.getIDToken(url);
|
||||
return token;
|
||||
} catch (err) {
|
||||
setFailure(
|
||||
`Codecov: Failed to get OIDC token with url: ${url}. ${err.message}`,
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
return token;
|
||||
};
|
||||
|
||||
const buildCommitExec = async (): Promise<{
|
||||
commitExecArgs: any[];
|
||||
commitOptions: any;
|
||||
commitCommand: string;
|
||||
}> => {
|
||||
const commitParent = core.getInput('commit_parent');
|
||||
const curlAwsArgs = core.getInput('aws_curl_args');
|
||||
const curlCodecovArgs = core.getInput('codecov_curl_args');
|
||||
const gitService = getGitService();
|
||||
const overrideBranch = core.getInput('override_branch');
|
||||
const overrideCommit = core.getInput('override_commit');
|
||||
const overridePr = core.getInput('override_pr');
|
||||
const slug = core.getInput('slug');
|
||||
const token = await getToken();
|
||||
const failCi = isTrue(core.getInput('fail_ci_if_error'));
|
||||
const workingDir = core.getInput('working-directory');
|
||||
|
||||
const commitCommand = 'create-commit';
|
||||
const commitExecArgs = [];
|
||||
|
||||
const commitOptions:any = {};
|
||||
commitOptions.env = Object.assign(process.env, {
|
||||
GITHUB_ACTION: process.env.GITHUB_ACTION,
|
||||
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
|
||||
GITHUB_REF: process.env.GITHUB_REF,
|
||||
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
|
||||
GITHUB_SHA: process.env.GITHUB_SHA,
|
||||
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
|
||||
});
|
||||
|
||||
|
||||
if (token) {
|
||||
commitOptions.env.CODECOV_TOKEN = token;
|
||||
}
|
||||
if (commitParent) {
|
||||
commitExecArgs.push('--parent-sha', `${commitParent}`);
|
||||
}
|
||||
commitExecArgs.push('--git-service', `${gitService}`);
|
||||
|
||||
if (overrideBranch) {
|
||||
commitExecArgs.push('-B', `${overrideBranch}`);
|
||||
}
|
||||
if (overrideCommit) {
|
||||
commitExecArgs.push('-C', `${overrideCommit}`);
|
||||
} else if (
|
||||
`${context.eventName}` == 'pull_request' ||
|
||||
`${context.eventName}` == 'pull_request_target'
|
||||
) {
|
||||
commitExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
||||
}
|
||||
if (overridePr) {
|
||||
commitExecArgs.push('--pr', `${overridePr}`);
|
||||
} else if (
|
||||
`${context.eventName}` == 'pull_request_target'
|
||||
) {
|
||||
commitExecArgs.push('--pr', `${context.payload.number}`);
|
||||
}
|
||||
if (slug) {
|
||||
commitExecArgs.push('--slug', `${slug}`);
|
||||
}
|
||||
if (failCi) {
|
||||
commitExecArgs.push('-Z');
|
||||
}
|
||||
if (workingDir) {
|
||||
commitOptions.cwd = workingDir;
|
||||
}
|
||||
|
||||
|
||||
return {commitExecArgs, commitOptions, commitCommand};
|
||||
};
|
||||
|
||||
const buildGeneralExec = (): {
|
||||
args: any[];
|
||||
verbose: boolean;
|
||||
} => {
|
||||
const codecovYmlPath = core.getInput('codecov_yml_path');
|
||||
const url = core.getInput('url');
|
||||
const verbose = isTrue(core.getInput('verbose'));
|
||||
const args = [];
|
||||
|
||||
if (codecovYmlPath) {
|
||||
args.push('--codecov-yml-path', `${codecovYmlPath}`);
|
||||
}
|
||||
if (url) {
|
||||
args.push('--enterprise-url', `${url}`);
|
||||
}
|
||||
if (verbose) {
|
||||
args.push('-v');
|
||||
}
|
||||
return {args, verbose};
|
||||
};
|
||||
|
||||
const buildReportExec = async (): Promise<{
|
||||
reportExecArgs: any[];
|
||||
reportOptions: any;
|
||||
reportCommand: string;
|
||||
}> => {
|
||||
const gitService = getGitService();
|
||||
const overrideCommit = core.getInput('override_commit');
|
||||
const overridePr = core.getInput('override_pr');
|
||||
const slug = core.getInput('slug');
|
||||
const token = await getToken();
|
||||
const failCi = isTrue(core.getInput('fail_ci_if_error'));
|
||||
const workingDir = core.getInput('working-directory');
|
||||
|
||||
|
||||
const reportCommand = 'create-report';
|
||||
const reportExecArgs = [];
|
||||
|
||||
const reportOptions:any = {};
|
||||
reportOptions.env = Object.assign(process.env, {
|
||||
GITHUB_ACTION: process.env.GITHUB_ACTION,
|
||||
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
|
||||
GITHUB_REF: process.env.GITHUB_REF,
|
||||
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY,
|
||||
GITHUB_SHA: process.env.GITHUB_SHA,
|
||||
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || '',
|
||||
});
|
||||
|
||||
|
||||
if (token) {
|
||||
reportOptions.env.CODECOV_TOKEN = token;
|
||||
}
|
||||
reportExecArgs.push('--git-service', `${gitService}`);
|
||||
|
||||
if (overrideCommit) {
|
||||
reportExecArgs.push('-C', `${overrideCommit}`);
|
||||
} else if (
|
||||
`${context.eventName}` == 'pull_request' ||
|
||||
`${context.eventName}` == 'pull_request_target'
|
||||
) {
|
||||
reportExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
||||
}
|
||||
if (overridePr) {
|
||||
reportExecArgs.push('-P', `${overridePr}`);
|
||||
} else if (
|
||||
`${context.eventName}` == 'pull_request_target'
|
||||
) {
|
||||
reportExecArgs.push('-P', `${context.payload.number}`);
|
||||
}
|
||||
if (slug) {
|
||||
reportExecArgs.push('--slug', `${slug}`);
|
||||
}
|
||||
if (failCi) {
|
||||
reportExecArgs.push('-Z');
|
||||
}
|
||||
if (workingDir) {
|
||||
reportOptions.cwd = workingDir;
|
||||
}
|
||||
|
||||
return {reportExecArgs, reportOptions, reportCommand};
|
||||
};
|
||||
|
||||
const buildUploadExec = async (): Promise<{
|
||||
uploadExecArgs: any[];
|
||||
uploadOptions: any;
|
||||
disableSafeDirectory: boolean;
|
||||
failCi: boolean;
|
||||
os: string;
|
||||
uploaderVersion: string;
|
||||
uploadCommand: string;
|
||||
}> => {
|
||||
const disableFileFixes = isTrue(core.getInput('disable_file_fixes'));
|
||||
const disableSafeDirectory = isTrue(core.getInput('disable_safe_directory'));
|
||||
const disableSearch = isTrue(core.getInput('disable_search'));
|
||||
const dryRun = isTrue(core.getInput('dry_run'));
|
||||
const envVars = core.getInput('env_vars');
|
||||
const exclude = core.getInput('exclude');
|
||||
const failCi = isTrue(core.getInput('fail_ci_if_error'));
|
||||
const file = core.getInput('file');
|
||||
const files = core.getInput('files');
|
||||
const flags = core.getInput('flags');
|
||||
const functionalities = core.getInput('functionalities');
|
||||
const gcovArgs = core.getInput('gcov_args');
|
||||
const gcovDir = core.getInput('gcov_root_dir');
|
||||
const gcovExclude = core.getInput('gcov_path_exclude');
|
||||
const gcovExec = core.getInput('gcov_executable');
|
||||
const gcovInclude = core.getInput('gcov_path_include');
|
||||
const gcovPrefix = core.getInput('gcov_prefix');
|
||||
const gitService = getGitService();
|
||||
const handleNoReportsFound = isTrue(core.getInput('handle_no_reports_found'));
|
||||
const jobCode = core.getInput('job_code');
|
||||
const name = core.getInput('name');
|
||||
const networkFilter = core.getInput('network_filter');
|
||||
const networkPrefix = core.getInput('network_prefix');
|
||||
const os = core.getInput('os');
|
||||
const overrideBranch = core.getInput('override_branch');
|
||||
const overrideBuild = core.getInput('override_build');
|
||||
const overrideBuildUrl = core.getInput('override_build_url');
|
||||
const overrideCommit = core.getInput('override_commit');
|
||||
const overridePr = core.getInput('override_pr');
|
||||
const overrideTag = core.getInput('override_tag');
|
||||
const plugin = core.getInput('plugin');
|
||||
const plugins = core.getInput('plugins');
|
||||
const reportCode = core.getInput('report_code');
|
||||
const rootDir = core.getInput('root_dir');
|
||||
const searchDir = core.getInput('directory');
|
||||
const token = core.getInput('token');
|
||||
const verbose = isTrue(core.getInput('verbose'));
|
||||
const workingDir = core.getInput('working-directory');
|
||||
const writePath = core.getInput('path_to_write_report');
|
||||
const xcodeDerivedData = core.getInput('xcode_derived_data');
|
||||
const xcodePackage = core.getInput('xcode_package');
|
||||
|
||||
const filepath = workingDir ?
|
||||
workingDir + '/codecov.sh' : 'codecov.sh';
|
||||
|
||||
const execArgs = [filepath];
|
||||
execArgs.push(
|
||||
'-n',
|
||||
`${name}`,
|
||||
'-F',
|
||||
`${flags}`,
|
||||
'-Q',
|
||||
`github-action-${VERSION}`,
|
||||
const slug = core.getInput('slug');
|
||||
const token = await getToken();
|
||||
let uploaderVersion = core.getInput('version');
|
||||
const useLegacyUploadEndpoint = isTrue(
|
||||
core.getInput('use_legacy_upload_endpoint'),
|
||||
);
|
||||
const workingDir = core.getInput('working-directory');
|
||||
|
||||
const options:any = {};
|
||||
options.env = Object.assign(process.env, {
|
||||
const uploadExecArgs = [];
|
||||
const uploadCommand = 'do-upload';
|
||||
const uploadOptions:any = {};
|
||||
uploadOptions.env = Object.assign(process.env, {
|
||||
GITHUB_ACTION: process.env.GITHUB_ACTION,
|
||||
GITHUB_RUN_ID: process.env.GITHUB_RUN_ID,
|
||||
GITHUB_REF: process.env.GITHUB_REF,
|
||||
@@ -76,113 +282,131 @@ const buildExec = () => {
|
||||
for (const envVar of envVars.split(',')) {
|
||||
const envVarClean = envVar.trim();
|
||||
if (envVarClean) {
|
||||
options.env[envVarClean] = process.env[envVarClean];
|
||||
uploadOptions.env[envVarClean] = process.env[envVarClean];
|
||||
envVarsArg.push(envVarClean);
|
||||
}
|
||||
}
|
||||
|
||||
if (token) {
|
||||
options.env.CODECOV_TOKEN = token;
|
||||
uploadOptions.env.CODECOV_TOKEN = token;
|
||||
}
|
||||
if (clean) {
|
||||
execArgs.push('-c');
|
||||
if (disableFileFixes) {
|
||||
uploadExecArgs.push('--disable-file-fixes');
|
||||
}
|
||||
if (commitParent) {
|
||||
execArgs.push('-N', `${commitParent}`);
|
||||
if (disableSearch) {
|
||||
uploadExecArgs.push('--disable-search');
|
||||
}
|
||||
if (curlAwsArgs) {
|
||||
execArgs.push('-A', `${curlAwsArgs}`);
|
||||
}
|
||||
if (curlCodecovArgs) {
|
||||
execArgs.push('-U', `${curlCodecovArgs}`);
|
||||
if (dryRun) {
|
||||
uploadExecArgs.push('-d');
|
||||
}
|
||||
if (envVarsArg.length) {
|
||||
execArgs.push('-e', envVarsArg.join(','));
|
||||
uploadExecArgs.push('-e', envVarsArg.join(','));
|
||||
}
|
||||
if (exclude) {
|
||||
uploadExecArgs.push('--exclude', `${exclude}`);
|
||||
}
|
||||
if (failCi) {
|
||||
execArgs.push('-Z');
|
||||
uploadExecArgs.push('-Z');
|
||||
}
|
||||
if (file) {
|
||||
execArgs.push('-f', `${file}`);
|
||||
uploadExecArgs.push('-f', `${file}`);
|
||||
}
|
||||
if (files) {
|
||||
files.split(',').forEach((f) => {
|
||||
execArgs.push('-f', `${f}`);
|
||||
files.split(',').map((f) => f.trim()).forEach((f) => {
|
||||
if (f.length > 0) { // this handles trailing commas
|
||||
uploadExecArgs.push('-f', `${f}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (functionalities) {
|
||||
functionalities.split(',').forEach((f) => {
|
||||
execArgs.push('-X', `${f}`);
|
||||
if (flags) {
|
||||
flags.split(',').map((f) => f.trim()).forEach((f) => {
|
||||
uploadExecArgs.push('-F', `${f}`);
|
||||
});
|
||||
}
|
||||
if (gcovArgs) {
|
||||
execArgs.push('-a', `${gcovArgs}`);
|
||||
uploadExecArgs.push('--git-service', `${gitService}`);
|
||||
if (handleNoReportsFound) {
|
||||
uploadExecArgs.push('--handle-no-reports-found');
|
||||
}
|
||||
if (gcovDir) {
|
||||
execArgs.push('-p', `${gcovDir}`);
|
||||
if (jobCode) {
|
||||
uploadExecArgs.push('--job-code', `${jobCode}`);
|
||||
}
|
||||
if (gcovExclude) {
|
||||
execArgs.push('-g', `${gcovExclude}`);
|
||||
}
|
||||
if (gcovExec) {
|
||||
execArgs.push('-x', `${gcovExec}`);
|
||||
}
|
||||
if (gcovInclude) {
|
||||
execArgs.push('-G', `${gcovInclude}`);
|
||||
}
|
||||
if (gcovPrefix) {
|
||||
execArgs.push('-k', `${gcovPrefix}`);
|
||||
if (name) {
|
||||
uploadExecArgs.push('-n', `${name}`);
|
||||
}
|
||||
if (networkFilter) {
|
||||
execArgs.push('-i', `${networkFilter}`);
|
||||
uploadExecArgs.push('--network-filter', `${networkFilter}`);
|
||||
}
|
||||
if (networkPrefix) {
|
||||
uploadExecArgs.push('--network-prefix', `${networkPrefix}`);
|
||||
}
|
||||
if (overrideBranch) {
|
||||
execArgs.push('-B', `${overrideBranch}`);
|
||||
uploadExecArgs.push('-B', `${overrideBranch}`);
|
||||
}
|
||||
if (overrideBuild) {
|
||||
execArgs.push('-b', `${overrideBuild}`);
|
||||
uploadExecArgs.push('-b', `${overrideBuild}`);
|
||||
}
|
||||
if (overrideBuildUrl) {
|
||||
uploadExecArgs.push('--build-url', `${overrideBuildUrl}`);
|
||||
}
|
||||
if (overrideCommit) {
|
||||
execArgs.push('-C', `${overrideCommit}`);
|
||||
uploadExecArgs.push('-C', `${overrideCommit}`);
|
||||
} else if (
|
||||
`${context.eventName}` == 'pull_request' ||
|
||||
`${context.eventName}` == 'pull_request_target'
|
||||
) {
|
||||
execArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
||||
uploadExecArgs.push('-C', `${context.payload.pull_request.head.sha}`);
|
||||
}
|
||||
if (overridePr) {
|
||||
execArgs.push('-P', `${overridePr}`);
|
||||
uploadExecArgs.push('-P', `${overridePr}`);
|
||||
} else if (
|
||||
`${context.eventName}` == 'pull_request_target'
|
||||
) {
|
||||
execArgs.push('-P', `${context.payload.number}`);
|
||||
uploadExecArgs.push('-P', `${context.payload.number}`);
|
||||
}
|
||||
if (overrideTag) {
|
||||
execArgs.push('-T', `${overrideTag}`);
|
||||
if (plugin) {
|
||||
uploadExecArgs.push('--plugin', `${plugin}`);
|
||||
}
|
||||
if (plugins) {
|
||||
plugins.split(',').map((p) => p.trim()).forEach((p) => {
|
||||
uploadExecArgs.push('--plugin', `${p}`);
|
||||
});
|
||||
}
|
||||
if (reportCode) {
|
||||
uploadExecArgs.push('--report-code', `${reportCode}`);
|
||||
}
|
||||
if (rootDir) {
|
||||
execArgs.push('-R', `${rootDir}`);
|
||||
uploadExecArgs.push('--network-root-folder', `${rootDir}`);
|
||||
}
|
||||
if (searchDir) {
|
||||
execArgs.push('-s', `${searchDir}`);
|
||||
uploadExecArgs.push('-s', `${searchDir}`);
|
||||
}
|
||||
if (verbose) {
|
||||
execArgs.push('-v');
|
||||
if (slug) {
|
||||
uploadExecArgs.push('-r', `${slug}`);
|
||||
}
|
||||
if (workingDir) {
|
||||
options.cwd = workingDir;
|
||||
uploadOptions.cwd = workingDir;
|
||||
}
|
||||
if (writePath) {
|
||||
execArgs.push('-q', `${writePath}`);
|
||||
if (uploaderVersion == '') {
|
||||
uploaderVersion = 'latest';
|
||||
}
|
||||
if (xcodeDerivedData) {
|
||||
execArgs.push('-D', `${xcodeDerivedData}`);
|
||||
}
|
||||
if (xcodePackage) {
|
||||
execArgs.push('-J', `${xcodePackage}`);
|
||||
if (useLegacyUploadEndpoint) {
|
||||
uploadExecArgs.push('--legacy');
|
||||
}
|
||||
|
||||
return {execArgs, options, filepath, failCi};
|
||||
return {
|
||||
uploadExecArgs,
|
||||
uploadOptions,
|
||||
disableSafeDirectory,
|
||||
failCi,
|
||||
os,
|
||||
uploaderVersion,
|
||||
uploadCommand,
|
||||
};
|
||||
};
|
||||
|
||||
export default buildExec;
|
||||
|
||||
export {
|
||||
buildCommitExec,
|
||||
buildGeneralExec,
|
||||
buildReportExec,
|
||||
buildUploadExec,
|
||||
};
|
||||
|
||||
1883
src/codecov
1883
src/codecov
File diff suppressed because it is too large
Load Diff
96
src/helpers.test.ts
Normal file
96
src/helpers.test.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import * as exec from '@actions/exec';
|
||||
|
||||
import {
|
||||
PLATFORMS,
|
||||
getBaseUrl,
|
||||
getCommand,
|
||||
getPlatform,
|
||||
isValidPlatform,
|
||||
isWindows,
|
||||
setSafeDirectory,
|
||||
} from './helpers';
|
||||
|
||||
let OLDOS = process.env.RUNNER_OS;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
OLDOS = process.env.RUNNER_OS;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env.RUNNER_OS = OLDOS;
|
||||
});
|
||||
|
||||
test('getPlatform', () => {
|
||||
expect(getPlatform('linux')).toBe('linux');
|
||||
expect(getPlatform('windows')).toBe('windows');
|
||||
|
||||
const defaultPlatform =
|
||||
process.env.RUNNER_OS ? process.env.RUNNER_OS.toLowerCase() : 'linux';
|
||||
expect(getPlatform('fakeos')).toBe(defaultPlatform);
|
||||
expect(getPlatform()).toBe(defaultPlatform);
|
||||
|
||||
process.env.RUNNER_OS = 'macos';
|
||||
expect(getPlatform('fakeos')).toBe('macos');
|
||||
expect(getPlatform()).toBe('macos');
|
||||
|
||||
process.env.RUNNER_OS = 'alsofakeos';
|
||||
expect(getPlatform()).toBe('linux');
|
||||
expect(getPlatform('fakeos')).toBe('linux');
|
||||
});
|
||||
|
||||
test('getBaseUrl', () => {
|
||||
expect(PLATFORMS.map((platform) => {
|
||||
return getBaseUrl(platform, 'latest');
|
||||
})).toEqual([
|
||||
'https://cli.codecov.io/latest/linux/codecov',
|
||||
'https://cli.codecov.io/latest/macos/codecov',
|
||||
'https://cli.codecov.io/latest/windows/codecov.exe',
|
||||
'https://cli.codecov.io/latest/alpine/codecov',
|
||||
'https://cli.codecov.io/latest/linux-arm64/codecov',
|
||||
'https://cli.codecov.io/latest/alpine-arm64/codecov',
|
||||
]);
|
||||
|
||||
expect(PLATFORMS.map((platform) => {
|
||||
return getBaseUrl(platform, 'v0.1.0_8880');
|
||||
})).toEqual([
|
||||
'https://cli.codecov.io/v0.1.0_8880/linux/codecov',
|
||||
'https://cli.codecov.io/v0.1.0_8880/macos/codecov',
|
||||
'https://cli.codecov.io/v0.1.0_8880/windows/codecov.exe',
|
||||
'https://cli.codecov.io/v0.1.0_8880/alpine/codecov',
|
||||
'https://cli.codecov.io/v0.1.0_8880/linux-arm64/codecov',
|
||||
'https://cli.codecov.io/v0.1.0_8880/alpine-arm64/codecov',
|
||||
]);
|
||||
});
|
||||
|
||||
test('isWindows', () => {
|
||||
expect(PLATFORMS.map((platform) => {
|
||||
return isWindows(platform);
|
||||
})).toEqual([false, false, true, false, false, false]);
|
||||
});
|
||||
|
||||
test('isValidPlatform', () => {
|
||||
expect(PLATFORMS.map((platform) => {
|
||||
return isValidPlatform(platform);
|
||||
})).toEqual([true, true, true, true, true, true]);
|
||||
|
||||
expect(isValidPlatform('fakeos')).toBeFalsy();
|
||||
});
|
||||
|
||||
test('getCommand', () => {
|
||||
expect(getCommand('path', ['-v', '-x'], 'do-upload'))
|
||||
.toEqual(['path', '-v', '-x', 'do-upload']);
|
||||
});
|
||||
|
||||
test('setSafeDirectory', async () => {
|
||||
process.env.GITHUB_WORKSPACE = 'testOrg/testRepo';
|
||||
await setSafeDirectory();
|
||||
const testSafeDirectory = ([
|
||||
'git',
|
||||
'config',
|
||||
'--get',
|
||||
'safe.directory',
|
||||
]).join(' ');
|
||||
const safeDirectory = await exec.getExecOutput(testSafeDirectory);
|
||||
expect(safeDirectory.stdout).toBe('testOrg/testRepo\n');
|
||||
});
|
||||
92
src/helpers.ts
Normal file
92
src/helpers.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
|
||||
const PLATFORMS = [
|
||||
'linux',
|
||||
'macos',
|
||||
'windows',
|
||||
'alpine',
|
||||
'linux-arm64',
|
||||
'alpine-arm64',
|
||||
] as const;
|
||||
type Platform = typeof PLATFORMS[number];
|
||||
|
||||
const setFailure = (message: string, failCi: boolean): void => {
|
||||
failCi ? core.setFailed(message) : core.warning(message);
|
||||
if (failCi) {
|
||||
process.exit();
|
||||
}
|
||||
};
|
||||
|
||||
const getUploaderName = (platform: string): string => {
|
||||
if (isWindows(platform)) {
|
||||
return 'codecov.exe';
|
||||
} else {
|
||||
return 'codecov';
|
||||
}
|
||||
};
|
||||
|
||||
const isValidPlatform = (platform: string): platform is Platform => {
|
||||
return PLATFORMS.includes(platform as Platform);
|
||||
};
|
||||
|
||||
const isWindows = (platform: string): boolean => {
|
||||
return platform === 'windows';
|
||||
};
|
||||
|
||||
const getPlatform = (os?: string): string => {
|
||||
if (isValidPlatform(os)) {
|
||||
core.info(`==> ${os} OS provided`);
|
||||
return os;
|
||||
}
|
||||
|
||||
const platform = process.env.RUNNER_OS?.toLowerCase();
|
||||
if (isValidPlatform(platform)) {
|
||||
core.info(`==> ${platform} OS detected`);
|
||||
return platform;
|
||||
}
|
||||
|
||||
core.info(
|
||||
'==> Could not detect OS or provided OS is invalid. Defaulting to linux',
|
||||
);
|
||||
return 'linux';
|
||||
};
|
||||
|
||||
const getBaseUrl = (platform: string, version: string): string => {
|
||||
return `https://cli.codecov.io/${version}/${platform}/${getUploaderName(platform)}`;
|
||||
};
|
||||
|
||||
const getCommand = (
|
||||
filename: string,
|
||||
generalArgs:string[],
|
||||
command: string,
|
||||
): string[] => {
|
||||
const fullCommand = [filename, ...generalArgs, command];
|
||||
core.info(`==> Running command '${fullCommand.join(' ')}'`);
|
||||
return fullCommand;
|
||||
};
|
||||
|
||||
const setSafeDirectory = async () => {
|
||||
const command = ([
|
||||
'git',
|
||||
'config',
|
||||
'--global',
|
||||
'--add',
|
||||
'safe.directory',
|
||||
`${process.env['GITHUB_WORKSPACE']}`,
|
||||
].join(' '));
|
||||
core.info(`==> Running ${command}`);
|
||||
await exec.exec(command);
|
||||
};
|
||||
|
||||
export {
|
||||
PLATFORMS,
|
||||
getBaseUrl,
|
||||
getPlatform,
|
||||
getUploaderName,
|
||||
isValidPlatform,
|
||||
isWindows,
|
||||
setFailure,
|
||||
setSafeDirectory,
|
||||
getCommand,
|
||||
};
|
||||
160
src/index.ts
160
src/index.ts
@@ -1,51 +1,129 @@
|
||||
const core = require('@actions/core');
|
||||
const exec = require('@actions/exec');
|
||||
import * as fs from 'node:fs';
|
||||
import * as https from 'node:https';
|
||||
import * as path from 'node:path';
|
||||
|
||||
const fs = require('fs');
|
||||
import * as exec from '@actions/exec';
|
||||
|
||||
import buildExec from './buildExec';
|
||||
import {
|
||||
buildCommitExec,
|
||||
buildGeneralExec,
|
||||
buildReportExec,
|
||||
buildUploadExec,
|
||||
} from './buildExec';
|
||||
import {
|
||||
getBaseUrl,
|
||||
getCommand,
|
||||
getPlatform,
|
||||
getUploaderName,
|
||||
setFailure,
|
||||
setSafeDirectory,
|
||||
} from './helpers';
|
||||
|
||||
const codecovScript = fs.readFileSync(__dirname + '/codecov');
|
||||
import verify from './validate';
|
||||
import versionInfo from './version';
|
||||
|
||||
let failCi;
|
||||
try {
|
||||
const {execArgs, options, filepath, failCi} = buildExec();
|
||||
|
||||
fs.writeFile(filepath, codecovScript, (err) => {
|
||||
if (err && failCi) {
|
||||
throw err;
|
||||
} else if (err) {
|
||||
core.warning(`Codecov warning: ${err.message}`);
|
||||
}
|
||||
const run = async (): Promise<void> => {
|
||||
try {
|
||||
const {commitExecArgs, commitOptions, commitCommand} = await buildCommitExec();
|
||||
const {reportExecArgs, reportOptions, reportCommand} = await buildReportExec();
|
||||
const {
|
||||
uploadExecArgs,
|
||||
uploadOptions,
|
||||
disableSafeDirectory,
|
||||
failCi,
|
||||
os,
|
||||
uploaderVersion,
|
||||
uploadCommand,
|
||||
} = await buildUploadExec();
|
||||
const {args, verbose} = buildGeneralExec();
|
||||
|
||||
exec.exec('bash', execArgs, options)
|
||||
.catch((err) => {
|
||||
if (failCi) {
|
||||
core.setFailed(
|
||||
`Codecov failed with the following error: ${err.message}`,
|
||||
const platform = getPlatform(os);
|
||||
|
||||
const filename = path.join( __dirname, getUploaderName(platform));
|
||||
https.get(getBaseUrl(platform, uploaderVersion), (res) => {
|
||||
// Image will be stored at this path
|
||||
const filePath = fs.createWriteStream(filename);
|
||||
res.pipe(filePath);
|
||||
filePath
|
||||
.on('error', (err) => {
|
||||
setFailure(
|
||||
`Codecov: Failed to write uploader binary: ${err.message}`,
|
||||
true,
|
||||
);
|
||||
} else {
|
||||
core.warning(`Codecov warning: ${err.message}`);
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
unlinkFile();
|
||||
});
|
||||
}).on('finish', async () => {
|
||||
filePath.close();
|
||||
|
||||
const unlinkFile = () => {
|
||||
fs.unlink(filepath, (err) => {
|
||||
if (err && failCi) {
|
||||
throw err;
|
||||
} else if (err) {
|
||||
core.warning(`Codecov warning: ${err.message}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
if (failCi) {
|
||||
core.setFailed(`Codecov failed with the following error: ${error.message}`);
|
||||
} else {
|
||||
core.warning(`Codecov warning: ${error.message}`);
|
||||
await verify(filename, platform, uploaderVersion, verbose, failCi);
|
||||
await versionInfo(platform, uploaderVersion);
|
||||
await fs.chmodSync(filename, '777');
|
||||
if (!disableSafeDirectory) {
|
||||
await setSafeDirectory();
|
||||
}
|
||||
|
||||
const unlink = (): void => {
|
||||
fs.unlink(filename, (err) => {
|
||||
if (err) {
|
||||
setFailure(
|
||||
`Codecov: Could not unlink uploader: ${err.message}`,
|
||||
failCi,
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
const doUpload = async (): Promise<void> => {
|
||||
await exec.exec(getCommand(filename, args, uploadCommand).join(' '),
|
||||
uploadExecArgs,
|
||||
uploadOptions)
|
||||
.catch((err) => {
|
||||
setFailure(
|
||||
`Codecov:
|
||||
Failed to properly upload report: ${err.message}`,
|
||||
failCi,
|
||||
);
|
||||
});
|
||||
};
|
||||
const createReport = async (): Promise<void> => {
|
||||
await exec.exec(
|
||||
getCommand(filename, args, reportCommand).join(' '),
|
||||
reportExecArgs,
|
||||
reportOptions)
|
||||
.then(async (exitCode) => {
|
||||
if (exitCode == 0) {
|
||||
await doUpload();
|
||||
}
|
||||
}).catch((err) => {
|
||||
setFailure(
|
||||
`Codecov:
|
||||
Failed to properly create report: ${err.message}`,
|
||||
failCi,
|
||||
);
|
||||
});
|
||||
};
|
||||
await exec.exec(
|
||||
getCommand(
|
||||
filename,
|
||||
args,
|
||||
commitCommand,
|
||||
).join(' '),
|
||||
commitExecArgs, commitOptions)
|
||||
.then(async (exitCode) => {
|
||||
if (exitCode == 0) {
|
||||
await createReport();
|
||||
}
|
||||
unlink();
|
||||
}).catch((err) => {
|
||||
setFailure(
|
||||
`Codecov: Failed to properly create commit: ${err.message}`,
|
||||
failCi,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
setFailure(`Codecov: Encountered an unexpected error ${err.message}`, failCi);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
run();
|
||||
|
||||
52
src/pgp_keys.asc
Normal file
52
src/pgp_keys.asc
Normal file
@@ -0,0 +1,52 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGCsMn0BEACiCKZOhkbhUjb+obvhH49p3ShjJzU5b/GqAXSDhRhdXUq7ZoGq
|
||||
KEKCd7sQHrCf16Pi5UVacGIyE9hS93HwY15kMlLwM+lNeAeCglEscOjpCly1qUIr
|
||||
sN1wjkd2cwDXS6zHBJTqJ7wSOiXbZfTAeKhd6DuLEpmA+Rz4Yc+4qZP+fVxVG3Pv
|
||||
2v06m+E5CP/JQVQPO8HYi+S36hJImTh+zaDspu+VujSai5KzJ6YKmgwslVNIp5X5
|
||||
GnEr2uAh5w6UTnt9UQUjFFliAvQ3lPLWzm7DWs6AP9hslYxSWzwbzVF5qbOIjUJL
|
||||
KfoUpvCYDs2ObgRn8WUQO0ndkRCBIxhlF3HGGYWKQaCEsiom7lyi8VbAszmUCDjw
|
||||
HdbQHFmm5yHLpTXJbg+iaxQzKnhWVXzye5/x92IJmJswW81Ky346VxYdC1XFL/+Y
|
||||
zBaj9oMmV7WfRpdch09Gf4TgosMzWf3NjJbtKE5xkaghJckIgxwzcrRmF/RmCJue
|
||||
IMqZ8A5qUUlK7NBzj51xmAQ4BtkUa2bcCBRV/vP+rk9wcBWz2LiaW+7Mwlfr/C/Q
|
||||
Swvv/JW2LsQ4iWc1BY7m7ksn9dcdypEq/1JbIzVLCRDG7pbMj9yLgYmhe5TtjOM3
|
||||
ygk25584EhXSgUA3MZw+DIqhbHQBYgrKndTr2N/wuBQY62zZg1YGQByD4QARAQAB
|
||||
tEpDb2RlY292IFVwbG9hZGVyIChDb2RlY292IFVwbG9hZGVyIFZlcmlmaWNhdGlv
|
||||
biBLZXkpIDxzZWN1cml0eUBjb2RlY292LmlvPokCTgQTAQoAOBYhBCcDTn/bhQ4L
|
||||
vCxi/4Brsortd5hpBQJgrDJ9AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJ
|
||||
EIBrsortd5hpxLMP/3Fbgx5EG7zUUOqPZ+Ya9z8JlZFIkh3FxYMfMFE8jH9Es26F
|
||||
V2ZTJLO259MxM+5N0XzObi3h4XqIzBn42pDRfwtojY5wl2STJ9Bzu+ykPog7OB1u
|
||||
yfWXDRKcqPTUIxI1/WdU+c0/WNE6wjyzK+lRc1YUlp4pdNU7l+j2vKN+jGi2b6nV
|
||||
PTPRsMcwy3B90fKf5h2wNMNqO+KX/rjgpG9Uhej+xyFWkGM1tZDQQYFj+ugQUj61
|
||||
BMsQrUmxOnaVVnix21cHnACDCaxqgQZH3iZyEOKPNMsRFRP+0fLEnUMP+DVnQE6J
|
||||
Brk1Z+XhtjGI9PISQVx5KKDKscreS/D5ae2Cw/FUlQMf57kir6mkbZVhz2khtccz
|
||||
atD0r59WomNywIDyk1QfAKV0+O0WeJg8A69/Jk6yegsrUb5qEfkih/I38vvI0OVL
|
||||
BYve/mQIHuQo5ziBptNytCrN5TXHXzguX9GOW1V1+3DR+w/vXcnz67sjlYDysf1f
|
||||
JUZv9edZ2RGKW7agbrgOw2hB+zuWZ10tjoEcsaSGOLtKRGFDfmu/dBxzl8yopUpa
|
||||
Tn79QKOieleRm5+uCcKCPTeKV0GbhDntCZJ+Yiw6ZPmrpcjDowAoMQ9kiMVa10+Q
|
||||
WwwoaRWuqhf+dL6Q2OLFOxlyCDKVSyW0YF4Vrf3fKGyxKJmszAL+NS1mVcdxuQIN
|
||||
BGCsMn0BEADLrIesbpfdAfWRvUFDN+PoRfa0ROwa/JOMhEgVsowQuk9No8yRva/X
|
||||
VyiA6oCq6na7IvZXMxT7di4FWDjDtw5xHjbtFg336IJTGBcnzm7WIsjvyyw8kKfB
|
||||
8cvG7D2OkzAUF8SVXLarJ1zdBP/Dr1Nz6F/gJsx5+BM8wGHEz4DsdMRV7ZMTVh6b
|
||||
PaGuPZysPjSEw62R8MFJ1fSyDGCKJYwMQ/sKFzseNaY/kZVR5lq0dmhiYjNVQeG9
|
||||
HJ6ZCGSGT5PKNOwx/UEkT6jhvzWgfr2eFVGJTcdwSLEgIrJIDzP7myHGxuOiuCmJ
|
||||
ENgL1f7mzGkJ/hYXq1RWqsn1Fh2I9KZMHggqu4a+s3RiscmNcbIlIhJLXoE1bxZ/
|
||||
TfYZ9Aod6Bd5TsSMTZNwV2am9zelhDiFF60FWww/5nEbhm/X4suC9W86qWBxs3Kh
|
||||
vk1dxhElRjtgwUEHA5OFOO48ERHfR7COH719D/YmqLU3EybBgJbGoC/yjlGJxv0R
|
||||
kOMAiG2FneNKEZZihReh8A5Jt6jYrSoHFRwL6oJIZfLezB7Rdajx1uH7uYcUyIaE
|
||||
SiDWlkDw/IFM315NYFA8c1TCSIfnabUYaAxSLNFRmXnt+GQpm44qAK1x8EGhY633
|
||||
e5B4FWorIXx0tTmsVM4rkQ6IgAodeywKG+c2Ikd+5dQLFmb7dW/6CwARAQABiQI2
|
||||
BBgBCgAgFiEEJwNOf9uFDgu8LGL/gGuyiu13mGkFAmCsMn0CGwwACgkQgGuyiu13
|
||||
mGkYWxAAkzF64SVpYvY9nY/QSYikL8UHlyyqirs6eFZ3Mj9lMRpHM2Spn9a3c701
|
||||
0Ge4wDbRP2oftCyPP+p9pdUA77ifMTlRcoMYX8oXAuyE5RT2emBDiWvSR6hQQ8bZ
|
||||
WFNXal+bUPpaRiruCCUPD2b8Od1ftzLqbYOosxr/m5Du0uahgOuGw6zlGBJCVOo7
|
||||
UB2Y++oZ8P7oDGF722opepWQ+bl2a6TRMLNWWlj4UANknyjlhyZZ7PKhWLjoC6MU
|
||||
dAKcwQUdp+XYLc/3b00bvgju0e99QgHZMX2fN3d3ktdN5Q2fqiAi5R6BmCCO4ISF
|
||||
o5j10gGU/sdqGHvNhv5C21ibun7HEzMtxBhnhGmytfBJzrsj7GOReePsfTLoCoUq
|
||||
dFMOAVUDciVfRtL2m8cv42ZJOXtPfDjsFOf8AKJk40/tc8mMMqZP7RVBr9RWOoq5
|
||||
y9D37NfI6UB8rPZ6qs0a1Vfm8lIh2/k1AFECduXgftMDTsmmXOgXXS37HukGW7AL
|
||||
QKWiWJQF/XopkXwkyAYpyuyRMZ77oF7nuqLFnl5VVEiRo0Fwu45erebc6ccSwYZU
|
||||
8pmeSx7s0aJtxCZPSZEKZ3mn0BXOR32Cgs48CjzFWf6PKucTwOy/YO0/4Gt/upNJ
|
||||
3DyeINcYcKyD08DEIF9f5tLyoiD4xz+N23ltTBoMPyv4f3X/wCQ=
|
||||
=ch7z
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
120
src/validate.ts
Normal file
120
src/validate.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import {execSync} from 'node:child_process';
|
||||
import * as crypto from 'node:crypto';
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
|
||||
import * as core from '@actions/core';
|
||||
import {request} from 'undici';
|
||||
|
||||
import {
|
||||
getBaseUrl,
|
||||
getUploaderName,
|
||||
setFailure,
|
||||
} from './helpers';
|
||||
|
||||
const verify = async (
|
||||
filename: string,
|
||||
platform: string,
|
||||
version: string,
|
||||
verbose: boolean,
|
||||
failCi: boolean,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const uploaderName = getUploaderName(platform);
|
||||
|
||||
// Get SHASUM and SHASUM signature files
|
||||
console.log(`${getBaseUrl(platform, version)}.SHA256SUM`);
|
||||
const shasumRes = await request(
|
||||
`${getBaseUrl(platform, version)}.SHA256SUM`,
|
||||
);
|
||||
const shasum = await shasumRes.body.text();
|
||||
if (verbose) {
|
||||
console.log(`Received SHA256SUM ${shasum}`);
|
||||
}
|
||||
await fs.writeFileSync(
|
||||
path.join(__dirname, `${uploaderName}.SHA256SUM`),
|
||||
shasum,
|
||||
);
|
||||
|
||||
const shaSigRes = await request(
|
||||
`${getBaseUrl(platform, version)}.SHA256SUM.sig`,
|
||||
);
|
||||
const shaSig = await shaSigRes.body.text();
|
||||
if (verbose) {
|
||||
console.log(`Received SHA256SUM signature ${shaSig}`);
|
||||
}
|
||||
await fs.writeFileSync(
|
||||
path.join(__dirname, `${uploaderName}.SHA256SUM.sig`),
|
||||
shaSig,
|
||||
);
|
||||
|
||||
const validateSha = async () => {
|
||||
const calculateHash = async (filename: string) => {
|
||||
const stream = fs.createReadStream(filename);
|
||||
const uploaderSha = crypto.createHash(`sha256`);
|
||||
stream.pipe(uploaderSha);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('end', () => resolve(
|
||||
`${uploaderSha.digest('hex')} ${uploaderName}`,
|
||||
));
|
||||
stream.on('error', reject);
|
||||
});
|
||||
};
|
||||
|
||||
const hash = await calculateHash(
|
||||
path.join(__dirname, `${uploaderName}`),
|
||||
);
|
||||
if (hash === shasum) {
|
||||
core.info(`==> Uploader SHASUM verified (${hash})`);
|
||||
} else {
|
||||
setFailure(
|
||||
'Codecov: Uploader shasum does not match -- ' +
|
||||
`uploader hash: ${hash}, public hash: ${shasum}`,
|
||||
failCi,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const verifySignature = async () => {
|
||||
const command = [
|
||||
'gpg',
|
||||
'--logger-fd',
|
||||
'1',
|
||||
'--verify',
|
||||
path.join(__dirname, `${uploaderName}.SHA256SUM.sig`),
|
||||
path.join(__dirname, `${uploaderName}.SHA256SUM`),
|
||||
].join(' ');
|
||||
|
||||
try {
|
||||
await execSync(command, {stdio: 'inherit'});
|
||||
} catch (err) {
|
||||
setFailure(`Codecov: Error verifying gpg signature: ${err.message}`, failCi);
|
||||
}
|
||||
};
|
||||
|
||||
const importKey = async () => {
|
||||
const command = [
|
||||
'gpg',
|
||||
'--logger-fd',
|
||||
'1',
|
||||
'--no-default-keyring',
|
||||
'--import',
|
||||
path.join(__dirname, 'pgp_keys.asc'),
|
||||
].join(' ');
|
||||
|
||||
try {
|
||||
await execSync(command, {stdio: 'inherit'});
|
||||
} catch (err) {
|
||||
setFailure(`Codecov: Error importing gpg key: ${err.message}`, failCi);
|
||||
}
|
||||
};
|
||||
|
||||
await importKey();
|
||||
await verifySignature();
|
||||
await validateSha();
|
||||
} catch (err) {
|
||||
setFailure(`Codecov: Error validating uploader: ${err.message}`, failCi);
|
||||
}
|
||||
};
|
||||
export default verify;
|
||||
@@ -1,3 +1,20 @@
|
||||
const VERSION = 'v1.5.2';
|
||||
import * as core from '@actions/core';
|
||||
import {request} from 'undici';
|
||||
|
||||
export default VERSION;
|
||||
const versionInfo = async (
|
||||
platform: string,
|
||||
version: string,
|
||||
): Promise<void> => {
|
||||
core.info(`==> Running version ${version}`);
|
||||
|
||||
try {
|
||||
const metadataRes = await request(`https://cli.codecov.io/${platform}/${version}`, {
|
||||
headers: {'Accept': 'application/json'},
|
||||
});
|
||||
const metadata = await metadataRes.body.json();
|
||||
core.info(`==> Running version ${metadata['version']}`);
|
||||
} catch (err) {
|
||||
core.info(`Could not pull latest version information: ${err}`);
|
||||
}
|
||||
};
|
||||
export default versionInfo;
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"outDir": "dist/",
|
||||
"resolveJsonModule": true,
|
||||
"rootDir": ".",
|
||||
"sourceMap": true,
|
||||
"target": "es2015"
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
"src/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"src/**/*.test.ts"
|
||||
]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user