13 Commits
v2.0 ... v3.3

Author SHA1 Message Date
b5b231a831 v3 new release (#89) 2022-07-25 14:01:37 -04:00
84b304dfb7 v3 new release (#87)
add token
2022-07-25 13:23:27 -04:00
e4f3964f67 v3 new release (#84)
swap to graphql
2022-07-11 13:48:02 -04:00
20d2b4f98d v3 new release (#80) 2022-06-27 14:17:15 -04:00
a767c8d3a1 fix jest version (#76) 2022-06-27 14:00:33 -04:00
97c3a3f138 upgraded to Node16 (#74)
Co-authored-by: Vidya Reddy <vidyareddy@microsoft.com>
2022-06-16 15:17:55 -04:00
0a86c98d61 Fix codeowners (#71) 2022-04-11 11:05:10 -04:00
39f78708c2 add version support message (#69) 2022-04-11 11:05:03 -04:00
fa870ea9a2 run to index (#67)
* run to index

* action.yaml route

* move to dev dep
2022-02-09 11:26:07 -05:00
e00756a00e Version fix (#66)
* Added version validation check

* Added check for latest

* Changed Helm 3.5.0 test to also test lack of v in version

* Pushing integration tests

* Removed push integration test

* Added more context to integration test

* Addressing comment
2022-02-08 17:07:21 -05:00
2998c83e16 Updated workflows, codeowner, .gitignore (#65) 2022-02-04 13:04:30 -05:00
5876560d6c Add arm64 support (#64)
* add arm64 support
2022-02-04 09:45:03 -05:00
7e6f48e5b4 master to main rename (#61) 2022-02-03 11:29:11 -05:00
2174 changed files with 141200 additions and 103139 deletions

2
.github/CODEOWNERS vendored
View File

@ -1 +1 @@
* @Azure/aksatlanta * @Azure/aks-atlanta

View File

@ -4,7 +4,4 @@ about: Create a report to help us improve
title: '' title: ''
labels: need-to-triage labels: need-to-triage
assignees: '' assignees: ''
--- ---

View File

@ -1,36 +1,35 @@
name: setting-default-labels name: setting-default-labels
# Controls when the action will run. # Controls when the action will run.
on: on:
schedule: schedule:
- cron: "0 0/3 * * *" - cron: '0 0/3 * * *'
# A workflow run is made up of one or more jobs that can run sequentially or in parallel # A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs: jobs:
build: build:
# The type of runner that the job will run on # The type of runner that the job will run on
runs-on: ubuntu-latest runs-on: ubuntu-latest
# Steps represent a sequence of tasks that will be executed as part of the job # Steps represent a sequence of tasks that will be executed as part of the job
steps: steps:
- uses: actions/stale@v3
- uses: actions/stale@v3 name: Setting issue as idle
name: Setting issue as idle with:
with: repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: 'This issue is idle because it has been open for 14 days with no activity.'
stale-issue-message: 'This issue is idle because it has been open for 14 days with no activity.' stale-issue-label: 'idle'
stale-issue-label: 'idle' days-before-stale: 14
days-before-stale: 14 days-before-close: -1
days-before-close: -1 operations-per-run: 100
operations-per-run: 100 exempt-issue-labels: 'backlog'
exempt-issue-labels: 'backlog'
- uses: actions/stale@v3
- uses: actions/stale@v3 name: Setting PR as idle
name: Setting PR as idle with:
with: repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token: ${{ secrets.GITHUB_TOKEN }} stale-pr-message: 'This PR is idle because it has been open for 14 days with no activity.'
stale-pr-message: 'This PR is idle because it has been open for 14 days with no activity.' stale-pr-label: 'idle'
stale-pr-label: 'idle' days-before-stale: 14
days-before-stale: 14 days-before-close: -1
days-before-close: -1 operations-per-run: 100
operations-per-run: 100

View File

@ -1,65 +1,65 @@
name: "Trigger Integration tests" name: 'Trigger Integration tests'
on: on:
pull_request: pull_request:
branches: branches:
- master - main
- 'releases/*' - 'releases/*'
jobs: jobs:
trigger-integration-tests: trigger-integration-tests:
name: Trigger Integration tests name: Trigger Integration tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
HELM_3_8_0: "v3.8.0" HELM_3_8_0: 'v3.8.0'
HELM_3_7_2: "v3.7.2" HELM_3_7_2: 'v3.7.2'
HELM_3_5_0: "v3.5.0" HELM_NO_V: '3.5.0'
PR_BASE_REF: ${{ github.event.pull_request.base.ref }} PR_BASE_REF: ${{ github.event.pull_request.base.ref }}
steps: steps:
- name: Check out repository - name: Check out repository
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: npm install and build - name: npm install and build
id: action-npm-build id: action-npm-build
run: | run: |
echo $PR_BASE_REF echo $PR_BASE_REF
if [[ $PR_BASE_REF != releases/* ]]; then if [[ $PR_BASE_REF != releases/* ]]; then
npm install npm install
npm run build npm run build
fi fi
- name: Setup helm - name: Setup helm
uses: ./ uses: ./
with: with:
version: ${{ env.HELM_3_8_0 }} version: ${{ env.HELM_3_8_0 }}
- name: Validate helm 3.8.0 - name: Validate helm 3.8.0
run: | run: |
if [[ $(helm version) != *$HELM_3_8_0* ]]; then if [[ $(helm version) != *$HELM_3_8_0* ]]; then
echo "HELM VERSION INCORRECT: HELM VERSION DOES NOT CONTAIN v3.8.0" echo "HELM VERSION INCORRECT: HELM VERSION DOES NOT CONTAIN v3.8.0"
echo "HELM VERSION OUTPUT: $(helm version)" echo "HELM VERSION OUTPUT: $(helm version)"
exit 1 exit 1
else else
echo "HELM VERSION $HELM_3_8_0 INSTALLED SUCCESSFULLY" echo "HELM VERSION $HELM_3_8_0 INSTALLED SUCCESSFULLY"
fi fi
- name: Setup helm 3.7.2 - name: Setup helm 3.7.2
uses: ./ uses: ./
with: with:
version: ${{ env.HELM_3_7_2 }} version: ${{ env.HELM_3_7_2 }}
- name: Validate 3.7.2 - name: Validate 3.7.2
run: | run: |
if [[ $(helm version) != *$HELM_3_7_2* ]]; then if [[ $(helm version) != *$HELM_3_7_2* ]]; then
echo "HELM VERSION INCORRECT: HELM VERSION DOES NOT CONTAIN v3.7.2" echo "HELM VERSION INCORRECT: HELM VERSION DOES NOT CONTAIN v3.7.2"
echo "HELM VERSION OUTPUT: $(helm version)" echo "HELM VERSION OUTPUT: $(helm version)"
exit 1 exit 1
else else
echo "HELM VERSION $HELM_3_7_2 INSTALLED SUCCESSFULLY" echo "HELM VERSION $HELM_3_7_2 INSTALLED SUCCESSFULLY"
fi fi
- name: Setup helm 3.5.0 - name: Setup helm 3.5.0 with no v in version
uses: ./ uses: ./
with: with:
version: ${{ env.HELM_3_5_0 }} version: ${{ env.HELM_NO_V }}
- name: Validate 3.5.0 - name: Validate 3.5.0 without v in version
run: | run: |
if [[ $(helm version) != *$HELM_3_5_0* ]]; then if [[ $(helm version) != *$HELM_NO_V* ]]; then
echo "HELM VERSION INCORRECT: HELM VERSION DOES NOT CONTAIN v3.5.0" echo "HELM VERSION INCORRECT: HELM VERSION DOES NOT CONTAIN v3.5.0"
echo "HELM VERSION OUTPUT: $(helm version)" echo "HELM VERSION OUTPUT: $(helm version)"
exit 1 exit 1
else else
echo "HELM VERSION $HELM_3_5_0 INSTALLED SUCCESSFULLY" echo "HELM VERSION $HELM_3_5_0 INSTALLED SUCCESSFULLY"
fi fi

18
.github/workflows/prettify-code.yml vendored Normal file
View File

@ -0,0 +1,18 @@
name: 'Run prettify'
on:
pull_request:
push:
branches: [main]
jobs:
prettier:
name: Prettier Check
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v2
- name: Enforce Prettier
uses: actionsx/prettier@v2
with:
args: --check .

View File

@ -1,55 +1,14 @@
name: "Create release PR" name: Create release PR
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
release: release:
description: "Define release version (ex: v1, v2, v3)" description: 'Define release version (ex: v1, v2, v3)'
required: true required: true
jobs: jobs:
createPullRequest: release-pr:
runs-on: ubuntu-latest uses: OliverMKing/javascript-release-workflow/.github/workflows/release-pr.yml@main
steps: with:
- uses: actions/checkout@v2 release: ${{ github.event.inputs.release }}
with:
fetch-depth: 0
- name: Check if remote branch exists
env:
BRANCH: releases/${{ github.event.inputs.release }}
run: |
echo "##[set-output name=exists;]$(echo $(if [[ -z $(git ls-remote --heads origin ${BRANCH}) ]]; then echo false; else echo true; fi;))"
id: extract-branch-status
# these two only need to occur if the branch exists
- name: Checkout proper branch
if: ${{ steps.extract-branch-status.outputs.exists == 'true' }}
env:
BRANCH: releases/${{ github.event.inputs.release }}
run: git checkout ${BRANCH}
- name: Reset promotion branch
if: ${{ steps.extract-branch-status.outputs.exists == 'true' }}
run: |
git fetch origin master:master
git reset --hard master
- name: Install packages
run: |
rm -rf node_modules/
npm install --no-bin-links
npm run build
- name: Remove node_modules from gitignore
run: |
sed -i '/node_modules/d' ./.gitignore
- name: Create branch
uses: peterjgrainger/action-create-branch@v2.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
branch: releases/${{ github.event.inputs.release }}
- name: Create pull request
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Add node modules and new code for release
title: ${{ github.event.inputs.release }} new release
base: releases/${{ github.event.inputs.release }}
branch: create-release

10
.github/workflows/tag-and-draft.yml vendored Normal file
View File

@ -0,0 +1,10 @@
name: Tag and create release draft
on:
push:
branches:
- releases/*
jobs:
tag-and-release:
uses: OliverMKing/javascript-release-workflow/.github/workflows/tag-and-release.yml@main

View File

@ -1,77 +0,0 @@
name: "Tag and create release draft"
on:
push:
branches:
- releases/*
jobs:
gh_tagged_release:
runs-on: "ubuntu-latest"
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Test release
run: |
sudo npm install n
sudo n latest
npm test
- name: Get branch ending
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF##*/} | sed 's:.*/::')"
id: extract-branch
- name: Get tags
run: |
echo "##[set-output name=tags;]$(echo $(git tag))"
id: extract-tags
- name: Get latest tag
uses: actions/github-script@v5
env:
TAGS: ${{ steps.extract-tags.outputs.tags }}
BRANCH: ${{ steps.extract-branch.outputs.branch }}
with:
script: |
const tags = process.env["TAGS"]
.split(" ")
.map((x) => x.trim());
const branch = process.env["BRANCH"];
const splitTag = (x) =>
x
.substring(branch.length + 1)
.split(".")
.map((x) => Number(x));
function compareTags(nums1, nums2, position = 0) {
if (nums1.length < position && nums2.length < position) return nums2;
const num1 = splitTag(nums1)[position] || 0;
const num2 = splitTag(nums2)[position] || 0;
if (num1 === num2) return compareTags(nums1, nums2, position + 1);
else if (num1 > num2) return nums1;
else return nums2;
}
const branchTags = tags.filter((tag) => tag.startsWith(branch));
if (branchTags.length < 1) return branch + ".-1"
return branchTags.reduce((prev, curr) => compareTags(prev, curr));
result-encoding: string
id: get-latest-tag
- name: Get new tag
uses: actions/github-script@v5
env:
PREV: ${{ steps.get-latest-tag.outputs.result }}
with:
script: |
let version = process.env["PREV"]
if (!version.includes(".")) version += ".0"; // case of v1 or v2
const prefix = /^([a-zA-Z]+)/.exec(version)[0];
const numbers = version.substring(prefix.length);
let split = numbers.split(".");
split[split.length - 1] = parseInt(split[split.length - 1]) + 1;
return prefix + split.join(".");
result-encoding: string
id: get-new-tag
- uses: "marvinpinto/action-automatic-releases@v1.2.1"
with:
title: ${{ steps.get-new-tag.outputs.result }} release
automatic_release_tag: ${{ steps.get-new-tag.outputs.result }}
repo_token: "${{ secrets.GITHUB_TOKEN }}"
draft: true

View File

@ -1,21 +1,21 @@
name: "Run unit tests." name: 'Run unit tests.'
on: # rebuild any PRs and main branch changes on: # rebuild any PRs and main branch changes
pull_request: pull_request:
branches: branches:
- master - main
- 'releases/*' - 'releases/*'
push: push:
branches: branches:
- master - main
- 'releases/*' - 'releases/*'
jobs: jobs:
build: # make sure build/ci works properly build: # make sure build/ci works properly
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: Run L0 tests. - name: Run L0 tests.
run: | run: |
npm install npm install
npm test npm test

4
.gitignore vendored
View File

@ -11,8 +11,6 @@ pids
*.seed *.seed
*.pid.lock *.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul # Coverage directory used by tools like istanbul
coverage coverage
@ -60,3 +58,5 @@ typings/
.next .next
coverage coverage
# Transpiled JS

4
.prettierignore Normal file
View File

@ -0,0 +1,4 @@
# dependencies
/node_modules
coverage
/lib

8
.prettierrc.json Normal file
View File

@ -0,0 +1,8 @@
{
"trailingComma": "none",
"bracketSpacing": false,
"semi": false,
"tabWidth": 3,
"singleQuote": true,
"printWidth": 80
}

View File

@ -1,9 +1,9 @@
# Microsoft Open Source Code of Conduct # Microsoft Open Source Code of Conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
Resources: Resources:
- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns

View File

@ -1,6 +1,6 @@
# Contributing # Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.

View File

@ -1,29 +1,34 @@
# Setup Helm # Setup Helm
Install a specific version of helm binary on the runner.
## Example Install a specific version of helm binary on the runner.
Acceptable values are latest or any semantic version string like v2.16.7 Use this action in workflow to define which version of helm will be used. ## Example
```yaml Acceptable values are latest or any semantic version string like v3.5.0 Use this action in workflow to define which version of helm will be used. v2 and v3 of this action only support Helm3.
- uses: azure/setup-helm@v1
with: ```yaml
version: '<version>' # default is latest stable - uses: azure/setup-helm@v3
id: install with:
``` version: '<version>' # default is latest (stable)
token: ${{ secrets.GITHUB_TOKEN }} # only needed if version is 'latest'
The cached helm binary path is prepended to the PATH environment variable as well as stored in the helm-path output variable. id: install
Refer to the action metadata file for details about all the inputs https://github.com/Azure/setup-helm/blob/master/action.yml ```
# Contributing > Note: When using latest version you might hit the GitHub GraphQL API hourly rate limit of 5,000. The action will then return the hardcoded default stable version (currently v3.9.0). If you rely on a certain version higher than the default, you should use that version instead of latest.
This project welcomes contributions and suggestions. Most contributions require you to agree to a The cached helm binary path is prepended to the PATH environment variable as well as stored in the helm-path output variable.
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us Refer to the action metadata file for details about all the inputs https://github.com/Azure/setup-helm/blob/master/action.yml
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
## Contributing
When you submit a pull request, a CLA bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions This project welcomes contributions and suggestions. Most contributions require you to agree to a
provided by the bot. You will only need to do this once across all repos using our CLA. Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or When you submit a pull request, a CLA bot will automatically determine whether you need to provide
contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.

View File

@ -1,41 +1,41 @@
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.2 BLOCK --> <!-- BEGIN MICROSOFT SECURITY.MD V0.0.2 BLOCK -->
## Security ## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [many more](https://opensource.microsoft.com/). Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [many more](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets Microsoft's [definition](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)) of a security vulnerability, please report it to us as described below. If you believe you have found a security vulnerability in any Microsoft-owned repository that meets Microsoft's [definition](<https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)>) of a security vulnerability, please report it to us as described below.
## Reporting Security Issues ## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.** **Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) - Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue - Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL) - The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue - Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue - Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible) - Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue - Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly. This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
## Preferred Languages ## Preferred Languages
We prefer all communications to be in English. We prefer all communications to be in English.
## Policy ## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd). Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK --> <!-- END MICROSOFT SECURITY.MD BLOCK -->

View File

@ -1,174 +0,0 @@
import * as run from '../src/run'
import * as os from 'os';
import * as toolCache from '@actions/tool-cache';
import * as fs from 'fs';
import * as path from 'path';
import * as core from '@actions/core';
describe('run.ts', () => {
test('getExecutableExtension() - return .exe when os is Windows', () => {
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
expect(run.getExecutableExtension()).toBe('.exe');
expect(os.type).toBeCalled();
});
test('getExecutableExtension() - return empty string for non-windows OS', () => {
jest.spyOn(os, 'type').mockReturnValue('Darwin');
expect(run.getExecutableExtension()).toBe('');
expect(os.type).toBeCalled();
});
test('getHelmDownloadURL() - return the URL to download helm for Linux', () => {
jest.spyOn(os, 'type').mockReturnValue('Linux');
const kubectlLinuxUrl = 'https://get.helm.sh/helm-v3.8.0-linux-amd64.zip'
expect(run.getHelmDownloadURL('v3.8.0')).toBe(kubectlLinuxUrl);
expect(os.type).toBeCalled();
});
test('getHelmDownloadURL() - return the URL to download helm for Darwin', () => {
jest.spyOn(os, 'type').mockReturnValue('Darwin');
const kubectlDarwinUrl = 'https://get.helm.sh/helm-v3.8.0-darwin-amd64.zip'
expect(run.getHelmDownloadURL('v3.8.0')).toBe(kubectlDarwinUrl);
expect(os.type).toBeCalled();
});
test('getHelmDownloadURL() - return the URL to download helm for Windows', () => {
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
const kubectlWindowsUrl = 'https://get.helm.sh/helm-v3.8.0-windows-amd64.zip'
expect(run.getHelmDownloadURL('v3.8.0')).toBe(kubectlWindowsUrl);
expect(os.type).toBeCalled();
});
test('getLatestHelmVersion() - return the latest version of HELM', async () => {
try{
expect(await run.getLatestHelmVersion()).toBe("v3.8.0");
} catch (e){
return e;
}
});
test('walkSync() - return path to the all files matching fileToFind in dir', () => {
jest.spyOn(fs, 'readdirSync').mockImplementation((file, _) => {
if (file == 'mainFolder') return ['file1' as unknown as fs.Dirent, 'file2' as unknown as fs.Dirent, 'folder1' as unknown as fs.Dirent, 'folder2' as unknown as fs.Dirent];
if (file == path.join('mainFolder', 'folder1')) return ['file11' as unknown as fs.Dirent, 'file12' as unknown as fs.Dirent];
if (file == path.join('mainFolder', 'folder2')) return ['file21' as unknown as fs.Dirent, 'file22' as unknown as fs.Dirent];
});
jest.spyOn(core, 'debug').mockImplementation();
jest.spyOn(fs, 'statSync').mockImplementation((file) => {
const isDirectory = (file as string).toLowerCase().indexOf('file') == -1 ? true: false
return { isDirectory: () => isDirectory } as fs.Stats;
});
expect(run.walkSync('mainFolder', null, 'file21')).toEqual([path.join('mainFolder', 'folder2', 'file21')]);
expect(fs.readdirSync).toBeCalledTimes(3);
expect(fs.statSync).toBeCalledTimes(8);
});
test('walkSync() - return empty array if no file with name fileToFind exists', () => {
jest.spyOn(fs, 'readdirSync').mockImplementation((file, _) => {
if (file == 'mainFolder') return ['file1' as unknown as fs.Dirent, 'file2' as unknown as fs.Dirent, 'folder1' as unknown as fs.Dirent, 'folder2' as unknown as fs.Dirent];
if (file == path.join('mainFolder', 'folder1')) return ['file11' as unknown as fs.Dirent, 'file12' as unknown as fs.Dirent];
if (file == path.join('mainFolder', 'folder2')) return ['file21' as unknown as fs.Dirent, 'file22' as unknown as fs.Dirent];
});
jest.spyOn(core, 'debug').mockImplementation();
jest.spyOn(fs, 'statSync').mockImplementation((file) => {
const isDirectory = (file as string).toLowerCase().indexOf('file') == -1 ? true: false
return { isDirectory: () => isDirectory } as fs.Stats;
});
expect(run.walkSync('mainFolder', null, 'helm.exe')).toEqual([]);
expect(fs.readdirSync).toBeCalledTimes(3);
expect(fs.statSync).toBeCalledTimes(8);
});
test('findHelm() - change access permissions and find the helm in given directory', () => {
jest.spyOn(fs, 'chmodSync').mockImplementation(() => {});
jest.spyOn(fs, 'readdirSync').mockImplementation((file, _) => {
if (file == 'mainFolder') return ['helm.exe' as unknown as fs.Dirent];
});
jest.spyOn(fs, 'statSync').mockImplementation((file) => {
const isDirectory = (file as string).indexOf('folder') == -1 ? false: true
return { isDirectory: () => isDirectory } as fs.Stats;
});
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
expect(run.findHelm('mainFolder')).toBe(path.join('mainFolder', 'helm.exe'));
});
test('findHelm() - throw error if executable not found', () => {
jest.spyOn(fs, 'chmodSync').mockImplementation(() => {});
jest.spyOn(fs, 'readdirSync').mockImplementation((file, _) => {
if (file == 'mainFolder') return [];
});
jest.spyOn(fs, 'statSync').mockImplementation((file) => { return { isDirectory: () => true } as fs.Stats});
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
expect(() => run.findHelm('mainFolder')).toThrow('Helm executable not found in path mainFolder');
});
test('downloadHelm() - download helm and return path to it', async () => {
jest.spyOn(toolCache, 'find').mockReturnValue('');
jest.spyOn(toolCache, 'downloadTool').mockResolvedValue('pathToTool');
const response = JSON.stringify([{'tag_name': 'v4.0.0'}]);
jest.spyOn(fs, 'readFileSync').mockReturnValue(response);
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
jest.spyOn(fs, 'chmodSync').mockImplementation(() => {});
jest.spyOn(toolCache, 'extractZip').mockResolvedValue('pathToUnzippedHelm');
jest.spyOn(toolCache, 'cacheDir').mockResolvedValue('pathToCachedDir');
jest.spyOn(fs, 'readdirSync').mockImplementation((file, _) => ['helm.exe' as unknown as fs.Dirent]);
jest.spyOn(fs, 'statSync').mockImplementation((file) => {
const isDirectory = (file as string).indexOf('folder') == -1 ? false: true
return { isDirectory: () => isDirectory } as fs.Stats;
});
expect(await run.downloadHelm("v4.0.0")).toBe(path.join('pathToCachedDir', 'helm.exe'));
expect(toolCache.find).toBeCalledWith('helm', 'v4.0.0');
expect(toolCache.downloadTool).toBeCalledWith('https://get.helm.sh/helm-v4.0.0-windows-amd64.zip');
expect(fs.chmodSync).toBeCalledWith('pathToTool', '777');
expect(toolCache.extractZip).toBeCalledWith('pathToTool');
expect(fs.chmodSync).toBeCalledWith(path.join('pathToCachedDir', 'helm.exe'), '777');
});
test('downloadHelm() - throw error if unable to download', async () => {
jest.spyOn(toolCache, 'find').mockReturnValue('');
jest.spyOn(toolCache, 'downloadTool').mockImplementation(async () => { throw 'Unable to download'});
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
await expect(run.downloadHelm('v3.2.1')).rejects.toThrow('Failed to download Helm from location https://get.helm.sh/helm-v3.2.1-windows-amd64.zip');
expect(toolCache.find).toBeCalledWith('helm', 'v3.2.1');
expect(toolCache.downloadTool).toBeCalledWith('https://get.helm.sh/helm-v3.2.1-windows-amd64.zip');
});
test('downloadHelm() - return path to helm tool with same version from toolCache', async () => {
jest.spyOn(toolCache, 'find').mockReturnValue('pathToCachedDir');
jest.spyOn(fs, 'chmodSync').mockImplementation(() => {});
expect(await run.downloadHelm('v3.2.1')).toBe(path.join('pathToCachedDir', 'helm.exe'));
expect(toolCache.find).toBeCalledWith('helm', 'v3.2.1');
expect(fs.chmodSync).toBeCalledWith(path.join('pathToCachedDir', 'helm.exe'), '777');
});
test('downloadHelm() - throw error is helm is not found in path', async () => {
jest.spyOn(toolCache, 'find').mockReturnValue('');
jest.spyOn(toolCache, 'downloadTool').mockResolvedValue('pathToTool');
jest.spyOn(os, 'type').mockReturnValue('Windows_NT');
jest.spyOn(fs, 'chmodSync').mockImplementation();
jest.spyOn(toolCache, 'extractZip').mockResolvedValue('pathToUnzippedHelm');
jest.spyOn(toolCache, 'cacheDir').mockResolvedValue('pathToCachedDir');
jest.spyOn(fs, 'readdirSync').mockImplementation((file, _) => []);
jest.spyOn(fs, 'statSync').mockImplementation((file) => {
const isDirectory = (file as string).indexOf('folder') == -1 ? false: true
return { isDirectory: () => isDirectory } as fs.Stats;
});
await expect(run.downloadHelm('v3.2.1')).rejects.toThrow('Helm executable not found in path pathToCachedDir');
expect(toolCache.find).toBeCalledWith('helm', 'v3.2.1');
expect(toolCache.downloadTool).toBeCalledWith('https://get.helm.sh/helm-v3.2.1-windows-amd64.zip');
expect(fs.chmodSync).toBeCalledWith('pathToTool', '777');
expect(toolCache.extractZip).toBeCalledWith('pathToTool');
});
});

View File

@ -1,15 +1,18 @@
name: 'Helm tool installer' name: 'Helm tool installer'
description: 'Install a specific version of helm binary. Acceptable values are latest or any semantic version string like 1.15.0' description: 'Install a specific version of helm binary. Acceptable values are latest or any semantic version string like 1.15.0'
inputs: inputs:
version: version:
description: 'Version of helm' description: 'Version of helm'
required: true required: true
default: 'latest' default: 'latest'
token:
description: GitHub token. Required only if 'version' == 'latest'
required: false
outputs: outputs:
helm-path: helm-path:
description: 'Path to the cached helm binary' description: 'Path to the cached helm binary'
branding: branding:
color: 'blue' color: 'blue'
runs: runs:
using: 'node12' using: 'node16'
main: 'lib/run.js' main: 'lib/index.js'

View File

@ -1,18 +1,18 @@
module.exports = { module.exports = {
clearMocks: true, clearMocks: true,
moduleFileExtensions: ['js', 'ts'], moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node', testEnvironment: 'node',
testMatch: ['**/*.test.ts'], testMatch: ['**/*.test.ts'],
transform: { transform: {
'^.+\\.ts$': 'ts-jest' '^.+\\.ts$': 'ts-jest'
}, },
verbose: true, verbose: true,
coverageThreshold: { coverageThreshold: {
"global": { global: {
"branches": 0, branches: 0,
"functions": 14, functions: 14,
"lines": 27, lines: 27,
"statements": 27 statements: 27
} }
} }
} }

13569
lib/index.js Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,143 +0,0 @@
"use strict";
// Copyright (c) Microsoft Corporation.
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.walkSync = exports.findHelm = exports.downloadHelm = exports.getHelmDownloadURL = exports.getExecutableExtension = exports.getLatestHelmVersion = exports.run = void 0;
const os = require("os");
const path = require("path");
const util = require("util");
const fs = require("fs");
const toolCache = require("@actions/tool-cache");
const core = require("@actions/core");
const helmToolName = 'helm';
const stableHelmVersion = 'v3.8.0';
const helmAllReleasesUrl = 'https://api.github.com/repos/helm/helm/releases';
function run() {
return __awaiter(this, void 0, void 0, function* () {
let version = core.getInput('version', { 'required': true });
if (version.toLocaleLowerCase() === 'latest') {
version = yield getLatestHelmVersion();
}
core.debug(util.format("Downloading %s", version));
let cachedPath = yield downloadHelm(version);
try {
if (!process.env['PATH'].startsWith(path.dirname(cachedPath))) {
core.addPath(path.dirname(cachedPath));
}
}
catch (_a) {
//do nothing, set as output variable
}
console.log(`Helm tool version: '${version}' has been cached at ${cachedPath}`);
core.setOutput('helm-path', cachedPath);
});
}
exports.run = run;
// Downloads the helm releases JSON and parses all the recent versions of helm from it.
// Defaults to sending stable helm version if none are valid or if it fails
function getLatestHelmVersion() {
return __awaiter(this, void 0, void 0, function* () {
const helmJSONPath = yield toolCache.downloadTool(helmAllReleasesUrl);
try {
const helmJSON = JSON.parse(fs.readFileSync(helmJSONPath, 'utf-8'));
for (let i in helmJSON) {
if (isValidVersion(helmJSON[i].tag_name)) {
return helmJSON[i].tag_name;
}
}
}
catch (err) {
core.warning(util.format("Error while fetching the latest Helm release. Error: %s. Using default Helm version %s", err.toString(), stableHelmVersion));
return stableHelmVersion;
}
return stableHelmVersion;
});
}
exports.getLatestHelmVersion = getLatestHelmVersion;
// isValidVersion checks if verison is a stable release
function isValidVersion(version) {
return version.indexOf('rc') == -1;
}
function getExecutableExtension() {
if (os.type().match(/^Win/)) {
return '.exe';
}
return '';
}
exports.getExecutableExtension = getExecutableExtension;
function getHelmDownloadURL(version) {
switch (os.type()) {
case 'Linux':
return util.format('https://get.helm.sh/helm-%s-linux-amd64.zip', version);
case 'Darwin':
return util.format('https://get.helm.sh/helm-%s-darwin-amd64.zip', version);
case 'Windows_NT':
default:
return util.format('https://get.helm.sh/helm-%s-windows-amd64.zip', version);
}
}
exports.getHelmDownloadURL = getHelmDownloadURL;
function downloadHelm(version) {
return __awaiter(this, void 0, void 0, function* () {
let cachedToolpath = toolCache.find(helmToolName, version);
if (!cachedToolpath) {
let helmDownloadPath;
try {
helmDownloadPath = yield toolCache.downloadTool(getHelmDownloadURL(version));
}
catch (exception) {
throw new Error(util.format("Failed to download Helm from location", getHelmDownloadURL(version)));
}
fs.chmodSync(helmDownloadPath, '777');
const unzipedHelmPath = yield toolCache.extractZip(helmDownloadPath);
cachedToolpath = yield toolCache.cacheDir(unzipedHelmPath, helmToolName, version);
}
const helmpath = findHelm(cachedToolpath);
if (!helmpath) {
throw new Error(util.format("Helm executable not found in path", cachedToolpath));
}
fs.chmodSync(helmpath, '777');
return helmpath;
});
}
exports.downloadHelm = downloadHelm;
function findHelm(rootFolder) {
fs.chmodSync(rootFolder, '777');
var filelist = [];
(0, exports.walkSync)(rootFolder, filelist, helmToolName + getExecutableExtension());
if (!filelist || filelist.length == 0) {
throw new Error(util.format("Helm executable not found in path", rootFolder));
}
else {
return filelist[0];
}
}
exports.findHelm = findHelm;
var walkSync = function (dir, filelist, fileToFind) {
var files = fs.readdirSync(dir);
filelist = filelist || [];
files.forEach(function (file) {
if (fs.statSync(path.join(dir, file)).isDirectory()) {
filelist = (0, exports.walkSync)(path.join(dir, file), filelist, fileToFind);
}
else {
core.debug(file);
if (file == fileToFind) {
filelist.push(path.join(dir, file));
}
}
});
return filelist;
};
exports.walkSync = walkSync;
run().catch(core.setFailed);

12822
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

25
node_modules/@actions/core/README.md generated vendored
View File

@ -309,4 +309,27 @@ outputs:
runs: runs:
using: 'node12' using: 'node12'
main: 'dist/index.js' main: 'dist/index.js'
``` ```
#### Filesystem path helpers
You can use these methods to manipulate file paths across operating systems.
The `toPosixPath` function converts input paths to Posix-style (Linux) paths.
The `toWin32Path` function converts input paths to Windows-style paths. These
functions work independently of the underlying runner operating system.
```js
toPosixPath('\\foo\\bar') // => /foo/bar
toWin32Path('/foo/bar') // => \foo\bar
```
The `toPlatformPath` function converts input paths to the expected value on the runner's operating system.
```js
// On a Windows runner.
toPlatformPath('/foo/bar') // => \foo\bar
// On a Linux runner.
toPlatformPath('\\foo\\bar') // => /foo/bar
```

View File

@ -184,3 +184,15 @@ export declare function saveState(name: string, value: any): void;
*/ */
export declare function getState(name: string): string; export declare function getState(name: string): string;
export declare function getIDToken(aud?: string): Promise<string>; export declare function getIDToken(aud?: string): Promise<string>;
/**
* Summary exports
*/
export { summary } from './summary';
/**
* @deprecated use core.summary
*/
export { markdownSummary } from './summary';
/**
* Path exports
*/
export { toPosixPath, toWin32Path, toPlatformPath } from './path-utils';

View File

@ -309,4 +309,21 @@ function getIDToken(aud) {
}); });
} }
exports.getIDToken = getIDToken; exports.getIDToken = getIDToken;
/**
* Summary exports
*/
var summary_1 = require("./summary");
Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } });
/**
* @deprecated use core.summary
*/
var summary_2 = require("./summary");
Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } });
/**
* Path exports
*/
var path_utils_1 = require("./path-utils");
Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });
Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });
Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });
//# sourceMappingURL=core.js.map //# sourceMappingURL=core.js.map

File diff suppressed because one or more lines are too long

View File

@ -11,7 +11,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.OidcClient = void 0; exports.OidcClient = void 0;
const http_client_1 = require("@actions/http-client"); const http_client_1 = require("@actions/http-client");
const auth_1 = require("@actions/http-client/auth"); const auth_1 = require("@actions/http-client/lib/auth");
const core_1 = require("./core"); const core_1 = require("./core");
class OidcClient { class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) { static createHttpClient(allowRetry = true, maxRetry = 10) {

View File

@ -1 +1 @@
{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,oDAAiE;AACjE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAoB;YACtC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"} {"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"}

25
node_modules/@actions/core/lib/path-utils.d.ts generated vendored Normal file
View File

@ -0,0 +1,25 @@
/**
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
* replaced with /.
*
* @param pth. Path to transform.
* @return string Posix path.
*/
export declare function toPosixPath(pth: string): string;
/**
* toWin32Path converts the given path to the win32 form. On Linux, / will be
* replaced with \\.
*
* @param pth. Path to transform.
* @return string Win32 path.
*/
export declare function toWin32Path(pth: string): string;
/**
* toPlatformPath converts the given path to a platform-specific path. It does
* this by replacing instances of / and \ with the platform-specific path
* separator.
*
* @param pth The path to platformize.
* @return string The platform-specific path.
*/
export declare function toPlatformPath(pth: string): string;

58
node_modules/@actions/core/lib/path-utils.js generated vendored Normal file
View File

@ -0,0 +1,58 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
const path = __importStar(require("path"));
/**
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
* replaced with /.
*
* @param pth. Path to transform.
* @return string Posix path.
*/
function toPosixPath(pth) {
return pth.replace(/[\\]/g, '/');
}
exports.toPosixPath = toPosixPath;
/**
* toWin32Path converts the given path to the win32 form. On Linux, / will be
* replaced with \\.
*
* @param pth. Path to transform.
* @return string Win32 path.
*/
function toWin32Path(pth) {
return pth.replace(/[/]/g, '\\');
}
exports.toWin32Path = toWin32Path;
/**
* toPlatformPath converts the given path to a platform-specific path. It does
* this by replacing instances of / and \ with the platform-specific path
* separator.
*
* @param pth The path to platformize.
* @return string The platform-specific path.
*/
function toPlatformPath(pth) {
return pth.replace(/[/\\]/g, path.sep);
}
exports.toPlatformPath = toPlatformPath;
//# sourceMappingURL=path-utils.js.map

1
node_modules/@actions/core/lib/path-utils.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"path-utils.js","sourceRoot":"","sources":["../src/path-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAE5B;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;;GAOG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,wCAEC"}

202
node_modules/@actions/core/lib/summary.d.ts generated vendored Normal file
View File

@ -0,0 +1,202 @@
export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
export declare type SummaryTableRow = (SummaryTableCell | string)[];
export interface SummaryTableCell {
/**
* Cell content
*/
data: string;
/**
* Render cell as header
* (optional) default: false
*/
header?: boolean;
/**
* Number of columns the cell extends
* (optional) default: '1'
*/
colspan?: string;
/**
* Number of rows the cell extends
* (optional) default: '1'
*/
rowspan?: string;
}
export interface SummaryImageOptions {
/**
* The width of the image in pixels. Must be an integer without a unit.
* (optional)
*/
width?: string;
/**
* The height of the image in pixels. Must be an integer without a unit.
* (optional)
*/
height?: string;
}
export interface SummaryWriteOptions {
/**
* Replace all existing content in summary file with buffer contents
* (optional) default: false
*/
overwrite?: boolean;
}
declare class Summary {
private _buffer;
private _filePath?;
constructor();
/**
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
* Also checks r/w permissions.
*
* @returns step summary file path
*/
private filePath;
/**
* Wraps content in an HTML tag, adding any HTML attributes
*
* @param {string} tag HTML tag to wrap
* @param {string | null} content content within the tag
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
*
* @returns {string} content wrapped in HTML element
*/
private wrap;
/**
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
*
* @param {SummaryWriteOptions} [options] (optional) options for write operation
*
* @returns {Promise<Summary>} summary instance
*/
write(options?: SummaryWriteOptions): Promise<Summary>;
/**
* Clears the summary buffer and wipes the summary file
*
* @returns {Summary} summary instance
*/
clear(): Promise<Summary>;
/**
* Returns the current summary buffer as a string
*
* @returns {string} string of summary buffer
*/
stringify(): string;
/**
* If the summary buffer is empty
*
* @returns {boolen} true if the buffer is empty
*/
isEmptyBuffer(): boolean;
/**
* Resets the summary buffer without writing to summary file
*
* @returns {Summary} summary instance
*/
emptyBuffer(): Summary;
/**
* Adds raw text to the summary buffer
*
* @param {string} text content to add
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
*
* @returns {Summary} summary instance
*/
addRaw(text: string, addEOL?: boolean): Summary;
/**
* Adds the operating system-specific end-of-line marker to the buffer
*
* @returns {Summary} summary instance
*/
addEOL(): Summary;
/**
* Adds an HTML codeblock to the summary buffer
*
* @param {string} code content to render within fenced code block
* @param {string} lang (optional) language to syntax highlight code
*
* @returns {Summary} summary instance
*/
addCodeBlock(code: string, lang?: string): Summary;
/**
* Adds an HTML list to the summary buffer
*
* @param {string[]} items list of items to render
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
*
* @returns {Summary} summary instance
*/
addList(items: string[], ordered?: boolean): Summary;
/**
* Adds an HTML table to the summary buffer
*
* @param {SummaryTableCell[]} rows table rows
*
* @returns {Summary} summary instance
*/
addTable(rows: SummaryTableRow[]): Summary;
/**
* Adds a collapsable HTML details element to the summary buffer
*
* @param {string} label text for the closed state
* @param {string} content collapsable content
*
* @returns {Summary} summary instance
*/
addDetails(label: string, content: string): Summary;
/**
* Adds an HTML image tag to the summary buffer
*
* @param {string} src path to the image you to embed
* @param {string} alt text description of the image
* @param {SummaryImageOptions} options (optional) addition image attributes
*
* @returns {Summary} summary instance
*/
addImage(src: string, alt: string, options?: SummaryImageOptions): Summary;
/**
* Adds an HTML section heading element
*
* @param {string} text heading text
* @param {number | string} [level=1] (optional) the heading level, default: 1
*
* @returns {Summary} summary instance
*/
addHeading(text: string, level?: number | string): Summary;
/**
* Adds an HTML thematic break (<hr>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addSeparator(): Summary;
/**
* Adds an HTML line break (<br>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addBreak(): Summary;
/**
* Adds an HTML blockquote to the summary buffer
*
* @param {string} text quote text
* @param {string} cite (optional) citation url
*
* @returns {Summary} summary instance
*/
addQuote(text: string, cite?: string): Summary;
/**
* Adds an HTML anchor tag to the summary buffer
*
* @param {string} text link text/content
* @param {string} href hyperlink
*
* @returns {Summary} summary instance
*/
addLink(text: string, href: string): Summary;
}
/**
* @deprecated use `core.summary`
*/
export declare const markdownSummary: Summary;
export declare const summary: Summary;
export {};

283
node_modules/@actions/core/lib/summary.js generated vendored Normal file
View File

@ -0,0 +1,283 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const os_1 = require("os");
const fs_1 = require("fs");
const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
class Summary {
constructor() {
this._buffer = '';
}
/**
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
* Also checks r/w permissions.
*
* @returns step summary file path
*/
filePath() {
return __awaiter(this, void 0, void 0, function* () {
if (this._filePath) {
return this._filePath;
}
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
if (!pathFromEnv) {
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
try {
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
}
catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
this._filePath = pathFromEnv;
return this._filePath;
});
}
/**
* Wraps content in an HTML tag, adding any HTML attributes
*
* @param {string} tag HTML tag to wrap
* @param {string | null} content content within the tag
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
*
* @returns {string} content wrapped in HTML element
*/
wrap(tag, content, attrs = {}) {
const htmlAttrs = Object.entries(attrs)
.map(([key, value]) => ` ${key}="${value}"`)
.join('');
if (!content) {
return `<${tag}${htmlAttrs}>`;
}
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
}
/**
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
*
* @param {SummaryWriteOptions} [options] (optional) options for write operation
*
* @returns {Promise<Summary>} summary instance
*/
write(options) {
return __awaiter(this, void 0, void 0, function* () {
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
const filePath = yield this.filePath();
const writeFunc = overwrite ? writeFile : appendFile;
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
return this.emptyBuffer();
});
}
/**
* Clears the summary buffer and wipes the summary file
*
* @returns {Summary} summary instance
*/
clear() {
return __awaiter(this, void 0, void 0, function* () {
return this.emptyBuffer().write({ overwrite: true });
});
}
/**
* Returns the current summary buffer as a string
*
* @returns {string} string of summary buffer
*/
stringify() {
return this._buffer;
}
/**
* If the summary buffer is empty
*
* @returns {boolen} true if the buffer is empty
*/
isEmptyBuffer() {
return this._buffer.length === 0;
}
/**
* Resets the summary buffer without writing to summary file
*
* @returns {Summary} summary instance
*/
emptyBuffer() {
this._buffer = '';
return this;
}
/**
* Adds raw text to the summary buffer
*
* @param {string} text content to add
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
*
* @returns {Summary} summary instance
*/
addRaw(text, addEOL = false) {
this._buffer += text;
return addEOL ? this.addEOL() : this;
}
/**
* Adds the operating system-specific end-of-line marker to the buffer
*
* @returns {Summary} summary instance
*/
addEOL() {
return this.addRaw(os_1.EOL);
}
/**
* Adds an HTML codeblock to the summary buffer
*
* @param {string} code content to render within fenced code block
* @param {string} lang (optional) language to syntax highlight code
*
* @returns {Summary} summary instance
*/
addCodeBlock(code, lang) {
const attrs = Object.assign({}, (lang && { lang }));
const element = this.wrap('pre', this.wrap('code', code), attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML list to the summary buffer
*
* @param {string[]} items list of items to render
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
*
* @returns {Summary} summary instance
*/
addList(items, ordered = false) {
const tag = ordered ? 'ol' : 'ul';
const listItems = items.map(item => this.wrap('li', item)).join('');
const element = this.wrap(tag, listItems);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML table to the summary buffer
*
* @param {SummaryTableCell[]} rows table rows
*
* @returns {Summary} summary instance
*/
addTable(rows) {
const tableBody = rows
.map(row => {
const cells = row
.map(cell => {
if (typeof cell === 'string') {
return this.wrap('td', cell);
}
const { header, data, colspan, rowspan } = cell;
const tag = header ? 'th' : 'td';
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
return this.wrap(tag, data, attrs);
})
.join('');
return this.wrap('tr', cells);
})
.join('');
const element = this.wrap('table', tableBody);
return this.addRaw(element).addEOL();
}
/**
* Adds a collapsable HTML details element to the summary buffer
*
* @param {string} label text for the closed state
* @param {string} content collapsable content
*
* @returns {Summary} summary instance
*/
addDetails(label, content) {
const element = this.wrap('details', this.wrap('summary', label) + content);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML image tag to the summary buffer
*
* @param {string} src path to the image you to embed
* @param {string} alt text description of the image
* @param {SummaryImageOptions} options (optional) addition image attributes
*
* @returns {Summary} summary instance
*/
addImage(src, alt, options) {
const { width, height } = options || {};
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML section heading element
*
* @param {string} text heading text
* @param {number | string} [level=1] (optional) the heading level, default: 1
*
* @returns {Summary} summary instance
*/
addHeading(text, level) {
const tag = `h${level}`;
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
? tag
: 'h1';
const element = this.wrap(allowedTag, text);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML thematic break (<hr>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addSeparator() {
const element = this.wrap('hr', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML line break (<br>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addBreak() {
const element = this.wrap('br', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML blockquote to the summary buffer
*
* @param {string} text quote text
* @param {string} cite (optional) citation url
*
* @returns {Summary} summary instance
*/
addQuote(text, cite) {
const attrs = Object.assign({}, (cite && { cite }));
const element = this.wrap('blockquote', text, attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML anchor tag to the summary buffer
*
* @param {string} text link text/content
* @param {string} href hyperlink
*
* @returns {Summary} summary instance
*/
addLink(text, href) {
const element = this.wrap('a', text, { href });
return this.addRaw(element).addEOL();
}
}
const _summary = new Summary();
/**
* @deprecated use `core.summary`
*/
exports.markdownSummary = _summary;
exports.summary = _summary;
//# sourceMappingURL=summary.js.map

1
node_modules/@actions/core/lib/summary.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/core", "name": "@actions/core",
"version": "1.6.0", "version": "1.9.0",
"description": "Actions core lib", "description": "Actions core lib",
"keywords": [ "keywords": [
"github", "github",
@ -36,7 +36,7 @@
"url": "https://github.com/actions/toolkit/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"dependencies": { "dependencies": {
"@actions/http-client": "^1.0.11" "@actions/http-client": "^2.0.1"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^12.0.2" "@types/node": "^12.0.2"

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/exec", "name": "@actions/exec",
"version": "1.1.0", "version": "1.1.1",
"description": "Actions exec lib", "description": "Actions exec lib",
"keywords": [ "keywords": [
"github", "github",

View File

@ -1,18 +1,11 @@
# `@actions/http-client`
<p align="center"> A lightweight HTTP client optimized for building actions.
<img src="actions.png">
</p>
# Actions Http-Client
[![Http Status](https://github.com/actions/http-client/workflows/http-tests/badge.svg)](https://github.com/actions/http-client/actions)
A lightweight HTTP client optimized for use with actions, TypeScript with generics and async await.
## Features ## Features
- HTTP client with TypeScript generics and async/await/Promises - HTTP client with TypeScript generics and async/await/Promises
- Typings included so no need to acquire separately (great for intellisense and no versioning drift) - Typings included!
- [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner - [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner
- Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+. - Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+.
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others. - Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
@ -28,7 +21,7 @@ npm install @actions/http-client --save
## Samples ## Samples
See the [HTTP](./__tests__) tests for detailed examples. See the [tests](./__tests__) for detailed examples.
## Errors ## Errors
@ -39,13 +32,13 @@ The HTTP client does not throw unless truly exceptional.
* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body. * A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body.
* Redirects (3xx) will be followed by default. * Redirects (3xx) will be followed by default.
See [HTTP tests](./__tests__) for detailed examples. See the [tests](./__tests__) for detailed examples.
## Debugging ## Debugging
To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible: To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible:
``` ```shell
export NODE_DEBUG=http export NODE_DEBUG=http
``` ```
@ -63,17 +56,18 @@ We welcome PRs. Please create an issue and if applicable, a design before proce
once: once:
```bash ```
$ npm install npm install
``` ```
To build: To build:
```bash ```
$ npm run build npm run build
``` ```
To run all tests: To run all tests:
```bash
$ npm test ```
npm test
``` ```

View File

@ -1,26 +0,0 @@
## Releases
## 1.0.10
Contains a bug fix where proxy is defined without a user and password. see [PR here](https://github.com/actions/http-client/pull/42)
## 1.0.9
Throw HttpClientError instead of a generic Error from the \<verb>Json() helper methods when the server responds with a non-successful status code.
## 1.0.8
Fixed security issue where a redirect (e.g. 302) to another domain would pass headers. The fix was to strip the authorization header if the hostname was different. More [details in PR #27](https://github.com/actions/http-client/pull/27)
## 1.0.7
Update NPM dependencies and add 429 to the list of HttpCodes
## 1.0.6
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
## 1.0.5
Adds \<verb>Json() helper methods for json over http scenarios.
## 1.0.4
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
## 1.0.1 to 1.0.3
Adds proxy support.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

View File

@ -1,23 +0,0 @@
import ifm = require('./interfaces');
export declare class BasicCredentialHandler implements ifm.IRequestHandler {
username: string;
password: string;
constructor(username: string, password: string);
prepareRequest(options: any): void;
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
}
export declare class BearerCredentialHandler implements ifm.IRequestHandler {
token: string;
constructor(token: string);
prepareRequest(options: any): void;
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
}
export declare class PersonalAccessTokenCredentialHandler implements ifm.IRequestHandler {
token: string;
constructor(token: string);
prepareRequest(options: any): void;
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
}

View File

@ -1,58 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' +
Buffer.from(this.username + ':' + this.password).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] = 'Bearer ' + this.token;
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;

View File

@ -1,537 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const http = require("http");
const https = require("https");
const pm = require("./proxy");
let tunnel;
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
function getProxyUrl(serverUrl) {
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
return proxyUrl ? proxyUrl.href : '';
}
exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [
HttpCodes.MovedPermanently,
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientError extends Error {
constructor(message, statusCode) {
super(message);
this.name = 'HttpClientError';
this.statusCode = statusCode;
Object.setPrototypeOf(this, HttpClientError.prototype);
}
}
exports.HttpClientError = HttpClientError;
class HttpClientResponse {
constructor(message) {
this.message = message;
}
readBody() {
return new Promise(async (resolve, reject) => {
let output = Buffer.alloc(0);
this.message.on('data', (chunk) => {
output = Buffer.concat([output, chunk]);
});
this.message.on('end', () => {
resolve(output.toString());
});
});
}
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
let parsedUrl = new URL(requestUrl);
return parsedUrl.protocol === 'https:';
}
exports.isHttps = isHttps;
class HttpClient {
constructor(userAgent, handlers, requestOptions) {
this._ignoreSslError = false;
this._allowRedirects = true;
this._allowRedirectDowngrade = false;
this._maxRedirects = 50;
this._allowRetries = false;
this._maxRetries = 1;
this._keepAlive = false;
this._disposed = false;
this.userAgent = userAgent;
this.handlers = handlers || [];
this.requestOptions = requestOptions;
if (requestOptions) {
if (requestOptions.ignoreSslError != null) {
this._ignoreSslError = requestOptions.ignoreSslError;
}
this._socketTimeout = requestOptions.socketTimeout;
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
}
get(requestUrl, additionalHeaders) {
return this.request('GET', requestUrl, null, additionalHeaders || {});
}
del(requestUrl, additionalHeaders) {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
}
post(requestUrl, data, additionalHeaders) {
return this.request('POST', requestUrl, data, additionalHeaders || {});
}
patch(requestUrl, data, additionalHeaders) {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
}
put(requestUrl, data, additionalHeaders) {
return this.request('PUT', requestUrl, data, additionalHeaders || {});
}
head(requestUrl, additionalHeaders) {
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
}
sendStream(verb, requestUrl, stream, additionalHeaders) {
return this.request(verb, requestUrl, stream, additionalHeaders);
}
/**
* Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/
async getJson(requestUrl, additionalHeaders = {}) {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
let res = await this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async postJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async putJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async patchJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
async request(verb, requestUrl, data, headers) {
if (this._disposed) {
throw new Error('Client has already been disposed.');
}
let parsedUrl = new URL(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
? this._maxRetries + 1
: 1;
let numTries = 0;
let response;
while (numTries < maxTries) {
response = await this.requestRaw(info, data);
// Check if it's an authentication challenge
if (response &&
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (let i = 0; i < this.handlers.length; i++) {
if (this.handlers[i].canHandleAuthentication(response)) {
authenticationHandler = this.handlers[i];
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
this._allowRedirects &&
redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location'];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
let parsedRedirectUrl = new URL(redirectUrl);
if (parsedUrl.protocol == 'https:' &&
parsedUrl.protocol != parsedRedirectUrl.protocol &&
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (let header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = await this.requestRaw(info, data);
redirectsRemaining--;
}
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
await response.readBody();
await this._performExponentialBackoff(numTries);
}
}
return response;
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return new Promise((resolve, reject) => {
let callbackForResult = function (err, res) {
if (err) {
reject(err);
}
resolve(res);
};
this.requestRawWithCallback(info, data, callbackForResult);
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
let socket;
if (typeof data === 'string') {
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
}
let callbackCalled = false;
let handleResult = (err, res) => {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
};
let req = info.httpModule.request(info.options, (msg) => {
let res = new HttpClientResponse(msg);
handleResult(null, res);
});
req.on('socket', sock => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.end();
}
handleResult(new Error('Request timeout: ' + info.options.path), null);
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err, null);
});
if (data && typeof data === 'string') {
req.write(data, 'utf8');
}
if (data && typeof data !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
/**
* Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
getAgent(serverUrl) {
let parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port
? parseInt(info.parsedUrl.port)
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers) {
this.handlers.forEach(handler => {
handler.prepareRequest(info.options);
});
}
return info;
}
_mergeHeaders(headers) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
}
return lowercaseKeys(headers || {});
}
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) {
let agent;
let proxyUrl = pm.getProxyUrl(parsedUrl);
let useProxy = proxyUrl && proxyUrl.hostname;
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (!!agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (!!this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
if (useProxy) {
// If using proxy, need tunnel
if (!tunnel) {
tunnel = require('tunnel');
}
const agentOptions = {
maxSockets: maxSockets,
keepAlive: this._keepAlive,
proxy: {
...((proxyUrl.username || proxyUrl.password) && {
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
}),
host: proxyUrl.hostname,
port: proxyUrl.port
}
};
let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, {
rejectUnauthorized: false
});
}
return agent;
}
_performExponentialBackoff(retryNumber) {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
}
static dateTimeDeserializer(key, value) {
if (typeof value === 'string') {
let a = new Date(value);
if (!isNaN(a.valueOf())) {
return a;
}
}
return value;
}
async _processResponse(res, options) {
return new Promise(async (resolve, reject) => {
const statusCode = res.message.statusCode;
const response = {
statusCode: statusCode,
result: null,
headers: {}
};
// not found leads to null obj returned
if (statusCode == HttpCodes.NotFound) {
resolve(response);
}
let obj;
let contents;
// get the result from the body
try {
contents = await res.readBody();
if (contents && contents.length > 0) {
if (options && options.deserializeDates) {
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
}
else {
obj = JSON.parse(contents);
}
response.result = obj;
}
response.headers = res.message.headers;
}
catch (err) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if (statusCode > 299) {
let msg;
// if exception/error in body, attempt to get better error
if (obj && obj.message) {
msg = obj.message;
}
else if (contents && contents.length > 0) {
// it may be the case that the exception is in the body message as string
msg = contents;
}
else {
msg = 'Failed request: (' + statusCode + ')';
}
let err = new HttpClientError(msg, statusCode);
err.result = response.result;
reject(err);
}
else {
resolve(response);
}
});
}
}
exports.HttpClient = HttpClient;

View File

@ -1,49 +0,0 @@
/// <reference types="node" />
import http = require('http');
export interface IHeaders {
[key: string]: any;
}
export interface IHttpClient {
options(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
get(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
del(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
post(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
patch(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
put(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: IHeaders): Promise<IHttpClientResponse>;
requestRaw(info: IRequestInfo, data: string | NodeJS.ReadableStream): Promise<IHttpClientResponse>;
requestRawWithCallback(info: IRequestInfo, data: string | NodeJS.ReadableStream, onResult: (err: any, res: IHttpClientResponse) => void): void;
}
export interface IRequestHandler {
prepareRequest(options: http.RequestOptions): void;
canHandleAuthentication(response: IHttpClientResponse): boolean;
handleAuthentication(httpClient: IHttpClient, requestInfo: IRequestInfo, objs: any): Promise<IHttpClientResponse>;
}
export interface IHttpClientResponse {
message: http.IncomingMessage;
readBody(): Promise<string>;
}
export interface IRequestInfo {
options: http.RequestOptions;
parsedUrl: URL;
httpModule: any;
}
export interface IRequestOptions {
headers?: IHeaders;
socketTimeout?: number;
ignoreSslError?: boolean;
allowRedirects?: boolean;
allowRedirectDowngrade?: boolean;
maxRedirects?: number;
maxSockets?: number;
keepAlive?: boolean;
deserializeDates?: boolean;
allowRetries?: boolean;
maxRetries?: number;
}
export interface ITypedResponse<T> {
statusCode: number;
result: T | null;
headers: Object;
}

26
node_modules/@actions/http-client/lib/auth.d.ts generated vendored Normal file
View File

@ -0,0 +1,26 @@
/// <reference types="node" />
import * as http from 'http';
import * as ifm from './interfaces';
import { HttpClientResponse } from './index';
export declare class BasicCredentialHandler implements ifm.RequestHandler {
username: string;
password: string;
constructor(username: string, password: string);
prepareRequest(options: http.RequestOptions): void;
canHandleAuthentication(): boolean;
handleAuthentication(): Promise<HttpClientResponse>;
}
export declare class BearerCredentialHandler implements ifm.RequestHandler {
token: string;
constructor(token: string);
prepareRequest(options: http.RequestOptions): void;
canHandleAuthentication(): boolean;
handleAuthentication(): Promise<HttpClientResponse>;
}
export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler {
token: string;
constructor(token: string);
prepareRequest(options: http.RequestOptions): void;
canHandleAuthentication(): boolean;
handleAuthentication(): Promise<HttpClientResponse>;
}

81
node_modules/@actions/http-client/lib/auth.js generated vendored Normal file
View File

@ -0,0 +1,81 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Bearer ${this.token}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
//# sourceMappingURL=auth.js.map

1
node_modules/@actions/http-client/lib/auth.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"}

View File

@ -1,6 +1,6 @@
/// <reference types="node" /> /// <reference types="node" />
import http = require('http'); import * as http from 'http';
import ifm = require('./interfaces'); import * as ifm from './interfaces';
export declare enum HttpCodes { export declare enum HttpCodes {
OK = 200, OK = 200,
MultipleChoices = 300, MultipleChoices = 300,
@ -47,7 +47,7 @@ export declare class HttpClientError extends Error {
statusCode: number; statusCode: number;
result?: any; result?: any;
} }
export declare class HttpClientResponse implements ifm.IHttpClientResponse { export declare class HttpClientResponse {
constructor(message: http.IncomingMessage); constructor(message: http.IncomingMessage);
message: http.IncomingMessage; message: http.IncomingMessage;
readBody(): Promise<string>; readBody(): Promise<string>;
@ -55,8 +55,8 @@ export declare class HttpClientResponse implements ifm.IHttpClientResponse {
export declare function isHttps(requestUrl: string): boolean; export declare function isHttps(requestUrl: string): boolean;
export declare class HttpClient { export declare class HttpClient {
userAgent: string | undefined; userAgent: string | undefined;
handlers: ifm.IRequestHandler[]; handlers: ifm.RequestHandler[];
requestOptions: ifm.IRequestOptions; requestOptions: ifm.RequestOptions | undefined;
private _ignoreSslError; private _ignoreSslError;
private _socketTimeout; private _socketTimeout;
private _allowRedirects; private _allowRedirects;
@ -68,29 +68,29 @@ export declare class HttpClient {
private _proxyAgent; private _proxyAgent;
private _keepAlive; private _keepAlive;
private _disposed; private _disposed;
constructor(userAgent?: string, handlers?: ifm.IRequestHandler[], requestOptions?: ifm.IRequestOptions); constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions);
options(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
get(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
del(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
post(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
patch(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
put(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
head(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
/** /**
* Gets a typed object from an endpoint * Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/ */
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; getJson<T>(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; postJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; putJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>; patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise<ifm.TypedResponse<T>>;
/** /**
* Makes a raw http request. * Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this. * All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch * Prefer get, del, post and patch
*/ */
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: ifm.IHeaders): Promise<ifm.IHttpClientResponse>; request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
/** /**
* Needs to be called if keepAlive is set to true in request options. * Needs to be called if keepAlive is set to true in request options.
*/ */
@ -100,14 +100,14 @@ export declare class HttpClient {
* @param info * @param info
* @param data * @param data
*/ */
requestRaw(info: ifm.IRequestInfo, data: string | NodeJS.ReadableStream): Promise<ifm.IHttpClientResponse>; requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise<HttpClientResponse>;
/** /**
* Raw request with callback. * Raw request with callback.
* @param info * @param info
* @param data * @param data
* @param onResult * @param onResult
*/ */
requestRawWithCallback(info: ifm.IRequestInfo, data: string | NodeJS.ReadableStream, onResult: (err: any, res: ifm.IHttpClientResponse) => void): void; requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void;
/** /**
* Gets an http agent. This function is useful when you need an http agent that handles * Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables. * routing through a proxy server - depending upon the url and proxy environment variables.
@ -119,6 +119,5 @@ export declare class HttpClient {
private _getExistingOrDefaultHeader; private _getExistingOrDefaultHeader;
private _getAgent; private _getAgent;
private _performExponentialBackoff; private _performExponentialBackoff;
private static dateTimeDeserializer;
private _processResponse; private _processResponse;
} }

605
node_modules/@actions/http-client/lib/index.js generated vendored Normal file
View File

@ -0,0 +1,605 @@
"use strict";
/* eslint-disable @typescript-eslint/no-explicit-any */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(require("http"));
const https = __importStar(require("https"));
const pm = __importStar(require("./proxy"));
const tunnel = __importStar(require("tunnel"));
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
function getProxyUrl(serverUrl) {
const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
return proxyUrl ? proxyUrl.href : '';
}
exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [
HttpCodes.MovedPermanently,
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientError extends Error {
constructor(message, statusCode) {
super(message);
this.name = 'HttpClientError';
this.statusCode = statusCode;
Object.setPrototypeOf(this, HttpClientError.prototype);
}
}
exports.HttpClientError = HttpClientError;
class HttpClientResponse {
constructor(message) {
this.message = message;
}
readBody() {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
let output = Buffer.alloc(0);
this.message.on('data', (chunk) => {
output = Buffer.concat([output, chunk]);
});
this.message.on('end', () => {
resolve(output.toString());
});
}));
});
}
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
const parsedUrl = new URL(requestUrl);
return parsedUrl.protocol === 'https:';
}
exports.isHttps = isHttps;
class HttpClient {
constructor(userAgent, handlers, requestOptions) {
this._ignoreSslError = false;
this._allowRedirects = true;
this._allowRedirectDowngrade = false;
this._maxRedirects = 50;
this._allowRetries = false;
this._maxRetries = 1;
this._keepAlive = false;
this._disposed = false;
this.userAgent = userAgent;
this.handlers = handlers || [];
this.requestOptions = requestOptions;
if (requestOptions) {
if (requestOptions.ignoreSslError != null) {
this._ignoreSslError = requestOptions.ignoreSslError;
}
this._socketTimeout = requestOptions.socketTimeout;
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
});
}
get(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('GET', requestUrl, null, additionalHeaders || {});
});
}
del(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
});
}
post(requestUrl, data, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('POST', requestUrl, data, additionalHeaders || {});
});
}
patch(requestUrl, data, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
});
}
put(requestUrl, data, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('PUT', requestUrl, data, additionalHeaders || {});
});
}
head(requestUrl, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
});
}
sendStream(verb, requestUrl, stream, additionalHeaders) {
return __awaiter(this, void 0, void 0, function* () {
return this.request(verb, requestUrl, stream, additionalHeaders);
});
}
/**
* Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/
getJson(requestUrl, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
const res = yield this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
postJson(requestUrl, obj, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
const data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
const res = yield this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
putJson(requestUrl, obj, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
const data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
const res = yield this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
patchJson(requestUrl, obj, additionalHeaders = {}) {
return __awaiter(this, void 0, void 0, function* () {
const data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
const res = yield this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
});
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
request(verb, requestUrl, data, headers) {
return __awaiter(this, void 0, void 0, function* () {
if (this._disposed) {
throw new Error('Client has already been disposed.');
}
const parsedUrl = new URL(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
? this._maxRetries + 1
: 1;
let numTries = 0;
let response;
do {
response = yield this.requestRaw(info, data);
// Check if it's an authentication challenge
if (response &&
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (const handler of this.handlers) {
if (handler.canHandleAuthentication(response)) {
authenticationHandler = handler;
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (response.message.statusCode &&
HttpRedirectCodes.includes(response.message.statusCode) &&
this._allowRedirects &&
redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location'];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
const parsedRedirectUrl = new URL(redirectUrl);
if (parsedUrl.protocol === 'https:' &&
parsedUrl.protocol !== parsedRedirectUrl.protocol &&
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
yield response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (const header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = yield this.requestRaw(info, data);
redirectsRemaining--;
}
if (!response.message.statusCode ||
!HttpResponseRetryCodes.includes(response.message.statusCode)) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
yield response.readBody();
yield this._performExponentialBackoff(numTries);
}
} while (numTries < maxTries);
return response;
});
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
function callbackForResult(err, res) {
if (err) {
reject(err);
}
else if (!res) {
// If `err` is not passed, then `res` must be passed.
reject(new Error('Unknown error'));
}
else {
resolve(res);
}
}
this.requestRawWithCallback(info, data, callbackForResult);
});
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
if (typeof data === 'string') {
if (!info.options.headers) {
info.options.headers = {};
}
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
}
let callbackCalled = false;
function handleResult(err, res) {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
}
const req = info.httpModule.request(info.options, (msg) => {
const res = new HttpClientResponse(msg);
handleResult(undefined, res);
});
let socket;
req.on('socket', sock => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.end();
}
handleResult(new Error(`Request timeout: ${info.options.path}`));
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err);
});
if (data && typeof data === 'string') {
req.write(data, 'utf8');
}
if (data && typeof data !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
/**
* Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
getAgent(serverUrl) {
const parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port
? parseInt(info.parsedUrl.port)
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers) {
for (const handler of this.handlers) {
handler.prepareRequest(info.options);
}
}
return info;
}
_mergeHeaders(headers) {
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
}
return lowercaseKeys(headers || {});
}
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) {
let agent;
const proxyUrl = pm.getProxyUrl(parsedUrl);
const useProxy = proxyUrl && proxyUrl.hostname;
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
// This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
if (proxyUrl && proxyUrl.hostname) {
const agentOptions = {
maxSockets,
keepAlive: this._keepAlive,
proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
})), { host: proxyUrl.hostname, port: proxyUrl.port })
};
let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, {
rejectUnauthorized: false
});
}
return agent;
}
_performExponentialBackoff(retryNumber) {
return __awaiter(this, void 0, void 0, function* () {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
});
}
_processResponse(res, options) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
const statusCode = res.message.statusCode || 0;
const response = {
statusCode,
result: null,
headers: {}
};
// not found leads to null obj returned
if (statusCode === HttpCodes.NotFound) {
resolve(response);
}
// get the result from the body
function dateTimeDeserializer(key, value) {
if (typeof value === 'string') {
const a = new Date(value);
if (!isNaN(a.valueOf())) {
return a;
}
}
return value;
}
let obj;
let contents;
try {
contents = yield res.readBody();
if (contents && contents.length > 0) {
if (options && options.deserializeDates) {
obj = JSON.parse(contents, dateTimeDeserializer);
}
else {
obj = JSON.parse(contents);
}
response.result = obj;
}
response.headers = res.message.headers;
}
catch (err) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if (statusCode > 299) {
let msg;
// if exception/error in body, attempt to get better error
if (obj && obj.message) {
msg = obj.message;
}
else if (contents && contents.length > 0) {
// it may be the case that the exception is in the body message as string
msg = contents;
}
else {
msg = `Failed request: (${statusCode})`;
}
const err = new HttpClientError(msg, statusCode);
err.result = response.result;
reject(err);
}
else {
resolve(response);
}
}));
});
}
}
exports.HttpClient = HttpClient;
const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
//# sourceMappingURL=index.js.map

1
node_modules/@actions/http-client/lib/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

44
node_modules/@actions/http-client/lib/interfaces.d.ts generated vendored Normal file
View File

@ -0,0 +1,44 @@
/// <reference types="node" />
import * as http from 'http';
import * as https from 'https';
import { HttpClientResponse } from './index';
export interface HttpClient {
options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise<HttpClientResponse>;
requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise<HttpClientResponse>;
requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void;
}
export interface RequestHandler {
prepareRequest(options: http.RequestOptions): void;
canHandleAuthentication(response: HttpClientResponse): boolean;
handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise<HttpClientResponse>;
}
export interface RequestInfo {
options: http.RequestOptions;
parsedUrl: URL;
httpModule: typeof http | typeof https;
}
export interface RequestOptions {
headers?: http.OutgoingHttpHeaders;
socketTimeout?: number;
ignoreSslError?: boolean;
allowRedirects?: boolean;
allowRedirectDowngrade?: boolean;
maxRedirects?: number;
maxSockets?: number;
keepAlive?: boolean;
deserializeDates?: boolean;
allowRetries?: boolean;
maxRetries?: number;
}
export interface TypedResponse<T> {
statusCode: number;
result: T | null;
headers: http.IncomingHttpHeaders;
}

3
node_modules/@actions/http-client/lib/interfaces.js generated vendored Normal file
View File

@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=interfaces.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""}

View File

@ -1,29 +1,32 @@
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.checkBypass = exports.getProxyUrl = void 0;
function getProxyUrl(reqUrl) { function getProxyUrl(reqUrl) {
let usingSsl = reqUrl.protocol === 'https:'; const usingSsl = reqUrl.protocol === 'https:';
let proxyUrl;
if (checkBypass(reqUrl)) { if (checkBypass(reqUrl)) {
return proxyUrl; return undefined;
} }
let proxyVar; const proxyVar = (() => {
if (usingSsl) { if (usingSsl) {
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
}
else {
return process.env['http_proxy'] || process.env['HTTP_PROXY'];
}
})();
if (proxyVar) {
return new URL(proxyVar);
} }
else { else {
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; return undefined;
} }
if (proxyVar) {
proxyUrl = new URL(proxyVar);
}
return proxyUrl;
} }
exports.getProxyUrl = getProxyUrl; exports.getProxyUrl = getProxyUrl;
function checkBypass(reqUrl) { function checkBypass(reqUrl) {
if (!reqUrl.hostname) { if (!reqUrl.hostname) {
return false; return false;
} }
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) { if (!noProxy) {
return false; return false;
} }
@ -39,12 +42,12 @@ function checkBypass(reqUrl) {
reqPort = 443; reqPort = 443;
} }
// Format the request hostname and hostname with port // Format the request hostname and hostname with port
let upperReqHosts = [reqUrl.hostname.toUpperCase()]; const upperReqHosts = [reqUrl.hostname.toUpperCase()];
if (typeof reqPort === 'number') { if (typeof reqPort === 'number') {
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
} }
// Compare request host against noproxy // Compare request host against noproxy
for (let upperNoProxyItem of noProxy for (const upperNoProxyItem of noProxy
.split(',') .split(',')
.map(x => x.trim().toUpperCase()) .map(x => x.trim().toUpperCase())
.filter(x => x)) { .filter(x => x)) {
@ -55,3 +58,4 @@ function checkBypass(reqUrl) {
return false; return false;
} }
exports.checkBypass = checkBypass; exports.checkBypass = checkBypass;
//# sourceMappingURL=proxy.js.map

1
node_modules/@actions/http-client/lib/proxy.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,gBAAgB,CAAC,EAAE;YACnD,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AArCD,kCAqCC"}

View File

@ -1,39 +1,48 @@
{ {
"name": "@actions/http-client", "name": "@actions/http-client",
"version": "1.0.11", "version": "2.0.1",
"description": "Actions Http Client", "description": "Actions Http Client",
"main": "index.js", "keywords": [
"scripts": { "github",
"build": "rm -Rf ./_out && tsc && cp package*.json ./_out && cp *.md ./_out && cp LICENSE ./_out && cp actions.png ./_out", "actions",
"test": "jest", "http"
"format": "prettier --write *.ts && prettier --write **/*.ts", ],
"format-check": "prettier --check *.ts && prettier --check **/*.ts", "homepage": "https://github.com/actions/toolkit/tree/main/packages/http-client",
"audit-check": "npm audit --audit-level=moderate" "license": "MIT",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"directories": {
"lib": "lib",
"test": "__tests__"
},
"files": [
"lib",
"!.DS_Store"
],
"publishConfig": {
"access": "public"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/actions/http-client.git" "url": "git+https://github.com/actions/toolkit.git",
"directory": "packages/http-client"
},
"scripts": {
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
"test": "echo \"Error: run tests from root\" && exit 1",
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"tsc": "tsc"
}, },
"keywords": [
"Actions",
"Http"
],
"author": "GitHub, Inc.",
"license": "MIT",
"bugs": { "bugs": {
"url": "https://github.com/actions/http-client/issues" "url": "https://github.com/actions/toolkit/issues"
}, },
"homepage": "https://github.com/actions/http-client#readme",
"devDependencies": { "devDependencies": {
"@types/jest": "^25.1.4", "@types/tunnel": "0.0.3",
"@types/node": "^12.12.31", "proxy": "^1.0.1"
"jest": "^25.1.0",
"prettier": "^2.0.4",
"proxy": "^1.0.1",
"ts-jest": "^25.2.1",
"typescript": "^3.8.3"
}, },
"dependencies": { "dependencies": {
"tunnel": "0.0.6" "tunnel": "^0.0.6"
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@actions/io", "name": "@actions/io",
"version": "1.1.1", "version": "1.1.2",
"description": "Actions io lib", "description": "Actions io lib",
"keywords": [ "keywords": [
"github", "github",

202
node_modules/@ampproject/remapping/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2019 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

218
node_modules/@ampproject/remapping/README.md generated vendored Normal file
View File

@ -0,0 +1,218 @@
# @ampproject/remapping
> Remap sequential sourcemaps through transformations to point at the original source code
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
them to the original source locations. Think "my minified code, transformed with babel and bundled
with webpack", all pointing to the correct location in your original source code.
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
they only need to generate an output sourcemap. This greatly simplifies building custom
transformations (think a find-and-replace).
## Installation
```sh
npm install @ampproject/remapping
```
## Usage
```typescript
function remapping(
map: SourceMap | SourceMap[],
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
options?: { excludeContent: boolean, decodedMappings: boolean }
): SourceMap;
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
// "source" location (where child sources are resolved relative to, or the location of original
// source), and the ability to override the "content" of an original source for inclusion in the
// output sourcemap.
type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
}
```
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
sourcemap. If not, the path will be treated as an original, untransformed source code.
```js
// Babel transformed "helloworld.js" into "transformed.js"
const transformedMap = JSON.stringify({
file: 'transformed.js',
// 1st column of 2nd line of output file translates into the 1st source
// file, line 3, column 2
mappings: ';CAEE',
sources: ['helloworld.js'],
version: 3,
});
// Uglify minified "transformed.js" into "transformed.min.js"
const minifiedTransformedMap = JSON.stringify({
file: 'transformed.min.js',
// 0th column of 1st line of output file translates into the 1st source
// file, line 2, column 1.
mappings: 'AACC',
names: [],
sources: ['transformed.js'],
version: 3,
});
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
// The "transformed.js" file is an transformed file.
if (file === 'transformed.js') {
// The root importer is empty.
console.assert(ctx.importer === '');
// The depth in the sourcemap tree we're currently loading.
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
console.assert(ctx.depth === 1);
return transformedMap;
}
// Loader will be called to load transformedMap's source file pointers as well.
console.assert(file === 'helloworld.js');
// `transformed.js`'s sourcemap points into `helloworld.js`.
console.assert(ctx.importer === 'transformed.js');
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
console.assert(ctx.depth === 2);
return null;
}
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
In this example, `loader` will be called twice:
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
be traced through it into the source files it represents.
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
we return `null`.
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
column of the 2nd line of the file `helloworld.js`".
### Multiple transformations of a file
As a convenience, if you have multiple single-source transformations of a file, you may pass an
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
changes the `importer` and `depth` of each call to our loader. So our above example could have been
written as:
```js
const remapped = remapping(
[minifiedTransformedMap, transformedMap],
() => null
);
console.log(remapped);
// {
// file: 'transpiled.min.js',
// mappings: 'AAEE',
// sources: ['helloworld.js'],
// version: 3,
// };
```
### Advanced control of the loading graph
#### `source`
The `source` property can overridden to any value to change the location of the current load. Eg,
for an original source file, it allows us to change the location to the original source regardless
of what the sourcemap source entry says. And for transformed files, it allows us to change the
relative resolving location for child sources of the loaded sourcemap.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
// source files are loaded, they will now be relative to `src/`.
ctx.source = 'src/transformed.js';
return transformedMap;
}
console.assert(file === 'src/helloworld.js');
// We could futher change the source of this original file, eg, to be inside a nested directory
// itself. This will be reflected in the remapped sourcemap.
ctx.source = 'src/nested/transformed.js';
return null;
}
);
console.log(remapped);
// {
// …,
// sources: ['src/nested/helloworld.js'],
// };
```
#### `content`
The `content` property can be overridden when we encounter an original source file. Eg, this allows
you to manually provide the source content of the original file regardless of whether the
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
the source content.
```js
const remapped = remapping(
minifiedTransformedMap,
(file, ctx) => {
if (file === 'transformed.js') {
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
// would not include any `sourcesContent` values.
return transformedMap;
}
console.assert(file === 'helloworld.js');
// We can read the file to provide the source content.
ctx.content = fs.readFileSync(file, 'utf8');
return null;
}
);
console.log(remapped);
// {
// …,
// sourcesContent: [
// 'console.log("Hello world!")',
// ],
// };
```
### Options
#### excludeContent
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
the size out the sourcemap.
#### decodedMappings
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
encoding into a VLQ string.

204
node_modules/@ampproject/remapping/dist/remapping.mjs generated vendored Normal file
View File

@ -0,0 +1,204 @@
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
import { GenMapping, addSegment, setSourceContent, decodedMap, encodedMap } from '@jridgewell/gen-mapping';
const SOURCELESS_MAPPING = {
source: null,
column: null,
line: null,
name: null,
content: null,
};
const EMPTY_SOURCES = [];
function Source(map, sources, source, content) {
return {
map,
sources,
source,
content,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content) {
return Source(null, EMPTY_SOURCES, source, content);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
const gen = new GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
let lastSource = null;
let lastSourceLine = null;
let lastSourceColumn = null;
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
// So we traced a segment down into its original source file. Now push a
// new segment pointing to this location.
const { column, line, name, content, source } = traced;
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
continue;
}
lastSourceLine = line;
lastSourceColumn = column;
lastSource = source;
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
addSegment(gen, i, genCol, source, line, column, name);
if (content != null)
setSourceContent(gen, source, content);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return { column, line, name, source: source.source, content: source.content };
}
const segment = traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build(new TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
return OriginalSource(source, sourceContent);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? decodedMap(map) : encodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
export { remapping as default };
//# sourceMappingURL=remapping.mjs.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,209 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
})(this, (function (traceMapping, genMapping) { 'use strict';
const SOURCELESS_MAPPING = {
source: null,
column: null,
line: null,
name: null,
content: null,
};
const EMPTY_SOURCES = [];
function Source(map, sources, source, content) {
return {
map,
sources,
source,
content,
};
}
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
function MapSource(map, sources) {
return Source(map, sources, '', null);
}
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
function OriginalSource(source, content) {
return Source(null, EMPTY_SOURCES, source, content);
}
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
function traceMappings(tree) {
const gen = new genMapping.GenMapping({ file: tree.map.file });
const { sources: rootSources, map } = tree;
const rootNames = map.names;
const rootMappings = traceMapping.decodedMappings(map);
for (let i = 0; i < rootMappings.length; i++) {
const segments = rootMappings[i];
let lastSource = null;
let lastSourceLine = null;
let lastSourceColumn = null;
for (let j = 0; j < segments.length; j++) {
const segment = segments[j];
const genCol = segment[0];
let traced = SOURCELESS_MAPPING;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length !== 1) {
const source = rootSources[segment[1]];
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
// respective segment into an original source.
if (traced == null)
continue;
}
// So we traced a segment down into its original source file. Now push a
// new segment pointing to this location.
const { column, line, name, content, source } = traced;
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
continue;
}
lastSourceLine = line;
lastSourceColumn = column;
lastSource = source;
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
genMapping.addSegment(gen, i, genCol, source, line, column, name);
if (content != null)
genMapping.setSourceContent(gen, source, content);
}
}
return gen;
}
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
function originalPositionFor(source, line, column, name) {
if (!source.map) {
return { column, line, name, source: source.source, content: source.content };
}
const segment = traceMapping.traceSegment(source.map, line, column);
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
if (segment == null)
return null;
// 1-length segments only move the current generated column, there's no source information
// to gather from it.
if (segment.length === 1)
return SOURCELESS_MAPPING;
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
}
function asArray(value) {
if (Array.isArray(value))
return value;
return [value];
}
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
function buildSourceMapTree(input, loader) {
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
const map = maps.pop();
for (let i = 0; i < maps.length; i++) {
if (maps[i].sources.length > 1) {
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
'Did you specify these with the most recent transformation maps first?');
}
}
let tree = build(map, loader, '', 0);
for (let i = maps.length - 1; i >= 0; i--) {
tree = MapSource(maps[i], [tree]);
}
return tree;
}
function build(map, loader, importer, importerDepth) {
const { resolvedSources, sourcesContent } = map;
const depth = importerDepth + 1;
const children = resolvedSources.map((sourceFile, i) => {
// The loading context gives the loader more information about why this file is being loaded
// (eg, from which importer). It also allows the loader to override the location of the loaded
// sourcemap/original source, or to override the content in the sourcesContent field if it's
// an unmodified source file.
const ctx = {
importer,
depth,
source: sourceFile || '',
content: undefined,
};
// Use the provided loader callback to retrieve the file's sourcemap.
// TODO: We should eventually support async loading of sourcemap files.
const sourceMap = loader(ctx.source, ctx);
const { source, content } = ctx;
// If there is a sourcemap, then we need to recurse into it to load its source files.
if (sourceMap)
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
// Else, it's an an unmodified source file.
// The contents of this unmodified source file can be overridden via the loader context,
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
// the importing sourcemap's `sourcesContent` field.
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
return OriginalSource(source, sourceContent);
});
return MapSource(map, children);
}
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
class SourceMap {
constructor(map, options) {
const out = options.decodedMappings ? genMapping.decodedMap(map) : genMapping.encodedMap(map);
this.version = out.version; // SourceMap spec says this should be first.
this.file = out.file;
this.mappings = out.mappings;
this.names = out.names;
this.sourceRoot = out.sourceRoot;
this.sources = out.sources;
if (!options.excludeContent) {
this.sourcesContent = out.sourcesContent;
}
}
toString() {
return JSON.stringify(this);
}
}
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
function remapping(input, loader, options) {
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
const tree = buildSourceMapTree(input, loader);
return new SourceMap(traceMappings(tree), opts);
}
return remapping;
}));
//# sourceMappingURL=remapping.umd.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,14 @@
import type { MapSource as MapSourceType } from './source-map-tree';
import type { SourceMapInput, SourceMapLoader } from './types';
/**
* Recursively builds a tree structure out of sourcemap files, with each node
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
* `OriginalSource`s and `SourceMapTree`s.
*
* Every sourcemap is composed of a collection of source files and mappings
* into locations of those source files. When we generate a `SourceMapTree` for
* the sourcemap, we attempt to load each source file's own sourcemap. If it
* does not have an associated sourcemap, it is considered an original,
* unmodified source file.
*/
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;

View File

@ -0,0 +1,19 @@
import SourceMap from './source-map';
import type { SourceMapInput, SourceMapLoader, Options } from './types';
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
/**
* Traces through all the mappings in the root sourcemap, through the sources
* (and their sourcemaps), all the way back to the original source location.
*
* `loader` will be called every time we encounter a source file. If it returns
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
* it returns a falsey value, that source file is treated as an original,
* unmodified source file.
*
* Pass `excludeContent` to exclude any self-containing source file content
* from the output sourcemap.
*
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
* VLQ encoded) mappings.
*/
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;

View File

@ -0,0 +1,48 @@
import { GenMapping } from '@jridgewell/gen-mapping';
import type { TraceMap } from '@jridgewell/trace-mapping';
export declare type SourceMapSegmentObject = {
column: number;
line: number;
name: string;
source: string;
content: string | null;
} | {
column: null;
line: null;
name: null;
source: null;
content: null;
};
export declare type OriginalSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: string | null;
};
export declare type MapSource = {
map: TraceMap;
sources: Sources[];
source: string;
content: string | null;
};
export declare type Sources = OriginalSource | MapSource;
/**
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
* (which may themselves be SourceMapTrees).
*/
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
/**
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
* segment tracing ends at the `OriginalSource`.
*/
export declare function OriginalSource(source: string, content: string | null): OriginalSource;
/**
* traceMappings is only called on the root level SourceMapTree, and begins the process of
* resolving each mapping in terms of the original source files.
*/
export declare function traceMappings(tree: MapSource): GenMapping;
/**
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
* child SourceMapTrees, until we find the original source map.
*/
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;

View File

@ -0,0 +1,17 @@
import type { GenMapping } from '@jridgewell/gen-mapping';
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
/**
* A SourceMap v3 compatible sourcemap, which only includes fields that were
* provided to it.
*/
export default class SourceMap {
file?: string | null;
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
sourceRoot?: string;
names: string[];
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
constructor(map: GenMapping, options: Options);
toString(): string;
}

View File

@ -0,0 +1,14 @@
import type { SourceMapInput } from '@jridgewell/trace-mapping';
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
export type { SourceMapInput };
export declare type LoaderContext = {
readonly importer: string;
readonly depth: number;
source: string;
content: string | null | undefined;
};
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
export declare type Options = {
excludeContent?: boolean;
decodedMappings?: boolean;
};

63
node_modules/@ampproject/remapping/package.json generated vendored Normal file
View File

@ -0,0 +1,63 @@
{
"name": "@ampproject/remapping",
"version": "2.2.0",
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
"keywords": [
"source",
"map",
"remap"
],
"main": "dist/remapping.umd.js",
"module": "dist/remapping.mjs",
"typings": "dist/types/remapping.d.ts",
"files": [
"dist"
],
"author": "Justin Ridgewell <jridgewell@google.com>",
"repository": {
"type": "git",
"url": "git+https://github.com/ampproject/remapping.git"
},
"license": "Apache-2.0",
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "jest --coverage",
"test:watch": "jest --coverage --watch"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.2",
"@types/jest": "27.4.1",
"@typescript-eslint/eslint-plugin": "5.20.0",
"@typescript-eslint/parser": "5.20.0",
"eslint": "8.14.0",
"eslint-config-prettier": "8.5.0",
"jest": "27.5.1",
"jest-config": "27.5.1",
"npm-run-all": "4.1.5",
"prettier": "2.6.2",
"rollup": "2.70.2",
"ts-jest": "27.1.4",
"tslib": "2.4.0",
"typescript": "4.6.3"
},
"dependencies": {
"@jridgewell/gen-mapping": "^0.1.0",
"@jridgewell/trace-mapping": "^0.3.9"
}
}

View File

@ -1,6 +1,6 @@
{ {
"name": "@babel/code-frame", "name": "@babel/code-frame",
"version": "7.16.7", "version": "7.18.6",
"description": "Generate errors that contain a code frame that point to source locations.", "description": "Generate errors that contain a code frame that point to source locations.",
"author": "The Babel Team (https://babel.dev/team)", "author": "The Babel Team (https://babel.dev/team)",
"homepage": "https://babel.dev/docs/en/next/babel-code-frame", "homepage": "https://babel.dev/docs/en/next/babel-code-frame",
@ -16,7 +16,7 @@
}, },
"main": "./lib/index.js", "main": "./lib/index.js",
"dependencies": { "dependencies": {
"@babel/highlight": "^7.16.7" "@babel/highlight": "^7.18.6"
}, },
"devDependencies": { "devDependencies": {
"@types/chalk": "^2.0.0", "@types/chalk": "^2.0.0",
@ -25,5 +25,6 @@
}, },
"engines": { "engines": {
"node": ">=6.9.0" "node": ">=6.9.0"
} },
"type": "commonjs"
} }

View File

@ -17,11 +17,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -90,11 +90,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -114,7 +114,7 @@
"chrome": "47", "chrome": "47",
"opera": "34", "opera": "34",
"edge": "14", "edge": "14",
"firefox": "43", "firefox": "102",
"safari": "10", "safari": "10",
"node": "6", "node": "6",
"ios": "10", "ios": "10",
@ -127,11 +127,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -142,11 +142,11 @@
"edge": "12", "edge": "12",
"firefox": "4", "firefox": "4",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -169,11 +169,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -207,11 +207,11 @@
"edge": "12", "edge": "12",
"firefox": "3", "firefox": "3",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -222,11 +222,11 @@
"edge": "12", "edge": "12",
"firefox": "3", "firefox": "3",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -248,11 +248,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -287,11 +287,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -302,11 +302,11 @@
"edge": "12", "edge": "12",
"firefox": "3.5", "firefox": "3.5",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -317,7 +317,7 @@
"edge": "12", "edge": "12",
"firefox": "4", "firefox": "4",
"safari": "10", "safari": "10",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "10", "ios": "10",
@ -342,11 +342,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.4",
"ie": "10", "ie": "10",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -357,11 +357,11 @@
"edge": "12", "edge": "12",
"firefox": "4", "firefox": "4",
"safari": "5.1", "safari": "5.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -383,10 +383,10 @@
"edge": "14", "edge": "14",
"firefox": "2", "firefox": "2",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -628,6 +628,7 @@
"node": "0.12", "node": "0.12",
"ios": "9", "ios": "9",
"samsung": "2", "samsung": "2",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.number.is-finite": { "es6.number.is-finite": {
@ -636,7 +637,7 @@
"edge": "12", "edge": "12",
"firefox": "16", "firefox": "16",
"safari": "9", "safari": "9",
"node": "0.12", "node": "0.8",
"android": "4.1", "android": "4.1",
"ios": "9", "ios": "9",
"samsung": "1.5", "samsung": "1.5",
@ -661,7 +662,7 @@
"edge": "12", "edge": "12",
"firefox": "15", "firefox": "15",
"safari": "9", "safari": "9",
"node": "0.12", "node": "0.8",
"android": "4.1", "android": "4.1",
"ios": "9", "ios": "9",
"samsung": "1.5", "samsung": "1.5",
@ -713,6 +714,7 @@
"node": "0.12", "node": "0.12",
"ios": "9", "ios": "9",
"samsung": "2", "samsung": "2",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.number.parse-int": { "es6.number.parse-int": {
@ -724,6 +726,7 @@
"node": "0.12", "node": "0.12",
"ios": "9", "ios": "9",
"samsung": "2", "samsung": "2",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.object.assign": { "es6.object.assign": {
@ -743,11 +746,11 @@
"edge": "12", "edge": "12",
"firefox": "4", "firefox": "4",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -780,11 +783,11 @@
"edge": "12", "edge": "12",
"firefox": "4", "firefox": "4",
"safari": "5.1", "safari": "5.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -795,11 +798,11 @@
"edge": "12", "edge": "12",
"firefox": "4", "firefox": "4",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -813,6 +816,7 @@
"node": "7", "node": "7",
"ios": "10.3", "ios": "10.3",
"samsung": "6", "samsung": "6",
"rhino": "1.7.14",
"electron": "1.4" "electron": "1.4"
}, },
"es6.object.freeze": { "es6.object.freeze": {
@ -925,7 +929,7 @@
"edge": "12", "edge": "12",
"firefox": "22", "firefox": "22",
"safari": "9", "safari": "9",
"node": "0.12", "node": "0.8",
"android": "4.1", "android": "4.1",
"ios": "9", "ios": "9",
"samsung": "1.5", "samsung": "1.5",
@ -1014,6 +1018,7 @@
"node": "7", "node": "7",
"ios": "10.3", "ios": "10.3",
"samsung": "6", "samsung": "6",
"rhino": "1.7.14",
"electron": "1.4" "electron": "1.4"
}, },
"es6.promise": { "es6.promise": {
@ -1299,11 +1304,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.big": { "es6.string.big": {
@ -1312,11 +1318,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.blink": { "es6.string.blink": {
@ -1325,11 +1332,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.bold": { "es6.string.bold": {
@ -1338,11 +1346,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.code-point-at": { "es6.string.code-point-at": {
@ -1375,11 +1384,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.fontcolor": { "es6.string.fontcolor": {
@ -1388,11 +1398,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.fontsize": { "es6.string.fontsize": {
@ -1401,11 +1412,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.from-code-point": { "es6.string.from-code-point": {
@ -1438,11 +1450,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.iterator": { "es6.string.iterator": {
@ -1463,11 +1476,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es7.string.pad-start": { "es7.string.pad-start": {
@ -1503,6 +1517,7 @@
"node": "4", "node": "4",
"ios": "9", "ios": "9",
"samsung": "3.4", "samsung": "3.4",
"rhino": "1.7.14",
"electron": "0.21" "electron": "0.21"
}, },
"es6.string.repeat": { "es6.string.repeat": {
@ -1523,11 +1538,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.starts-with": { "es6.string.starts-with": {
@ -1548,11 +1564,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.sub": { "es6.string.sub": {
@ -1561,11 +1578,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.sup": { "es6.string.sup": {
@ -1574,11 +1592,12 @@
"edge": "12", "edge": "12",
"firefox": "17", "firefox": "17",
"safari": "6", "safari": "6",
"node": "0.10", "node": "0.4",
"android": "4", "android": "4",
"ios": "7", "ios": "7",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.14",
"electron": "0.20" "electron": "0.20"
}, },
"es6.string.trim": { "es6.string.trim": {
@ -1587,11 +1606,11 @@
"edge": "12", "edge": "12",
"firefox": "3.5", "firefox": "3.5",
"safari": "4", "safari": "4",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -1637,11 +1656,11 @@
"edge": "12", "edge": "12",
"firefox": "15", "firefox": "15",
"safari": "5.1", "safari": "5.1",
"node": "0.10", "node": "0.4",
"ie": "10", "ie": "10",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"

View File

@ -1,15 +1,4 @@
{ {
"transform-async-to-generator": {
"chrome": "55",
"opera": "42",
"edge": "15",
"firefox": "52",
"safari": "10.1",
"node": "7.6",
"ios": "10.3",
"samsung": "6",
"electron": "1.6"
},
"bugfix/transform-async-arrows-in-class": { "bugfix/transform-async-arrows-in-class": {
"chrome": "55", "chrome": "55",
"opera": "42", "opera": "42",
@ -21,17 +10,6 @@
"samsung": "6", "samsung": "6",
"electron": "1.6" "electron": "1.6"
}, },
"transform-parameters": {
"chrome": "49",
"opera": "36",
"edge": "15",
"firefox": "53",
"safari": "10",
"node": "6",
"ios": "10",
"samsung": "5",
"electron": "0.37"
},
"bugfix/transform-edge-default-parameters": { "bugfix/transform-edge-default-parameters": {
"chrome": "49", "chrome": "49",
"opera": "36", "opera": "36",
@ -43,17 +21,6 @@
"samsung": "5", "samsung": "5",
"electron": "0.37" "electron": "0.37"
}, },
"transform-function-name": {
"chrome": "51",
"opera": "38",
"edge": "14",
"firefox": "53",
"safari": "10",
"node": "6.5",
"ios": "10",
"samsung": "5",
"electron": "1.2"
},
"bugfix/transform-edge-function-name": { "bugfix/transform-edge-function-name": {
"chrome": "51", "chrome": "51",
"opera": "38", "opera": "38",
@ -65,17 +32,6 @@
"samsung": "5", "samsung": "5",
"electron": "1.2" "electron": "1.2"
}, },
"transform-block-scoping": {
"chrome": "49",
"opera": "36",
"edge": "14",
"firefox": "51",
"safari": "10",
"node": "6",
"ios": "10",
"samsung": "5",
"electron": "0.37"
},
"bugfix/transform-safari-block-shadowing": { "bugfix/transform-safari-block-shadowing": {
"chrome": "49", "chrome": "49",
"opera": "36", "opera": "36",
@ -108,20 +64,8 @@
"firefox": "2", "firefox": "2",
"node": "6", "node": "6",
"samsung": "5", "samsung": "5",
"rhino": "1.7.13",
"electron": "0.37" "electron": "0.37"
}, },
"transform-template-literals": {
"chrome": "41",
"opera": "28",
"edge": "13",
"firefox": "34",
"safari": "9",
"node": "4",
"ios": "9",
"samsung": "3.4",
"electron": "0.21"
},
"bugfix/transform-tagged-template-caching": { "bugfix/transform-tagged-template-caching": {
"chrome": "41", "chrome": "41",
"opera": "28", "opera": "28",
@ -131,8 +75,19 @@
"node": "4", "node": "4",
"ios": "13", "ios": "13",
"samsung": "3.4", "samsung": "3.4",
"rhino": "1.7.14",
"electron": "0.21" "electron": "0.21"
}, },
"bugfix/transform-v8-spread-parameters-in-optional-chaining": {
"chrome": "91",
"opera": "77",
"edge": "91",
"firefox": "74",
"safari": "13.1",
"node": "16.9",
"ios": "13.4",
"electron": "13.0"
},
"proposal-optional-chaining": { "proposal-optional-chaining": {
"chrome": "80", "chrome": "80",
"opera": "67", "opera": "67",
@ -144,14 +99,59 @@
"samsung": "13", "samsung": "13",
"electron": "8.0" "electron": "8.0"
}, },
"bugfix/transform-v8-spread-parameters-in-optional-chaining": { "transform-parameters": {
"chrome": "91", "chrome": "49",
"opera": "77", "opera": "36",
"edge": "91", "edge": "15",
"firefox": "74", "firefox": "53",
"safari": "13.1", "safari": "10",
"node": "16.9", "node": "6",
"ios": "13.4", "ios": "10",
"electron": "13.0" "samsung": "5",
"electron": "0.37"
},
"transform-async-to-generator": {
"chrome": "55",
"opera": "42",
"edge": "15",
"firefox": "52",
"safari": "10.1",
"node": "7.6",
"ios": "10.3",
"samsung": "6",
"electron": "1.6"
},
"transform-template-literals": {
"chrome": "41",
"opera": "28",
"edge": "13",
"firefox": "34",
"safari": "9",
"node": "4",
"ios": "9",
"samsung": "3.4",
"electron": "0.21"
},
"transform-function-name": {
"chrome": "51",
"opera": "38",
"edge": "14",
"firefox": "53",
"safari": "10",
"node": "6.5",
"ios": "10",
"samsung": "5",
"electron": "1.2"
},
"transform-block-scoping": {
"chrome": "49",
"opera": "36",
"edge": "14",
"firefox": "51",
"safari": "10",
"node": "6",
"ios": "10",
"samsung": "5",
"electron": "0.37"
} }
} }

View File

@ -4,7 +4,8 @@
"opera": "80", "opera": "80",
"edge": "94", "edge": "94",
"firefox": "93", "firefox": "93",
"node": "16.11" "node": "16.11",
"electron": "15.0"
}, },
"proposal-private-property-in-object": { "proposal-private-property-in-object": {
"chrome": "91", "chrome": "91",
@ -47,6 +48,7 @@
"node": "12.5", "node": "12.5",
"ios": "13", "ios": "13",
"samsung": "11", "samsung": "11",
"rhino": "1.7.14",
"electron": "6.0" "electron": "6.0"
}, },
"proposal-logical-assignment-operators": { "proposal-logical-assignment-operators": {
@ -90,6 +92,7 @@
"node": "10", "node": "10",
"ios": "12", "ios": "12",
"samsung": "9", "samsung": "9",
"rhino": "1.7.14",
"electron": "3.0" "electron": "3.0"
}, },
"proposal-optional-catch-binding": { "proposal-optional-catch-binding": {
@ -187,6 +190,7 @@
"node": "7", "node": "7",
"ios": "10.3", "ios": "10.3",
"samsung": "6", "samsung": "6",
"rhino": "1.7.14",
"electron": "1.3" "electron": "1.3"
}, },
"transform-template-literals": { "transform-template-literals": {
@ -277,6 +281,7 @@
"node": "4", "node": "4",
"ios": "9", "ios": "9",
"samsung": "4", "samsung": "4",
"rhino": "1.7.14",
"electron": "0.27" "electron": "0.27"
}, },
"transform-duplicate-keys": { "transform-duplicate-keys": {
@ -418,11 +423,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "5.1", "safari": "5.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -433,11 +438,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "5.1", "safari": "5.1",
"node": "0.10", "node": "0.4",
"ie": "9", "ie": "9",
"android": "4", "android": "4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"
@ -448,11 +453,11 @@
"edge": "12", "edge": "12",
"firefox": "2", "firefox": "2",
"safari": "3.1", "safari": "3.1",
"node": "0.10", "node": "0.6",
"ie": "9", "ie": "9",
"android": "4.4", "android": "4.4",
"ios": "6", "ios": "6",
"phantom": "2", "phantom": "1.9",
"samsung": "1", "samsung": "1",
"rhino": "1.7.13", "rhino": "1.7.13",
"electron": "0.20" "electron": "0.20"

View File

@ -1,6 +1,6 @@
{ {
"name": "@babel/compat-data", "name": "@babel/compat-data",
"version": "7.16.8", "version": "7.18.6",
"author": "The Babel Team (https://babel.dev/team)", "author": "The Babel Team (https://babel.dev/team)",
"license": "MIT", "license": "MIT",
"description": "", "description": "",
@ -30,10 +30,11 @@
], ],
"devDependencies": { "devDependencies": {
"@mdn/browser-compat-data": "^4.0.10", "@mdn/browser-compat-data": "^4.0.10",
"core-js-compat": "^3.20.2", "core-js-compat": "^3.22.1",
"electron-to-chromium": "^1.3.893" "electron-to-chromium": "^1.4.113"
}, },
"engines": { "engines": {
"node": ">=6.9.0" "node": ">=6.9.0"
} },
"type": "commonjs"
} }

View File

@ -0,0 +1 @@
0 && 0;

View File

@ -322,4 +322,6 @@ class Lock {
this._resolve(value); this._resolve(value);
} }
} }
0 && 0;

View File

@ -561,4 +561,6 @@ function matchPattern(pattern, dirname, pathToTest, context) {
} }
return pattern.test(pathToTest); return pattern.test(pathToTest);
} }
0 && 0;

View File

@ -241,4 +241,6 @@ function assertNoDuplicates(items) {
nameMap.add(item.name); nameMap.add(item.name);
} }
} }
0 && 0;

View File

@ -355,4 +355,6 @@ module.exports = function(api) {
// Return the value that will be cached. // Return the value that will be cached.
return { }; return { };
};`); };`);
} }
0 && 0;

View File

@ -38,4 +38,6 @@ function _resolve() {
return (yield importMetaResolveP)(specifier, parent); return (yield importMetaResolveP)(specifier, parent);
}); });
return _resolve.apply(this, arguments); return _resolve.apply(this, arguments);
} }
0 && 0;

View File

@ -7,4 +7,6 @@ exports.default = import_;
function import_(filepath) { function import_(filepath) {
return import(filepath); return import(filepath);
} }
0 && 0;

View File

@ -64,4 +64,6 @@ function loadPlugin(name, dirname) {
function loadPreset(name, dirname) { function loadPreset(name, dirname) {
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`); throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
} }
0 && 0;

View File

@ -83,4 +83,5 @@ exports.resolvePlugin = resolvePlugin;
const resolvePreset = _gensync()(plugins.resolvePreset).sync; const resolvePreset = _gensync()(plugins.resolvePreset).sync;
exports.resolvePreset = resolvePreset; exports.resolvePreset = resolvePreset;
0 && 0;

View File

@ -38,6 +38,16 @@ function _module() {
return data; return data;
} }
function _semver() {
const data = require("semver");
_semver = function () {
return data;
};
return data;
}
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
@ -48,7 +58,8 @@ try {
import_ = require("./import").default; import_ = require("./import").default;
} catch (_unused) {} } catch (_unused) {}
const supportsESM = !!import_; const supportsESM = _semver().satisfies(process.versions.node, "^12.17 || >=13.2");
exports.supportsESM = supportsESM; exports.supportsESM = supportsESM;
function* loadCjsOrMjsDefault(filepath, asyncError, fallbackToTranspiledModule = false) { function* loadCjsOrMjsDefault(filepath, asyncError, fallbackToTranspiledModule = false) {
@ -105,4 +116,6 @@ function _loadMjsDefault() {
return module.default; return module.default;
}); });
return _loadMjsDefault.apply(this, arguments); return _loadMjsDefault.apply(this, arguments);
} }
0 && 0;

View File

@ -73,4 +73,5 @@ const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) =>
dirname: _path().dirname(filepath), dirname: _path().dirname(filepath),
options options
}; };
}); });
0 && 0;

View File

@ -187,7 +187,7 @@ function _tryImportMetaResolve() {
return _tryImportMetaResolve.apply(this, arguments); return _tryImportMetaResolve.apply(this, arguments);
} }
function resolveStandardizedNameForRequrie(type, name, dirname) { function resolveStandardizedNameForRequire(type, name, dirname) {
const it = resolveAlternativesHelper(type, name); const it = resolveAlternativesHelper(type, name);
let res = it.next(); let res = it.next();
@ -221,20 +221,20 @@ function _resolveStandardizedNameForImport() {
const resolveStandardizedName = _gensync()({ const resolveStandardizedName = _gensync()({
sync(type, name, dirname = process.cwd()) { sync(type, name, dirname = process.cwd()) {
return resolveStandardizedNameForRequrie(type, name, dirname); return resolveStandardizedNameForRequire(type, name, dirname);
}, },
async(type, name, dirname = process.cwd()) { async(type, name, dirname = process.cwd()) {
return _asyncToGenerator(function* () { return _asyncToGenerator(function* () {
if (!_moduleTypes.supportsESM) { if (!_moduleTypes.supportsESM) {
return resolveStandardizedNameForRequrie(type, name, dirname); return resolveStandardizedNameForRequire(type, name, dirname);
} }
try { try {
return yield resolveStandardizedNameForImport(type, name, dirname); return yield resolveStandardizedNameForImport(type, name, dirname);
} catch (e) { } catch (e) {
try { try {
return resolveStandardizedNameForRequrie(type, name, dirname); return resolveStandardizedNameForRequire(type, name, dirname);
} catch (e2) { } catch (e2) {
if (e.type === "MODULE_NOT_FOUND") throw e; if (e.type === "MODULE_NOT_FOUND") throw e;
if (e2.type === "MODULE_NOT_FOUND") throw e2; if (e2.type === "MODULE_NOT_FOUND") throw e2;
@ -270,4 +270,6 @@ function* requireModule(type, name) {
LOADING_MODULES.delete(name); LOADING_MODULES.delete(name);
} }
} }
} }
0 && 0;

View File

@ -0,0 +1 @@
0 && 0;

View File

@ -41,4 +41,6 @@ function fileMtime(filepath) {
} }
return null; return null;
} }
0 && 0;

View File

@ -27,6 +27,8 @@ var _item = require("./item");
var _configChain = require("./config-chain"); var _configChain = require("./config-chain");
var _deepArray = require("./helpers/deep-array");
function _traverse() { function _traverse() {
const data = require("@babel/traverse"); const data = require("@babel/traverse");
@ -96,6 +98,7 @@ var _default = _gensync()(function* loadFullConfig(inputOpts) {
const initialPluginsDescriptors = plugins.map(toDescriptor); const initialPluginsDescriptors = plugins.map(toDescriptor);
const pluginDescriptorsByPass = [[]]; const pluginDescriptorsByPass = [[]];
const passes = []; const passes = [];
const externalDependencies = [];
const ignored = yield* enhanceError(context, function* recursePresetDescriptors(rawPresets, pluginDescriptorsPass) { const ignored = yield* enhanceError(context, function* recursePresetDescriptors(rawPresets, pluginDescriptorsPass) {
const presets = []; const presets = [];
@ -104,17 +107,7 @@ var _default = _gensync()(function* loadFullConfig(inputOpts) {
if (descriptor.options !== false) { if (descriptor.options !== false) {
try { try {
if (descriptor.ownPass) { var preset = yield* loadPresetDescriptor(descriptor, presetContext);
presets.push({
preset: yield* loadPresetDescriptor(descriptor, presetContext),
pass: []
});
} else {
presets.unshift({
preset: yield* loadPresetDescriptor(descriptor, presetContext),
pass: pluginDescriptorsPass
});
}
} catch (e) { } catch (e) {
if (e.code === "BABEL_UNKNOWN_OPTION") { if (e.code === "BABEL_UNKNOWN_OPTION") {
(0, _options.checkNoUnwrappedItemOptionPairs)(rawPresets, i, "preset", e); (0, _options.checkNoUnwrappedItemOptionPairs)(rawPresets, i, "preset", e);
@ -122,6 +115,20 @@ var _default = _gensync()(function* loadFullConfig(inputOpts) {
throw e; throw e;
} }
externalDependencies.push(preset.externalDependencies);
if (descriptor.ownPass) {
presets.push({
preset: preset.chain,
pass: []
});
} else {
presets.unshift({
preset: preset.chain,
pass: pluginDescriptorsPass
});
}
} }
} }
@ -160,7 +167,7 @@ var _default = _gensync()(function* loadFullConfig(inputOpts) {
if (descriptor.options !== false) { if (descriptor.options !== false) {
try { try {
pass.push(yield* loadPluginDescriptor(descriptor, pluginContext)); var plugin = yield* loadPluginDescriptor(descriptor, pluginContext);
} catch (e) { } catch (e) {
if (e.code === "BABEL_UNKNOWN_PLUGIN_PROPERTY") { if (e.code === "BABEL_UNKNOWN_PLUGIN_PROPERTY") {
(0, _options.checkNoUnwrappedItemOptionPairs)(descs, i, "plugin", e); (0, _options.checkNoUnwrappedItemOptionPairs)(descs, i, "plugin", e);
@ -168,6 +175,9 @@ var _default = _gensync()(function* loadFullConfig(inputOpts) {
throw e; throw e;
} }
pass.push(plugin);
externalDependencies.push(plugin.externalDependencies);
} }
} }
} }
@ -179,7 +189,8 @@ var _default = _gensync()(function* loadFullConfig(inputOpts) {
opts.passPerPreset = opts.presets.length > 0; opts.passPerPreset = opts.presets.length > 0;
return { return {
options: opts, options: opts,
passes: passes passes: passes,
externalDependencies: (0, _deepArray.finalize)(externalDependencies)
}; };
}); });
@ -207,11 +218,12 @@ const makeDescriptorLoader = apiFactory => (0, _caching.makeWeakCache)(function*
}, cache) { }, cache) {
if (options === false) throw new Error("Assertion failure"); if (options === false) throw new Error("Assertion failure");
options = options || {}; options = options || {};
const externalDependencies = [];
let item = value; let item = value;
if (typeof value === "function") { if (typeof value === "function") {
const factory = (0, _async.maybeAsync)(value, `You appear to be using an async plugin/preset, but Babel has been called synchronously`); const factory = (0, _async.maybeAsync)(value, `You appear to be using an async plugin/preset, but Babel has been called synchronously`);
const api = Object.assign({}, context, apiFactory(cache)); const api = Object.assign({}, context, apiFactory(cache, externalDependencies));
try { try {
item = yield* factory(api, options, dirname); item = yield* factory(api, options, dirname);
@ -233,11 +245,25 @@ const makeDescriptorLoader = apiFactory => (0, _caching.makeWeakCache)(function*
throw new Error(`You appear to be using a promise as a plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version. ` + `As an alternative, you can prefix the promise with "await". ` + `(While processing: ${JSON.stringify(alias)})`); throw new Error(`You appear to be using a promise as a plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version. ` + `As an alternative, you can prefix the promise with "await". ` + `(While processing: ${JSON.stringify(alias)})`);
} }
if (externalDependencies.length > 0 && (!cache.configured() || cache.mode() === "forever")) {
let error = `A plugin/preset has external untracked dependencies ` + `(${externalDependencies[0]}), but the cache `;
if (!cache.configured()) {
error += `has not been configured to be invalidated when the external dependencies change. `;
} else {
error += ` has been configured to never be invalidated. `;
}
error += `Plugins/presets should configure their cache to be invalidated when the external ` + `dependencies change, for example using \`api.cache.invalidate(() => ` + `statSync(filepath).mtimeMs)\` or \`api.cache.never()\`\n` + `(While processing: ${JSON.stringify(alias)})`;
throw new Error(error);
}
return { return {
value: item, value: item,
options, options,
dirname, dirname,
alias alias,
externalDependencies: (0, _deepArray.finalize)(externalDependencies)
}; };
}); });
@ -260,7 +286,8 @@ const instantiatePlugin = (0, _caching.makeWeakCache)(function* ({
value, value,
options, options,
dirname, dirname,
alias alias,
externalDependencies
}, cache) { }, cache) {
const pluginObj = (0, _plugins.validatePluginObject)(value); const pluginObj = (0, _plugins.validatePluginObject)(value);
const plugin = Object.assign({}, pluginObj); const plugin = Object.assign({}, pluginObj);
@ -284,15 +311,23 @@ const instantiatePlugin = (0, _caching.makeWeakCache)(function* ({
plugin.post = chain(inherits.post, plugin.post); plugin.post = chain(inherits.post, plugin.post);
plugin.manipulateOptions = chain(inherits.manipulateOptions, plugin.manipulateOptions); plugin.manipulateOptions = chain(inherits.manipulateOptions, plugin.manipulateOptions);
plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]); plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]);
if (inherits.externalDependencies.length > 0) {
if (externalDependencies.length === 0) {
externalDependencies = inherits.externalDependencies;
} else {
externalDependencies = (0, _deepArray.finalize)([externalDependencies, inherits.externalDependencies]);
}
}
} }
return new _plugin.default(plugin, options, alias); return new _plugin.default(plugin, options, alias, externalDependencies);
}); });
const validateIfOptionNeedsFilename = (options, descriptor) => { const validateIfOptionNeedsFilename = (options, descriptor) => {
if (options.test || options.include || options.exclude) { if (options.test || options.include || options.exclude) {
const formattedPresetName = descriptor.name ? `"${descriptor.name}"` : "/* your preset */"; const formattedPresetName = descriptor.name ? `"${descriptor.name}"` : "/* your preset */";
throw new Error([`Preset ${formattedPresetName} requires a filename to be set when babel is called directly,`, `\`\`\``, `babel.transform(code, { filename: 'file.ts', presets: [${formattedPresetName}] });`, `\`\`\``, `See https://babeljs.io/docs/en/options#filename for more information.`].join("\n")); throw new Error([`Preset ${formattedPresetName} requires a filename to be set when babel is called directly,`, `\`\`\``, `babel.transformSync(code, { filename: 'file.ts', presets: [${formattedPresetName}] });`, `\`\`\``, `See https://babeljs.io/docs/en/options#filename for more information.`].join("\n"));
} }
}; };
@ -312,18 +347,23 @@ const validatePreset = (preset, context, descriptor) => {
function* loadPresetDescriptor(descriptor, context) { function* loadPresetDescriptor(descriptor, context) {
const preset = instantiatePreset(yield* presetDescriptorLoader(descriptor, context)); const preset = instantiatePreset(yield* presetDescriptorLoader(descriptor, context));
validatePreset(preset, context, descriptor); validatePreset(preset, context, descriptor);
return yield* (0, _configChain.buildPresetChain)(preset, context); return {
chain: yield* (0, _configChain.buildPresetChain)(preset, context),
externalDependencies: preset.externalDependencies
};
} }
const instantiatePreset = (0, _caching.makeWeakCacheSync)(({ const instantiatePreset = (0, _caching.makeWeakCacheSync)(({
value, value,
dirname, dirname,
alias alias,
externalDependencies
}) => { }) => {
return { return {
options: (0, _options.validate)("preset", value), options: (0, _options.validate)("preset", value),
alias, alias,
dirname dirname,
externalDependencies
}; };
}); });
@ -335,4 +375,6 @@ function chain(a, b) {
fn.apply(this, args); fn.apply(this, args);
} }
}; };
} }
0 && 0;

View File

@ -31,8 +31,7 @@ function makeConfigAPI(cache) {
return (0, _caching.assertSimpleType)(value(data.envName)); return (0, _caching.assertSimpleType)(value(data.envName));
} }
if (!Array.isArray(value)) value = [value]; return (Array.isArray(value) ? value : [value]).some(entry => {
return value.some(entry => {
if (typeof entry !== "string") { if (typeof entry !== "string") {
throw new Error("Unexpected non-string value"); throw new Error("Unexpected non-string value");
} }
@ -53,18 +52,23 @@ function makeConfigAPI(cache) {
}; };
} }
function makePresetAPI(cache) { function makePresetAPI(cache, externalDependencies) {
const targets = () => JSON.parse(cache.using(data => JSON.stringify(data.targets))); const targets = () => JSON.parse(cache.using(data => JSON.stringify(data.targets)));
const addExternalDependency = ref => {
externalDependencies.push(ref);
};
return Object.assign({}, makeConfigAPI(cache), { return Object.assign({}, makeConfigAPI(cache), {
targets targets,
addExternalDependency
}); });
} }
function makePluginAPI(cache) { function makePluginAPI(cache, externalDependencies) {
const assumption = name => cache.using(data => data.assumptions[name]); const assumption = name => cache.using(data => data.assumptions[name]);
return Object.assign({}, makePresetAPI(cache), { return Object.assign({}, makePresetAPI(cache, externalDependencies), {
assumption assumption
}); });
} }
@ -100,4 +104,6 @@ function assertVersion(range) {
version: _.version, version: _.version,
range range
}); });
} }
0 && 0;

View File

@ -0,0 +1,26 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.finalize = finalize;
exports.flattenToSet = flattenToSet;
function finalize(deepArr) {
return Object.freeze(deepArr);
}
function flattenToSet(arr) {
const result = new Set();
const stack = [arr];
while (stack.length > 0) {
for (const el of stack.pop()) {
if (Array.isArray(el)) stack.push(el);else result.add(el);
}
}
return result;
}
0 && 0;

View File

@ -7,4 +7,6 @@ exports.getEnv = getEnv;
function getEnv(defaultValue = "development") { function getEnv(defaultValue = "development") {
return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue; return process.env.BABEL_ENV || process.env.NODE_ENV || defaultValue;
} }
0 && 0;

View File

@ -72,4 +72,6 @@ function createConfigItem(target, options, callback) {
} else { } else {
return createConfigItemRunner.sync(target, options); return createConfigItemRunner.sync(target, options);
} }
} }
0 && 0;

View File

@ -73,4 +73,5 @@ class ConfigItem {
} }
Object.freeze(ConfigItem.prototype); Object.freeze(ConfigItem.prototype);
0 && 0;

View File

@ -194,4 +194,5 @@ class PartialConfig {
} }
Object.freeze(PartialConfig.prototype); Object.freeze(PartialConfig.prototype);
0 && 0;

View File

@ -41,4 +41,6 @@ function pathToPattern(pattern, dirname) {
return escapeRegExp(part) + (last ? endSep : sep); return escapeRegExp(part) + (last ? endSep : sep);
})].join("")); })].join(""));
} }
0 && 0;

View File

@ -5,8 +5,10 @@ Object.defineProperty(exports, "__esModule", {
}); });
exports.default = void 0; exports.default = void 0;
var _deepArray = require("./helpers/deep-array");
class Plugin { class Plugin {
constructor(plugin, options, key) { constructor(plugin, options, key, externalDependencies = (0, _deepArray.finalize)([])) {
this.key = void 0; this.key = void 0;
this.manipulateOptions = void 0; this.manipulateOptions = void 0;
this.post = void 0; this.post = void 0;
@ -15,6 +17,7 @@ class Plugin {
this.parserOverride = void 0; this.parserOverride = void 0;
this.generatorOverride = void 0; this.generatorOverride = void 0;
this.options = void 0; this.options = void 0;
this.externalDependencies = void 0;
this.key = plugin.name || key; this.key = plugin.name || key;
this.manipulateOptions = plugin.manipulateOptions; this.manipulateOptions = plugin.manipulateOptions;
this.post = plugin.post; this.post = plugin.post;
@ -23,8 +26,10 @@ class Plugin {
this.parserOverride = plugin.parserOverride; this.parserOverride = plugin.parserOverride;
this.generatorOverride = plugin.generatorOverride; this.generatorOverride = plugin.generatorOverride;
this.options = options; this.options = options;
this.externalDependencies = externalDependencies;
} }
} }
exports.default = Plugin; exports.default = Plugin;
0 && 0;

View File

@ -81,7 +81,7 @@ function descriptorToConfig(d) {
if (typeof d.value === "object") { if (typeof d.value === "object") {
name = d.value; name = d.value;
} else if (typeof d.value === "function") { } else if (typeof d.value === "function") {
name = `[Function: ${d.value.toString().substr(0, 50)} ... ]`; name = `[Function: ${d.value.toString().slice(0, 50)} ... ]`;
} }
} }
@ -136,4 +136,5 @@ class ConfigPrinter {
} }
exports.ConfigPrinter = ConfigPrinter; exports.ConfigPrinter = ConfigPrinter;
0 && 0;

View File

@ -21,22 +21,27 @@ function resolveBrowserslistConfigFile(browserslistConfigFile, configFilePath) {
} }
function resolveTargets(options, root) { function resolveTargets(options, root) {
let targets = options.targets; const optTargets = options.targets;
let targets;
if (typeof targets === "string" || Array.isArray(targets)) { if (typeof optTargets === "string" || Array.isArray(optTargets)) {
targets = { targets = {
browsers: targets browsers: optTargets
}; };
} } else if (optTargets) {
if ("esmodules" in optTargets) {
if (targets && targets.esmodules) { targets = Object.assign({}, optTargets, {
targets = Object.assign({}, targets, { esmodules: "intersect"
esmodules: "intersect" });
}); } else {
targets = optTargets;
}
} }
return (0, _helperCompilationTargets().default)(targets, { return (0, _helperCompilationTargets().default)(targets, {
ignoreBrowserslistConfig: true, ignoreBrowserslistConfig: true,
browserslistEnv: options.browserslistEnv browserslistEnv: options.browserslistEnv
}); });
} }
0 && 0;

View File

@ -33,18 +33,21 @@ function resolveBrowserslistConfigFile(browserslistConfigFile, configFileDir) {
} }
function resolveTargets(options, root) { function resolveTargets(options, root) {
let targets = options.targets; const optTargets = options.targets;
let targets;
if (typeof targets === "string" || Array.isArray(targets)) { if (typeof optTargets === "string" || Array.isArray(optTargets)) {
targets = { targets = {
browsers: targets browsers: optTargets
}; };
} } else if (optTargets) {
if ("esmodules" in optTargets) {
if (targets && targets.esmodules) { targets = Object.assign({}, optTargets, {
targets = Object.assign({}, targets, { esmodules: "intersect"
esmodules: "intersect" });
}); } else {
targets = optTargets;
}
} }
const { const {
@ -65,4 +68,6 @@ function resolveTargets(options, root) {
configPath: root, configPath: root,
browserslistEnv: options.browserslistEnv browserslistEnv: options.browserslistEnv
}); });
} }
0 && 0;

Some files were not shown because too many files have changed in this diff Show More