Compare commits

..

3 Commits

27 changed files with 744 additions and 848 deletions

View File

@ -22,12 +22,12 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4.1.6 - uses: actions/checkout@v3
- name: Set Node.js 24.x - name: Set Node.js 20.x
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 24.x node-version: 20.x
- name: Install dependencies - name: Install dependencies
run: npm ci run: npm ci

View File

@ -39,7 +39,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4.1.6 uses: actions/checkout@v3
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v3 uses: github/codeql-action/init@v3

View File

@ -9,6 +9,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Check licenses name: Check licenses
steps: steps:
- uses: actions/checkout@v4.1.6 - uses: actions/checkout@v3
- run: npm ci - run: npm ci
- run: npm run licensed-check - run: npm run licensed-check

View File

@ -1,20 +0,0 @@
name: 'Publish Immutable Action Version'
on:
release:
types: [published]
jobs:
publish:
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
steps:
- name: Checking out
uses: actions/checkout@v4
- name: Publish
id: publish
uses: actions/publish-immutable-action@0.0.3

View File

@ -18,8 +18,8 @@ jobs:
steps: steps:
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: 24.x node-version: 20.x
- uses: actions/checkout@v4.1.6 - uses: actions/checkout@v3
- run: npm ci - run: npm ci
- run: npm run build - run: npm run build
- run: npm run format-check - run: npm run format-check
@ -37,7 +37,7 @@ jobs:
steps: steps:
# Clone this repo # Clone this repo
- name: Checkout - name: Checkout
uses: actions/checkout@v4.1.6 uses: actions/checkout@v3
# Basic checkout # Basic checkout
- name: Checkout basic - name: Checkout basic
@ -202,7 +202,7 @@ jobs:
steps: steps:
# Clone this repo # Clone this repo
- name: Checkout - name: Checkout
uses: actions/checkout@v4.1.6 uses: actions/checkout@v3
# Basic checkout using git # Basic checkout using git
- name: Checkout basic - name: Checkout basic
@ -234,7 +234,7 @@ jobs:
steps: steps:
# Clone this repo # Clone this repo
- name: Checkout - name: Checkout
uses: actions/checkout@v4.1.6 uses: actions/checkout@v3
# Basic checkout using git # Basic checkout using git
- name: Checkout basic - name: Checkout basic
@ -264,13 +264,13 @@ jobs:
steps: steps:
# Clone this repo # Clone this repo
- name: Checkout - name: Checkout
uses: actions/checkout@v4.1.6 uses: actions/checkout@v3
with: with:
path: localClone path: v3
# Basic checkout using git # Basic checkout using git
- name: Checkout basic - name: Checkout basic
uses: ./localClone uses: ./v3
with: with:
ref: test-data/v2/basic ref: test-data/v2/basic
- name: Verify basic - name: Verify basic
@ -291,41 +291,7 @@ jobs:
git fetch --no-tags --depth=1 origin +refs/heads/main:refs/remotes/origin/main git fetch --no-tags --depth=1 origin +refs/heads/main:refs/remotes/origin/main
# needed to make checkout post cleanup succeed # needed to make checkout post cleanup succeed
- name: Fix Checkout v4 - name: Fix Checkout v3
uses: actions/checkout@v4.1.6 uses: actions/checkout@v3
with: with:
path: localClone path: v3
test-output:
runs-on: ubuntu-latest
steps:
# Clone this repo
- name: Checkout
uses: actions/checkout@v4.1.6
# Basic checkout using git
- name: Checkout basic
id: checkout
uses: ./
with:
ref: test-data/v2/basic
# Verify output
- name: Verify output
run: |
echo "Commit: ${{ steps.checkout.outputs.commit }}"
echo "Ref: ${{ steps.checkout.outputs.ref }}"
if [ "${{ steps.checkout.outputs.ref }}" != "test-data/v2/basic" ]; then
echo "Expected ref to be test-data/v2/basic"
exit 1
fi
if [ "${{ steps.checkout.outputs.commit }}" != "82f71901cf8c021332310dcc8cdba84c4193ff5d" ]; then
echo "Expected commit to be 82f71901cf8c021332310dcc8cdba84c4193ff5d"
exit 1
fi
# needed to make checkout post cleanup succeed
- name: Fix Checkout
uses: actions/checkout@v4.1.6

View File

@ -11,7 +11,6 @@ on:
type: choice type: choice
description: The major version to update description: The major version to update
options: options:
- v5
- v4 - v4
- v3 - v3
- v2 - v2
@ -22,14 +21,14 @@ jobs:
steps: steps:
# Note this update workflow can also be used as a rollback tool. # Note this update workflow can also be used as a rollback tool.
# For that reason, it's best to pin `actions/checkout` to a known, stable version # For that reason, it's best to pin `actions/checkout` to a known, stable version
# (typically, about two releases back). # (typically, the previous major version -- see https://github.com/actions/checkout/pull/1705).
- uses: actions/checkout@v4.1.6 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Git config - name: Git config
run: | run: |
git config user.name "github-actions[bot]" git config user.name github-actions
git config user.email "41898282+github-actions[bot]@users.noreply.github.com" git config user.email github-actions@github.com
- name: Tag new target - name: Tag new target
run: git tag -f ${{ github.event.inputs.major_version }} ${{ github.event.inputs.target }} run: git tag -f ${{ github.event.inputs.major_version }} ${{ github.event.inputs.target }}
- name: Push new tag - name: Push new tag

View File

@ -31,7 +31,7 @@ jobs:
# Use `docker/login-action` to log in to GHCR.io. # Use `docker/login-action` to log in to GHCR.io.
# Once published, the packages are scoped to the account defined here. # Once published, the packages are scoped to the account defined here.
- name: Log in to the ghcr.io container registry - name: Log in to the ghcr.io container registry
uses: docker/login-action@v3.3.0 uses: docker/login-action@v3.1.0
with: with:
registry: ${{ env.REGISTRY }} registry: ${{ env.REGISTRY }}
username: ${{ github.actor }} username: ${{ github.actor }}
@ -48,7 +48,7 @@ jobs:
# Use `docker/build-push-action` to build (and optionally publish) the image. # Use `docker/build-push-action` to build (and optionally publish) the image.
- name: Build Docker Image (with optional Push) - name: Build Docker Image (with optional Push)
uses: docker/build-push-action@v6.5.0 uses: docker/build-push-action@v5.3.0
with: with:
context: . context: .
file: images/test-ubuntu-git.Dockerfile file: images/test-ubuntu-git.Dockerfile

View File

@ -1,6 +1,6 @@
--- ---
name: "@octokit/endpoint" name: "@octokit/endpoint"
version: 9.0.6 version: 9.0.5
type: npm type: npm
summary: Turns REST API endpoints into generic request options summary: Turns REST API endpoints into generic request options
homepage: homepage:

View File

@ -1,6 +1,6 @@
--- ---
name: "@octokit/plugin-paginate-rest" name: "@octokit/plugin-paginate-rest"
version: 9.2.2 version: 9.2.1
type: npm type: npm
summary: Octokit plugin to paginate REST API endpoint responses summary: Octokit plugin to paginate REST API endpoint responses
homepage: homepage:

View File

@ -1,6 +1,6 @@
--- ---
name: "@octokit/request-error" name: "@octokit/request-error"
version: 5.1.1 version: 5.1.0
type: npm type: npm
summary: Error class for Octokit request errors summary: Error class for Octokit request errors
homepage: homepage:

View File

@ -1,6 +1,6 @@
--- ---
name: "@octokit/request" name: "@octokit/request"
version: 8.4.1 version: 8.4.0
type: npm type: npm
summary: Send parameterized requests to GitHub's APIs with sensible defaults in browsers summary: Send parameterized requests to GitHub's APIs with sensible defaults in browsers
and Node and Node

View File

@ -1,6 +1,6 @@
--- ---
name: undici name: undici
version: 5.29.0 version: 5.28.4
type: npm type: npm
summary: An HTTP/1.1 client, written from scratch for Node.js summary: An HTTP/1.1 client, written from scratch for Node.js
homepage: https://undici.nodejs.org homepage: https://undici.nodejs.org

View File

@ -1,46 +1,5 @@
# Changelog # Changelog
## V5.0.0
* Update actions checkout to use node 24 by @salmanmkc in https://github.com/actions/checkout/pull/2226
## V4.3.0
* docs: update README.md by @motss in https://github.com/actions/checkout/pull/1971
* Add internal repos for checking out multiple repositories by @mouismail in https://github.com/actions/checkout/pull/1977
* Documentation update - add recommended permissions to Readme by @benwells in https://github.com/actions/checkout/pull/2043
* Adjust positioning of user email note and permissions heading by @joshmgross in https://github.com/actions/checkout/pull/2044
* Update README.md by @nebuk89 in https://github.com/actions/checkout/pull/2194
* Update CODEOWNERS for actions by @TingluoHuang in https://github.com/actions/checkout/pull/2224
* Update package dependencies by @salmanmkc in https://github.com/actions/checkout/pull/2236
## v4.2.2
* `url-helper.ts` now leverages well-known environment variables by @jww3 in https://github.com/actions/checkout/pull/1941
* Expand unit test coverage for `isGhes` by @jww3 in https://github.com/actions/checkout/pull/1946
## v4.2.1
* Check out other refs/* by commit if provided, fall back to ref by @orhantoy in https://github.com/actions/checkout/pull/1924
## v4.2.0
* Add Ref and Commit outputs by @lucacome in https://github.com/actions/checkout/pull/1180
* Dependency updates by @dependabot- https://github.com/actions/checkout/pull/1777, https://github.com/actions/checkout/pull/1872
## v4.1.7
* Bump the minor-npm-dependencies group across 1 directory with 4 updates by @dependabot in https://github.com/actions/checkout/pull/1739
* Bump actions/checkout from 3 to 4 by @dependabot in https://github.com/actions/checkout/pull/1697
* Check out other refs/* by commit by @orhantoy in https://github.com/actions/checkout/pull/1774
* Pin actions/checkout's own workflows to a known, good, stable version. by @jww3 in https://github.com/actions/checkout/pull/1776
## v4.1.6
* Check platform to set archive extension appropriately by @cory-miller in https://github.com/actions/checkout/pull/1732
## v4.1.5
* Update NPM dependencies by @cory-miller in https://github.com/actions/checkout/pull/1703
* Bump github/codeql-action from 2 to 3 by @dependabot in https://github.com/actions/checkout/pull/1694
* Bump actions/setup-node from 1 to 4 by @dependabot in https://github.com/actions/checkout/pull/1696
* Bump actions/upload-artifact from 2 to 4 by @dependabot in https://github.com/actions/checkout/pull/1695
* README: Suggest `user.email` to be `41898282+github-actions[bot]@users.noreply.github.com` by @cory-miller in https://github.com/actions/checkout/pull/1707
## v4.1.4 ## v4.1.4
- Disable `extensions.worktreeConfig` when disabling `sparse-checkout` by @jww3 in https://github.com/actions/checkout/pull/1692 - Disable `extensions.worktreeConfig` when disabling `sparse-checkout` by @jww3 in https://github.com/actions/checkout/pull/1692
- Add dependabot config by @cory-miller in https://github.com/actions/checkout/pull/1688 - Add dependabot config by @cory-miller in https://github.com/actions/checkout/pull/1688

View File

@ -1 +1 @@
* @actions/actions-runtime * @actions/actions-launch

131
README.md
View File

@ -1,9 +1,5 @@
[![Build and Test](https://github.com/actions/checkout/actions/workflows/test.yml/badge.svg)](https://github.com/actions/checkout/actions/workflows/test.yml) [![Build and Test](https://github.com/actions/checkout/actions/workflows/test.yml/badge.svg)](https://github.com/actions/checkout/actions/workflows/test.yml)
# Checkout V5
Checkout v5 now supports Node.js 24
# Checkout V4 # Checkout V4
This action checks-out your repository under `$GITHUB_WORKSPACE`, so your workflow can access it. This action checks-out your repository under `$GITHUB_WORKSPACE`, so your workflow can access it.
@ -14,24 +10,6 @@ The auth token is persisted in the local git config. This enables your scripts t
When Git 2.18 or higher is not in your PATH, falls back to the REST API to download the files. When Git 2.18 or higher is not in your PATH, falls back to the REST API to download the files.
### Note
Thank you for your interest in this GitHub action, however, right now we are not taking contributions.
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features were working on and what stage theyre in.
We are taking the following steps to better direct requests related to GitHub Actions, including:
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
3. Security Issues should be handled as per our [security.md](security.md)
We will still provide security updates for this project and fix major breaking changes during this time.
You are welcome to still raise bugs in this repo.
# What's new # What's new
Please refer to the [release page](https://github.com/actions/checkout/releases/latest) for the latest release notes. Please refer to the [release page](https://github.com/actions/checkout/releases/latest) for the latest release notes.
@ -40,7 +18,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
<!-- start usage --> <!-- start usage -->
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
# Repository name with owner. For example, actions/checkout # Repository name with owner. For example, actions/checkout
# Default: ${{ github.repository }} # Default: ${{ github.repository }}
@ -153,32 +131,23 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
# Scenarios # Scenarios
- [Checkout V5](#checkout-v5) - [Fetch only the root files](#Fetch-only-the-root-files)
- [Checkout V4](#checkout-v4) - [Fetch only the root files and `.github` and `src` folder](#Fetch-only-the-root-files-and-github-and-src-folder)
- [Note](#note) - [Fetch only a single file](#Fetch-only-a-single-file)
- [What's new](#whats-new) - [Fetch all history for all tags and branches](#Fetch-all-history-for-all-tags-and-branches)
- [Usage](#usage) - [Checkout a different branch](#Checkout-a-different-branch)
- [Scenarios](#scenarios) - [Checkout HEAD^](#Checkout-HEAD)
- [Fetch only the root files](#fetch-only-the-root-files) - [Checkout multiple repos (side by side)](#Checkout-multiple-repos-side-by-side)
- [Fetch only the root files and `.github` and `src` folder](#fetch-only-the-root-files-and-github-and-src-folder) - [Checkout multiple repos (nested)](#Checkout-multiple-repos-nested)
- [Fetch only a single file](#fetch-only-a-single-file) - [Checkout multiple repos (private)](#Checkout-multiple-repos-private)
- [Fetch all history for all tags and branches](#fetch-all-history-for-all-tags-and-branches) - [Checkout pull request HEAD commit instead of merge commit](#Checkout-pull-request-HEAD-commit-instead-of-merge-commit)
- [Checkout a different branch](#checkout-a-different-branch) - [Checkout pull request on closed event](#Checkout-pull-request-on-closed-event)
- [Checkout HEAD^](#checkout-head) - [Push a commit using the built-in token](#Push-a-commit-using-the-built-in-token)
- [Checkout multiple repos (side by side)](#checkout-multiple-repos-side-by-side)
- [Checkout multiple repos (nested)](#checkout-multiple-repos-nested)
- [Checkout multiple repos (private)](#checkout-multiple-repos-private)
- [Checkout pull request HEAD commit instead of merge commit](#checkout-pull-request-head-commit-instead-of-merge-commit)
- [Checkout pull request on closed event](#checkout-pull-request-on-closed-event)
- [Push a commit using the built-in token](#push-a-commit-using-the-built-in-token)
- [Push a commit to a PR using the built-in token](#push-a-commit-to-a-pr-using-the-built-in-token)
- [Recommended permissions](#recommended-permissions)
- [License](#license)
## Fetch only the root files ## Fetch only the root files
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
sparse-checkout: . sparse-checkout: .
``` ```
@ -186,7 +155,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
## Fetch only the root files and `.github` and `src` folder ## Fetch only the root files and `.github` and `src` folder
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
sparse-checkout: | sparse-checkout: |
.github .github
@ -196,7 +165,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
## Fetch only a single file ## Fetch only a single file
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
sparse-checkout: | sparse-checkout: |
README.md README.md
@ -206,7 +175,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
## Fetch all history for all tags and branches ## Fetch all history for all tags and branches
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
``` ```
@ -214,7 +183,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
## Checkout a different branch ## Checkout a different branch
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
ref: my-branch ref: my-branch
``` ```
@ -222,7 +191,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
## Checkout HEAD^ ## Checkout HEAD^
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
fetch-depth: 2 fetch-depth: 2
- run: git checkout HEAD^ - run: git checkout HEAD^
@ -232,42 +201,42 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
```yaml ```yaml
- name: Checkout - name: Checkout
uses: actions/checkout@v5 uses: actions/checkout@v4
with: with:
path: main path: main
- name: Checkout tools repo - name: Checkout tools repo
uses: actions/checkout@v5 uses: actions/checkout@v4
with: with:
repository: my-org/my-tools repository: my-org/my-tools
path: my-tools path: my-tools
``` ```
> - If your secondary repository is private or internal you will need to add the option noted in [Checkout multiple repos (private)](#Checkout-multiple-repos-private) > - If your secondary repository is private you will need to add the option noted in [Checkout multiple repos (private)](#Checkout-multiple-repos-private)
## Checkout multiple repos (nested) ## Checkout multiple repos (nested)
```yaml ```yaml
- name: Checkout - name: Checkout
uses: actions/checkout@v5 uses: actions/checkout@v4
- name: Checkout tools repo - name: Checkout tools repo
uses: actions/checkout@v5 uses: actions/checkout@v4
with: with:
repository: my-org/my-tools repository: my-org/my-tools
path: my-tools path: my-tools
``` ```
> - If your secondary repository is private or internal you will need to add the option noted in [Checkout multiple repos (private)](#Checkout-multiple-repos-private) > - If your secondary repository is private you will need to add the option noted in [Checkout multiple repos (private)](#Checkout-multiple-repos-private)
## Checkout multiple repos (private) ## Checkout multiple repos (private)
```yaml ```yaml
- name: Checkout - name: Checkout
uses: actions/checkout@v5 uses: actions/checkout@v4
with: with:
path: main path: main
- name: Checkout private tools - name: Checkout private tools
uses: actions/checkout@v5 uses: actions/checkout@v4
with: with:
repository: my-org/my-private-tools repository: my-org/my-private-tools
token: ${{ secrets.GH_PAT }} # `GH_PAT` is a secret that contains your PAT token: ${{ secrets.GH_PAT }} # `GH_PAT` is a secret that contains your PAT
@ -280,7 +249,7 @@ Please refer to the [release page](https://github.com/actions/checkout/releases/
## Checkout pull request HEAD commit instead of merge commit ## Checkout pull request HEAD commit instead of merge commit
```yaml ```yaml
- uses: actions/checkout@v5 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
``` ```
@ -296,7 +265,7 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v4
``` ```
## Push a commit using the built-in token ## Push a commit using the built-in token
@ -307,51 +276,15 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v4
- run: | - run: |
date > generated.txt date > generated.txt
# Note: the following account information will not work on GHES git config user.name github-actions
git config user.name "github-actions[bot]" git config user.email github-actions@github.com
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add . git add .
git commit -m "generated" git commit -m "generated"
git push git push
``` ```
*NOTE:* The user email is `{user.id}+{user.login}@users.noreply.github.com`. See users API: https://api.github.com/users/github-actions%5Bbot%5D
## Push a commit to a PR using the built-in token
In a pull request trigger, `ref` is required as GitHub Actions checks out in detached HEAD mode, meaning it doesnt check out your branch by default.
```yaml
on: pull_request
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- run: |
date > generated.txt
# Note: the following account information will not work on GHES
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add .
git commit -m "generated"
git push
```
*NOTE:* The user email is `{user.id}+{user.login}@users.noreply.github.com`. See users API: https://api.github.com/users/github-actions%5Bbot%5D
# Recommended permissions
When using the `checkout` action in your GitHub Actions workflow, it is recommended to set the following `GITHUB_TOKEN` permissions to ensure proper functionality, unless alternative auth is provided via the `token` or `ssh-key` inputs:
```yaml
permissions:
contents: read
```
# License # License

View File

@ -67,26 +67,6 @@ describe('ref-helper tests', () => {
expect(checkoutInfo.startPoint).toBeFalsy() expect(checkoutInfo.startPoint).toBeFalsy()
}) })
it('getCheckoutInfo refs/', async () => {
const checkoutInfo = await refHelper.getCheckoutInfo(
git,
'refs/gh/queue/main/pr-123',
commit
)
expect(checkoutInfo.ref).toBe(commit)
expect(checkoutInfo.startPoint).toBeFalsy()
})
it('getCheckoutInfo refs/ without commit', async () => {
const checkoutInfo = await refHelper.getCheckoutInfo(
git,
'refs/non-standard-ref',
''
)
expect(checkoutInfo.ref).toBe('refs/non-standard-ref')
expect(checkoutInfo.startPoint).toBeFalsy()
})
it('getCheckoutInfo unqualified branch only', async () => { it('getCheckoutInfo unqualified branch only', async () => {
git.branchExists = jest.fn(async (remote: boolean, pattern: string) => { git.branchExists = jest.fn(async (remote: boolean, pattern: string) => {
return true return true

View File

@ -1,92 +0,0 @@
import * as urlHelper from '../src/url-helper'
describe('getServerUrl tests', () => {
it('basics', async () => {
// Note that URL::toString will append a trailing / when passed just a domain name ...
expect(urlHelper.getServerUrl().toString()).toBe('https://github.com/')
expect(urlHelper.getServerUrl(' ').toString()).toBe('https://github.com/')
expect(urlHelper.getServerUrl(' ').toString()).toBe('https://github.com/')
expect(urlHelper.getServerUrl('http://contoso.com').toString()).toBe(
'http://contoso.com/'
)
expect(urlHelper.getServerUrl('https://contoso.com').toString()).toBe(
'https://contoso.com/'
)
expect(urlHelper.getServerUrl('https://contoso.com/').toString()).toBe(
'https://contoso.com/'
)
// ... but can't make that same assumption when passed an URL that includes some deeper path.
expect(urlHelper.getServerUrl('https://contoso.com/a/b').toString()).toBe(
'https://contoso.com/a/b'
)
})
})
describe('isGhes tests', () => {
const pristineEnv = process.env
beforeEach(() => {
jest.resetModules()
process.env = {...pristineEnv}
})
afterAll(() => {
process.env = pristineEnv
})
it('basics', async () => {
delete process.env['GITHUB_SERVER_URL']
expect(urlHelper.isGhes()).toBeFalsy()
expect(urlHelper.isGhes('https://github.com')).toBeFalsy()
expect(urlHelper.isGhes('https://contoso.ghe.com')).toBeFalsy()
expect(urlHelper.isGhes('https://test.github.localhost')).toBeFalsy()
expect(urlHelper.isGhes('https://src.onpremise.fabrikam.com')).toBeTruthy()
})
it('returns false when the GITHUB_SERVER_URL environment variable is not defined', async () => {
delete process.env['GITHUB_SERVER_URL']
expect(urlHelper.isGhes()).toBeFalsy()
})
it('returns false when the GITHUB_SERVER_URL environment variable is set to github.com', async () => {
process.env['GITHUB_SERVER_URL'] = 'https://github.com'
expect(urlHelper.isGhes()).toBeFalsy()
})
it('returns false when the GITHUB_SERVER_URL environment variable is set to a GitHub Enterprise Cloud-style URL', async () => {
process.env['GITHUB_SERVER_URL'] = 'https://contoso.ghe.com'
expect(urlHelper.isGhes()).toBeFalsy()
})
it('returns false when the GITHUB_SERVER_URL environment variable has a .localhost suffix', async () => {
process.env['GITHUB_SERVER_URL'] = 'https://mock-github.localhost'
expect(urlHelper.isGhes()).toBeFalsy()
})
it('returns true when the GITHUB_SERVER_URL environment variable is set to some other URL', async () => {
process.env['GITHUB_SERVER_URL'] = 'https://src.onpremise.fabrikam.com'
expect(urlHelper.isGhes()).toBeTruthy()
})
})
describe('getServerApiUrl tests', () => {
it('basics', async () => {
expect(urlHelper.getServerApiUrl()).toBe('https://api.github.com')
expect(urlHelper.getServerApiUrl('https://github.com')).toBe(
'https://api.github.com'
)
expect(urlHelper.getServerApiUrl('https://GitHub.com')).toBe(
'https://api.github.com'
)
expect(urlHelper.getServerApiUrl('https://contoso.ghe.com')).toBe(
'https://api.contoso.ghe.com'
)
expect(urlHelper.getServerApiUrl('https://fabrikam.GHE.COM')).toBe(
'https://api.fabrikam.ghe.com'
)
expect(
urlHelper.getServerApiUrl('https://src.onpremise.fabrikam.com')
).toBe('https://src.onpremise.fabrikam.com/api/v3')
})
})

View File

@ -98,12 +98,7 @@ inputs:
github-server-url: github-server-url:
description: The base URL for the GitHub instance that you are trying to clone from, will use environment defaults to fetch from the same instance that the workflow is running from unless specified. Example URLs are https://github.com or https://my-ghes-server.example.com description: The base URL for the GitHub instance that you are trying to clone from, will use environment defaults to fetch from the same instance that the workflow is running from unless specified. Example URLs are https://github.com or https://my-ghes-server.example.com
required: false required: false
outputs:
ref:
description: 'The branch, tag or SHA that was checked out'
commit:
description: 'The commit SHA that was checked out'
runs: runs:
using: node24 using: node20
main: dist/index.js main: dist/index.js
post: dist/index.js post: dist/index.js

231
dist/index.js vendored
View File

@ -30,9 +30,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.directoryExistsSync = directoryExistsSync; exports.fileExistsSync = exports.existsSync = exports.directoryExistsSync = void 0;
exports.existsSync = existsSync;
exports.fileExistsSync = fileExistsSync;
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
function directoryExistsSync(path, required) { function directoryExistsSync(path, required) {
var _a; var _a;
@ -60,6 +58,7 @@ function directoryExistsSync(path, required) {
} }
throw new Error(`Directory '${path}' does not exist`); throw new Error(`Directory '${path}' does not exist`);
} }
exports.directoryExistsSync = directoryExistsSync;
function existsSync(path) { function existsSync(path) {
var _a; var _a;
if (!path) { if (!path) {
@ -76,6 +75,7 @@ function existsSync(path) {
} }
return true; return true;
} }
exports.existsSync = existsSync;
function fileExistsSync(path) { function fileExistsSync(path) {
var _a; var _a;
if (!path) { if (!path) {
@ -96,6 +96,7 @@ function fileExistsSync(path) {
} }
return false; return false;
} }
exports.fileExistsSync = fileExistsSync;
/***/ }), /***/ }),
@ -138,7 +139,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createAuthHelper = createAuthHelper; exports.createAuthHelper = void 0;
const assert = __importStar(__nccwpck_require__(9491)); const assert = __importStar(__nccwpck_require__(9491));
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const exec = __importStar(__nccwpck_require__(1514)); const exec = __importStar(__nccwpck_require__(1514));
@ -155,6 +156,7 @@ const SSH_COMMAND_KEY = 'core.sshCommand';
function createAuthHelper(git, settings) { function createAuthHelper(git, settings) {
return new GitAuthHelper(git, settings); return new GitAuthHelper(git, settings);
} }
exports.createAuthHelper = createAuthHelper;
class GitAuthHelper { class GitAuthHelper {
constructor(gitCommandManager, gitSourceSettings) { constructor(gitCommandManager, gitSourceSettings) {
this.insteadOfValues = []; this.insteadOfValues = [];
@ -473,8 +475,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MinimumGitSparseCheckoutVersion = exports.MinimumGitVersion = void 0; exports.createCommandManager = exports.MinimumGitSparseCheckoutVersion = exports.MinimumGitVersion = void 0;
exports.createCommandManager = createCommandManager;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const exec = __importStar(__nccwpck_require__(1514)); const exec = __importStar(__nccwpck_require__(1514));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
@ -495,6 +496,7 @@ function createCommandManager(workingDirectory, lfs, doSparseCheckout) {
return yield GitCommandManager.createCommandManager(workingDirectory, lfs, doSparseCheckout); return yield GitCommandManager.createCommandManager(workingDirectory, lfs, doSparseCheckout);
}); });
} }
exports.createCommandManager = createCommandManager;
class GitCommandManager { class GitCommandManager {
// Private constructor; use createCommandManager() // Private constructor; use createCommandManager()
constructor() { constructor() {
@ -1018,7 +1020,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.prepareExistingDirectory = prepareExistingDirectory; exports.prepareExistingDirectory = void 0;
const assert = __importStar(__nccwpck_require__(9491)); const assert = __importStar(__nccwpck_require__(9491));
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
@ -1122,6 +1124,7 @@ function prepareExistingDirectory(git, repositoryPath, repositoryUrl, clean, ref
} }
}); });
} }
exports.prepareExistingDirectory = prepareExistingDirectory;
/***/ }), /***/ }),
@ -1164,8 +1167,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getSource = getSource; exports.cleanup = exports.getSource = void 0;
exports.cleanup = cleanup;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const fsHelper = __importStar(__nccwpck_require__(7219)); const fsHelper = __importStar(__nccwpck_require__(7219));
const gitAuthHelper = __importStar(__nccwpck_require__(2565)); const gitAuthHelper = __importStar(__nccwpck_require__(2565));
@ -1353,8 +1355,7 @@ function getSource(settings) {
// Get commit information // Get commit information
const commitInfo = yield git.log1(); const commitInfo = yield git.log1();
// Log commit sha // Log commit sha
const commitSHA = yield git.log1('--format=%H'); yield git.log1("--format='%H'");
core.setOutput('commit', commitSHA.trim());
// Check for incorrect pull request merge commit // Check for incorrect pull request merge commit
yield refHelper.checkCommitInfo(settings.authToken, commitInfo, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.githubServerUrl); yield refHelper.checkCommitInfo(settings.authToken, commitInfo, settings.repositoryOwner, settings.repositoryName, settings.ref, settings.commit, settings.githubServerUrl);
} }
@ -1371,6 +1372,7 @@ function getSource(settings) {
} }
}); });
} }
exports.getSource = getSource;
function cleanup(repositoryPath) { function cleanup(repositoryPath) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Repo exists? // Repo exists?
@ -1406,6 +1408,7 @@ function cleanup(repositoryPath) {
} }
}); });
} }
exports.cleanup = cleanup;
function getGitCommandManager(settings) { function getGitCommandManager(settings) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
core.info(`Working directory is '${settings.repositoryPath}'`); core.info(`Working directory is '${settings.repositoryPath}'`);
@ -1544,8 +1547,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadRepository = downloadRepository; exports.getDefaultBranch = exports.downloadRepository = void 0;
exports.getDefaultBranch = getDefaultBranch;
const assert = __importStar(__nccwpck_require__(9491)); const assert = __importStar(__nccwpck_require__(9491));
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
@ -1572,9 +1574,7 @@ function downloadRepository(authToken, owner, repo, ref, commit, repositoryPath,
// Write archive to disk // Write archive to disk
core.info('Writing archive to disk'); core.info('Writing archive to disk');
const uniqueId = (0, uuid_1.v4)(); const uniqueId = (0, uuid_1.v4)();
const archivePath = IS_WINDOWS const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`);
? path.join(repositoryPath, `${uniqueId}.zip`)
: path.join(repositoryPath, `${uniqueId}.tar.gz`);
yield fs.promises.writeFile(archivePath, archiveData); yield fs.promises.writeFile(archivePath, archiveData);
archiveData = Buffer.from(''); // Free memory archiveData = Buffer.from(''); // Free memory
// Extract archive // Extract archive
@ -1609,6 +1609,7 @@ function downloadRepository(authToken, owner, repo, ref, commit, repositoryPath,
yield io.rmRF(extractPath); yield io.rmRF(extractPath);
}); });
} }
exports.downloadRepository = downloadRepository;
/** /**
* Looks up the default branch name * Looks up the default branch name
*/ */
@ -1647,6 +1648,7 @@ function getDefaultBranch(authToken, owner, repo, baseUrl) {
})); }));
}); });
} }
exports.getDefaultBranch = getDefaultBranch;
function downloadArchive(authToken, owner, repo, ref, commit, baseUrl) { function downloadArchive(authToken, owner, repo, ref, commit, baseUrl) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const octokit = github.getOctokit(authToken, { const octokit = github.getOctokit(authToken, {
@ -1705,7 +1707,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getInputs = getInputs; exports.getInputs = void 0;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const fsHelper = __importStar(__nccwpck_require__(7219)); const fsHelper = __importStar(__nccwpck_require__(7219));
const github = __importStar(__nccwpck_require__(5438)); const github = __importStar(__nccwpck_require__(5438));
@ -1834,6 +1836,7 @@ function getInputs() {
return result; return result;
}); });
} }
exports.getInputs = getInputs;
/***/ }), /***/ }),
@ -1892,7 +1895,6 @@ function run() {
coreCommand.issueCommand('add-matcher', {}, path.join(__dirname, 'problem-matcher.json')); coreCommand.issueCommand('add-matcher', {}, path.join(__dirname, 'problem-matcher.json'));
// Get sources // Get sources
yield gitSourceProvider.getSource(sourceSettings); yield gitSourceProvider.getSource(sourceSettings);
core.setOutput('ref', sourceSettings.ref);
} }
finally { finally {
// Unregister problem matcher // Unregister problem matcher
@ -1965,12 +1967,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.tagsRefSpec = void 0; exports.checkCommitInfo = exports.testRef = exports.getRefSpec = exports.getRefSpecForAllHistory = exports.getCheckoutInfo = exports.tagsRefSpec = void 0;
exports.getCheckoutInfo = getCheckoutInfo;
exports.getRefSpecForAllHistory = getRefSpecForAllHistory;
exports.getRefSpec = getRefSpec;
exports.testRef = testRef;
exports.checkCommitInfo = checkCommitInfo;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const github = __importStar(__nccwpck_require__(5438)); const github = __importStar(__nccwpck_require__(5438));
const url_helper_1 = __nccwpck_require__(9437); const url_helper_1 = __nccwpck_require__(9437);
@ -2001,12 +1998,8 @@ function getCheckoutInfo(git, ref, commit) {
result.ref = `refs/remotes/pull/${branch}`; result.ref = `refs/remotes/pull/${branch}`;
} }
// refs/tags/ // refs/tags/
else if (upperRef.startsWith('REFS/TAGS/')) {
result.ref = ref;
}
// refs/
else if (upperRef.startsWith('REFS/')) { else if (upperRef.startsWith('REFS/')) {
result.ref = commit ? commit : ref; result.ref = ref;
} }
// Unqualified ref, check for a matching branch or tag // Unqualified ref, check for a matching branch or tag
else { else {
@ -2024,6 +2017,7 @@ function getCheckoutInfo(git, ref, commit) {
return result; return result;
}); });
} }
exports.getCheckoutInfo = getCheckoutInfo;
function getRefSpecForAllHistory(ref, commit) { function getRefSpecForAllHistory(ref, commit) {
const result = ['+refs/heads/*:refs/remotes/origin/*', exports.tagsRefSpec]; const result = ['+refs/heads/*:refs/remotes/origin/*', exports.tagsRefSpec];
if (ref && ref.toUpperCase().startsWith('REFS/PULL/')) { if (ref && ref.toUpperCase().startsWith('REFS/PULL/')) {
@ -2032,6 +2026,7 @@ function getRefSpecForAllHistory(ref, commit) {
} }
return result; return result;
} }
exports.getRefSpecForAllHistory = getRefSpecForAllHistory;
function getRefSpec(ref, commit) { function getRefSpec(ref, commit) {
if (!ref && !commit) { if (!ref && !commit) {
throw new Error('Args ref and commit cannot both be empty'); throw new Error('Args ref and commit cannot both be empty');
@ -2080,6 +2075,7 @@ function getRefSpec(ref, commit) {
return [`+${ref}:${ref}`]; return [`+${ref}:${ref}`];
} }
} }
exports.getRefSpec = getRefSpec;
/** /**
* Tests whether the initial fetch created the ref at the expected commit * Tests whether the initial fetch created the ref at the expected commit
*/ */
@ -2123,6 +2119,7 @@ function testRef(git, ref, commit) {
} }
}); });
} }
exports.testRef = testRef;
function checkCommitInfo(token, commitInfo, repositoryOwner, repositoryName, ref, commit, baseUrl) { function checkCommitInfo(token, commitInfo, repositoryOwner, repositoryName, ref, commit, baseUrl) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
var _a; var _a;
@ -2188,6 +2185,7 @@ function checkCommitInfo(token, commitInfo, repositoryOwner, repositoryName, ref
} }
}); });
} }
exports.checkCommitInfo = checkCommitInfo;
function fromPayload(path) { function fromPayload(path) {
return select(github.context.payload, path); return select(github.context.payload, path);
} }
@ -2212,12 +2210,13 @@ function select(obj, path) {
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.escape = escape; exports.escape = void 0;
function escape(value) { function escape(value) {
return value.replace(/[^a-zA-Z0-9_]/g, x => { return value.replace(/[^a-zA-Z0-9_]/g, x => {
return `\\${x}`; return `\\${x}`;
}); });
} }
exports.escape = escape;
/***/ }), /***/ }),
@ -2260,8 +2259,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.RetryHelper = void 0; exports.execute = exports.RetryHelper = void 0;
exports.execute = execute;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const defaultMaxAttempts = 3; const defaultMaxAttempts = 3;
const defaultMinSeconds = 10; const defaultMinSeconds = 10;
@ -2313,6 +2311,7 @@ function execute(action) {
return yield retryHelper.execute(action); return yield retryHelper.execute(action);
}); });
} }
exports.execute = execute;
/***/ }), /***/ }),
@ -2346,11 +2345,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.SshKnownHostsPath = exports.SshKeyPath = exports.PostSetSafeDirectory = exports.RepositoryPath = exports.IsPost = void 0; exports.setSafeDirectory = exports.setSshKnownHostsPath = exports.setSshKeyPath = exports.setRepositoryPath = exports.SshKnownHostsPath = exports.SshKeyPath = exports.PostSetSafeDirectory = exports.RepositoryPath = exports.IsPost = void 0;
exports.setRepositoryPath = setRepositoryPath;
exports.setSshKeyPath = setSshKeyPath;
exports.setSshKnownHostsPath = setSshKnownHostsPath;
exports.setSafeDirectory = setSafeDirectory;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
/** /**
* Indicates whether the POST action is running * Indicates whether the POST action is running
@ -2378,24 +2373,28 @@ exports.SshKnownHostsPath = core.getState('sshKnownHostsPath');
function setRepositoryPath(repositoryPath) { function setRepositoryPath(repositoryPath) {
core.saveState('repositoryPath', repositoryPath); core.saveState('repositoryPath', repositoryPath);
} }
exports.setRepositoryPath = setRepositoryPath;
/** /**
* Save the SSH key path so the POST action can retrieve the value. * Save the SSH key path so the POST action can retrieve the value.
*/ */
function setSshKeyPath(sshKeyPath) { function setSshKeyPath(sshKeyPath) {
core.saveState('sshKeyPath', sshKeyPath); core.saveState('sshKeyPath', sshKeyPath);
} }
exports.setSshKeyPath = setSshKeyPath;
/** /**
* Save the SSH known hosts path so the POST action can retrieve the value. * Save the SSH known hosts path so the POST action can retrieve the value.
*/ */
function setSshKnownHostsPath(sshKnownHostsPath) { function setSshKnownHostsPath(sshKnownHostsPath) {
core.saveState('sshKnownHostsPath', sshKnownHostsPath); core.saveState('sshKnownHostsPath', sshKnownHostsPath);
} }
exports.setSshKnownHostsPath = setSshKnownHostsPath;
/** /**
* Save the set-safe-directory input so the POST action can retrieve the value. * Save the set-safe-directory input so the POST action can retrieve the value.
*/ */
function setSafeDirectory() { function setSafeDirectory() {
core.saveState('setSafeDirectory', 'true'); core.saveState('setSafeDirectory', 'true');
} }
exports.setSafeDirectory = setSafeDirectory;
// Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic. // Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic.
// This is necessary since we don't have a separate entry point. // This is necessary since we don't have a separate entry point.
if (!exports.IsPost) { if (!exports.IsPost) {
@ -2434,10 +2433,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getFetchUrl = getFetchUrl; exports.isGhes = exports.getServerApiUrl = exports.getServerUrl = exports.getFetchUrl = void 0;
exports.getServerUrl = getServerUrl;
exports.getServerApiUrl = getServerApiUrl;
exports.isGhes = isGhes;
const assert = __importStar(__nccwpck_require__(9491)); const assert = __importStar(__nccwpck_require__(9491));
const url_1 = __nccwpck_require__(7310); const url_1 = __nccwpck_require__(7310);
function getFetchUrl(settings) { function getFetchUrl(settings) {
@ -2453,52 +2449,28 @@ function getFetchUrl(settings) {
// "origin" is SCHEME://HOSTNAME[:PORT] // "origin" is SCHEME://HOSTNAME[:PORT]
return `${serviceUrl.origin}/${encodedOwner}/${encodedName}`; return `${serviceUrl.origin}/${encodedOwner}/${encodedName}`;
} }
exports.getFetchUrl = getFetchUrl;
function getServerUrl(url) { function getServerUrl(url) {
let resolvedUrl = process.env['GITHUB_SERVER_URL'] || 'https://github.com'; let urlValue = url && url.trim().length > 0
if (hasContent(url, WhitespaceMode.Trim)) { ? url
resolvedUrl = url; : process.env['GITHUB_SERVER_URL'] || 'https://github.com';
} return new url_1.URL(urlValue);
return new url_1.URL(resolvedUrl);
} }
exports.getServerUrl = getServerUrl;
function getServerApiUrl(url) { function getServerApiUrl(url) {
if (hasContent(url, WhitespaceMode.Trim)) { let apiUrl = 'https://api.github.com';
let serverUrl = getServerUrl(url);
if (isGhes(url)) { if (isGhes(url)) {
serverUrl.pathname = 'api/v3'; const serverUrl = getServerUrl(url);
apiUrl = new url_1.URL(`${serverUrl.origin}/api/v3`).toString();
} }
else { return apiUrl;
serverUrl.hostname = 'api.' + serverUrl.hostname;
}
return pruneSuffix(serverUrl.toString(), '/');
}
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
} }
exports.getServerApiUrl = getServerApiUrl;
function isGhes(url) { function isGhes(url) {
const ghUrl = new url_1.URL(url || process.env['GITHUB_SERVER_URL'] || 'https://github.com'); const ghUrl = getServerUrl(url);
const hostname = ghUrl.hostname.trimEnd().toUpperCase(); return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
const isGitHubHost = hostname === 'GITHUB.COM';
const isGitHubEnterpriseCloudHost = hostname.endsWith('.GHE.COM');
const isLocalHost = hostname.endsWith('.LOCALHOST');
return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost;
}
function pruneSuffix(text, suffix) {
if (hasContent(suffix, WhitespaceMode.Preserve) && (text === null || text === void 0 ? void 0 : text.endsWith(suffix))) {
return text.substring(0, text.length - suffix.length);
}
return text;
}
var WhitespaceMode;
(function (WhitespaceMode) {
WhitespaceMode[WhitespaceMode["Trim"] = 0] = "Trim";
WhitespaceMode[WhitespaceMode["Preserve"] = 1] = "Preserve";
})(WhitespaceMode || (WhitespaceMode = {}));
function hasContent(text, whitespaceMode) {
let refinedText = text !== null && text !== void 0 ? text : '';
if (whitespaceMode == WhitespaceMode.Trim) {
refinedText = refinedText.trim();
}
return refinedText.length > 0;
} }
exports.isGhes = isGhes;
/***/ }), /***/ }),
@ -2541,7 +2513,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOrganizationId = getOrganizationId; exports.getOrganizationId = void 0;
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
/** /**
@ -2570,6 +2542,7 @@ function getOrganizationId() {
} }
}); });
} }
exports.getOrganizationId = getOrganizationId;
/***/ }), /***/ }),
@ -7802,7 +7775,7 @@ module.exports = __toCommonJS(dist_src_exports);
var import_universal_user_agent = __nccwpck_require__(5030); var import_universal_user_agent = __nccwpck_require__(5030);
// pkg/dist-src/version.js // pkg/dist-src/version.js
var VERSION = "9.0.6"; var VERSION = "9.0.5";
// pkg/dist-src/defaults.js // pkg/dist-src/defaults.js
var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;
@ -7907,9 +7880,9 @@ function addQueryParameters(url, parameters) {
} }
// pkg/dist-src/util/extract-url-variable-names.js // pkg/dist-src/util/extract-url-variable-names.js
var urlVariableRegex = /\{[^{}}]+\}/g; var urlVariableRegex = /\{[^}]+\}/g;
function removeNonChars(variableName) { function removeNonChars(variableName) {
return variableName.replace(/(?:^\W+)|(?:(?<!\W)\W+$)/g, "").split(/,/); return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
} }
function extractUrlVariableNames(url) { function extractUrlVariableNames(url) {
const matches = url.match(urlVariableRegex); const matches = url.match(urlVariableRegex);
@ -8095,7 +8068,7 @@ function parse(options) {
} }
if (url.endsWith("/graphql")) { if (url.endsWith("/graphql")) {
if (options.mediaType.previews?.length) { if (options.mediaType.previews?.length) {
const previewsFromAcceptHeader = headers.accept.match(/(?<![\w-])[\w-]+(?=-preview)/g) || []; const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
return `application/vnd.github.${preview}-preview${format}`; return `application/vnd.github.${preview}-preview${format}`;
@ -8344,7 +8317,7 @@ __export(dist_src_exports, {
module.exports = __toCommonJS(dist_src_exports); module.exports = __toCommonJS(dist_src_exports);
// pkg/dist-src/version.js // pkg/dist-src/version.js
var VERSION = "9.2.2"; var VERSION = "9.2.1";
// pkg/dist-src/normalize-paginated-list-response.js // pkg/dist-src/normalize-paginated-list-response.js
function normalizePaginatedListResponse(response) { function normalizePaginatedListResponse(response) {
@ -8392,7 +8365,7 @@ function iterator(octokit, route, parameters) {
const response = await requestMethod({ method, url, headers }); const response = await requestMethod({ method, url, headers });
const normalizedResponse = normalizePaginatedListResponse(response); const normalizedResponse = normalizePaginatedListResponse(response);
url = ((normalizedResponse.headers.link || "").match( url = ((normalizedResponse.headers.link || "").match(
/<([^<>]+)>;\s*rel="next"/ /<([^>]+)>;\s*rel="next"/
) || [])[1]; ) || [])[1];
return { value: normalizedResponse }; return { value: normalizedResponse };
} catch (error) { } catch (error) {
@ -10944,7 +10917,7 @@ var RequestError = class extends Error {
if (options.request.headers.authorization) { if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, { requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace( authorization: options.request.headers.authorization.replace(
/(?<! ) .*$/, / .*$/,
" [REDACTED]" " [REDACTED]"
) )
}); });
@ -11012,7 +10985,7 @@ var import_endpoint = __nccwpck_require__(9440);
var import_universal_user_agent = __nccwpck_require__(5030); var import_universal_user_agent = __nccwpck_require__(5030);
// pkg/dist-src/version.js // pkg/dist-src/version.js
var VERSION = "8.4.1"; var VERSION = "8.4.0";
// pkg/dist-src/is-plain-object.js // pkg/dist-src/is-plain-object.js
function isPlainObject(value) { function isPlainObject(value) {
@ -11071,7 +11044,7 @@ function fetchWrapper(requestOptions) {
headers[keyAndValue[0]] = keyAndValue[1]; headers[keyAndValue[0]] = keyAndValue[1];
} }
if ("deprecation" in headers) { if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^<>]+)>; rel="deprecation"/); const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop(); const deprecationLink = matches && matches.pop();
log.warn( log.warn(
`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`
@ -18725,7 +18698,7 @@ module.exports = {
const { parseSetCookie } = __nccwpck_require__(4408) const { parseSetCookie } = __nccwpck_require__(4408)
const { stringify } = __nccwpck_require__(3121) const { stringify, getHeadersList } = __nccwpck_require__(3121)
const { webidl } = __nccwpck_require__(1744) const { webidl } = __nccwpck_require__(1744)
const { Headers } = __nccwpck_require__(554) const { Headers } = __nccwpck_require__(554)
@ -18801,13 +18774,14 @@ function getSetCookies (headers) {
webidl.brandCheck(headers, Headers, { strict: false }) webidl.brandCheck(headers, Headers, { strict: false })
const cookies = headers.getSetCookie() const cookies = getHeadersList(headers).cookies
if (!cookies) { if (!cookies) {
return [] return []
} }
return cookies.map((pair) => parseSetCookie(pair)) // In older versions of undici, cookies is a list of name:value.
return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
} }
/** /**
@ -19235,15 +19209,14 @@ module.exports = {
/***/ }), /***/ }),
/***/ 3121: /***/ 3121:
/***/ ((module) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; "use strict";
/** const assert = __nccwpck_require__(9491)
* @param {string} value const { kHeadersList } = __nccwpck_require__(2785)
* @returns {boolean}
*/
function isCTLExcludingHtab (value) { function isCTLExcludingHtab (value) {
if (value.length === 0) { if (value.length === 0) {
return false return false
@ -19504,13 +19477,31 @@ function stringify (cookie) {
return out.join('; ') return out.join('; ')
} }
let kHeadersListNode
function getHeadersList (headers) {
if (headers[kHeadersList]) {
return headers[kHeadersList]
}
if (!kHeadersListNode) {
kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
(symbol) => symbol.description === 'headers list'
)
assert(kHeadersListNode, 'Headers cannot be parsed')
}
const headersList = headers[kHeadersListNode]
assert(headersList)
return headersList
}
module.exports = { module.exports = {
isCTLExcludingHtab, isCTLExcludingHtab,
validateCookieName, stringify,
validateCookiePath, getHeadersList
validateCookieValue,
toIMFDate,
stringify
} }
@ -21439,14 +21430,6 @@ const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830)
const { File: UndiciFile } = __nccwpck_require__(8511) const { File: UndiciFile } = __nccwpck_require__(8511)
const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685)
let random
try {
const crypto = __nccwpck_require__(6005)
random = (max) => crypto.randomInt(0, max)
} catch {
random = (max) => Math.floor(Math.random(max))
}
let ReadableStream = globalThis.ReadableStream let ReadableStream = globalThis.ReadableStream
/** @type {globalThis['File']} */ /** @type {globalThis['File']} */
@ -21532,7 +21515,7 @@ function extractBody (object, keepalive = false) {
// Set source to a copy of the bytes held by object. // Set source to a copy of the bytes held by object.
source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
} else if (util.isFormDataLike(object)) { } else if (util.isFormDataLike(object)) {
const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`
const prefix = `--${boundary}\r\nContent-Disposition: form-data` const prefix = `--${boundary}\r\nContent-Disposition: form-data`
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */ /*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
@ -23514,7 +23497,6 @@ const {
isValidHeaderName, isValidHeaderName,
isValidHeaderValue isValidHeaderValue
} = __nccwpck_require__(2538) } = __nccwpck_require__(2538)
const util = __nccwpck_require__(3837)
const { webidl } = __nccwpck_require__(1744) const { webidl } = __nccwpck_require__(1744)
const assert = __nccwpck_require__(9491) const assert = __nccwpck_require__(9491)
@ -24068,9 +24050,6 @@ Object.defineProperties(Headers.prototype, {
[Symbol.toStringTag]: { [Symbol.toStringTag]: {
value: 'Headers', value: 'Headers',
configurable: true configurable: true
},
[util.inspect.custom]: {
enumerable: false
} }
}) })
@ -33247,20 +33226,6 @@ class Pool extends PoolBase {
? { ...options.interceptors } ? { ...options.interceptors }
: undefined : undefined
this[kFactory] = factory this[kFactory] = factory
this.on('connectionError', (origin, targets, error) => {
// If a connection error occurs, we remove the client from the pool,
// and emit a connectionError event. They will not be re-used.
// Fixes https://github.com/nodejs/undici/issues/3895
for (const target of targets) {
// Do not use kRemoveClient here, as it will close the client,
// but the client cannot be closed in this state.
const idx = this[kClients].indexOf(target)
if (idx !== -1) {
this[kClients].splice(idx, 1)
}
}
})
} }
[kGetDispatcher] () { [kGetDispatcher] () {
@ -36416,14 +36381,6 @@ module.exports = require("net");
/***/ }), /***/ }),
/***/ 6005:
/***/ ((module) => {
"use strict";
module.exports = require("node:crypto");
/***/ }),
/***/ 5673: /***/ 5673:
/***/ ((module) => { /***/ ((module) => {

858
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "checkout", "name": "checkout",
"version": "5.0.0", "version": "4.1.4",
"description": "checkout action", "description": "checkout action",
"main": "lib/main.js", "main": "lib/main.js",
"scripts": { "scripts": {
@ -37,19 +37,19 @@
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^29.5.12", "@types/jest": "^29.5.12",
"@types/node": "^24.1.0", "@types/node": "^20.12.7",
"@types/uuid": "^9.0.8", "@types/uuid": "^9.0.8",
"@typescript-eslint/eslint-plugin": "^7.9.0", "@typescript-eslint/eslint-plugin": "^7.7.1",
"@typescript-eslint/parser": "^7.9.0", "@typescript-eslint/parser": "^7.7.1",
"@vercel/ncc": "^0.38.1", "@vercel/ncc": "^0.38.1",
"eslint": "^8.57.0", "eslint": "^8.57.0",
"eslint-plugin-github": "^4.10.2", "eslint-plugin-github": "^4.10.2",
"eslint-plugin-jest": "^28.8.2", "eslint-plugin-jest": "^28.2.0",
"jest": "^29.7.0", "jest": "^29.7.0",
"jest-circus": "^29.7.0", "jest-circus": "^29.7.0",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"prettier": "^3.3.3", "prettier": "^3.2.5",
"ts-jest": "^29.2.5", "ts-jest": "^29.1.2",
"typescript": "^5.5.4" "typescript": "^5.4.5"
} }
} }

View File

@ -261,8 +261,7 @@ export async function getSource(settings: IGitSourceSettings): Promise<void> {
const commitInfo = await git.log1() const commitInfo = await git.log1()
// Log commit sha // Log commit sha
const commitSHA = await git.log1('--format=%H') await git.log1("--format='%H'")
core.setOutput('commit', commitSHA.trim())
// Check for incorrect pull request merge commit // Check for incorrect pull request merge commit
await refHelper.checkCommitInfo( await refHelper.checkCommitInfo(

View File

@ -35,9 +35,7 @@ export async function downloadRepository(
// Write archive to disk // Write archive to disk
core.info('Writing archive to disk') core.info('Writing archive to disk')
const uniqueId = uuid() const uniqueId = uuid()
const archivePath = IS_WINDOWS const archivePath = path.join(repositoryPath, `${uniqueId}.tar.gz`)
? path.join(repositoryPath, `${uniqueId}.zip`)
: path.join(repositoryPath, `${uniqueId}.tar.gz`)
await fs.promises.writeFile(archivePath, archiveData) await fs.promises.writeFile(archivePath, archiveData)
archiveData = Buffer.from('') // Free memory archiveData = Buffer.from('') // Free memory

View File

@ -19,7 +19,6 @@ async function run(): Promise<void> {
// Get sources // Get sources
await gitSourceProvider.getSource(sourceSettings) await gitSourceProvider.getSource(sourceSettings)
core.setOutput('ref', sourceSettings.ref)
} finally { } finally {
// Unregister problem matcher // Unregister problem matcher
coreCommand.issueCommand('remove-matcher', {owner: 'checkout-git'}, '') coreCommand.issueCommand('remove-matcher', {owner: 'checkout-git'}, '')

View File

@ -120,7 +120,7 @@ function updateUsage(
} }
updateUsage( updateUsage(
'actions/checkout@v5', 'actions/checkout@v4',
path.join(__dirname, '..', '..', 'action.yml'), path.join(__dirname, '..', '..', 'action.yml'),
path.join(__dirname, '..', '..', 'README.md') path.join(__dirname, '..', '..', 'README.md')
) )

View File

@ -42,12 +42,8 @@ export async function getCheckoutInfo(
result.ref = `refs/remotes/pull/${branch}` result.ref = `refs/remotes/pull/${branch}`
} }
// refs/tags/ // refs/tags/
else if (upperRef.startsWith('REFS/TAGS/')) {
result.ref = ref
}
// refs/
else if (upperRef.startsWith('REFS/')) { else if (upperRef.startsWith('REFS/')) {
result.ref = commit ? commit : ref result.ref = ref
} }
// Unqualified ref, check for a matching branch or tag // Unqualified ref, check for a matching branch or tag
else { else {

View File

@ -21,61 +21,26 @@ export function getFetchUrl(settings: IGitSourceSettings): string {
} }
export function getServerUrl(url?: string): URL { export function getServerUrl(url?: string): URL {
let resolvedUrl = process.env['GITHUB_SERVER_URL'] || 'https://github.com' let urlValue =
if (hasContent(url, WhitespaceMode.Trim)) { url && url.trim().length > 0
resolvedUrl = url! ? url
} : process.env['GITHUB_SERVER_URL'] || 'https://github.com'
return new URL(urlValue)
return new URL(resolvedUrl)
} }
export function getServerApiUrl(url?: string): string { export function getServerApiUrl(url?: string): string {
if (hasContent(url, WhitespaceMode.Trim)) { let apiUrl = 'https://api.github.com'
let serverUrl = getServerUrl(url)
if (isGhes(url)) { if (isGhes(url)) {
serverUrl.pathname = 'api/v3' const serverUrl = getServerUrl(url)
} else { apiUrl = new URL(`${serverUrl.origin}/api/v3`).toString()
serverUrl.hostname = 'api.' + serverUrl.hostname
} }
return pruneSuffix(serverUrl.toString(), '/') return apiUrl
}
return process.env['GITHUB_API_URL'] || 'https://api.github.com'
} }
export function isGhes(url?: string): boolean { export function isGhes(url?: string): boolean {
const ghUrl = new URL( const ghUrl = getServerUrl(url)
url || process.env['GITHUB_SERVER_URL'] || 'https://github.com'
)
const hostname = ghUrl.hostname.trimEnd().toUpperCase() return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'
const isGitHubHost = hostname === 'GITHUB.COM'
const isGitHubEnterpriseCloudHost = hostname.endsWith('.GHE.COM')
const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost
}
function pruneSuffix(text: string, suffix: string) {
if (hasContent(suffix, WhitespaceMode.Preserve) && text?.endsWith(suffix)) {
return text.substring(0, text.length - suffix.length)
}
return text
}
enum WhitespaceMode {
Trim,
Preserve
}
function hasContent(
text: string | undefined,
whitespaceMode: WhitespaceMode
): boolean {
let refinedText = text ?? ''
if (whitespaceMode == WhitespaceMode.Trim) {
refinedText = refinedText.trim()
}
return refinedText.length > 0
} }