Compare commits

..

No commits in common. "main" and "v42" have entirely different histories.
main ... v42

37 changed files with 8481 additions and 22978 deletions

View file

@ -208,80 +208,6 @@
"test", "test",
"bug" "bug"
] ]
},
{
"login": "levenleven",
"name": "Aleksey Levenstein",
"avatar_url": "https://avatars.githubusercontent.com/u/6463364?v=4",
"profile": "https://github.com/levenleven",
"contributions": [
"doc"
]
},
{
"login": "dan-hill2802",
"name": "Daniel Hill",
"avatar_url": "https://avatars.githubusercontent.com/u/5046322?v=4",
"profile": "https://github.com/dan-hill2802",
"contributions": [
"doc"
]
},
{
"login": "KeisukeYamashita",
"name": "KeisukeYamashita",
"avatar_url": "https://avatars.githubusercontent.com/u/23056537?v=4",
"profile": "https://keisukeyamashita.com",
"contributions": [
"doc"
]
},
{
"login": "codesculpture",
"name": "Aravind",
"avatar_url": "https://avatars.githubusercontent.com/u/63452117?v=4",
"profile": "https://github.com/codesculpture",
"contributions": [
"code",
"bug"
]
},
{
"login": "Whadup",
"name": "Lukas Pfahler",
"avatar_url": "https://avatars.githubusercontent.com/u/2308119?v=4",
"profile": "https://lukaspfahler.de",
"contributions": [
"code"
]
},
{
"login": "RajendraP",
"name": "Rajendra Pandey",
"avatar_url": "https://avatars.githubusercontent.com/u/8928165?v=4",
"profile": "https://github.com/RajendraP",
"contributions": [
"doc"
]
},
{
"login": "undefined-moe",
"name": "undefined",
"avatar_url": "https://avatars.githubusercontent.com/u/29992205?v=4",
"profile": "https://undefined.moe/",
"contributions": [
"doc"
]
},
{
"login": "Jellyfrog",
"name": "Jellyfrog",
"avatar_url": "https://avatars.githubusercontent.com/u/759887?v=4",
"profile": "https://github.com/Jellyfrog",
"contributions": [
"code",
"doc"
]
} }
], ],
"contributorsPerLine": 7, "contributorsPerLine": 7,

View file

@ -24,17 +24,22 @@ permissions:
jobs: jobs:
codacy-security-scan: codacy-security-scan:
# Cancel other workflows that are running for the same branch
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
name: Codacy Security Scan name: Codacy Security Scan
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# Checkout the repository to the GitHub Actions runner # Checkout the repository to the GitHub Actions runner
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
# Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis # Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis
- name: Run Codacy Analysis CLI - name: Run Codacy Analysis CLI
continue-on-error: true continue-on-error: true
uses: codacy/codacy-analysis-cli-action@97bf5df3c09e75f5bcd72695998f96ebd701846e # v4.4.5 uses: codacy/codacy-analysis-cli-action@v4.3.0
with: with:
# Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository # Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository
# You can also omit the token and run the tools that support default configurations # You can also omit the token and run the tools that support default configurations
@ -51,6 +56,6 @@ jobs:
# Upload the SARIF file generated in the previous step # Upload the SARIF file generated in the previous step
- name: Upload SARIF results file - name: Upload SARIF results file
continue-on-error: true continue-on-error: true
uses: github/codeql-action/upload-sarif@39edc492dbe16b1465b0cafca41432d857bdb31a # v3.29.1 uses: github/codeql-action/upload-sarif@v3
with: with:
sarif_file: results.sarif sarif_file: results.sarif

View file

@ -43,11 +43,11 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@39edc492dbe16b1465b0cafca41432d857bdb31a # v3.29.1 uses: github/codeql-action/init@v3
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@ -61,7 +61,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@39edc492dbe16b1465b0cafca41432d857bdb31a # v3.29.1 uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@ -74,6 +74,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh # ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@39edc492dbe16b1465b0cafca41432d857bdb31a # v3.29.1 uses: github/codeql-action/analyze@v3
with: with:
category: "/language:${{matrix.language}}" category: "/language:${{matrix.language}}"

View file

@ -19,7 +19,7 @@ jobs:
NUMBER: ${{ github.event.issue.number }} NUMBER: ${{ github.event.issue.number }}
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
submodules: true submodules: true
fetch-depth: 0 fetch-depth: 0
@ -104,7 +104,7 @@ jobs:
NUMBER: ${{ github.event.issue.number }} NUMBER: ${{ github.event.issue.number }}
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
submodules: true submodules: true
fetch-depth: 0 fetch-depth: 0

View file

@ -18,7 +18,7 @@ jobs:
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
submodules: true submodules: true
fetch-depth: 0 fetch-depth: 0

View file

@ -17,14 +17,15 @@ jobs:
matrix: ${{ steps.changed-files.outputs.all_changed_files }} matrix: ${{ steps.changed-files.outputs.all_changed_files }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Get changed files - name: Get changed files
id: changed-files id: changed-files
uses: ./ uses: ./
with: with:
matrix: true json: true
escape_json: false
- name: List all changed files - name: List all changed files
run: echo '${{ steps.changed-files.outputs.all_changed_files }}' run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
@ -39,7 +40,7 @@ jobs:
fail-fast: false fail-fast: false
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
- name: Test - name: Test
run: | run: |
echo ${{ matrix.files }} echo ${{ matrix.files }}

View file

@ -19,7 +19,7 @@ jobs:
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }} all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Get changed files - name: Get changed files
@ -45,7 +45,7 @@ jobs:
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }} all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Get changed files - name: Get changed files

View file

@ -13,22 +13,21 @@ jobs:
update-version: update-version:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Run release-tagger - name: Run release-tagger
uses: tj-actions/release-tagger@1a9264b0fd99a1ef92c4fd2f077f292900cc79b6 # v4.0.0 uses: tj-actions/release-tagger@v4
- name: Sync release version. - name: Sync release version.
uses: tj-actions/sync-release-version@2a7ef0deb39b3ecce887ee99d2261c6cef989d84 # v13.16 uses: tj-actions/sync-release-version@v13
id: sync-release-version id: sync-release-version
with: with:
pattern: '${{ github.repository }}@' pattern: '${{ github.repository }}@'
only_major: true only_major: true
use_tag_commit_hash: true
paths: | paths: |
README.md README.md
- name: Sync release package version. - name: Sync release package version.
uses: tj-actions/sync-release-version@2a7ef0deb39b3ecce887ee99d2261c6cef989d84 # v13.16 uses: tj-actions/sync-release-version@v13
id: sync-release-package-version id: sync-release-package-version
with: with:
pattern: '"version": "' pattern: '"version": "'
@ -36,15 +35,14 @@ jobs:
paths: | paths: |
package.json package.json
- name: Run git-cliff - name: Run git-cliff
uses: tj-actions/git-cliff@75599f745633e29f99bd9e14a30865b7d2fcbe84 # v1.5.0 uses: tj-actions/git-cliff@v1
- name: Create Pull Request - name: Create Pull Request
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 uses: peter-evans/create-pull-request@v5.0.2
with: with:
base: "main" base: "main"
labels: "merge when passing" labels: "merge when passing"
sign-commits: true
title: "Upgraded to ${{ steps.sync-release-version.outputs.new_version }}" title: "Upgraded to ${{ steps.sync-release-version.outputs.new_version }}"
branch: "upgrade-to-${{ steps.sync-release-version.outputs.new_version }}" branch: "upgrade-to-${{ steps.sync-release-version.outputs.new_version }}"
commit-message: "Upgraded from ${{ steps.sync-release-version.outputs.old_version }} -> ${{ steps.sync-release-version.outputs.new_version }}" commit-message: "Upgraded from ${{ steps.sync-release-version.outputs.old_version }} -> ${{ steps.sync-release-version.outputs.new_version }}"
body: "View [CHANGES](https://github.com/${{ github.repository }}/compare/${{ steps.sync-release-version.outputs.old_version }}...${{ steps.sync-release-version.outputs.new_version }})" body: "View [CHANGES](https://github.com/${{ github.repository }}/compare/${{ steps.sync-release-version.outputs.old_version }}...${{ steps.sync-release-version.outputs.new_version }})"
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.PAT_TOKEN }}

View file

@ -2,6 +2,7 @@ name: CI
permissions: permissions:
contents: read contents: read
pull-requests: write
on: on:
push: push:
@ -34,18 +35,15 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
files_changed: ${{ steps.changed_files.outputs.files_changed }} files_changed: ${{ steps.changed_files.outputs.files_changed }}
permissions:
contents: write
pull-requests: read
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with: with:
persist-credentials: false persist-credentials: false
fetch-depth: 0 fetch-depth: 0
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Use Node.js 20.x - name: Use Node.js 20.x
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4.0.1
with: with:
cache: 'yarn' cache: 'yarn'
node-version: '20.x' node-version: '20.x'
@ -60,41 +58,49 @@ jobs:
yarn install yarn install
- name: Run eslint on changed files - name: Run eslint on changed files
uses: tj-actions/eslint-changed-files@0cfcd35949a961d13e964a7c2a4971c0e89b68fc # v25.3.1 uses: tj-actions/eslint-changed-files@v23
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
with: with:
token: ${{ secrets.PAT_TOKEN }}
config_path: ".eslintrc.json" config_path: ".eslintrc.json"
ignore_path: ".eslintignore" ignore_path: ".eslintignore"
- name: Run build and test - name: Run build and test
run: | run: |
yarn all yarn all
env:
GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FORK: ${{ github.event.pull_request.head.repo.fork }}
- name: Verify Changed files - name: Verify Changed files
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v17
if: github.actor != 'dependabot[bot]' id: changed_files
id: verify_changed_files
with: with:
files: | files: |
src src
dist dist
- name: Verify outstanding changes - name: Commit files
if: steps.verify_changed_files.outputs.files_changed == 'true' && github.event_name == 'pull_request' if: steps.changed_files.outputs.files_changed == 'true' && github.event_name == 'pull_request'
run: | run: |
echo "Uncommitted build/lint changes detected. Please run 'yarn all' locally and push the changes for review." git config --local user.email "action@github.com"
exit 1 git config --local user.name "GitHub Action"
git add src dist
git commit -m "Added missing changes and modified dist assets."
- name: Push changes
if: steps.changed_files.outputs.files_changed == 'true' && github.event_name == 'pull_request'
continue-on-error: true
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.PAT_TOKEN }}
branch: ${{ github.head_ref }}
- name: Upload build assets - name: Upload build assets
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v3
with: with:
name: build-assets name: build-assets
path: dist path: dist
- name: Run codacy-coverage-reporter - name: Run codacy-coverage-reporter
uses: codacy/codacy-coverage-reporter-action@89d6c85cfafaec52c72b6c5e8b2878d33104c699 # v1.3.0 uses: codacy/codacy-coverage-reporter-action@v1
continue-on-error: true continue-on-error: true
with: with:
project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
@ -105,11 +111,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout into dir1 - name: Checkout into dir1
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: true submodules: true
@ -117,7 +121,7 @@ jobs:
path: dir1 path: dir1
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
path: dir1/dist path: dir1/dist
@ -143,13 +147,13 @@ jobs:
bash bash
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
path: dir2/dist path: dir2/dist
- name: Checkout into dir2 - name: Checkout into dir2
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: true submodules: true
@ -181,17 +185,16 @@ jobs:
needs: build needs: build
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event_name == 'push' && needs.build.outputs.files_changed != 'true' if: github.event_name == 'push' && needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 0 fetch-depth: 0
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -240,18 +243,16 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 0 fetch-depth: 0
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -284,18 +285,16 @@ jobs:
github.event_name == 'push' || github.event_name == 'push' ||
github.event_name == 'pull_request' github.event_name == 'pull_request'
) && needs.build.outputs.files_changed != 'true' ) && needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 0 fetch-depth: 0
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -316,8 +315,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
@ -328,13 +325,13 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -357,17 +354,15 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: github.event_name != 'push' && needs.build.outputs.files_changed != 'true' if: github.event_name != 'push' && needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -386,8 +381,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: github.event_name != 'push' && needs.build.outputs.files_changed != 'true' if: github.event_name != 'push' && needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 4 max-parallel: 4
@ -396,14 +389,14 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
persist-credentials: false persist-credentials: false
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -422,14 +415,13 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -477,14 +469,13 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -536,7 +527,7 @@ jobs:
pull-requests: read pull-requests: read
steps: steps:
- name: Checkout into dir1 - name: Checkout into dir1
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: true submodules: true
@ -544,7 +535,7 @@ jobs:
path: dir1 path: dir1
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
path: dir1/dist path: dir1/dist
@ -559,83 +550,11 @@ jobs:
shell: shell:
bash bash
test-dir-names-nested-folder:
name: Test changed-files with dir-names in a nested folder
runs-on: ubuntu-latest
needs: build
if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps:
- name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: true
fetch-depth: 0
- name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: build-assets
path: dist
- name: Get changed files in the .github folder
id: changed-files
uses: ./
with:
path: .github
json: true
escape_json: false
dir_names: true
dir_names_exclude_current_dir: true
- name: Show output
run: |
echo "${{ toJSON(steps.changed-files.outputs) }}"
shell:
bash
test-non-existing-repository:
name: Test changed-files with non existing repository
runs-on: ubuntu-latest
needs: build
if: github.event_name == 'push' && needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps:
- name: Checkout into dir1
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: true
fetch-depth: 0
path: dir1
- name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: build-assets
path: dir1/dist
- name: Run changed-files with non existing repository
id: changed-files
continue-on-error: true
uses: ./dir1
- name: Verify failed
if: steps.changed-files.outcome != 'failure'
run: |
echo "Expected: (failure) got ${{ steps.changed-files.outcome }}"
exit 1
test-submodules: test-submodules:
name: Test changed-files with submodule name: Test changed-files with submodule
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 4 max-parallel: 4
@ -644,14 +563,14 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha || github.sha }} ref: ${{ github.event.pull_request.head.sha || github.sha }}
submodules: recursive submodules: recursive
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -674,34 +593,11 @@ jobs:
echo "${{ toJSON(steps.changed-files.outputs) }}" echo "${{ toJSON(steps.changed-files.outputs) }}"
shell: shell:
bash bash
- name: Run changed-files excluding submodule
id: changed-files-exclude-submodule
uses: ./
with:
base_sha: "85bd869"
sha: "adde7bb"
fetch_depth: 60000
exclude_submodules: true
- name: Verify no added files
if: steps.changed-files-exclude-submodule.outputs.added_files != ''
run: |
echo "Expected: ('') got ${{ steps.changed-files-exclude-submodule.outputs.added_files }}"
exit 1
- name: Show output
run: |
echo "${{ toJSON(steps.changed-files-exclude-submodule.outputs) }}"
shell:
bash
test-yaml: test-yaml:
name: Test changed-files with yaml name: Test changed-files with yaml
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 4 max-parallel: 4
@ -710,14 +606,14 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha || github.sha }} ref: ${{ github.event.pull_request.head.sha || github.sha }}
submodules: recursive submodules: recursive
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -749,24 +645,6 @@ jobs:
shell: shell:
bash bash
- name: Run changed-files with files_yaml, files_ignore_yaml
id: changed-files-ignore
uses: ./
with:
files_yaml: |
test:
- test/**.txt
- test/**.md
files_ignore_yaml: |
test:
- test/test.txt
- name: Show output
run: |
echo "${{ toJSON(steps.changed-files-ignore.outputs) }}"
shell:
bash
- name: Run changed-files with files_yaml, json and write_output_files - name: Run changed-files with files_yaml, json and write_output_files
id: changed-files-json-write-output-files id: changed-files-json-write-output-files
uses: ./ uses: ./
@ -795,8 +673,6 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 4 max-parallel: 4
@ -805,14 +681,14 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha || github.sha }} ref: ${{ github.event.pull_request.head.sha || github.sha }}
submodules: recursive submodules: recursive
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -987,11 +863,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: a52f8621d26d5d9f54b80f74bda2d9eedff94693 ref: a52f8621d26d5d9f54b80f74bda2d9eedff94693
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
@ -1019,19 +893,14 @@ jobs:
bash bash
test-dir-names-deleted-files-include-only-deleted-dirs-directory: test-dir-names-deleted-files-include-only-deleted-dirs-directory:
name: Test dir names deleted files include only deleted dirs name: Test dir names deleted files include only deleted dirs
runs-on: ${{ matrix.platform }} runs-on: ubuntu-latest
needs: build needs: build
strategy:
matrix:
platform: [ubuntu-latest, macos-latest, windows-latest]
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: cd1e384723e4d1a184568182ac2b27c53ebf017f
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
submodules: true submodules: true
fetch-depth: 2 fetch-depth: 2
@ -1043,7 +912,6 @@ jobs:
sha: cd1e384723e4d1a184568182ac2b27c53ebf017f sha: cd1e384723e4d1a184568182ac2b27c53ebf017f
dir_names: true dir_names: true
dir_names_deleted_files_include_only_deleted_dirs: true dir_names_deleted_files_include_only_deleted_dirs: true
fetch_depth: 60000
- name: Show output - name: Show output
run: | run: |
echo '${{ toJSON(steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory.outputs) }}' echo '${{ toJSON(steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory.outputs) }}'
@ -1063,43 +931,12 @@ jobs:
exit 1 exit 1
shell: shell:
bash bash
- name: Run changed-files with dir_names and dir_names_deleted_files_include_only_deleted_dirs with the test directory deleted returns posix path separator
id: changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory-posix-path-separator
uses: ./
with:
base_sha: a52f8621d26d5d9f54b80f74bda2d9eedff94693
sha: cd1e384723e4d1a184568182ac2b27c53ebf017f
dir_names: true
dir_names_deleted_files_include_only_deleted_dirs: true
use_posix_path_separator: true
fetch_depth: 60000
- name: Show output
run: |
echo '${{ toJSON(steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory-posix-path-separator.outputs) }}'
shell:
bash
- name: Check deleted_files output on non windows platform
if: steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory-posix-path-separator.outputs.deleted_files != 'test/test3' && runner.os != 'Windows'
run: |
echo "Invalid output: Expected (test/test3) got (${{ steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory-posix-path-separator.outputs.deleted_files }})"
exit 1
shell:
bash
- name: Check deleted_files output on windows platform
if: "!contains(steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory-posix-path-separator.outputs.deleted_files, 'test/test3') && runner.os == 'Windows'"
run: |
echo "Invalid output: Expected (test/test3) got (${{ steps.changed-files-dir-names-deleted-files-include-only-deleted-dirs-directory-posix-path-separator.outputs.deleted_files }})"
exit 1
shell:
bash
test-since-last-remote-commit: test-since-last-remote-commit:
name: Test changed-files since last remote commit name: Test changed-files since last remote commit
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 4 max-parallel: 4
@ -1108,7 +945,7 @@ jobs:
steps: steps:
- name: Checkout branch - name: Checkout branch
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
@ -1116,7 +953,7 @@ jobs:
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
@ -1127,10 +964,10 @@ jobs:
with: with:
since_last_remote_commit: true since_last_remote_commit: true
- name: Verify succeeded pull_request(closed) - name: Verify failed
if: steps.changed-files-since-last-remote-commit.outcome != 'success' && matrix.fetch-depth == 1 && github.event.action == 'closed' && github.event_name == 'pull_request' if: steps.changed-files-since-last-remote-commit.outcome != 'failure' && matrix.fetch-depth == 1 && github.event.action == 'closed' && github.event_name == 'pull_request'
run: | run: |
echo "Expected: (success) got ${{ steps.changed-files-since-last-remote-commit.outcome }}" echo "Expected: (failure) got ${{ steps.changed-files-since-last-remote-commit.outcome }}"
exit 1 exit 1
- name: Verify succeeded - name: Verify succeeded
@ -1150,8 +987,6 @@ jobs:
runs-on: ${{ matrix.platform }} runs-on: ${{ matrix.platform }}
needs: build needs: build
if: needs.build.outputs.files_changed != 'true' if: needs.build.outputs.files_changed != 'true'
permissions:
contents: read
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 4 max-parallel: 4
@ -1161,13 +996,13 @@ jobs:
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@v4
with: with:
submodules: true submodules: true
repository: ${{ github.event.pull_request.head.repo.full_name }} repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: ${{ matrix.fetch-depth }} fetch-depth: ${{ matrix.fetch-depth }}
- name: Download build assets - name: Download build assets
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v3
with: with:
name: build-assets name: build-assets
- name: Dump GitHub context - name: Dump GitHub context
@ -1182,7 +1017,7 @@ jobs:
echo '${{ toJSON(steps.changed-files.outputs) }}' echo '${{ toJSON(steps.changed-files.outputs) }}'
shell: shell:
bash bash
- name: Run changed-files with dir name pattern - name: Run changed-files with dir name
id: changed-files-dir-name id: changed-files-dir-name
uses: ./ uses: ./
with: with:
@ -2152,9 +1987,9 @@ jobs:
bash bash
- name: Get branch name - name: Get branch name
id: branch-name id: branch-name
uses: tj-actions/branch-names@dde14ac574a8b9b1cedc59a1cf312788af43d8d8 # v8.2.1 uses: tj-actions/branch-names@v8
if: github.event_name == 'pull_request' && matrix.fetch-depth == 0 if: github.event_name == 'pull_request' && matrix.fetch-depth == 0
- uses: nrwl/nx-set-shas@dbe0650947e5f2c81f59190a38512cf49126fe6b # v4.3.0 - uses: nrwl/nx-set-shas@v4
id: last_successful_commit id: last_successful_commit
if: github.event_name == 'pull_request' && github.event.action != 'closed' && matrix.fetch-depth == 0 if: github.event_name == 'pull_request' && github.event.action != 'closed' && matrix.fetch-depth == 0
with: with:

View file

@ -1,7 +1,7 @@
name: Format README.md name: Format README.md
permissions: permissions:
contents: write contents: read
pull-requests: write pull-requests: write
on: on:
@ -13,22 +13,21 @@ jobs:
sync-assets: sync-assets:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Run auto-doc - name: Run auto-doc
uses: tj-actions/auto-doc@b10ceedffd794ec29a8fa8700529f40c1b64a951 # v3.6.0 uses: tj-actions/auto-doc@v3
with: with:
use_code_blocks: true use_code_blocks: true
use_major_version: true use_major_version: true
use_tag_commit_hash: true
- name: Run remark - name: Run remark
uses: tj-actions/remark@10fc40701928cbafcc4a2d241679579d218144ff # v3 uses: tj-actions/remark@v3
- name: Verify Changed files - name: Verify Changed files
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v17
id: verify_changed_files id: verify_changed_files
with: with:
files: | files: |
@ -42,7 +41,7 @@ jobs:
- name: Create Pull Request - name: Create Pull Request
if: failure() if: failure()
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 uses: peter-evans/create-pull-request@v5
with: with:
base: "main" base: "main"
labels: "merge when passing" labels: "merge when passing"
@ -50,5 +49,4 @@ jobs:
branch: "chore/update-readme" branch: "chore/update-readme"
commit-message: "Updated README.md" commit-message: "Updated README.md"
body: "Updated README.md" body: "Updated README.md"
sign-commits: true token: ${{ secrets.PAT_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -1,7 +1,7 @@
name: Workflow Run Example name: Workflow Run Example
on: on:
workflow_run: workflow_run:
workflows: [Matrix Example] workflows: [Matrix Test]
types: [completed] types: [completed]
permissions: permissions:
@ -12,30 +12,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }} if: ${{ github.event.workflow_run.conclusion == 'success' }}
steps: steps:
- name: Checkout code - run: echo 'The triggering workflow passed'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get changed files
id: changed-files
uses: ./
- name: Echo list of changed files on success
run: |
echo "Changed files on success:"
echo "${{ steps.changed-files.outputs.all_changed_files }}"
on-failure: on-failure:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'failure' }} if: ${{ github.event.workflow_run.conclusion == 'failure' }}
steps: steps:
- name: Checkout code - run: echo 'The triggering workflow failed'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get changed files
id: changed-files
uses: ./
- name: Echo list of changed files on failure
run: |
echo "Changed files on failure:"
echo "${{ steps.changed-files.outputs.all_changed_files }}"

1566
HISTORY.md

File diff suppressed because it is too large Load diff

467
README.md

File diff suppressed because it is too large Load diff

View file

@ -127,7 +127,7 @@ inputs:
required: false required: false
default: "false" default: "false"
json: json:
description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs. Example: https://github.com/tj-actions/changed-files/blob/main/.github/workflows/matrix-example.yml" description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs. Example: https://github.com/tj-actions/changed-files/blob/main/.github/workflows/matrix-test.yml"
required: false required: false
default: "false" default: "false"
escape_json: escape_json:
@ -141,11 +141,11 @@ inputs:
fetch_depth: fetch_depth:
description: "Depth of additional branch history fetched. NOTE: This can be adjusted to resolve errors with insufficient history." description: "Depth of additional branch history fetched. NOTE: This can be adjusted to resolve errors with insufficient history."
required: false required: false
default: "25" default: "50"
skip_initial_fetch: skip_initial_fetch:
description: | description: |
Skip initially fetching additional history to improve performance for shallow repositories. Skip the initial fetch to improve performance for shallow repositories.
NOTE: This could lead to errors with missing history. It's intended to be used when you've fetched all necessary history to perform the diff. NOTE: This could lead to errors with missing history and the intended use is limited to when you've fetched the history necessary to perform the diff.
required: false required: false
default: "false" default: "false"
fetch_additional_submodule_history: fetch_additional_submodule_history:
@ -219,39 +219,12 @@ inputs:
description: "Apply the negation patterns first. NOTE: This affects how changed files are matched." description: "Apply the negation patterns first. NOTE: This affects how changed files are matched."
required: false required: false
default: "false" default: "false"
matrix:
description: "Output changed files in a format that can be used for matrix jobs. Alias for setting inputs `json` to `true` and `escape_json` to `false`."
required: false
default: "false"
exclude_submodules:
description: "Exclude changes to submodules."
required: false
default: "false"
fetch_missing_history_max_retries:
description: "Maximum number of retries to fetch missing history."
required: false
default: "20"
use_posix_path_separator:
description: "Use POSIX path separator `/` for output file paths on Windows."
required: false
default: "false"
tags_pattern:
description: "Tags pattern to include."
required: false
default: "*"
tags_ignore_pattern:
description: "Tags pattern to ignore."
required: false
default: ""
outputs: outputs:
added_files: added_files:
description: "Returns only files that are Added (A)." description: "Returns only files that are Added (A)."
added_files_count: added_files_count:
description: "Returns the number of `added_files`" description: "Returns the number of `added_files`"
any_added:
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been added."
copied_files: copied_files:
description: "Returns only files that are Copied (C)." description: "Returns only files that are Copied (C)."
copied_files_count: copied_files_count:
@ -285,45 +258,45 @@ outputs:
unknown_files_count: unknown_files_count:
description: "Returns the number of `unknown_files`" description: "Returns the number of `unknown_files`"
all_changed_and_modified_files: all_changed_and_modified_files:
description: "Returns all changed and modified files i.e. a combination of (ACMRDTUX)" description: "Returns all changed and modified files i.e. *a combination of (ACMRDTUX)*"
all_changed_and_modified_files_count: all_changed_and_modified_files_count:
description: "Returns the number of `all_changed_and_modified_files`" description: "Returns the number of `all_changed_and_modified_files`"
all_changed_files: all_changed_files:
description: "Returns all changed files i.e. a combination of all added, copied, modified and renamed files (ACMR)" description: "Returns all changed files i.e. *a combination of all added, copied, modified and renamed files (ACMR)*"
all_changed_files_count: all_changed_files_count:
description: "Returns the number of `all_changed_files`" description: "Returns the number of `all_changed_files`"
any_changed: any_changed:
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have changed. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*." description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
only_changed: only_changed:
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*." description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
other_changed_files: other_changed_files:
description: "Returns all other changed files not listed in the files input i.e. includes a combination of all added, copied, modified and renamed files (ACMR)." description: "Returns all other changed files not listed in the files input i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
other_changed_files_count: other_changed_files_count:
description: "Returns the number of `other_changed_files`" description: "Returns the number of `other_changed_files`"
all_modified_files: all_modified_files:
description: "Returns all changed files i.e. a combination of all added, copied, modified, renamed and deleted files (ACMRD)." description: "Returns all changed files i.e. *a combination of all added, copied, modified, renamed and deleted files (ACMRD)*."
all_modified_files_count: all_modified_files_count:
description: "Returns the number of `all_modified_files`" description: "Returns the number of `all_modified_files`"
any_modified: any_modified:
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been modified. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*." description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has been modified. i.e. *includes a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
only_modified: only_modified:
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been modified. (ACMRD)." description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has been modified. (ACMRD)."
other_modified_files: other_modified_files:
description: "Returns all other modified files not listed in the files input i.e. a combination of all added, copied, modified, and deleted files (ACMRD)" description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
other_modified_files_count: other_modified_files_count:
description: "Returns the number of `other_modified_files`" description: "Returns the number of `other_modified_files`"
any_deleted: any_deleted:
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been deleted. This defaults to `true` when no patterns are specified. (D)" description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has been deleted. (D)"
only_deleted: only_deleted:
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been deleted. (D)" description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has been deleted. (D)"
other_deleted_files: other_deleted_files:
description: "Returns all other deleted files not listed in the files input i.e. a combination of all deleted files (D)" description: "Returns all other deleted files not listed in the files input i.e. *a combination of all deleted files (D)*"
other_deleted_files_count: other_deleted_files_count:
description: "Returns the number of `other_deleted_files`" description: "Returns the number of `other_deleted_files`"
modified_keys: modified_keys:
description: "Returns all modified YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and deleted (ACMRD)" description: "Returns all modified YAML keys when the `files_yaml` input is used. i.e. *key that contains any path that has either been added, copied, modified, and deleted (ACMRD)*"
changed_keys: changed_keys:
description: "Returns all changed YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and renamed (ACMR)" description: "Returns all changed YAML keys when the `files_yaml` input is used. i.e. *key that contains any path that has either been added, copied, modified, and renamed (ACMR)*"
runs: runs:
using: 'node20' using: 'node20'

17527
dist/index.js generated vendored

File diff suppressed because one or more lines are too long

2
dist/index.js.map generated vendored

File diff suppressed because one or more lines are too long

5264
dist/licenses.txt generated vendored

File diff suppressed because it is too large Load diff

2
dist/sourcemap-register.js generated vendored

File diff suppressed because one or more lines are too long

View file

@ -1,5 +1,6 @@
const path = require('path') const path = require('path')
process.env.TESTING = "1"
process.env.GITHUB_WORKSPACE = path.join( process.env.GITHUB_WORKSPACE = path.join(
path.resolve(__dirname, '..'), '.' path.resolve(__dirname, '..'), '.'
) )

View file

@ -1,6 +1,6 @@
{ {
"name": "@tj-actions/changed-files", "name": "@tj-actions/changed-files",
"version": "45.0.8", "version": "42.0.0",
"description": "Github action to retrieve all (added, copied, modified, deleted, renamed, type changed, unmerged, unknown) files and directories.", "description": "Github action to retrieve all (added, copied, modified, deleted, renamed, type changed, unmerged, unknown) files and directories.",
"main": "lib/main.js", "main": "lib/main.js",
"publishConfig": { "publishConfig": {
@ -14,7 +14,6 @@
"lint:fix": "eslint --fix src/*.ts src/**/*.ts", "lint:fix": "eslint --fix src/*.ts src/**/*.ts",
"package": "ncc build lib/main.js --source-map --license licenses.txt", "package": "ncc build lib/main.js --source-map --license licenses.txt",
"test": "jest --coverage", "test": "jest --coverage",
"update-snapshot": "jest -u",
"all": "yarn build && yarn format && yarn lint && yarn package && yarn test" "all": "yarn build && yarn format && yarn lint && yarn package && yarn test"
}, },
"repository": { "repository": {
@ -36,27 +35,26 @@
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0", "@actions/github": "^6.0.0",
"@octokit/rest": "^22.0.0", "@octokit/rest": "^20.0.1",
"@stdlib/utils-convert-path": "^0.2.1",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"micromatch": "^4.0.5", "micromatch": "^4.0.5",
"yaml": "^2.3.1" "yaml": "^2.3.1"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^30.0.0", "@types/jest": "^29.5.2",
"@types/lodash": "^4.14.195", "@types/lodash": "^4.14.195",
"@types/micromatch": "^4.0.2", "@types/micromatch": "^4.0.2",
"@types/node": "^24.0.1", "@types/node": "^20.3.2",
"@types/uuid": "^10.0.0", "@types/uuid": "^9.0.2",
"@typescript-eslint/eslint-plugin": "^7.0.0", "@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^7.0.0", "@typescript-eslint/parser": "^6.0.0",
"@vercel/ncc": "^0.38.0", "@vercel/ncc": "^0.38.0",
"eslint": "^8.43.0", "eslint": "^8.43.0",
"eslint-config-prettier": "^10.0.0", "eslint-plugin-github": "^4.8.0",
"eslint-plugin-github": "^5.0.0", "eslint-plugin-jest": "^27.2.2",
"eslint-plugin-jest": "^29.0.1",
"eslint-plugin-prettier": "^5.0.0-alpha.2", "eslint-plugin-prettier": "^5.0.0-alpha.2",
"jest": "^30.0.3", "eslint-config-prettier": "^9.0.0",
"jest": "^29.5.0",
"prettier": "^3.0.0", "prettier": "^3.0.0",
"ts-jest": "^29.1.0", "ts-jest": "^29.1.0",
"typescript": "^5.1.3" "typescript": "^5.1.3"

58
renovate.json Normal file
View file

@ -0,0 +1,58 @@
{
"extends": [
"config:base"
],
"enabled": true,
"prHourlyLimit": 10,
"prConcurrentLimit": 5,
"rebaseWhen": "behind-base-branch",
"addLabels": [
"dependencies",
"merge when passing"
],
"assignees": [
"jackton1"
],
"assignAutomerge": true,
"dependencyDashboard": true,
"dependencyDashboardAutoclose": true,
"lockFileMaintenance": {
"enabled": true,
"automerge": true
},
"nvm": {
"enabled": false
},
"packageRules": [
{
"matchUpdateTypes": [
"minor",
"patch",
"pin",
"digest"
],
"automerge": true,
"rebaseWhen": "behind-base-branch",
"addLabels": [
"merge when passing"
]
},
{
"description": "docker images",
"matchLanguages": [
"docker"
],
"matchUpdateTypes": [
"minor",
"patch",
"pin",
"digest"
],
"rebaseWhen": "behind-base-branch",
"addLabels": [
"merge when passing"
],
"automerge": true
}
]
}

View file

@ -1,373 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`getInputs should correctly parse boolean inputs 1`] = `
{
"apiUrl": "",
"baseSha": "",
"diffRelative": "false",
"dirNames": "false",
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": "false",
"dirNamesExcludeCurrentDir": "false",
"dirNamesIncludeFiles": "",
"dirNamesIncludeFilesSeparator": "",
"escapeJson": false,
"excludeSubmodules": "false",
"failOnInitialDiffError": "false",
"failOnSubmoduleDiffError": "false",
"fetchAdditionalSubmoduleHistory": "false",
"fetchMissingHistoryMaxRetries": 20,
"files": "",
"filesFromSourceFile": "",
"filesFromSourceFileSeparator": "",
"filesIgnore": "",
"filesIgnoreFromSourceFile": "",
"filesIgnoreFromSourceFileSeparator": "",
"filesIgnoreSeparator": "",
"filesIgnoreYaml": "",
"filesIgnoreYamlFromSourceFile": "",
"filesIgnoreYamlFromSourceFileSeparator": "",
"filesSeparator": "",
"filesYaml": "",
"filesYamlFromSourceFile": "",
"filesYamlFromSourceFileSeparator": "",
"includeAllOldNewRenamedFiles": "false",
"json": true,
"negationPatternsFirst": "false",
"oldNewFilesSeparator": " ",
"oldNewSeparator": ",",
"outputDir": "",
"outputRenamedFilesAsDeletedAndAdded": "false",
"path": ".",
"quotepath": "false",
"recoverDeletedFiles": "false",
"recoverDeletedFilesToDestination": "",
"recoverFiles": "",
"recoverFilesIgnore": "",
"recoverFilesIgnoreSeparator": "
",
"recoverFilesSeparator": "
",
"safeOutput": "false",
"separator": "",
"sha": "",
"since": "",
"sinceLastRemoteCommit": "false",
"skipInitialFetch": "true",
"tagsIgnorePattern": "",
"tagsPattern": "*",
"token": "",
"until": "",
"usePosixPathSeparator": "false",
"useRestApi": "false",
"writeOutputFiles": "false",
}
`;
exports[`getInputs should correctly parse numeric inputs 1`] = `
{
"apiUrl": "",
"baseSha": "",
"diffRelative": true,
"dirNames": false,
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
"dirNamesExcludeCurrentDir": false,
"dirNamesIncludeFiles": "",
"dirNamesIncludeFilesSeparator": "",
"dirNamesMaxDepth": 2,
"escapeJson": false,
"excludeSubmodules": false,
"failOnInitialDiffError": false,
"failOnSubmoduleDiffError": false,
"fetchAdditionalSubmoduleHistory": false,
"fetchDepth": 5,
"files": "",
"filesFromSourceFile": "",
"filesFromSourceFileSeparator": "",
"filesIgnore": "",
"filesIgnoreFromSourceFile": "",
"filesIgnoreFromSourceFileSeparator": "",
"filesIgnoreSeparator": "",
"filesIgnoreYaml": "",
"filesIgnoreYamlFromSourceFile": "",
"filesIgnoreYamlFromSourceFileSeparator": "",
"filesSeparator": "",
"filesYaml": "",
"filesYamlFromSourceFile": "",
"filesYamlFromSourceFileSeparator": "",
"includeAllOldNewRenamedFiles": false,
"json": false,
"negationPatternsFirst": false,
"oldNewFilesSeparator": "",
"oldNewSeparator": "",
"outputDir": "",
"outputRenamedFilesAsDeletedAndAdded": false,
"path": "",
"quotepath": true,
"recoverDeletedFiles": false,
"recoverDeletedFilesToDestination": "",
"recoverFiles": "",
"recoverFilesIgnore": "",
"recoverFilesIgnoreSeparator": "",
"recoverFilesSeparator": "",
"safeOutput": false,
"separator": "",
"sha": "",
"since": "",
"sinceLastRemoteCommit": false,
"skipInitialFetch": false,
"tagsIgnorePattern": "",
"tagsPattern": "",
"token": "",
"until": "",
"usePosixPathSeparator": false,
"useRestApi": false,
"writeOutputFiles": false,
}
`;
exports[`getInputs should correctly parse string inputs 1`] = `
{
"apiUrl": "https://api.github.com",
"baseSha": "",
"diffRelative": true,
"dirNames": false,
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
"dirNamesExcludeCurrentDir": false,
"dirNamesIncludeFiles": "",
"dirNamesIncludeFilesSeparator": "",
"escapeJson": false,
"excludeSubmodules": false,
"failOnInitialDiffError": false,
"failOnSubmoduleDiffError": false,
"fetchAdditionalSubmoduleHistory": false,
"files": "",
"filesFromSourceFile": "",
"filesFromSourceFileSeparator": "",
"filesIgnore": "",
"filesIgnoreFromSourceFile": "",
"filesIgnoreFromSourceFileSeparator": "",
"filesIgnoreSeparator": "",
"filesIgnoreYaml": "",
"filesIgnoreYamlFromSourceFile": "",
"filesIgnoreYamlFromSourceFileSeparator": "",
"filesSeparator": "",
"filesYaml": "",
"filesYamlFromSourceFile": "",
"filesYamlFromSourceFileSeparator": "",
"includeAllOldNewRenamedFiles": false,
"json": false,
"negationPatternsFirst": false,
"oldNewFilesSeparator": "",
"oldNewSeparator": "",
"outputDir": "",
"outputRenamedFilesAsDeletedAndAdded": false,
"path": "",
"quotepath": true,
"recoverDeletedFiles": false,
"recoverDeletedFilesToDestination": "",
"recoverFiles": "",
"recoverFilesIgnore": "",
"recoverFilesIgnoreSeparator": "",
"recoverFilesSeparator": "",
"safeOutput": false,
"separator": "",
"sha": "",
"since": "",
"sinceLastRemoteCommit": false,
"skipInitialFetch": false,
"tagsIgnorePattern": "",
"tagsPattern": "",
"token": "token",
"until": "",
"usePosixPathSeparator": false,
"useRestApi": false,
"writeOutputFiles": false,
}
`;
exports[`getInputs should handle invalid numeric inputs correctly 1`] = `
{
"apiUrl": "",
"baseSha": "",
"diffRelative": true,
"dirNames": false,
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
"dirNamesExcludeCurrentDir": false,
"dirNamesIncludeFiles": "",
"dirNamesIncludeFilesSeparator": "",
"dirNamesMaxDepth": 2,
"escapeJson": false,
"excludeSubmodules": false,
"failOnInitialDiffError": false,
"failOnSubmoduleDiffError": false,
"fetchAdditionalSubmoduleHistory": false,
"fetchDepth": NaN,
"files": "",
"filesFromSourceFile": "",
"filesFromSourceFileSeparator": "",
"filesIgnore": "",
"filesIgnoreFromSourceFile": "",
"filesIgnoreFromSourceFileSeparator": "",
"filesIgnoreSeparator": "",
"filesIgnoreYaml": "",
"filesIgnoreYamlFromSourceFile": "",
"filesIgnoreYamlFromSourceFileSeparator": "",
"filesSeparator": "",
"filesYaml": "",
"filesYamlFromSourceFile": "",
"filesYamlFromSourceFileSeparator": "",
"includeAllOldNewRenamedFiles": false,
"json": false,
"negationPatternsFirst": false,
"oldNewFilesSeparator": "",
"oldNewSeparator": "",
"outputDir": "",
"outputRenamedFilesAsDeletedAndAdded": false,
"path": "",
"quotepath": true,
"recoverDeletedFiles": false,
"recoverDeletedFilesToDestination": "",
"recoverFiles": "",
"recoverFilesIgnore": "",
"recoverFilesIgnoreSeparator": "",
"recoverFilesSeparator": "",
"safeOutput": false,
"separator": "",
"sha": "",
"since": "",
"sinceLastRemoteCommit": false,
"skipInitialFetch": false,
"tagsIgnorePattern": "",
"tagsPattern": "",
"token": "",
"until": "",
"usePosixPathSeparator": false,
"useRestApi": false,
"writeOutputFiles": false,
}
`;
exports[`getInputs should handle negative numeric inputs correctly 1`] = `
{
"apiUrl": "",
"baseSha": "",
"diffRelative": true,
"dirNames": false,
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
"dirNamesExcludeCurrentDir": false,
"dirNamesIncludeFiles": "",
"dirNamesIncludeFilesSeparator": "",
"dirNamesMaxDepth": -2,
"escapeJson": false,
"excludeSubmodules": false,
"failOnInitialDiffError": false,
"failOnSubmoduleDiffError": false,
"fetchAdditionalSubmoduleHistory": false,
"fetchDepth": 2,
"files": "",
"filesFromSourceFile": "",
"filesFromSourceFileSeparator": "",
"filesIgnore": "",
"filesIgnoreFromSourceFile": "",
"filesIgnoreFromSourceFileSeparator": "",
"filesIgnoreSeparator": "",
"filesIgnoreYaml": "",
"filesIgnoreYamlFromSourceFile": "",
"filesIgnoreYamlFromSourceFileSeparator": "",
"filesSeparator": "",
"filesYaml": "",
"filesYamlFromSourceFile": "",
"filesYamlFromSourceFileSeparator": "",
"includeAllOldNewRenamedFiles": false,
"json": false,
"negationPatternsFirst": false,
"oldNewFilesSeparator": "",
"oldNewSeparator": "",
"outputDir": "",
"outputRenamedFilesAsDeletedAndAdded": false,
"path": "",
"quotepath": true,
"recoverDeletedFiles": false,
"recoverDeletedFilesToDestination": "",
"recoverFiles": "",
"recoverFilesIgnore": "",
"recoverFilesIgnoreSeparator": "",
"recoverFilesSeparator": "",
"safeOutput": false,
"separator": "",
"sha": "",
"since": "",
"sinceLastRemoteCommit": false,
"skipInitialFetch": false,
"tagsIgnorePattern": "",
"tagsPattern": "",
"token": "",
"until": "",
"usePosixPathSeparator": false,
"useRestApi": false,
"writeOutputFiles": false,
}
`;
exports[`getInputs should return default values when no inputs are provided 1`] = `
{
"apiUrl": "",
"baseSha": "",
"diffRelative": true,
"dirNames": false,
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
"dirNamesExcludeCurrentDir": false,
"dirNamesIncludeFiles": "",
"dirNamesIncludeFilesSeparator": "",
"escapeJson": false,
"excludeSubmodules": false,
"failOnInitialDiffError": false,
"failOnSubmoduleDiffError": false,
"fetchAdditionalSubmoduleHistory": false,
"fetchMissingHistoryMaxRetries": 20,
"files": "",
"filesFromSourceFile": "",
"filesFromSourceFileSeparator": "",
"filesIgnore": "",
"filesIgnoreFromSourceFile": "",
"filesIgnoreFromSourceFileSeparator": "",
"filesIgnoreSeparator": "",
"filesIgnoreYaml": "",
"filesIgnoreYamlFromSourceFile": "",
"filesIgnoreYamlFromSourceFileSeparator": "",
"filesSeparator": "",
"filesYaml": "",
"filesYamlFromSourceFile": "",
"filesYamlFromSourceFileSeparator": "",
"includeAllOldNewRenamedFiles": false,
"json": false,
"negationPatternsFirst": false,
"oldNewFilesSeparator": " ",
"oldNewSeparator": ",",
"outputDir": "",
"outputRenamedFilesAsDeletedAndAdded": false,
"path": ".",
"quotepath": true,
"recoverDeletedFiles": false,
"recoverDeletedFilesToDestination": "",
"recoverFiles": "",
"recoverFilesIgnore": "",
"recoverFilesIgnoreSeparator": "
",
"recoverFilesSeparator": "
",
"safeOutput": false,
"separator": "",
"sha": "",
"since": "",
"sinceLastRemoteCommit": false,
"skipInitialFetch": false,
"tagsIgnorePattern": "",
"tagsPattern": "*",
"token": "",
"until": "",
"usePosixPathSeparator": false,
"useRestApi": false,
"writeOutputFiles": false,
}
`;

View file

@ -1,153 +0,0 @@
import * as core from '@actions/core'
import {getInputs, Inputs} from '../inputs'
import {DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS} from '../constant'
jest.mock('@actions/core')
describe('getInputs', () => {
afterEach(() => {
jest.clearAllMocks()
})
test('should return default values when no inputs are provided', () => {
;(core.getInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
'') as string
})
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
false) as boolean
})
expect(getInputs()).toMatchSnapshot()
})
test('should correctly parse boolean inputs', () => {
;(core.getInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
'') as string
})
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
switch (name) {
case 'matrix':
return 'true'
case 'skip_initial_fetch':
return 'true'
default:
return 'false'
}
})
expect(getInputs()).toMatchSnapshot()
})
test('should handle matrix alias correctly', () => {
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
return name === 'matrix' ? 'true' : 'false'
})
const inputs = getInputs()
expect(inputs).toHaveProperty('json', true)
expect(inputs).toHaveProperty('escapeJson', false)
})
test('should correctly parse string inputs', () => {
;(core.getInput as jest.Mock).mockImplementation(name => {
switch (name) {
case 'token':
return 'token'
case 'api_url':
return 'https://api.github.com'
default:
return ''
}
})
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
false) as boolean
})
expect(getInputs()).toMatchSnapshot()
})
test('should correctly parse numeric inputs', () => {
;(core.getInput as jest.Mock).mockImplementation(name => {
switch (name) {
case 'fetch_depth':
return '5'
case 'dir_names_max_depth':
return '2'
default:
return ''
}
})
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
false) as boolean
})
expect(getInputs()).toMatchSnapshot()
})
test('should handle invalid numeric inputs correctly', () => {
;(core.getInput as jest.Mock).mockImplementation(name => {
// TODO: Add validation for invalid numbers which should result in an error instead of NaN
switch (name) {
case 'fetch_depth':
return 'invalid'
case 'dir_names_max_depth':
return '2'
default:
return ''
}
})
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
false) as boolean
})
expect(getInputs()).toMatchSnapshot()
})
test('should handle negative numeric inputs correctly', () => {
;(core.getInput as jest.Mock).mockImplementation(name => {
// TODO: Add validation for negative numbers which should result in an error
switch (name) {
case 'fetch_depth':
return '-5'
case 'dir_names_max_depth':
return '-2'
default:
return ''
}
})
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
return g[1].toUpperCase()
}) as keyof Inputs
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
false) as boolean
})
expect(getInputs()).toMatchSnapshot()
})
})

View file

@ -1,12 +1,10 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as exec from '@actions/exec'
import {ChangeTypeEnum} from '../changedFiles' import {ChangeTypeEnum} from '../changedFiles'
import {Inputs} from '../inputs' import {Inputs} from '../inputs'
import { import {
getDirname, getDirname,
getDirnameMaxDepth, getDirnameMaxDepth,
getFilteredChangedFiles, getFilteredChangedFiles,
getPreviousGitTag,
normalizeSeparators, normalizeSeparators,
warnUnsupportedRESTAPIInputs warnUnsupportedRESTAPIInputs
} from '../utils' } from '../utils'
@ -637,12 +635,7 @@ describe('utils test', () => {
failOnInitialDiffError: false, failOnInitialDiffError: false,
failOnSubmoduleDiffError: false, failOnSubmoduleDiffError: false,
negationPatternsFirst: false, negationPatternsFirst: false,
useRestApi: false, useRestApi: false
excludeSubmodules: false,
fetchMissingHistoryMaxRetries: 20,
usePosixPathSeparator: false,
tagsPattern: '*',
tagsIgnorePattern: ''
} }
const coreWarningSpy = jest.spyOn(core, 'warning') const coreWarningSpy = jest.spyOn(core, 'warning')
@ -658,104 +651,4 @@ describe('utils test', () => {
expect(coreWarningSpy).toHaveBeenCalledTimes(1) expect(coreWarningSpy).toHaveBeenCalledTimes(1)
}) })
}) })
describe('getPreviousGitTag', () => {
// Check if the environment variable GITHUB_REPOSITORY_OWNER is 'tj-actions'
const shouldSkip = !!process.env.GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FORK
// Function returns the second-latest tag and its SHA
it('should return the second latest tag and its SHA when multiple tags are present', async () => {
if (shouldSkip) {
return
}
const result = await getPreviousGitTag({
cwd: '.',
tagsPattern: '*',
tagsIgnorePattern: '',
currentBranch: 'v1.0.1'
})
expect(result).toEqual({
tag: 'v1.0.0',
sha: 'f0751de6af436d4e79016e2041cf6400e0833653'
})
})
// Tags are filtered by a specified pattern when 'tagsPattern' is provided
it('should filter tags by the specified pattern', async () => {
if (shouldSkip) {
return
}
const result = await getPreviousGitTag({
cwd: '.',
tagsPattern: 'v1.*',
tagsIgnorePattern: '',
currentBranch: 'v1.0.1'
})
expect(result).toEqual({
tag: 'v1.0.0',
sha: 'f0751de6af436d4e79016e2041cf6400e0833653'
})
})
// Tags are excluded by a specified ignore pattern when 'tagsIgnorePattern' is provided
it('should exclude tags by the specified ignore pattern', async () => {
if (shouldSkip) {
return
}
const result = await getPreviousGitTag({
cwd: '.',
tagsPattern: '*',
tagsIgnorePattern: 'v0.*.*',
currentBranch: 'v1.0.1'
})
expect(result).toEqual({
tag: 'v1.0.0',
sha: 'f0751de6af436d4e79016e2041cf6400e0833653'
})
})
// No tags are available in the repository
it('should return empty values when no tags are available in the repository', async () => {
jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({
stdout: '',
stderr: '',
exitCode: 0
})
const result = await getPreviousGitTag({
cwd: '.',
tagsPattern: '*',
tagsIgnorePattern: '',
currentBranch: ''
})
expect(result).toEqual({tag: '', sha: ''})
})
// Only one tag is available, making it impossible to find a previous tag
it('should return empty values when only one tag is available', async () => {
jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({
stdout:
'v1.0.1|f0751de6af436d4e79016e2041cf6400e0833653|2021-01-01T00:00:00Z',
stderr: '',
exitCode: 0
})
const result = await getPreviousGitTag({
cwd: '.',
tagsPattern: '*',
tagsIgnorePattern: '',
currentBranch: 'v1.0.1'
})
expect(result).toEqual({tag: '', sha: ''})
})
// Git commands fail and throw errors
it('should throw an error when git commands fail', async () => {
jest
.spyOn(exec, 'getExecOutput')
.mockRejectedValue(new Error('git command failed'))
await expect(
getPreviousGitTag({
cwd: '.',
tagsPattern: '*',
tagsIgnorePattern: '',
currentBranch: 'v1.0.1'
})
).rejects.toThrow('git command failed')
})
})
}) })

View file

@ -2,7 +2,6 @@ import * as core from '@actions/core'
import * as github from '@actions/github' import * as github from '@actions/github'
import type {RestEndpointMethodTypes} from '@octokit/rest' import type {RestEndpointMethodTypes} from '@octokit/rest'
import flatten from 'lodash/flatten' import flatten from 'lodash/flatten'
import convertPath from '@stdlib/utils-convert-path'
import mm from 'micromatch' import mm from 'micromatch'
import * as path from 'path' import * as path from 'path'
import {setOutputsAndGetModifiedAndChangedFilesStatus} from './changedFilesOutput' import {setOutputsAndGetModifiedAndChangedFilesStatus} from './changedFilesOutput'
@ -89,17 +88,21 @@ export const processChangedFiles = async ({
core.endGroup() core.endGroup()
} }
await setArrayOutput({ if (modifiedKeys.length > 0) {
key: 'modified_keys', await setArrayOutput({
inputs, key: 'modified_keys',
value: modifiedKeys inputs,
}) value: modifiedKeys
})
}
await setArrayOutput({ if (changedKeys.length > 0) {
key: 'changed_keys', await setArrayOutput({
inputs, key: 'changed_keys',
value: changedKeys inputs,
}) value: changedKeys
})
}
} }
if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) { if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) {
@ -118,13 +121,13 @@ export const processChangedFiles = async ({
export const getRenamedFiles = async ({ export const getRenamedFiles = async ({
inputs, inputs,
workingDirectory, workingDirectory,
diffSubmodule, hasSubmodule,
diffResult, diffResult,
submodulePaths submodulePaths
}: { }: {
inputs: Inputs inputs: Inputs
workingDirectory: string workingDirectory: string
diffSubmodule: boolean hasSubmodule: boolean
diffResult: DiffResult diffResult: DiffResult
submodulePaths: string[] submodulePaths: string[]
}): Promise<{paths: string; count: string}> => { }): Promise<{paths: string; count: string}> => {
@ -136,7 +139,7 @@ export const getRenamedFiles = async ({
oldNewSeparator: inputs.oldNewSeparator oldNewSeparator: inputs.oldNewSeparator
}) })
if (diffSubmodule) { if (hasSubmodule) {
for (const submodulePath of submodulePaths) { for (const submodulePath of submodulePaths) {
const submoduleShaResult = await gitSubmoduleDiffSHA({ const submoduleShaResult = await gitSubmoduleDiffSHA({
cwd: workingDirectory, cwd: workingDirectory,
@ -214,7 +217,7 @@ export type ChangedFiles = {
export const getAllDiffFiles = async ({ export const getAllDiffFiles = async ({
workingDirectory, workingDirectory,
diffSubmodule, hasSubmodule,
diffResult, diffResult,
submodulePaths, submodulePaths,
outputRenamedFilesAsDeletedAndAdded, outputRenamedFilesAsDeletedAndAdded,
@ -223,7 +226,7 @@ export const getAllDiffFiles = async ({
failOnSubmoduleDiffError failOnSubmoduleDiffError
}: { }: {
workingDirectory: string workingDirectory: string
diffSubmodule: boolean hasSubmodule: boolean
diffResult: DiffResult diffResult: DiffResult
submodulePaths: string[] submodulePaths: string[]
outputRenamedFilesAsDeletedAndAdded: boolean outputRenamedFilesAsDeletedAndAdded: boolean
@ -240,7 +243,7 @@ export const getAllDiffFiles = async ({
failOnInitialDiffError failOnInitialDiffError
}) })
if (diffSubmodule) { if (hasSubmodule) {
for (const submodulePath of submodulePaths) { for (const submodulePath of submodulePaths) {
const submoduleShaResult = await gitSubmoduleDiffSHA({ const submoduleShaResult = await gitSubmoduleDiffSHA({
cwd: workingDirectory, cwd: workingDirectory,
@ -352,11 +355,7 @@ function* getChangeTypeFilesGenerator({
filePaths, filePaths,
dirNamesIncludeFilePatterns dirNamesIncludeFilePatterns
})) { })) {
if (isWindows() && inputs.usePosixPathSeparator) { yield filePath
yield convertPath(filePath, 'mixed')
} else {
yield filePath
}
} }
} }
} }
@ -403,11 +402,7 @@ function* getAllChangeTypeFilesGenerator({
filePaths, filePaths,
dirNamesIncludeFilePatterns dirNamesIncludeFilePatterns
})) { })) {
if (isWindows() && inputs.usePosixPathSeparator) { yield filePath
yield convertPath(filePath, 'mixed')
} else {
yield filePath
}
} }
} }
@ -480,7 +475,7 @@ export const getChangedFilesFromGithubAPI = async ({
if (changeType === ChangeTypeEnum.Renamed) { if (changeType === ChangeTypeEnum.Renamed) {
if (inputs.outputRenamedFilesAsDeletedAndAdded) { if (inputs.outputRenamedFilesAsDeletedAndAdded) {
changedFiles[ChangeTypeEnum.Deleted].push(item.previous_filename || '') changedFiles[ChangeTypeEnum.Deleted].push(item.filename)
changedFiles[ChangeTypeEnum.Added].push(item.filename) changedFiles[ChangeTypeEnum.Added].push(item.filename)
} else { } else {
changedFiles[ChangeTypeEnum.Renamed].push(item.filename) changedFiles[ChangeTypeEnum.Renamed].push(item.filename)

View file

@ -52,13 +52,6 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
writeOutputFiles: inputs.writeOutputFiles, writeOutputFiles: inputs.writeOutputFiles,
outputDir: inputs.outputDir outputDir: inputs.outputDir
}) })
await setOutput({
key: getOutputKey('any_added', outputPrefix),
value: addedFiles.paths.length > 0,
writeOutputFiles: inputs.writeOutputFiles,
outputDir: inputs.outputDir,
json: inputs.json
})
const copiedFiles = await getChangeTypeFiles({ const copiedFiles = await getChangeTypeFiles({
inputs, inputs,
@ -254,7 +247,7 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
await setOutput({ await setOutput({
key: getOutputKey('any_changed', outputPrefix), key: getOutputKey('any_changed', outputPrefix),
value: allChangedFiles.paths.length > 0, value: allChangedFiles.paths.length > 0 && filePatterns.length > 0,
writeOutputFiles: inputs.writeOutputFiles, writeOutputFiles: inputs.writeOutputFiles,
outputDir: inputs.outputDir, outputDir: inputs.outputDir,
json: inputs.json json: inputs.json
@ -343,7 +336,7 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
await setOutput({ await setOutput({
key: getOutputKey('any_modified', outputPrefix), key: getOutputKey('any_modified', outputPrefix),
value: allModifiedFiles.paths.length > 0, value: allModifiedFiles.paths.length > 0 && filePatterns.length > 0,
writeOutputFiles: inputs.writeOutputFiles, writeOutputFiles: inputs.writeOutputFiles,
outputDir: inputs.outputDir, outputDir: inputs.outputDir,
json: inputs.json json: inputs.json
@ -449,7 +442,7 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
await setOutput({ await setOutput({
key: getOutputKey('any_deleted', outputPrefix), key: getOutputKey('any_deleted', outputPrefix),
value: deletedFiles.paths.length > 0, value: deletedFiles.paths.length > 0 && filePatterns.length > 0,
writeOutputFiles: inputs.writeOutputFiles, writeOutputFiles: inputs.writeOutputFiles,
outputDir: inputs.outputDir, outputDir: inputs.outputDir,
json: inputs.json json: inputs.json
@ -503,7 +496,7 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
}) })
return { return {
anyModified: allModifiedFiles.paths.length > 0, anyModified: allModifiedFiles.paths.length > 0 && filePatterns.length > 0,
anyChanged: allChangedFiles.paths.length > 0 anyChanged: allChangedFiles.paths.length > 0 && filePatterns.length > 0
} }
} }

View file

@ -86,27 +86,15 @@ export interface DiffResult {
initialCommit?: boolean initialCommit?: boolean
} }
interface SHAForNonPullRequestEvent { export const getSHAForNonPullRequestEvent = async (
inputs: Inputs inputs: Inputs,
env: Env env: Env,
workingDirectory: string workingDirectory: string,
isShallow: boolean isShallow: boolean,
diffSubmodule: boolean hasSubmodule: boolean,
gitFetchExtraArgs: string[] gitFetchExtraArgs: string[],
isTag: boolean isTag: boolean
remoteName: string ): Promise<DiffResult> => {
}
export const getSHAForNonPullRequestEvent = async ({
inputs,
env,
workingDirectory,
isShallow,
diffSubmodule,
gitFetchExtraArgs,
isTag,
remoteName
}: SHAForNonPullRequestEvent): Promise<DiffResult> => {
let targetBranch = env.GITHUB_REF_NAME let targetBranch = env.GITHUB_REF_NAME
let currentBranch = targetBranch let currentBranch = targetBranch
let initialCommit = false let initialCommit = false
@ -134,8 +122,8 @@ export const getSHAForNonPullRequestEvent = async ({
'-u', '-u',
'--progress', '--progress',
`--deepen=${inputs.fetchDepth}`, `--deepen=${inputs.fetchDepth}`,
remoteName, 'origin',
`+refs/heads/${sourceBranch}:refs/remotes/${remoteName}/${sourceBranch}` `+refs/heads/${sourceBranch}:refs/remotes/origin/${sourceBranch}`
] ]
}) })
} else { } else {
@ -146,13 +134,13 @@ export const getSHAForNonPullRequestEvent = async ({
'-u', '-u',
'--progress', '--progress',
`--deepen=${inputs.fetchDepth}`, `--deepen=${inputs.fetchDepth}`,
remoteName, 'origin',
`+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}` `+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
] ]
}) })
} }
if (diffSubmodule) { if (hasSubmodule) {
await gitFetchSubmodules({ await gitFetchSubmodules({
cwd: workingDirectory, cwd: workingDirectory,
args: [ args: [
@ -164,7 +152,7 @@ export const getSHAForNonPullRequestEvent = async ({
}) })
} }
} else { } else {
if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) { if (hasSubmodule && inputs.fetchAdditionalSubmoduleHistory) {
await gitFetchSubmodules({ await gitFetchSubmodules({
cwd: workingDirectory, cwd: workingDirectory,
args: [ args: [
@ -240,12 +228,7 @@ export const getSHAForNonPullRequestEvent = async ({
} }
} else if (isTag) { } else if (isTag) {
core.debug('Getting previous SHA for tag...') core.debug('Getting previous SHA for tag...')
const {sha, tag} = await getPreviousGitTag({ const {sha, tag} = await getPreviousGitTag({cwd: workingDirectory})
cwd: workingDirectory,
tagsPattern: inputs.tagsPattern,
tagsIgnorePattern: inputs.tagsIgnorePattern,
currentBranch
})
previousSha = sha previousSha = sha
targetBranch = tag targetBranch = tag
} else { } else {
@ -324,23 +307,14 @@ export const getSHAForNonPullRequestEvent = async ({
} }
} }
interface SHAForPullRequestEvent { export const getSHAForPullRequestEvent = async (
inputs: Inputs inputs: Inputs,
workingDirectory: string env: Env,
isShallow: boolean workingDirectory: string,
diffSubmodule: boolean isShallow: boolean,
hasSubmodule: boolean,
gitFetchExtraArgs: string[] gitFetchExtraArgs: string[]
remoteName: string ): Promise<DiffResult> => {
}
export const getSHAForPullRequestEvent = async ({
inputs,
workingDirectory,
isShallow,
diffSubmodule,
gitFetchExtraArgs,
remoteName
}: SHAForPullRequestEvent): Promise<DiffResult> => {
let targetBranch = github.context.payload.pull_request?.base?.ref let targetBranch = github.context.payload.pull_request?.base?.ref
const currentBranch = github.context.payload.pull_request?.head?.ref const currentBranch = github.context.payload.pull_request?.head?.ref
if (inputs.sinceLastRemoteCommit) { if (inputs.sinceLastRemoteCommit) {
@ -356,7 +330,7 @@ export const getSHAForPullRequestEvent = async ({
...gitFetchExtraArgs, ...gitFetchExtraArgs,
'-u', '-u',
'--progress', '--progress',
remoteName, 'origin',
`pull/${github.context.payload.pull_request?.number}/head:${currentBranch}` `pull/${github.context.payload.pull_request?.number}/head:${currentBranch}`
] ]
}) })
@ -369,8 +343,8 @@ export const getSHAForPullRequestEvent = async ({
'-u', '-u',
'--progress', '--progress',
`--deepen=${inputs.fetchDepth}`, `--deepen=${inputs.fetchDepth}`,
remoteName, 'origin',
`+refs/heads/${currentBranch}*:refs/remotes/${remoteName}/${currentBranch}*` `+refs/heads/${currentBranch}*:refs/remotes/origin/${currentBranch}*`
] ]
}) })
} }
@ -380,32 +354,35 @@ export const getSHAForPullRequestEvent = async ({
'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage' 'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage'
) )
} }
core.debug('Fetching target branch...')
await gitFetch({
cwd: workingDirectory,
args: [
...gitFetchExtraArgs,
'-u',
'--progress',
`--deepen=${inputs.fetchDepth}`,
remoteName,
`+refs/heads/${github.context.payload.pull_request?.base?.ref}:refs/remotes/${remoteName}/${github.context.payload.pull_request?.base?.ref}`
]
})
if (diffSubmodule) { if (!inputs.sinceLastRemoteCommit) {
await gitFetchSubmodules({ core.debug('Fetching target branch...')
await gitFetch({
cwd: workingDirectory, cwd: workingDirectory,
args: [ args: [
...gitFetchExtraArgs, ...gitFetchExtraArgs,
'-u', '-u',
'--progress', '--progress',
`--deepen=${inputs.fetchDepth}` `--deepen=${inputs.fetchDepth}`,
'origin',
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
] ]
}) })
if (hasSubmodule) {
await gitFetchSubmodules({
cwd: workingDirectory,
args: [
...gitFetchExtraArgs,
'-u',
'--progress',
`--deepen=${inputs.fetchDepth}`
]
})
}
} }
} else { } else {
if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) { if (hasSubmodule && inputs.fetchAdditionalSubmoduleHistory) {
await gitFetchSubmodules({ await gitFetchSubmodules({
cwd: workingDirectory, cwd: workingDirectory,
args: [ args: [
@ -450,7 +427,10 @@ export const getSHAForPullRequestEvent = async ({
} }
} }
if (!github.context.payload.pull_request?.base?.ref) { if (
!github.context.payload.pull_request?.base?.ref ||
github.context.payload.head?.repo?.fork === 'true'
) {
diff = '..' diff = '..'
} }
@ -507,14 +487,17 @@ export const getSHAForPullRequestEvent = async ({
} }
} }
} else { } else {
previousSha = github.context.payload.pull_request?.base?.sha if (github.context.payload.action === 'closed') {
previousSha = github.context.payload.pull_request?.base?.sha
if (!previousSha) { } else {
previousSha = await getRemoteBranchHeadSha({ previousSha = await getRemoteBranchHeadSha({
cwd: workingDirectory, cwd: workingDirectory,
remoteName,
branch: targetBranch branch: targetBranch
}) })
if (!previousSha) {
previousSha = github.context.payload.pull_request?.base?.sha
}
} }
if (isShallow) { if (isShallow) {
@ -530,11 +513,7 @@ export const getSHAForPullRequestEvent = async ({
'Merge base is not in the local history, fetching remote target branch...' 'Merge base is not in the local history, fetching remote target branch...'
) )
for ( for (let i = 1; i <= 10; i++) {
let i = 1;
i <= (inputs.fetchMissingHistoryMaxRetries || 10);
i++
) {
await gitFetch({ await gitFetch({
cwd: workingDirectory, cwd: workingDirectory,
args: [ args: [
@ -542,8 +521,8 @@ export const getSHAForPullRequestEvent = async ({
'-u', '-u',
'--progress', '--progress',
`--deepen=${inputs.fetchDepth}`, `--deepen=${inputs.fetchDepth}`,
remoteName, 'origin',
`+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}` `+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
] ]
}) })
@ -561,9 +540,7 @@ export const getSHAForPullRequestEvent = async ({
core.info( core.info(
'Merge base is not in the local history, fetching remote target branch again...' 'Merge base is not in the local history, fetching remote target branch again...'
) )
core.info( core.info(`Attempt ${i}/10`)
`Attempt ${i}/${inputs.fetchMissingHistoryMaxRetries || 10}`
)
} }
} }
} }

View file

@ -20,10 +20,5 @@ export const DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS: Partial<Inputs> = {
oldNewFilesSeparator: ' ', oldNewFilesSeparator: ' ',
skipInitialFetch: false, skipInitialFetch: false,
fetchAdditionalSubmoduleHistory: false, fetchAdditionalSubmoduleHistory: false,
dirNamesDeletedFilesIncludeOnlyDeletedDirs: false, dirNamesDeletedFilesIncludeOnlyDeletedDirs: false
excludeSubmodules: false,
fetchMissingHistoryMaxRetries: 20,
usePosixPathSeparator: false,
tagsPattern: '*',
tagsIgnorePattern: ''
} }

View file

@ -1,10 +1,13 @@
import * as core from '@actions/core'
export type Env = { export type Env = {
GITHUB_REF_NAME: string GITHUB_REF_NAME: string
GITHUB_REF: string GITHUB_REF: string
GITHUB_WORKSPACE: string GITHUB_WORKSPACE: string
} }
export const getEnv = async (): Promise<Env> => { export const getEnv = async (): Promise<Env> => {
core.debug(`Env: ${JSON.stringify(process.env, null, 2)}`)
return { return {
GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || '', GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || '',
GITHUB_REF: process.env.GITHUB_REF || '', GITHUB_REF: process.env.GITHUB_REF || '',

View file

@ -54,11 +54,6 @@ export type Inputs = {
failOnSubmoduleDiffError: boolean failOnSubmoduleDiffError: boolean
negationPatternsFirst: boolean negationPatternsFirst: boolean
useRestApi: boolean useRestApi: boolean
excludeSubmodules: boolean
fetchMissingHistoryMaxRetries?: number
usePosixPathSeparator: boolean
tagsPattern: string
tagsIgnorePattern?: string
} }
export const getInputs = (): Inputs => { export const getInputs = (): Inputs => {
@ -159,15 +154,8 @@ export const getInputs = (): Inputs => {
trimWhitespace: false trimWhitespace: false
} }
) )
let json = core.getBooleanInput('json', {required: false}) const json = core.getBooleanInput('json', {required: false})
let escapeJson = core.getBooleanInput('escape_json', {required: false}) const escapeJson = core.getBooleanInput('escape_json', {required: false})
const matrix = core.getBooleanInput('matrix', {required: false})
if (matrix) {
json = true
escapeJson = false
}
const safeOutput = core.getBooleanInput('safe_output', {required: false}) const safeOutput = core.getBooleanInput('safe_output', {required: false})
const fetchDepth = core.getInput('fetch_depth', {required: false}) const fetchDepth = core.getInput('fetch_depth', {required: false})
const sinceLastRemoteCommit = core.getBooleanInput( const sinceLastRemoteCommit = core.getBooleanInput(
@ -245,31 +233,6 @@ export const getInputs = (): Inputs => {
required: false required: false
}) })
const excludeSubmodules = core.getBooleanInput('exclude_submodules', {
required: false
})
const fetchMissingHistoryMaxRetries = core.getInput(
'fetch_missing_history_max_retries',
{required: false}
)
const usePosixPathSeparator = core.getBooleanInput(
'use_posix_path_separator',
{
required: false
}
)
const tagsPattern = core.getInput('tags_pattern', {
required: false,
trimWhitespace: false
})
const tagsIgnorePattern = core.getInput('tags_ignore_pattern', {
required: false,
trimWhitespace: false
})
const inputs: Inputs = { const inputs: Inputs = {
files, files,
filesSeparator, filesSeparator,
@ -309,10 +272,6 @@ export const getInputs = (): Inputs => {
skipInitialFetch, skipInitialFetch,
fetchAdditionalSubmoduleHistory, fetchAdditionalSubmoduleHistory,
dirNamesDeletedFilesIncludeOnlyDeletedDirs, dirNamesDeletedFilesIncludeOnlyDeletedDirs,
excludeSubmodules,
usePosixPathSeparator,
tagsPattern,
tagsIgnorePattern,
// End Not Supported via REST API // End Not Supported via REST API
dirNames, dirNames,
dirNamesExcludeCurrentDir, dirNamesExcludeCurrentDir,
@ -331,7 +290,6 @@ export const getInputs = (): Inputs => {
} }
if (fetchDepth) { if (fetchDepth) {
// Fallback to at least 2 if the fetch_depth is less than 2
inputs.fetchDepth = Math.max(parseInt(fetchDepth, 10), 2) inputs.fetchDepth = Math.max(parseInt(fetchDepth, 10), 2)
} }
@ -339,13 +297,5 @@ export const getInputs = (): Inputs => {
inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10) inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10)
} }
if (fetchMissingHistoryMaxRetries) {
// Fallback to at least 1 if the fetch_missing_history_max_retries is less than 1
inputs.fetchMissingHistoryMaxRetries = Math.max(
parseInt(fetchMissingHistoryMaxRetries, 10),
1
)
}
return inputs return inputs
} }

View file

@ -64,26 +64,14 @@ const getChangedFilesFromLocalGitHistory = async ({
} }
const isShallow = await isRepoShallow({cwd: workingDirectory}) const isShallow = await isRepoShallow({cwd: workingDirectory})
let diffSubmodule = false const hasSubmodule = await submoduleExists({cwd: workingDirectory})
let gitFetchExtraArgs = ['--no-tags', '--prune'] let gitFetchExtraArgs = ['--no-tags', '--prune', '--recurse-submodules']
if (inputs.excludeSubmodules) {
core.info('Excluding submodules from the diff')
} else {
diffSubmodule = await submoduleExists({cwd: workingDirectory})
}
if (diffSubmodule) {
gitFetchExtraArgs.push('--recurse-submodules')
}
const isTag = env.GITHUB_REF?.startsWith('refs/tags/') const isTag = env.GITHUB_REF?.startsWith('refs/tags/')
const remoteName = 'origin'
const outputRenamedFilesAsDeletedAndAdded = const outputRenamedFilesAsDeletedAndAdded =
inputs.outputRenamedFilesAsDeletedAndAdded inputs.outputRenamedFilesAsDeletedAndAdded
let submodulePaths: string[] = [] let submodulePaths: string[] = []
if (diffSubmodule) { if (hasSubmodule) {
submodulePaths = await getSubmodulePath({cwd: workingDirectory}) submodulePaths = await getSubmodulePath({cwd: workingDirectory})
} }
@ -95,30 +83,29 @@ const getChangedFilesFromLocalGitHistory = async ({
if (!github.context.payload.pull_request?.base?.ref) { if (!github.context.payload.pull_request?.base?.ref) {
core.info(`Running on a ${github.context.eventName || 'push'} event...`) core.info(`Running on a ${github.context.eventName || 'push'} event...`)
diffResult = await getSHAForNonPullRequestEvent({ diffResult = await getSHAForNonPullRequestEvent(
inputs, inputs,
env, env,
workingDirectory, workingDirectory,
isShallow, isShallow,
diffSubmodule, hasSubmodule,
gitFetchExtraArgs, gitFetchExtraArgs,
isTag, isTag
remoteName )
})
} else { } else {
core.info( core.info(
`Running on a ${github.context.eventName || 'pull_request'} (${ `Running on a ${github.context.eventName || 'pull_request'} (${
github.context.payload.action github.context.payload.action
}) event...` }) event...`
) )
diffResult = await getSHAForPullRequestEvent({ diffResult = await getSHAForPullRequestEvent(
inputs, inputs,
env,
workingDirectory, workingDirectory,
isShallow, isShallow,
diffSubmodule, hasSubmodule,
gitFetchExtraArgs, gitFetchExtraArgs
remoteName )
})
} }
if (diffResult.initialCommit) { if (diffResult.initialCommit) {
@ -133,7 +120,7 @@ const getChangedFilesFromLocalGitHistory = async ({
const allDiffFiles = await getAllDiffFiles({ const allDiffFiles = await getAllDiffFiles({
workingDirectory, workingDirectory,
diffSubmodule, hasSubmodule,
diffResult, diffResult,
submodulePaths, submodulePaths,
outputRenamedFilesAsDeletedAndAdded, outputRenamedFilesAsDeletedAndAdded,
@ -159,7 +146,7 @@ const getChangedFilesFromLocalGitHistory = async ({
deletedFiles: allDiffFiles[ChangeTypeEnum.Deleted], deletedFiles: allDiffFiles[ChangeTypeEnum.Deleted],
recoverPatterns, recoverPatterns,
diffResult, diffResult,
diffSubmodule, hasSubmodule,
submodulePaths submodulePaths
}) })
} }
@ -177,7 +164,7 @@ const getChangedFilesFromLocalGitHistory = async ({
const allOldNewRenamedFiles = await getRenamedFiles({ const allOldNewRenamedFiles = await getRenamedFiles({
inputs, inputs,
workingDirectory, workingDirectory,
diffSubmodule, hasSubmodule,
diffResult, diffResult,
submodulePaths submodulePaths
}) })
@ -234,6 +221,8 @@ export async function run(): Promise<void> {
const inputs = getInputs() const inputs = getInputs()
core.debug(`Inputs: ${JSON.stringify(inputs, null, 2)}`) core.debug(`Inputs: ${JSON.stringify(inputs, null, 2)}`)
core.debug(`Github Context: ${JSON.stringify(github.context, null, 2)}`)
const workingDirectory = path.resolve( const workingDirectory = path.resolve(
env.GITHUB_WORKSPACE || process.cwd(), env.GITHUB_WORKSPACE || process.cwd(),
inputs.useRestApi ? '.' : inputs.path inputs.useRestApi ? '.' : inputs.path
@ -275,8 +264,9 @@ export async function run(): Promise<void> {
}) })
} else { } else {
if (!hasGitDirectory) { if (!hasGitDirectory) {
core.info(`Running on a ${github.context.eventName} event...`)
throw new Error( throw new Error(
`Unable to locate the git repository in the given path: ${workingDirectory}.\n Please run actions/checkout before this action (Make sure the 'path' input is correct).\n If you intend to use Github's REST API note that only pull_request* events are supported. Current event is "${github.context.eventName}".` `Can't find local .git in ${workingDirectory} directory. Please run actions/checkout before this action (Make sure the 'path' input is correct). If you intend to use Github's REST API note that only pull_request* events are supported.`
) )
} }
@ -291,8 +281,11 @@ export async function run(): Promise<void> {
} }
} }
// eslint-disable-next-line github/no-then /* istanbul ignore if */
run().catch(e => { if (!process.env.TESTING) {
core.setFailed(e.message || e) // eslint-disable-next-line github/no-then
process.exit(1) run().catch(e => {
}) core.setFailed(e.message || e)
process.exit(1)
})
}

View file

@ -32,13 +32,11 @@ export const normalizeSeparators = (p: string): string => {
// Remove redundant slashes // Remove redundant slashes
const isUnc = /^\\\\+[^\\]/.test(p) // e.g. \\hello const isUnc = /^\\\\+[^\\]/.test(p) // e.g. \\hello
p = (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\') // preserve leading \\ for UNC return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\') // preserve leading \\ for UNC
} else {
// Remove redundant slashes on Linux/macOS
p = p.replace(/\/\/+/g, '/')
} }
return p // Remove redundant slashes
return p.replace(/\/\/+/g, '/')
} }
/** /**
@ -687,16 +685,14 @@ export const isInsideWorkTree = async ({
export const getRemoteBranchHeadSha = async ({ export const getRemoteBranchHeadSha = async ({
cwd, cwd,
branch, branch
remoteName
}: { }: {
cwd: string cwd: string
branch: string branch: string
remoteName: string
}): Promise<string> => { }): Promise<string> => {
const {stdout} = await exec.getExecOutput( const {stdout} = await exec.getExecOutput(
'git', 'git',
['rev-parse', `${remoteName}/${branch}`], ['rev-parse', `origin/${branch}`],
{ {
cwd, cwd,
silent: !core.isDebug() silent: !core.isDebug()
@ -831,80 +827,41 @@ export const cleanShaInput = async ({
return stdout.trim() return stdout.trim()
} }
export const getPreviousGitTag = async ({ export const getPreviousGitTag = async ({
cwd, cwd
tagsPattern,
currentBranch,
tagsIgnorePattern
}: { }: {
cwd: string cwd: string
tagsPattern: string
currentBranch: string
tagsIgnorePattern?: string
}): Promise<{tag: string; sha: string}> => { }): Promise<{tag: string; sha: string}> => {
const ignorePatterns: string[] = []
let currentShaDate: Date | null = null
const {stdout} = await exec.getExecOutput( const {stdout} = await exec.getExecOutput(
'git', 'git',
[ ['tag', '--sort=-creatordate'],
'tag',
'--sort=-creatordate',
'--format=%(refname:short)|%(objectname)|%(creatordate:iso)'
],
{ {
cwd, cwd,
silent: !core.isDebug() silent: !core.isDebug()
} }
) )
if (tagsIgnorePattern) {
ignorePatterns.push(tagsIgnorePattern)
}
if (currentBranch) {
ignorePatterns.push(currentBranch)
try {
const {stdout: currentShaDateOutput} = await exec.getExecOutput(
'git',
['show', '-s', '--format=%ai', currentBranch],
{
cwd,
silent: !core.isDebug()
}
)
currentShaDate = new Date(currentShaDateOutput.trim())
} catch (error) {
// Handle the case where the current branch doesn't exist
// This might happen in detached head state
core.warning(`Failed to get date for current branch ${currentBranch}`)
}
}
const previousTag: {tag: string; sha: string} = {tag: '', sha: ''}
const tags = stdout.trim().split('\n') const tags = stdout.trim().split('\n')
for (const tagData of tags) {
const [tag, sha, dateString] = tagData.split('|')
if (!mm.isMatch(tag, tagsPattern) || mm.isMatch(tag, ignorePatterns)) {
continue
}
const date = new Date(dateString)
if (currentShaDate && date >= currentShaDate) {
continue
}
// Found a suitable tag, no need to continue
previousTag.tag = tag
previousTag.sha = sha
break
}
if (!previousTag.tag) { if (tags.length < 2) {
core.warning('No previous tag found') core.warning('No previous tag found')
return {tag: '', sha: ''}
} }
return previousTag const previousTag = tags[1]
const {stdout: stdout2} = await exec.getExecOutput(
'git',
['rev-parse', previousTag],
{
cwd,
silent: !core.isDebug()
}
)
const sha = stdout2.trim()
return {tag: previousTag, sha}
} }
export const canDiffCommits = async ({ export const canDiffCommits = async ({
@ -945,7 +902,7 @@ export const canDiffCommits = async ({
} else { } else {
const {exitCode, stderr} = await exec.getExecOutput( const {exitCode, stderr} = await exec.getExecOutput(
'git', 'git',
['diff', '--no-patch', sha1, sha2], ['diff', '--quiet', sha1, sha2],
{ {
cwd, cwd,
ignoreReturnCode: true, ignoreReturnCode: true,
@ -1258,11 +1215,16 @@ export const getYamlFilePatterns = async ({
const newFilePatterns = await getYamlFilePatternsFromContents({filePath}) const newFilePatterns = await getYamlFilePatternsFromContents({filePath})
for (const key in newFilePatterns) { for (const key in newFilePatterns) {
if (key in filePatterns) { if (key in filePatterns) {
filePatterns[key] = [...filePatterns[key], ...newFilePatterns[key]] core.warning(
} else { `files_yaml_from_source_file: Duplicated key ${key} detected in ${filePath}, the ${filePatterns[key]} will be overwritten by ${newFilePatterns[key]}.`
filePatterns[key] = newFilePatterns[key] )
} }
} }
filePatterns = {
...filePatterns,
...newFilePatterns
}
} }
} }
@ -1274,12 +1236,9 @@ export const getYamlFilePatterns = async ({
for (const key in newIgnoreFilePatterns) { for (const key in newIgnoreFilePatterns) {
if (key in filePatterns) { if (key in filePatterns) {
filePatterns[key] = [ core.warning(
...filePatterns[key], `files_ignore_yaml: Duplicated key ${key} detected, the ${filePatterns[key]} will be overwritten by ${newIgnoreFilePatterns[key]}.`
...newIgnoreFilePatterns[key] )
]
} else {
filePatterns[key] = newIgnoreFilePatterns[key]
} }
} }
} }
@ -1303,14 +1262,16 @@ export const getYamlFilePatterns = async ({
for (const key in newIgnoreFilePatterns) { for (const key in newIgnoreFilePatterns) {
if (key in filePatterns) { if (key in filePatterns) {
filePatterns[key] = [ core.warning(
...filePatterns[key], `files_ignore_yaml_from_source_file: Duplicated key ${key} detected in ${filePath}, the ${filePatterns[key]} will be overwritten by ${newIgnoreFilePatterns[key]}.`
...newIgnoreFilePatterns[key] )
]
} else {
filePatterns[key] = newIgnoreFilePatterns[key]
} }
} }
filePatterns = {
...filePatterns,
...newIgnoreFilePatterns
}
} }
} }
@ -1450,7 +1411,7 @@ export const recoverDeletedFiles = async ({
deletedFiles, deletedFiles,
recoverPatterns, recoverPatterns,
diffResult, diffResult,
diffSubmodule, hasSubmodule,
submodulePaths submodulePaths
}: { }: {
inputs: Inputs inputs: Inputs
@ -1458,7 +1419,7 @@ export const recoverDeletedFiles = async ({
deletedFiles: string[] deletedFiles: string[]
recoverPatterns: string[] recoverPatterns: string[]
diffResult: DiffResult diffResult: DiffResult
diffSubmodule: boolean hasSubmodule: boolean
submodulePaths: string[] submodulePaths: string[]
}): Promise<void> => { }): Promise<void> => {
let recoverableDeletedFiles = deletedFiles let recoverableDeletedFiles = deletedFiles
@ -1488,7 +1449,7 @@ export const recoverDeletedFiles = async ({
const submodulePath = submodulePaths.find(p => deletedFile.startsWith(p)) const submodulePath = submodulePaths.find(p => deletedFile.startsWith(p))
if (diffSubmodule && submodulePath) { if (hasSubmodule && submodulePath) {
const submoduleShaResult = await gitSubmoduleDiffSHA({ const submoduleShaResult = await gitSubmoduleDiffSHA({
cwd: workingDirectory, cwd: workingDirectory,
parentSha1: diffResult.previousSha, parentSha1: diffResult.previousSha,

@ -1 +1 @@
Subproject commit c6bd3b33a9020f54e389066cc88e510015fefd13 Subproject commit 5dfac2e9a7dc53ca33bc4682355193672c97437c

View file

@ -1 +0,0 @@
This is a test markdown file

View file

@ -1 +1 @@
This is a test file... This is a test file.

View file

@ -1,6 +1,6 @@
{ {
"compilerOptions": { "compilerOptions": {
"target": "ES2018", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */ "outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */

4906
yarn.lock

File diff suppressed because it is too large Load diff