mirror of
https://github.com/tj-actions/changed-files
synced 2024-12-15 23:27:40 +00:00
feat: update action to nodejs (#1159)
Co-authored-by: Tonye Jack <jtonye@ymail.com> Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
parent
7bbc71bb94
commit
413fd78918
32 changed files with 6256 additions and 1012 deletions
4
.eslintignore
Normal file
4
.eslintignore
Normal file
|
@ -0,0 +1,4 @@
|
|||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
jest.config.js
|
81
.eslintrc.json
Normal file
81
.eslintrc.json
Normal file
|
@ -0,0 +1,81 @@
|
|||
{
|
||||
"plugins": [
|
||||
"jest",
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"extends": [
|
||||
"plugin:github/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module",
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"rules": {
|
||||
"i18n-text/no-en": "off",
|
||||
"eslint-comments/no-use": "off",
|
||||
"import/no-namespace": "off",
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": [
|
||||
"error",
|
||||
{
|
||||
"accessibility": "no-public"
|
||||
}
|
||||
],
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"@typescript-eslint/ban-ts-comment": "off",
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/consistent-type-assertions": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": [
|
||||
"error",
|
||||
{
|
||||
"allowExpressions": true
|
||||
}
|
||||
],
|
||||
"@typescript-eslint/func-call-spacing": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-for-in-array": "error",
|
||||
"@typescript-eslint/no-inferrable-types": "error",
|
||||
"@typescript-eslint/no-misused-new": "error",
|
||||
"@typescript-eslint/no-namespace": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
"@typescript-eslint/no-var-requires": "error",
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
"@typescript-eslint/prefer-function-type": "warn",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"filenames/match-regex": [
|
||||
"error",
|
||||
"^[a-zA-Z0-9\\-.]+$",
|
||||
true
|
||||
],
|
||||
"@typescript-eslint/semi": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error"
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"jest/globals": true
|
||||
}
|
||||
}
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
dist/** -diff linguist-generated=true
|
5
.github/workflows/codacy-analysis.yml
vendored
5
.github/workflows/codacy-analysis.yml
vendored
|
@ -19,6 +19,11 @@ on:
|
|||
|
||||
jobs:
|
||||
codacy-security-scan:
|
||||
# Cancel other workflows that are running for the same branch
|
||||
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
name: Codacy Security Scan
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
74
.github/workflows/codeql.yml
vendored
Normal file
74
.github/workflows/codeql.yml
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "main" ]
|
||||
schedule:
|
||||
- cron: '44 20 * * 0'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
1
.github/workflows/matrix-test.yml
vendored
1
.github/workflows/matrix-test.yml
vendored
|
@ -22,6 +22,7 @@ jobs:
|
|||
uses: ./
|
||||
with:
|
||||
json: true
|
||||
quotepath: false
|
||||
- name: List all changed files
|
||||
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
||||
- id: set-matrix
|
||||
|
|
131
.github/workflows/test.yml
vendored
131
.github/workflows/test.yml
vendored
|
@ -24,9 +24,81 @@ jobs:
|
|||
- name: shellcheck
|
||||
uses: reviewdog/action-shellcheck@v1.17
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 16.x
|
||||
uses: actions/setup-node@v3.6.0
|
||||
with:
|
||||
cache: 'yarn'
|
||||
node-version: '16.x'
|
||||
|
||||
- name: Create coverage directory and clover.xml
|
||||
run: |
|
||||
mkdir -p coverage
|
||||
touch coverage/clover.xml
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
yarn install
|
||||
|
||||
- name: Run eslint on changed files
|
||||
uses: tj-actions/eslint-changed-files@v18
|
||||
with:
|
||||
token: ${{ secrets.PAT_TOKEN }}
|
||||
config_path: ".eslintrc.json"
|
||||
ignore_path: ".eslintignore"
|
||||
|
||||
- name: Run build and test
|
||||
run: |
|
||||
yarn all
|
||||
|
||||
- name: Verify Changed files
|
||||
uses: tj-actions/verify-changed-files@v14
|
||||
id: changed_files
|
||||
with:
|
||||
files: |
|
||||
src
|
||||
dist
|
||||
|
||||
- name: Commit files
|
||||
if: steps.changed_files.outputs.files_changed == 'true'
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add src dist
|
||||
git commit -m "Added missing changes and modified dist assets."
|
||||
|
||||
- name: Push changes
|
||||
if: steps.changed_files.outputs.files_changed == 'true'
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.PAT_TOKEN }}
|
||||
branch: ${{ github.head_ref }}
|
||||
|
||||
- name: Upload build assets
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
path: dist
|
||||
|
||||
- name: Run codacy-coverage-reporter
|
||||
uses: codacy/codacy-coverage-reporter-action@v1
|
||||
continue-on-error: true
|
||||
with:
|
||||
project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||
coverage-reports: coverage/lcov.info
|
||||
|
||||
test-multiple-repositories:
|
||||
name: Test with multiple repositories
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: Checkout into dir1
|
||||
uses: actions/checkout@v3
|
||||
|
@ -34,16 +106,24 @@ jobs:
|
|||
submodules: true
|
||||
fetch-depth: 0
|
||||
path: dir1
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files with defaults on the dir1
|
||||
id: changed-files-dir1
|
||||
uses: ./dir1
|
||||
with:
|
||||
path: dir1
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files-dir1.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: List all modified files
|
||||
run: |
|
||||
for file in ${{ steps.changed-files-dir1.outputs.modified_files }}; do
|
||||
|
@ -51,22 +131,26 @@ jobs:
|
|||
done
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: Checkout into dir2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
fetch-depth: 0
|
||||
path: dir2
|
||||
|
||||
- name: Run changed-files with defaults on the dir2
|
||||
id: changed-files-dir2
|
||||
uses: ./dir2
|
||||
with:
|
||||
path: dir2
|
||||
|
||||
- name: Show output
|
||||
run: |
|
||||
echo '${{ toJSON(steps.changed-files-dir2.outputs) }}'
|
||||
shell:
|
||||
bash
|
||||
|
||||
- name: List all modified files
|
||||
run: |
|
||||
for file in ${{ steps.changed-files-dir2.outputs.modified_files }}; do
|
||||
|
@ -77,6 +161,7 @@ jobs:
|
|||
|
||||
test-using-since-and-until:
|
||||
name: Test changed-files using since and until
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push'
|
||||
|
||||
|
@ -86,6 +171,11 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files since 2022-08-19
|
||||
id: changed-files-since
|
||||
uses: ./
|
||||
|
@ -129,6 +219,7 @@ jobs:
|
|||
test-similar-base-and-commit-sha:
|
||||
name: Test changed-files similar base and commit sha
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
|
||||
steps:
|
||||
- name: Checkout to branch
|
||||
|
@ -136,6 +227,11 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files with similar base and commit sha
|
||||
id: changed-files
|
||||
continue-on-error: true
|
||||
|
@ -159,6 +255,7 @@ jobs:
|
|||
test-unset-github-output-env:
|
||||
name: Test unset GITHUB_OUTPUT env
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
|
||||
steps:
|
||||
- name: Checkout to branch
|
||||
|
@ -166,6 +263,11 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files with unset GITHUB_OUTPUT env
|
||||
id: changed-files
|
||||
continue-on-error: true
|
||||
|
@ -182,6 +284,7 @@ jobs:
|
|||
test-limited-commit-history:
|
||||
name: Test changed-files with limited commit history
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
|
@ -195,6 +298,11 @@ jobs:
|
|||
with:
|
||||
fetch-depth: ${{ matrix.fetch-depth }}
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files
|
||||
id: changed-files
|
||||
uses: ./
|
||||
|
@ -210,11 +318,17 @@ jobs:
|
|||
test-non-existent-base-sha:
|
||||
name: Test changed-files non existent base sha
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
|
||||
steps:
|
||||
- name: Checkout to branch
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files with non existent base sha
|
||||
id: changed-files
|
||||
uses: ./
|
||||
|
@ -257,11 +371,17 @@ jobs:
|
|||
test-non-existent-sha:
|
||||
name: Test changed-files non existent sha
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
|
||||
steps:
|
||||
- name: Checkout to branch
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files with non existent sha
|
||||
id: changed-files
|
||||
uses: ./
|
||||
|
@ -304,6 +424,7 @@ jobs:
|
|||
test-submodules:
|
||||
name: Test changed-files with submodule
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
|
@ -318,6 +439,11 @@ jobs:
|
|||
submodules: recursive
|
||||
fetch-depth: ${{ matrix.fetch-depth }}
|
||||
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
|
||||
- name: Run changed-files with submodule
|
||||
id: changed-files
|
||||
uses: ./
|
||||
|
@ -341,6 +467,7 @@ jobs:
|
|||
test:
|
||||
name: Test changed-files
|
||||
runs-on: ${{ matrix.platform }}
|
||||
needs: build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
|
@ -354,6 +481,10 @@ jobs:
|
|||
with:
|
||||
submodules: true
|
||||
fetch-depth: ${{ matrix.fetch-depth }}
|
||||
- name: Download build assets
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: build-assets
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
|
|
110
.gitignore
vendored
110
.gitignore
vendored
|
@ -1,5 +1,107 @@
|
|||
.idea/
|
||||
.envrc
|
||||
tag.sh
|
||||
untag.sh
|
||||
# Dependency directory
|
||||
node_modules
|
||||
|
||||
# Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
./.env
|
||||
.env/../.env
|
||||
./.env.local
|
||||
./.env/../.env.local
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# OS metadata
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Ignore built ts files
|
||||
__tests__/runner/*
|
||||
lib/**/*
|
||||
|
||||
# IDEA
|
||||
.idea/
|
||||
|
||||
|
|
1
.nvmrc
Normal file
1
.nvmrc
Normal file
|
@ -0,0 +1 @@
|
|||
16
|
3
.prettierignore
Normal file
3
.prettierignore
Normal file
|
@ -0,0 +1,3 @@
|
|||
dist/
|
||||
lib/
|
||||
node_modules/
|
10
.prettierrc.json
Normal file
10
.prettierrc.json
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid"
|
||||
}
|
19
README.md
19
README.md
|
@ -335,14 +335,12 @@ See [outputs](#outputs) for a list of all available outputs.
|
|||
with:
|
||||
write_output_files: true
|
||||
|
||||
- name: Verify the contents of the .github/outputs/added_files.txt file
|
||||
run: |
|
||||
cat .github/outputs/added_files.txt
|
||||
- name: Verify the contents of the .github/outputs/added_files.txt file
|
||||
run: |
|
||||
cat .github/outputs/added_files.txt
|
||||
...
|
||||
```
|
||||
|
||||
See [action.yml](action.yml#L264) for a list of all available keys.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
@ -357,14 +355,12 @@ See [action.yml](action.yml#L264) for a list of all available keys.
|
|||
json: true
|
||||
write_output_files: true
|
||||
|
||||
- name: Verify the contents of the .github/outputs/added_files.json file
|
||||
run: |
|
||||
cat .github/outputs/added_files.json
|
||||
- name: Verify the contents of the .github/outputs/added_files.json file
|
||||
run: |
|
||||
cat .github/outputs/added_files.json
|
||||
...
|
||||
```
|
||||
|
||||
See [action.yml](action.yml#L264) for a list of all available keys.
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
|
@ -745,8 +741,7 @@ And many more...
|
|||
## Known Limitation
|
||||
|
||||
> **Warning**:
|
||||
>
|
||||
> * Using characters like `\n`, `%`, `.` and `\r` as separators would be [URL encoded](https://www.w3schools.com/tags/ref_urlencode.asp)
|
||||
>
|
||||
> * Spaces in file names can introduce bugs when using bash loops. See: [#216](https://github.com/tj-actions/changed-files/issues/216)
|
||||
> However, this action will handle spaces in file names, with a recommendation of using a separator to prevent hidden issues.
|
||||
>
|
||||
|
|
164
action.yml
164
action.yml
|
@ -23,6 +23,10 @@ inputs:
|
|||
description: "Source file(s) used to populate the `files` input."
|
||||
required: false
|
||||
default: ""
|
||||
files_from_source_file_separator:
|
||||
description: 'Separator used to split the `files-from-source-file` input'
|
||||
default: "\n"
|
||||
required: false
|
||||
files:
|
||||
description: "File and directory patterns to detect changes using only these list of file(s) (Defaults to the entire repo) **NOTE:** Multiline file/directory patterns should not include quotes."
|
||||
required: false
|
||||
|
@ -43,6 +47,10 @@ inputs:
|
|||
description: "Source file(s) used to populate the `files_ignore` input"
|
||||
required: false
|
||||
default: ""
|
||||
files_ignore_from_source_file_separator:
|
||||
description: 'Separator used to split the `files_ignore_from_source_file` input'
|
||||
default: "\n"
|
||||
required: false
|
||||
sha:
|
||||
description: "Specify a different commit SHA used for comparing changes"
|
||||
required: false
|
||||
|
@ -68,6 +76,7 @@ inputs:
|
|||
diff_relative:
|
||||
description: "Exclude changes outside the current directory and show path names relative to it. **NOTE:** This requires you to specify the top level directory via the `path` input."
|
||||
required: false
|
||||
default: "true"
|
||||
dir_names:
|
||||
default: "false"
|
||||
description: "Output unique changed directories instead of filenames. **NOTE:** This returns `.` for changed files located in the root of the project."
|
||||
|
@ -83,10 +92,10 @@ inputs:
|
|||
description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs."
|
||||
required: false
|
||||
default: "false"
|
||||
json_raw_format:
|
||||
description: "Output list of changed files in [jq](https://devdocs.io/jq/) raw output format which means that the output will not be surrounded by quotes and special characters will not be escaped."
|
||||
escape_json:
|
||||
description: "Escape JSON output."
|
||||
required: false
|
||||
default: "false"
|
||||
default: "true"
|
||||
fetch_depth:
|
||||
description: "Depth of additional branch history fetched. **NOTE**: This can be adjusted to resolve errors with insufficient history."
|
||||
required: false
|
||||
|
@ -103,187 +112,54 @@ inputs:
|
|||
description: "Directory to store output files."
|
||||
required: false
|
||||
default: ".github/outputs"
|
||||
match_directories:
|
||||
description: "Indicates whether to include match directories"
|
||||
default: "true"
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
added_files:
|
||||
description: "Returns only files that are Added (A)."
|
||||
value: ${{ steps.changed-files.outputs.added_files }}
|
||||
copied_files:
|
||||
description: "Returns only files that are Copied (C)."
|
||||
value: ${{ steps.changed-files.outputs.copied_files }}
|
||||
deleted_files:
|
||||
description: "Returns only files that are Deleted (D)."
|
||||
value: ${{ steps.changed-files.outputs.deleted_files }}
|
||||
modified_files:
|
||||
description: "Returns only files that are Modified (M)."
|
||||
value: ${{ steps.changed-files.outputs.modified_files }}
|
||||
renamed_files:
|
||||
description: "Returns only files that are Renamed (R)."
|
||||
value: ${{ steps.changed-files.outputs.renamed_files }}
|
||||
all_old_new_renamed_files:
|
||||
description: "Returns only files that are Renamed and list their old and new names. **NOTE:** This requires setting `include_all_old_new_renamed_files` to `true` (R)"
|
||||
value: ${{ steps.changed-files.outputs.all_old_new_renamed_files }}
|
||||
type_changed_files:
|
||||
description: "Returns only files that have their file type changed (T)."
|
||||
value: ${{ steps.changed-files.outputs.type_changed_files }}
|
||||
unmerged_files:
|
||||
description: "Returns only files that are Unmerged (U)."
|
||||
value: ${{ steps.changed-files.outputs.unmerged_files }}
|
||||
unknown_files:
|
||||
description: "Returns only files that are Unknown (X)."
|
||||
value: ${{ steps.changed-files.outputs.unknown_files }}
|
||||
all_changed_and_modified_files:
|
||||
description: "Returns all changed and modified files i.e. *a combination of (ACMRDTUX)*"
|
||||
value: ${{ steps.changed-files.outputs.all_changed_and_modified_files }}
|
||||
all_changed_files:
|
||||
description: "Returns all changed files i.e. *a combination of all added, copied, modified and renamed files (ACMR)*"
|
||||
value: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||
any_changed:
|
||||
description: "Returns `true` when any of the filenames provided using the `files` input has changed. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
value: ${{ steps.changed-files.outputs.any_changed }}
|
||||
description: "Returns `true` when any of the filenames provided using the `files` input has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
only_changed:
|
||||
description: "Returns `true` when only files provided using the `files` input has changed. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
value: ${{ steps.changed-files.outputs.only_changed }}
|
||||
description: "Returns `true` when only files provided using the `files` input has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
other_changed_files:
|
||||
description: "Returns all other changed files not listed in the files input i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||
value: ${{ steps.changed-files.outputs.other_changed_files }}
|
||||
all_modified_files:
|
||||
description: "Returns all changed files i.e. *a combination of all added, copied, modified, renamed and deleted files (ACMRD)*."
|
||||
value: ${{ steps.changed-files.outputs.all_modified_files }}
|
||||
any_modified:
|
||||
description: "Returns `true` when any of the filenames provided using the `files` input has been modified. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
||||
value: ${{ steps.changed-files.outputs.any_modified }}
|
||||
description: "Returns `true` when any of the filenames provided using the `files` input has been modified. i.e. *using a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
||||
only_modified:
|
||||
description: "Returns `true` when only files provided using the `files` input has been modified. If no `files` have been specified,an empty string `''` is returned.(ACMRD)."
|
||||
value: ${{ steps.changed-files.outputs.only_modified }}
|
||||
description: "Returns `true` when only files provided using the `files` input has been modified. (ACMRD)."
|
||||
other_modified_files:
|
||||
description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
|
||||
value: ${{ steps.changed-files.outputs.other_modified_files }}
|
||||
any_deleted:
|
||||
description: "Returns `true` when any of the filenames provided using the `files` input has been deleted. If no `files` have been specified,an empty string `''` is returned. (D)"
|
||||
value: ${{ steps.changed-files.outputs.any_deleted }}
|
||||
description: "Returns `true` when any of the filenames provided using the `files` input has been deleted. (D)"
|
||||
only_deleted:
|
||||
description: "Returns `true` when only files provided using the `files` input has been deleted. If no `files` have been specified,an empty string `''` is returned. (D)"
|
||||
value: ${{ steps.changed-files.outputs.only_deleted }}
|
||||
description: "Returns `true` when only files provided using the `files` input has been deleted. (D)"
|
||||
other_deleted_files:
|
||||
description: "Returns all other deleted files not listed in the files input i.e. *a combination of all deleted files (D)*"
|
||||
value: ${{ steps.changed-files.outputs.other_deleted_files }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: |
|
||||
# "Calculating the previous and current SHA..."
|
||||
bash $GITHUB_ACTION_PATH/diff-sha.sh
|
||||
id: changed-files-diff-sha
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GITHUB_WORKSPACE: ${{ github.workspace }}
|
||||
GITHUB_EVENT_BASE_REF: ${{ github.event.base_ref }}
|
||||
GITHUB_EVENT_HEAD_REPO_FORK: ${{ github.event.pull_request.head.repo.fork }}
|
||||
GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: ${{ github.event.pull_request.head.ref }}
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||
GITHUB_EVENT_PULL_REQUEST_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
GITHUB_EVENT_PULL_REQUEST_COMMITS: ${{ github.event.pull_request.commits }}
|
||||
GITHUB_EVENT_BEFORE: ${{ github.event.before }}
|
||||
GITHUB_EVENT_FORCED: ${{ github.event.forced }}
|
||||
GITHUB_REFNAME: ${{ github.ref_name }}
|
||||
# INPUT_<VARIABLE_NAME> is not available in Composite run steps
|
||||
# https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs
|
||||
INPUT_SHA: ${{ inputs.sha }}
|
||||
INPUT_BASE_SHA: ${{ inputs.base_sha }}
|
||||
INPUT_SINCE: ${{ inputs.since }}
|
||||
INPUT_UNTIL: ${{ inputs.until }}
|
||||
INPUT_PATH: ${{ inputs.path }}
|
||||
INPUT_FETCH_DEPTH: ${{ inputs.fetch_depth }}
|
||||
INPUT_SINCE_LAST_REMOTE_COMMIT: ${{ inputs.since_last_remote_commit }}
|
||||
- name: Glob match
|
||||
uses: tj-actions/glob@v17.2.6
|
||||
id: glob
|
||||
with:
|
||||
files: ${{ inputs.files }}
|
||||
files-separator: ${{ inputs.files_separator }}
|
||||
excluded-files: ${{ inputs.files_ignore }}
|
||||
excluded-files-separator: ${{ inputs.files_ignore_separator }}
|
||||
files-from-source-file: ${{ inputs.files_from_source_file }}
|
||||
excluded-files-from-source-file: ${{ inputs.files_ignore_from_source_file}}
|
||||
escape-paths: true
|
||||
working-directory: ${{ inputs.path }}
|
||||
base-sha: ${{ steps.changed-files-diff-sha.outputs.previous_sha }}
|
||||
sha: ${{ steps.changed-files-diff-sha.outputs.current_sha }}
|
||||
diff: ${{ steps.changed-files-diff-sha.outputs.diff }}
|
||||
match-directories: ${{ inputs.match_directories }}
|
||||
include-deleted-files: true
|
||||
separator: "|"
|
||||
- run: |
|
||||
bash $GITHUB_ACTION_PATH/get-changed-paths.sh
|
||||
id: changed-files
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_WORKSPACE: ${{ github.workspace }}
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FORK: ${{ github.event.pull_request.head.repo.fork }}
|
||||
# INPUT_<VARIABLE_NAME> is not available in Composite run steps
|
||||
# https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs
|
||||
INPUT_FILES_PATTERN_FILE: ${{ steps.glob.outputs.paths-output-file }}
|
||||
INPUT_SEPARATOR: ${{ inputs.separator }}
|
||||
INPUT_PATH: ${{ inputs.path }}
|
||||
INPUT_PREVIOUS_SHA: ${{ steps.changed-files-diff-sha.outputs.previous_sha }}
|
||||
INPUT_CURRENT_SHA: ${{ steps.changed-files-diff-sha.outputs.current_sha }}
|
||||
INPUT_TARGET_BRANCH: ${{ steps.changed-files-diff-sha.outputs.target_branch }}
|
||||
INPUT_CURRENT_BRANCH: ${{ steps.changed-files-diff-sha.outputs.current_branch }}
|
||||
INPUT_DIFF: ${{ steps.changed-files-diff-sha.outputs.diff }}
|
||||
INPUT_QUOTEPATH: ${{ inputs.quotepath }}
|
||||
INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES: ${{ inputs.include_all_old_new_renamed_files }}
|
||||
INPUT_OLD_NEW_SEPARATOR: ${{ inputs.old_new_separator }}
|
||||
INPUT_OLD_NEW_FILES_SEPARATOR: ${{ inputs.old_new_files_separator }}
|
||||
INPUT_DIFF_RELATIVE: ${{ inputs.diff_relative }}
|
||||
INPUT_DIR_NAMES: ${{ inputs.dir_names }}
|
||||
INPUT_DIR_NAMES_MAX_DEPTH: ${{ inputs.dir_names_max_depth }}
|
||||
INPUT_DIR_NAMES_EXCLUDE_ROOT: ${{ inputs.dir_names_exclude_root }}
|
||||
INPUT_JSON: ${{ inputs.json }}
|
||||
INPUT_HAS_CUSTOM_PATTERNS: ${{ steps.glob.outputs.has-custom-patterns }}
|
||||
INPUT_JSON_RAW_FORMAT: ${{ inputs.json_raw_format }}
|
||||
- name: Generate output files
|
||||
uses: tj-actions/json2file@v1.7.2
|
||||
if: inputs.write_output_files == 'true'
|
||||
with:
|
||||
outputs: ${{ toJSON(steps.changed-files.outputs) }}
|
||||
directory: ${{ inputs.output_dir }}
|
||||
skip_missing_keys: true
|
||||
keys: |
|
||||
added_files
|
||||
copied_files
|
||||
deleted_files
|
||||
modified_files
|
||||
renamed_files
|
||||
all_old_new_renamed_files
|
||||
type_changed_files
|
||||
unmerged_files
|
||||
unknown_files
|
||||
all_changed_and_modified_files
|
||||
all_changed_files
|
||||
any_changed
|
||||
only_changed
|
||||
other_changed_files
|
||||
all_modified_files
|
||||
any_modified
|
||||
only_modified
|
||||
other_modified_files
|
||||
any_deleted
|
||||
only_deleted
|
||||
other_deleted_files
|
||||
extension: ${{ steps.changed-files.outputs.outputs_extension }}
|
||||
using: 'node16'
|
||||
main: 'dist/index.js'
|
||||
|
||||
branding:
|
||||
icon: file-text
|
||||
|
|
323
diff-sha.sh
323
diff-sha.sh
|
@ -1,323 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
INITIAL_COMMIT="false"
|
||||
GITHUB_OUTPUT=${GITHUB_OUTPUT:-""}
|
||||
EXTRA_ARGS="--no-tags --prune --recurse-submodules"
|
||||
PREVIOUS_SHA=""
|
||||
CURRENT_SHA=""
|
||||
DIFF="..."
|
||||
IS_TAG="false"
|
||||
SOURCE_BRANCH=""
|
||||
|
||||
if [[ "$GITHUB_REF" == "refs/tags/"* ]]; then
|
||||
IS_TAG="true"
|
||||
EXTRA_ARGS="--prune --no-recurse-submodules"
|
||||
SOURCE_BRANCH=${GITHUB_EVENT_BASE_REF#refs/heads/}
|
||||
fi
|
||||
|
||||
if [[ -z $GITHUB_EVENT_PULL_REQUEST_BASE_REF || "$GITHUB_EVENT_HEAD_REPO_FORK" == "true" ]]; then
|
||||
DIFF=".."
|
||||
fi
|
||||
|
||||
echo "::group::changed-files-diff-sha"
|
||||
|
||||
if [[ -n $INPUT_PATH ]]; then
|
||||
REPO_DIR="$GITHUB_WORKSPACE/$INPUT_PATH"
|
||||
|
||||
echo "::debug::Resolving repository path: $REPO_DIR"
|
||||
if [[ ! -d "$REPO_DIR" ]]; then
|
||||
echo "::error::Invalid repository path: $REPO_DIR"
|
||||
exit 1
|
||||
fi
|
||||
cd "$REPO_DIR"
|
||||
fi
|
||||
|
||||
function __version() {
|
||||
echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }';
|
||||
}
|
||||
|
||||
echo "Verifying git version..."
|
||||
|
||||
GIT_VERSION=$(git --version | awk '{print $3}') && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::git not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $(__version "$GIT_VERSION") -lt $(__version "2.18.0") ]]; then
|
||||
echo "::error::Invalid git version. Please upgrade ($GIT_VERSION) to >= (2.18.0)"
|
||||
exit 1
|
||||
else
|
||||
echo "Valid git version found: ($GIT_VERSION)"
|
||||
fi
|
||||
|
||||
IS_SHALLOW=$(git rev-parse --is-shallow-repository) && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Unable to determine if the repository is shallow"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z $GITHUB_EVENT_PULL_REQUEST_BASE_REF ]]; then
|
||||
echo "Running on a push event..."
|
||||
TARGET_BRANCH=$GITHUB_REFNAME
|
||||
CURRENT_BRANCH=$TARGET_BRANCH
|
||||
|
||||
if [[ "$IS_SHALLOW" == "true" ]]; then
|
||||
echo "Fetching remote refs..."
|
||||
if [[ "$IS_TAG" == "false" ]]; then
|
||||
# shellcheck disable=SC2086
|
||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$CURRENT_BRANCH":refs/remotes/origin/"$CURRENT_BRANCH" 1>/dev/null || true
|
||||
elif [[ "$SOURCE_BRANCH" != "" ]]; then
|
||||
# shellcheck disable=SC2086
|
||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$SOURCE_BRANCH":refs/remotes/origin/"$SOURCE_BRANCH" 1>/dev/null || true
|
||||
fi
|
||||
|
||||
if git submodule status &>/dev/null; then
|
||||
# shellcheck disable=SC2086
|
||||
git submodule foreach git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" || true
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::debug::Getting HEAD SHA..."
|
||||
if [[ -n "$INPUT_UNTIL" ]]; then
|
||||
echo "::debug::Getting HEAD SHA for '$INPUT_UNTIL'..."
|
||||
CURRENT_SHA=$(git log -1 --format="%H" --date=local --until="$INPUT_UNTIL") && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Invalid until date: $INPUT_UNTIL"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [[ -z $INPUT_SHA ]]; then
|
||||
CURRENT_SHA=$(git rev-list -n 1 HEAD) && exit_status=$? || exit_status=$?
|
||||
else
|
||||
CURRENT_SHA=$INPUT_SHA; exit_status=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::debug::Verifying the current commit SHA: $CURRENT_SHA"
|
||||
git rev-parse --quiet --verify "$CURRENT_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Unable to locate the current sha: $CURRENT_SHA"
|
||||
echo "::error::Please verify that current sha is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
||||
exit 1
|
||||
else
|
||||
echo "::debug::Current SHA: $CURRENT_SHA"
|
||||
fi
|
||||
|
||||
if [[ -z $INPUT_BASE_SHA ]]; then
|
||||
if [[ -n "$INPUT_SINCE" ]]; then
|
||||
echo "::debug::Getting base SHA for '$INPUT_SINCE'..."
|
||||
PREVIOUS_SHA=$(git log --format="%H" --date=local --since="$INPUT_SINCE" | tail -1) && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ -z "$PREVIOUS_SHA" ]]; then
|
||||
echo "::error::Unable to locate a previous commit for the specified date: $INPUT_SINCE"
|
||||
exit 1
|
||||
fi
|
||||
elif [[ "$IS_TAG" == "true" ]]; then
|
||||
PREVIOUS_SHA=$(git rev-parse "$(git tag --sort=-v:refname | head -n 2 | tail -n 1)") && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ -z "$PREVIOUS_SHA" ]]; then
|
||||
echo "::error::Unable to locate a previous commit for the specified tag: $GITHUB_REF"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" == "true" ]]; then
|
||||
PREVIOUS_SHA=""
|
||||
|
||||
if [[ "$GITHUB_EVENT_FORCED" == "false" || -z "$GITHUB_EVENT_FORCED" ]]; then
|
||||
PREVIOUS_SHA=$GITHUB_EVENT_BEFORE && exit_status=$? || exit_status=$?
|
||||
else
|
||||
PREVIOUS_SHA=$(git rev-list -n 1 "HEAD^") && exit_status=$? || exit_status=$?
|
||||
fi
|
||||
else
|
||||
PREVIOUS_SHA=$(git rev-list -n 1 "HEAD^") && exit_status=$? || exit_status=$?
|
||||
fi
|
||||
|
||||
if [[ -z "$PREVIOUS_SHA" || "$PREVIOUS_SHA" == "0000000000000000000000000000000000000000" ]]; then
|
||||
PREVIOUS_SHA=$(git rev-list -n 1 "HEAD^") && exit_status=$? || exit_status=$?
|
||||
fi
|
||||
|
||||
if [[ "$PREVIOUS_SHA" == "$CURRENT_SHA" ]]; then
|
||||
if ! git rev-parse "$PREVIOUS_SHA^1" &>/dev/null; then
|
||||
INITIAL_COMMIT="true"
|
||||
PREVIOUS_SHA=$(git rev-parse "$CURRENT_SHA")
|
||||
echo "::warning::Initial commit detected no previous commit found."
|
||||
else
|
||||
PREVIOUS_SHA=$(git rev-parse "$PREVIOUS_SHA^1")
|
||||
fi
|
||||
else
|
||||
if [[ -z "$PREVIOUS_SHA" ]]; then
|
||||
echo "::error::Unable to locate a previous commit."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
PREVIOUS_SHA=$INPUT_BASE_SHA
|
||||
|
||||
if [[ "$IS_TAG" == "true" ]]; then
|
||||
TARGET_BRANCH=$(git describe --tags "$PREVIOUS_SHA")
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::debug::Target branch $TARGET_BRANCH..."
|
||||
echo "::debug::Current branch $CURRENT_BRANCH..."
|
||||
|
||||
echo "::debug::Verifying the previous commit SHA: $PREVIOUS_SHA"
|
||||
git rev-parse --quiet --verify "$PREVIOUS_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Unable to locate the previous sha: $PREVIOUS_SHA"
|
||||
echo "::error::Please verify that the previous sha commit is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Running on a pull request event..."
|
||||
TARGET_BRANCH=$GITHUB_EVENT_PULL_REQUEST_BASE_REF
|
||||
CURRENT_BRANCH=$GITHUB_EVENT_PULL_REQUEST_HEAD_REF
|
||||
|
||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" == "true" ]]; then
|
||||
TARGET_BRANCH=$CURRENT_BRANCH
|
||||
fi
|
||||
|
||||
if [[ "$IS_SHALLOW" == "true" ]]; then
|
||||
echo "Fetching remote refs..."
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
if git fetch $EXTRA_ARGS -u --progress origin pull/"$GITHUB_EVENT_PULL_REQUEST_NUMBER"/head:"$CURRENT_BRANCH" 1>/dev/null; then
|
||||
echo "First fetch succeeded"
|
||||
else
|
||||
echo "First fetch failed, falling back to second fetch"
|
||||
# shellcheck disable=SC2086
|
||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$CURRENT_BRANCH"*:refs/remotes/origin/"$CURRENT_BRANCH"* 1>/dev/null || true
|
||||
fi
|
||||
|
||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" != "true" ]]; then
|
||||
echo "::debug::Fetching remote target branch..."
|
||||
# shellcheck disable=SC2086
|
||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$TARGET_BRANCH":refs/remotes/origin/"$TARGET_BRANCH" 1>/dev/null
|
||||
git branch --track "$TARGET_BRANCH" origin/"$TARGET_BRANCH" 1>/dev/null || true
|
||||
fi
|
||||
|
||||
if git submodule status &>/dev/null; then
|
||||
# shellcheck disable=SC2086
|
||||
git submodule foreach git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" || true
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::debug::Getting HEAD SHA..."
|
||||
if [[ -n "$INPUT_UNTIL" ]]; then
|
||||
echo "::debug::Getting HEAD SHA for '$INPUT_UNTIL'..."
|
||||
CURRENT_SHA=$(git log -1 --format="%H" --date=local --until="$INPUT_UNTIL") && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Invalid until date: $INPUT_UNTIL"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if [[ -z $INPUT_SHA ]]; then
|
||||
CURRENT_SHA=$(git rev-list -n 1 HEAD) && exit_status=$? || exit_status=$?
|
||||
else
|
||||
CURRENT_SHA=$INPUT_SHA; exit_status=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::debug::Verifying the current commit SHA: $CURRENT_SHA"
|
||||
git rev-parse --quiet --verify "$CURRENT_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Unable to locate the current sha: $CURRENT_SHA"
|
||||
echo "::error::Please verify that current sha is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
||||
exit 1
|
||||
else
|
||||
echo "::debug::Current SHA: $CURRENT_SHA"
|
||||
fi
|
||||
|
||||
if [[ -z $INPUT_BASE_SHA ]]; then
|
||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" == "true" ]]; then
|
||||
PREVIOUS_SHA=$GITHUB_EVENT_BEFORE
|
||||
|
||||
if ! git rev-parse --quiet --verify "$PREVIOUS_SHA^{commit}" 1>/dev/null 2>&1; then
|
||||
PREVIOUS_SHA=$GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||
fi
|
||||
else
|
||||
PREVIOUS_SHA=$(git rev-parse origin/"$TARGET_BRANCH") && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ "$IS_SHALLOW" == "true" ]]; then
|
||||
# check if the merge base is in the local history
|
||||
if ! git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
||||
echo "::debug::Merge base is not in the local history, fetching remote target branch..."
|
||||
# Fetch more of the target branch history until the merge base is found
|
||||
for i in {1..10}; do
|
||||
# shellcheck disable=SC2086
|
||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$TARGET_BRANCH":refs/remotes/origin/"$TARGET_BRANCH" 1>/dev/null
|
||||
if git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
||||
break
|
||||
fi
|
||||
echo "::debug::Merge base is not in the local history, fetching remote target branch again..."
|
||||
echo "::debug::Attempt $i/10"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$PREVIOUS_SHA" || "$PREVIOUS_SHA" == "$CURRENT_SHA" ]]; then
|
||||
PREVIOUS_SHA=$GITHUB_EVENT_PULL_REQUEST_BASE_SHA && exit_status=$? || exit_status=$?
|
||||
fi
|
||||
|
||||
echo "::debug::Previous SHA: $PREVIOUS_SHA"
|
||||
else
|
||||
PREVIOUS_SHA=$INPUT_BASE_SHA && exit_status=$? || exit_status=$?
|
||||
fi
|
||||
|
||||
if ! git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
||||
DIFF=".."
|
||||
fi
|
||||
|
||||
echo "::debug::Target branch: $TARGET_BRANCH"
|
||||
echo "::debug::Current branch: $CURRENT_BRANCH"
|
||||
|
||||
echo "::debug::Verifying the previous commit SHA: $PREVIOUS_SHA"
|
||||
git rev-parse --quiet --verify "$PREVIOUS_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Unable to locate the previous sha: $PREVIOUS_SHA"
|
||||
echo "::error::Please verify that the previous sha is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
||||
echo "::error::Unable to determine a difference between $PREVIOUS_SHA$DIFF$CURRENT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$PREVIOUS_SHA" == "$CURRENT_SHA" && "$INITIAL_COMMIT" == "false" ]]; then
|
||||
echo "::error::Similar commit hashes detected: previous sha: $PREVIOUS_SHA is equivalent to the current sha: $CURRENT_SHA."
|
||||
echo "::error::Please verify that both commits are valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=target_branch::$TARGET_BRANCH"
|
||||
echo "::set-output name=current_branch::$CURRENT_BRANCH"
|
||||
echo "::set-output name=previous_sha::$PREVIOUS_SHA"
|
||||
echo "::set-output name=current_sha::$CURRENT_SHA"
|
||||
echo "::set-output name=diff::$DIFF"
|
||||
else
|
||||
cat <<EOF >> "$GITHUB_OUTPUT"
|
||||
target_branch=$TARGET_BRANCH
|
||||
current_branch=$CURRENT_BRANCH
|
||||
previous_sha=$PREVIOUS_SHA
|
||||
current_sha=$CURRENT_SHA
|
||||
diff=$DIFF
|
||||
EOF
|
||||
fi
|
||||
|
||||
echo "::endgroup::"
|
BIN
dist/index.js
generated
vendored
Normal file
BIN
dist/index.js
generated
vendored
Normal file
Binary file not shown.
BIN
dist/index.js.map
generated
vendored
Normal file
BIN
dist/index.js.map
generated
vendored
Normal file
Binary file not shown.
BIN
dist/licenses.txt
generated
vendored
Normal file
BIN
dist/licenses.txt
generated
vendored
Normal file
Binary file not shown.
BIN
dist/sourcemap-register.js
generated
vendored
Normal file
BIN
dist/sourcemap-register.js
generated
vendored
Normal file
Binary file not shown.
|
@ -1,528 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//'%'/'%25'}"
|
||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//'.'/'%2E'}"
|
||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//$'\n'/'%0A'}"
|
||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//$'\r'/'%0D'}"
|
||||
|
||||
GITHUB_OUTPUT=${GITHUB_OUTPUT:-""}
|
||||
DIFF=$INPUT_DIFF
|
||||
|
||||
OUTPUTS_EXTENSION="txt"
|
||||
|
||||
if [[ "$INPUT_JSON" == "true" ]]; then
|
||||
OUTPUTS_EXTENSION="json"
|
||||
fi
|
||||
|
||||
if [[ $INPUT_QUOTEPATH == "false" ]]; then
|
||||
git config --global core.quotepath off
|
||||
else
|
||||
git config --global core.quotepath on
|
||||
fi
|
||||
|
||||
if [[ -n $INPUT_DIFF_RELATIVE ]]; then
|
||||
git config --global diff.relative "$INPUT_DIFF_RELATIVE"
|
||||
fi
|
||||
|
||||
function get_dirname_max_depth() {
|
||||
while IFS='' read -r line; do
|
||||
local dir="$line"
|
||||
local dirs=()
|
||||
IFS='/' read -ra dirs <<<"$dir"
|
||||
|
||||
local max_depth=${#dirs[@]}
|
||||
local input_dir_names_max_depth="${INPUT_DIR_NAMES_MAX_DEPTH:-$max_depth}"
|
||||
|
||||
if [[ -n "$input_dir_names_max_depth" && "$input_dir_names_max_depth" -lt "$max_depth" ]]; then
|
||||
max_depth="$input_dir_names_max_depth"
|
||||
fi
|
||||
|
||||
local output="${dirs[0]}"
|
||||
local depth="1"
|
||||
|
||||
while [ "$depth" -lt "$max_depth" ]; do
|
||||
output="$output/${dirs[${depth}]}"
|
||||
depth=$((depth + 1))
|
||||
done
|
||||
|
||||
if [[ "$INPUT_DIR_NAMES_EXCLUDE_ROOT" == "true" && "$output" == "." ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "$output"
|
||||
done < <(uniq)
|
||||
}
|
||||
|
||||
function json_output() {
|
||||
local jq_args="-sR"
|
||||
if [[ "$INPUT_JSON_RAW_FORMAT" == "true" ]]; then
|
||||
jq_args="$jq_args -r"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
jq $jq_args 'split("\n") | map(select(. != "")) | @json' | sed -r 's/^"|"$//g' | tr -s /
|
||||
}
|
||||
|
||||
function get_diff() {
|
||||
local base="$1"
|
||||
local sha="$2"
|
||||
local filter="$3"
|
||||
|
||||
while IFS='' read -r sub; do
|
||||
sub_commit_pre="$(git diff "$base" "$sha" -- "$sub" | { grep '^[-]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::warning::Failed to get previous commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
||||
fi
|
||||
|
||||
sub_commit_cur="$(git diff "$base" "$sha" -- "$sub" | { grep '^[+]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::warning::Failed to get current commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
||||
fi
|
||||
|
||||
if [ -n "$sub_commit_cur" ]; then
|
||||
(
|
||||
cd "$sub" && (
|
||||
# the strange magic number is a hardcoded "empty tree" commit sha
|
||||
git diff --diff-filter="$filter" --name-only --ignore-submodules=all "${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904}" "${sub_commit_cur}" | awk -v r="$sub" '{ print "" r "/" $0}' 2>/dev/null
|
||||
)
|
||||
) || {
|
||||
echo "::warning::Failed to get changed files for submodule ($sub) between: ${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904} ${sub_commit_cur}. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
||||
}
|
||||
fi
|
||||
done < <(git submodule status --recursive | grep -v "^-" | awk '{print $2}')
|
||||
|
||||
if [[ "$filter" == "D" ]]; then
|
||||
while read -r sub; do
|
||||
echo "$sub"
|
||||
done < <(git submodule status --recursive | grep -e "^-" | awk '{print $2}')
|
||||
fi
|
||||
|
||||
git diff --diff-filter="$filter" --name-only --ignore-submodules=all "$base$DIFF$sha" && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Failed to get changed files between: $base$DIFF$sha" >&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function get_renames() {
|
||||
local base="$1"
|
||||
local sha="$2"
|
||||
|
||||
while IFS='' read -r sub; do
|
||||
sub_commit_pre="$(git diff "$base" "$sha" -- "$sub" | { grep '^[-]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::warning::Failed to get previous commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
||||
fi
|
||||
|
||||
sub_commit_cur="$(git diff "$base" "$sha" -- "$sub" | { grep '^[+]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::warning::Failed to get current commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
||||
fi
|
||||
|
||||
if [ -n "$sub_commit_cur" ]; then
|
||||
(
|
||||
cd "$sub" && (
|
||||
# the strange magic number is a hardcoded "empty tree" commit sha
|
||||
get_renames "${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904}" "${sub_commit_cur}" | awk -v r="$sub" '{ print "" r "/" $0}'
|
||||
)
|
||||
) || {
|
||||
echo "::warning::Failed to get renamed files for submodule ($sub) between: ${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904} ${sub_commit_cur}. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
||||
}
|
||||
fi
|
||||
done < <(git submodule | awk '{print $2}')
|
||||
|
||||
git diff --name-status --ignore-submodules=all --diff-filter=R "$base$DIFF$sha" | { grep -E "^R" || true; } | awk -F '\t' -v d="$INPUT_OLD_NEW_SEPARATOR" '{print $2d$3}' && exit_status=$? || exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "::error::Failed to get renamed files between: $base → $sha" >&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
echo "::group::changed-files"
|
||||
|
||||
if [[ -n $INPUT_PATH ]]; then
|
||||
REPO_DIR="$GITHUB_WORKSPACE/$INPUT_PATH"
|
||||
|
||||
echo "Resolving repository path: $REPO_DIR"
|
||||
if [[ ! -d "$REPO_DIR" ]]; then
|
||||
echo "::error::Invalid repository path: $REPO_DIR"
|
||||
exit 1
|
||||
fi
|
||||
cd "$REPO_DIR"
|
||||
fi
|
||||
|
||||
echo "Retrieving changes between $INPUT_PREVIOUS_SHA ($INPUT_TARGET_BRANCH) → $INPUT_CURRENT_SHA ($INPUT_CURRENT_BRANCH)"
|
||||
|
||||
if [[ "$INPUT_HAS_CUSTOM_PATTERNS" == "false" || -z "$INPUT_FILES_PATTERN_FILE" ]]; then
|
||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_OLD_NEW_FILES_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | awk -v d="$INPUT_OLD_NEW_FILES_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
fi
|
||||
else
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
else
|
||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | json_output)
|
||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | json_output)
|
||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | json_output)
|
||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | json_output)
|
||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | json_output)
|
||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | json_output)
|
||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | json_output)
|
||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | json_output)
|
||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | json_output)
|
||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | json_output)
|
||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | json_output)
|
||||
fi
|
||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
else
|
||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | json_output)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | { grep -w -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="$INPUT_OLD_NEW_FILES_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
|
||||
ALL_OTHER_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
|
||||
if [[ -n "${ALL_CHANGED}" ]]; then
|
||||
echo "::debug::Matching changed files: ${ALL_CHANGED}"
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=any_changed::true"
|
||||
else
|
||||
echo "any_changed=true" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=any_changed::false"
|
||||
else
|
||||
echo "any_changed=false" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
OTHER_CHANGED=""
|
||||
|
||||
if [[ -n $ALL_OTHER_CHANGED ]]; then
|
||||
if [[ -n "$ALL_CHANGED" ]]; then
|
||||
OTHER_CHANGED=$(echo "${ALL_OTHER_CHANGED}|${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | sort | uniq -u | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
OTHER_CHANGED=$ALL_OTHER_CHANGED
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
else
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
else
|
||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${OTHER_CHANGED}" && "${OTHER_CHANGED}" != "[]" ]]; then
|
||||
echo "::debug::Non Matching changed files: ${OTHER_CHANGED}"
|
||||
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=only_changed::false"
|
||||
echo "::set-output name=other_changed_files::$OTHER_CHANGED"
|
||||
else
|
||||
echo "only_changed=false" >>"$GITHUB_OUTPUT"
|
||||
echo "other_changed_files=$OTHER_CHANGED" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
elif [[ -n "${ALL_CHANGED}" ]]; then
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=only_changed::true"
|
||||
else
|
||||
echo "only_changed=true" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
ALL_OTHER_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
|
||||
if [[ -n "${ALL_MODIFIED}" ]]; then
|
||||
echo "::debug::Matching modified files: ${ALL_MODIFIED}"
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=any_modified::true"
|
||||
else
|
||||
echo "any_modified=true" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=any_modified::false"
|
||||
else
|
||||
echo "any_modified=false" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
OTHER_MODIFIED=""
|
||||
|
||||
if [[ -n $ALL_OTHER_MODIFIED ]]; then
|
||||
if [[ -n "$ALL_MODIFIED" ]]; then
|
||||
OTHER_MODIFIED=$(echo "${ALL_OTHER_MODIFIED}|${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | sort | uniq -u | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
OTHER_MODIFIED=$ALL_OTHER_MODIFIED
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
else
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
else
|
||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${OTHER_MODIFIED}" && "$OTHER_MODIFIED" != "[]" ]]; then
|
||||
echo "::debug::Non Matching modified files: ${OTHER_MODIFIED}"
|
||||
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=only_modified::false"
|
||||
echo "::set-output name=other_modified_files::$OTHER_MODIFIED"
|
||||
else
|
||||
echo "only_modified=false" >>"$GITHUB_OUTPUT"
|
||||
echo "other_modified_files=$OTHER_MODIFIED" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
elif [[ -n "${ALL_MODIFIED}" ]]; then
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=only_modified::true"
|
||||
else
|
||||
echo "only_modified=true" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
ALL_OTHER_DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
|
||||
if [[ -n "${DELETED}" ]]; then
|
||||
echo "::debug::Matching deleted files: ${DELETED}"
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=any_deleted::true"
|
||||
else
|
||||
echo "any_deleted=true" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=any_deleted::false"
|
||||
else
|
||||
echo "any_deleted=false" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
OTHER_DELETED=""
|
||||
|
||||
if [[ -n $ALL_OTHER_DELETED ]]; then
|
||||
if [[ -n "$DELETED" ]]; then
|
||||
OTHER_DELETED=$(echo "${ALL_OTHER_DELETED}|${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | sort | uniq -u | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
OTHER_DELETED=$ALL_OTHER_DELETED
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
else
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
else
|
||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${OTHER_DELETED}" && "${OTHER_DELETED}" != "[]" ]]; then
|
||||
echo "::debug::Non Matching deleted files: ${OTHER_DELETED}"
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=only_deleted::false"
|
||||
echo "::set-output name=other_deleted_files::$OTHER_DELETED"
|
||||
else
|
||||
echo "only_deleted=false" >>"$GITHUB_OUTPUT"
|
||||
echo "other_deleted_files=$OTHER_DELETED" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
elif [[ -n "${DELETED}" ]]; then
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=only_deleted::true"
|
||||
else
|
||||
echo "only_deleted=true" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
else
|
||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
||||
fi
|
||||
else
|
||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
||||
else
|
||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::debug::Added files: $ADDED"
|
||||
echo "::debug::Copied files: $COPIED"
|
||||
echo "::debug::Deleted files: $DELETED"
|
||||
echo "::debug::Modified files: $MODIFIED"
|
||||
echo "::debug::Renamed files: $RENAMED"
|
||||
echo "::debug::Type Changed files: $TYPE_CHANGED"
|
||||
echo "::debug::Unmerged files: $UNMERGED"
|
||||
echo "::debug::Unknown files: $UNKNOWN"
|
||||
echo "::debug::All changed and modified files: $ALL_CHANGED_AND_MODIFIED"
|
||||
echo "::debug::All changed files: $ALL_CHANGED"
|
||||
echo "::debug::All modified files: $ALL_MODIFIED"
|
||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
||||
echo "::debug::All old & new renamed files: $ALL_OLD_NEW_RENAMED"
|
||||
fi
|
||||
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=added_files::$ADDED"
|
||||
echo "::set-output name=copied_files::$COPIED"
|
||||
echo "::set-output name=deleted_files::$DELETED"
|
||||
echo "::set-output name=modified_files::$MODIFIED"
|
||||
echo "::set-output name=renamed_files::$RENAMED"
|
||||
echo "::set-output name=type_changed_files::$TYPE_CHANGED"
|
||||
echo "::set-output name=unmerged_files::$UNMERGED"
|
||||
echo "::set-output name=unknown_files::$UNKNOWN"
|
||||
echo "::set-output name=all_changed_and_modified_files::$ALL_CHANGED_AND_MODIFIED"
|
||||
echo "::set-output name=all_changed_files::$ALL_CHANGED"
|
||||
echo "::set-output name=all_modified_files::$ALL_MODIFIED"
|
||||
echo "::set-output name=outputs_extension::$OUTPUTS_EXTENSION"
|
||||
else
|
||||
cat <<EOF >>"$GITHUB_OUTPUT"
|
||||
added_files=$ADDED
|
||||
copied_files=$COPIED
|
||||
deleted_files=$DELETED
|
||||
modified_files=$MODIFIED
|
||||
renamed_files=$RENAMED
|
||||
type_changed_files=$TYPE_CHANGED
|
||||
unmerged_files=$UNMERGED
|
||||
unknown_files=$UNKNOWN
|
||||
all_changed_and_modified_files=$ALL_CHANGED_AND_MODIFIED
|
||||
all_changed_files=$ALL_CHANGED
|
||||
all_modified_files=$ALL_MODIFIED
|
||||
outputs_extension=$OUTPUTS_EXTENSION
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
||||
echo "::set-output name=all_old_new_renamed_files::$ALL_OLD_NEW_RENAMED"
|
||||
else
|
||||
echo "all_old_new_renamed_files=$ALL_OLD_NEW_RENAMED" >>"$GITHUB_OUTPUT"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::endgroup::"
|
13
jest.config.js
Normal file
13
jest.config.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testMatch: ['**/*.test.ts'],
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true,
|
||||
testTimeout: 10000,
|
||||
setupFiles: [
|
||||
"<rootDir>/jest/setupEnv.cjs"
|
||||
]
|
||||
}
|
9
jest/setupEnv.cjs
Normal file
9
jest/setupEnv.cjs
Normal file
|
@ -0,0 +1,9 @@
|
|||
const path = require('path')
|
||||
|
||||
process.env.TESTING = "1"
|
||||
process.env.GITHUB_WORKSPACE = path.join(
|
||||
path.resolve(__dirname, '..'), '.'
|
||||
)
|
||||
process.env.GITHUB_ACTION_PATH = path.join(
|
||||
path.resolve(__dirname, '..'), '.'
|
||||
)
|
56
package.json
Normal file
56
package.json
Normal file
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"name": "@tj-actions/glob",
|
||||
"version": "17.2.5",
|
||||
"description": "Glob pattern matching github action",
|
||||
"main": "lib/main.js",
|
||||
"publishConfig": {
|
||||
"registry": "https://npm.pkg.github.com"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"lint:fix": "eslint --fix src/**/*.ts",
|
||||
"package": "ncc build lib/main.js --source-map --license licenses.txt",
|
||||
"test": "jest --coverage",
|
||||
"all": "yarn build && yarn format && yarn lint && yarn package && yarn test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/tj-actions/glob.git"
|
||||
},
|
||||
"keywords": [
|
||||
"actions",
|
||||
"glob",
|
||||
"github-actions"
|
||||
],
|
||||
"author": "Tonye Jack",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/tj-actions/glob/issues"
|
||||
},
|
||||
"homepage": "https://github.com/tj-actions/glob#readme",
|
||||
"dependencies": {
|
||||
"@actions/core": "1.10.0",
|
||||
"@actions/exec": "1.1.1",
|
||||
"micromatch": "^4.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "29.5.1",
|
||||
"@types/micromatch": "^4.0.2",
|
||||
"@types/node": "20.2.1",
|
||||
"@types/uuid": "9.0.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.59.6",
|
||||
"@typescript-eslint/parser": "5.59.6",
|
||||
"@vercel/ncc": "0.36.1",
|
||||
"eslint": "8.41.0",
|
||||
"eslint-plugin-github": "4.7.0",
|
||||
"eslint-plugin-jest": "27.2.1",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"jest": "29.5.0",
|
||||
"prettier": "2.8.8",
|
||||
"ts-jest": "29.1.0",
|
||||
"typescript": "5.0.4"
|
||||
}
|
||||
}
|
5
src/__tests__/main.test.ts
Normal file
5
src/__tests__/main.test.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
describe('main test', () => {
|
||||
it('adds two numbers', async () => {
|
||||
expect(1 + 1).toEqual(2)
|
||||
})
|
||||
})
|
143
src/changedFiles.ts
Normal file
143
src/changedFiles.ts
Normal file
|
@ -0,0 +1,143 @@
|
|||
import * as path from 'path'
|
||||
|
||||
import {DiffResult} from './commitSha'
|
||||
import {Inputs} from './inputs'
|
||||
import {
|
||||
getDirnameMaxDepth,
|
||||
gitDiff,
|
||||
gitRenamedFiles,
|
||||
gitSubmoduleDiffSHA,
|
||||
jsonOutput
|
||||
} from './utils'
|
||||
|
||||
export const getRenamedFiles = async ({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
submodulePaths
|
||||
}: {
|
||||
inputs: Inputs
|
||||
workingDirectory: string
|
||||
hasSubmodule: boolean
|
||||
diffResult: DiffResult
|
||||
submodulePaths: string[]
|
||||
}): Promise<string> => {
|
||||
const renamedFiles = await gitRenamedFiles({
|
||||
cwd: workingDirectory,
|
||||
sha1: diffResult.previousSha,
|
||||
sha2: diffResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
oldNewSeparator: inputs.oldNewSeparator
|
||||
})
|
||||
|
||||
if (hasSubmodule) {
|
||||
for (const submodulePath of submodulePaths) {
|
||||
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||
cwd: workingDirectory,
|
||||
parentSha1: diffResult.previousSha,
|
||||
parentSha2: diffResult.currentSha,
|
||||
submodulePath,
|
||||
diff: diffResult.diff
|
||||
})
|
||||
|
||||
const submoduleWorkingDirectory = path.join(
|
||||
workingDirectory,
|
||||
submodulePath
|
||||
)
|
||||
|
||||
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||
const submoduleRenamedFiles = await gitRenamedFiles({
|
||||
cwd: submoduleWorkingDirectory,
|
||||
sha1: submoduleShaResult.previousSha,
|
||||
sha2: submoduleShaResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
oldNewSeparator: inputs.oldNewSeparator,
|
||||
isSubmodule: true,
|
||||
parentDir: submodulePath
|
||||
})
|
||||
renamedFiles.push(...submoduleRenamedFiles)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (inputs.json) {
|
||||
return jsonOutput({value: renamedFiles, shouldEscape: inputs.escapeJson})
|
||||
}
|
||||
|
||||
return renamedFiles.join(inputs.oldNewFilesSeparator)
|
||||
}
|
||||
|
||||
export const getDiffFiles = async ({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter,
|
||||
filePatterns = [],
|
||||
submodulePaths
|
||||
}: {
|
||||
inputs: Inputs
|
||||
workingDirectory: string
|
||||
hasSubmodule: boolean
|
||||
diffResult: DiffResult
|
||||
diffFilter: string
|
||||
filePatterns?: string[]
|
||||
submodulePaths: string[]
|
||||
}): Promise<string> => {
|
||||
let files = await gitDiff({
|
||||
cwd: workingDirectory,
|
||||
sha1: diffResult.previousSha,
|
||||
sha2: diffResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
diffFilter,
|
||||
filePatterns
|
||||
})
|
||||
|
||||
if (hasSubmodule) {
|
||||
for (const submodulePath of submodulePaths) {
|
||||
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||
cwd: workingDirectory,
|
||||
parentSha1: diffResult.previousSha,
|
||||
parentSha2: diffResult.currentSha,
|
||||
submodulePath,
|
||||
diff: diffResult.diff
|
||||
})
|
||||
|
||||
const submoduleWorkingDirectory = path.join(
|
||||
workingDirectory,
|
||||
submodulePath
|
||||
)
|
||||
|
||||
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||
const submoduleFiles = await gitDiff({
|
||||
cwd: submoduleWorkingDirectory,
|
||||
sha1: submoduleShaResult.previousSha,
|
||||
sha2: submoduleShaResult.currentSha,
|
||||
diff: diffResult.diff,
|
||||
diffFilter,
|
||||
isSubmodule: true,
|
||||
filePatterns,
|
||||
parentDir: submodulePath
|
||||
})
|
||||
files.push(...submoduleFiles)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (inputs.dirNames) {
|
||||
files = files.map(file =>
|
||||
getDirnameMaxDepth({
|
||||
pathStr: file,
|
||||
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||
excludeRoot: inputs.dirNamesExcludeRoot
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (inputs.json) {
|
||||
return jsonOutput({value: files, shouldEscape: inputs.escapeJson})
|
||||
}
|
||||
|
||||
return files.join(inputs.separator)
|
||||
}
|
473
src/commitSha.ts
Normal file
473
src/commitSha.ts
Normal file
|
@ -0,0 +1,473 @@
|
|||
import * as core from '@actions/core'
|
||||
|
||||
import {Env} from './env'
|
||||
import {Inputs} from './inputs'
|
||||
import {
|
||||
canDiffCommits,
|
||||
getHeadSha,
|
||||
getParentHeadSha,
|
||||
getPreviousGitTag,
|
||||
gitFetch,
|
||||
gitFetchSubmodules,
|
||||
gitLog,
|
||||
gitLsRemote,
|
||||
verifyCommitSha
|
||||
} from './utils'
|
||||
|
||||
const getCurrentSHA = async ({
|
||||
inputs,
|
||||
workingDirectory
|
||||
}: {
|
||||
inputs: Inputs
|
||||
workingDirectory: string
|
||||
}): Promise<string> => {
|
||||
let currentSha = inputs.sha
|
||||
core.debug('Getting current SHA...')
|
||||
|
||||
if (inputs.until) {
|
||||
core.debug(`Getting base SHA for '${inputs.until}'...`)
|
||||
try {
|
||||
currentSha = await gitLog({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
'--format',
|
||||
'"%H"',
|
||||
'-n',
|
||||
'1',
|
||||
'--date',
|
||||
'local',
|
||||
'--until',
|
||||
inputs.until
|
||||
]
|
||||
})
|
||||
} catch (error) {
|
||||
core.error(
|
||||
`Invalid until date: ${inputs.until}. ${(error as Error).message}`
|
||||
)
|
||||
throw error
|
||||
}
|
||||
} else {
|
||||
if (!currentSha) {
|
||||
currentSha = await getHeadSha({cwd: workingDirectory})
|
||||
}
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: currentSha, cwd: workingDirectory})
|
||||
core.debug(`Current SHA: ${currentSha}`)
|
||||
|
||||
return currentSha
|
||||
}
|
||||
|
||||
export interface DiffResult {
|
||||
previousSha: string
|
||||
currentSha: string
|
||||
currentBranch: string
|
||||
targetBranch: string
|
||||
diff: string
|
||||
}
|
||||
|
||||
export const getSHAForPushEvent = async (
|
||||
inputs: Inputs,
|
||||
env: Env,
|
||||
workingDirectory: string,
|
||||
isShallow: boolean,
|
||||
hasSubmodule: boolean,
|
||||
gitExtraArgs: string[],
|
||||
isTag: boolean
|
||||
): Promise<DiffResult> => {
|
||||
let targetBranch = env.GITHUB_REFNAME
|
||||
const currentBranch = targetBranch
|
||||
let initialCommit = false
|
||||
|
||||
if (isShallow) {
|
||||
core.info('Repository is shallow, fetching more history...')
|
||||
|
||||
if (isTag) {
|
||||
const sourceBranch = env.GITHUB_EVENT_BASE_REF.replace('refs/heads/', '')
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/tags/${sourceBranch}:refs/remotes/origin/${sourceBranch}`
|
||||
]
|
||||
})
|
||||
} else {
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (hasSubmodule) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const currentSha = await getCurrentSHA({inputs, workingDirectory})
|
||||
let previousSha = inputs.baseSha
|
||||
const diff = '..'
|
||||
|
||||
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||
if (previousSha === currentSha) {
|
||||
core.error(
|
||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||
)
|
||||
core.error(
|
||||
`Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.`
|
||||
)
|
||||
throw new Error('Similar commit hashes detected.')
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||
core.info(`Previous SHA: ${previousSha}`)
|
||||
|
||||
return {
|
||||
previousSha,
|
||||
currentSha,
|
||||
currentBranch,
|
||||
targetBranch,
|
||||
diff
|
||||
}
|
||||
}
|
||||
|
||||
if (!previousSha) {
|
||||
core.debug('Getting previous SHA...')
|
||||
if (inputs.since) {
|
||||
core.debug(`Getting base SHA for '${inputs.since}'...`)
|
||||
try {
|
||||
previousSha = await gitLog({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
'--format',
|
||||
'"%H"',
|
||||
'-n',
|
||||
'1',
|
||||
'--date',
|
||||
'local',
|
||||
'--since',
|
||||
inputs.since
|
||||
]
|
||||
})
|
||||
} catch (error) {
|
||||
core.error(
|
||||
`Invalid since date: ${inputs.since}. ${(error as Error).message}`
|
||||
)
|
||||
throw error
|
||||
}
|
||||
} else if (isTag) {
|
||||
core.debug('Getting previous SHA for tag...')
|
||||
const {sha, tag} = await getPreviousGitTag({cwd: workingDirectory})
|
||||
previousSha = sha
|
||||
targetBranch = tag
|
||||
} else {
|
||||
if (inputs.sinceLastRemoteCommit) {
|
||||
core.debug('Getting previous SHA for last remote commit...')
|
||||
|
||||
if (env.GITHUB_EVENT_FORCED === 'false' || !env.GITHUB_EVENT_FORCED) {
|
||||
previousSha = env.GITHUB_EVENT_BEFORE
|
||||
} else {
|
||||
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||
}
|
||||
} else {
|
||||
core.debug('Getting previous SHA for last commit...')
|
||||
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||
}
|
||||
|
||||
if (
|
||||
!previousSha ||
|
||||
previousSha === '0000000000000000000000000000000000000000'
|
||||
) {
|
||||
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||
}
|
||||
|
||||
if (previousSha === currentSha) {
|
||||
if (!(await getParentHeadSha({cwd: workingDirectory}))) {
|
||||
core.warning('Initial commit detected no previous commit found.')
|
||||
initialCommit = true
|
||||
previousSha = currentSha
|
||||
} else {
|
||||
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||
}
|
||||
} else {
|
||||
if (!previousSha) {
|
||||
core.error('Unable to locate a previous commit.')
|
||||
throw new Error('Unable to locate a previous commit.')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||
core.debug(`Previous SHA: ${previousSha}`)
|
||||
|
||||
core.debug(`Target branch: ${targetBranch}`)
|
||||
core.debug(`Current branch: ${currentBranch}`)
|
||||
|
||||
if (!initialCommit && previousSha === currentSha) {
|
||||
core.error(
|
||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||
)
|
||||
core.error(
|
||||
`Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.`
|
||||
)
|
||||
throw new Error('Similar commit hashes detected.')
|
||||
}
|
||||
|
||||
return {
|
||||
previousSha,
|
||||
currentSha,
|
||||
currentBranch,
|
||||
targetBranch,
|
||||
diff
|
||||
}
|
||||
}
|
||||
|
||||
export const getSHAForPullRequestEvent = async (
|
||||
inputs: Inputs,
|
||||
env: Env,
|
||||
workingDirectory: string,
|
||||
isShallow: boolean,
|
||||
hasSubmodule: boolean,
|
||||
gitExtraArgs: string[]
|
||||
): Promise<DiffResult> => {
|
||||
let targetBranch = env.GITHUB_EVENT_PULL_REQUEST_BASE_REF
|
||||
const currentBranch = env.GITHUB_EVENT_PULL_REQUEST_HEAD_REF
|
||||
if (inputs.sinceLastRemoteCommit) {
|
||||
targetBranch = currentBranch
|
||||
}
|
||||
|
||||
if (isShallow) {
|
||||
core.info('Repository is shallow, fetching more history...')
|
||||
|
||||
const prFetchExitCode = await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
'origin',
|
||||
`pull/${env.GITHUB_EVENT_PULL_REQUEST_NUMBER}/head:${currentBranch}`
|
||||
]
|
||||
})
|
||||
|
||||
if (prFetchExitCode !== 0) {
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${currentBranch}*:refs/remotes/origin/${currentBranch}*`
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
if (!inputs.sinceLastRemoteCommit) {
|
||||
core.debug('Fetching target branch...')
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
]
|
||||
})
|
||||
|
||||
if (hasSubmodule) {
|
||||
await gitFetchSubmodules({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const currentSha = await getCurrentSHA({inputs, workingDirectory})
|
||||
let previousSha = inputs.baseSha
|
||||
let diff = '...'
|
||||
|
||||
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||
if (previousSha === currentSha) {
|
||||
core.error(
|
||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||
)
|
||||
core.error(
|
||||
`Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.`
|
||||
)
|
||||
throw new Error('Similar commit hashes detected.')
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: currentSha, cwd: workingDirectory})
|
||||
core.info(`Previous SHA: ${previousSha}`)
|
||||
|
||||
return {
|
||||
previousSha,
|
||||
currentSha,
|
||||
currentBranch,
|
||||
targetBranch,
|
||||
diff
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!env.GITHUB_EVENT_PULL_REQUEST_BASE_REF ||
|
||||
env.GITHUB_EVENT_HEAD_REPO_FORK === 'true'
|
||||
) {
|
||||
diff = '..'
|
||||
}
|
||||
|
||||
if (!previousSha) {
|
||||
if (inputs.sinceLastRemoteCommit) {
|
||||
previousSha = env.GITHUB_EVENT_BEFORE
|
||||
|
||||
if (!previousSha) {
|
||||
previousSha = await gitLsRemote({
|
||||
cwd: workingDirectory,
|
||||
args: [currentBranch]
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
(await verifyCommitSha({
|
||||
sha: previousSha,
|
||||
cwd: workingDirectory,
|
||||
showAsErrorMessage: false
|
||||
})) !== 0
|
||||
) {
|
||||
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||
}
|
||||
} else {
|
||||
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||
|
||||
if (!previousSha) {
|
||||
previousSha = await gitLsRemote({
|
||||
cwd: workingDirectory,
|
||||
args: [targetBranch]
|
||||
})
|
||||
}
|
||||
|
||||
if (isShallow) {
|
||||
if (
|
||||
await canDiffCommits({
|
||||
cwd: workingDirectory,
|
||||
sha1: previousSha,
|
||||
sha2: currentSha,
|
||||
diff
|
||||
})
|
||||
) {
|
||||
core.debug(
|
||||
'Merge base is not in the local history, fetching remote target branch...'
|
||||
)
|
||||
|
||||
for (let i = 1; i <= 10; i++) {
|
||||
await gitFetch({
|
||||
cwd: workingDirectory,
|
||||
args: [
|
||||
...gitExtraArgs,
|
||||
'-u',
|
||||
'--progress',
|
||||
`--deepen=${inputs.fetchDepth}`,
|
||||
'origin',
|
||||
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||
]
|
||||
})
|
||||
|
||||
if (
|
||||
await canDiffCommits({
|
||||
cwd: workingDirectory,
|
||||
sha1: previousSha,
|
||||
sha2: currentSha,
|
||||
diff
|
||||
})
|
||||
) {
|
||||
break
|
||||
}
|
||||
|
||||
core.debug(
|
||||
'Merge base is not in the local history, fetching remote target branch again...'
|
||||
)
|
||||
core.debug(`Attempt ${i}/10`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!previousSha || previousSha === currentSha) {
|
||||
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!(await canDiffCommits({
|
||||
cwd: workingDirectory,
|
||||
sha1: previousSha,
|
||||
sha2: currentSha,
|
||||
diff
|
||||
}))
|
||||
) {
|
||||
diff = '..'
|
||||
}
|
||||
|
||||
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||
core.debug(`Previous SHA: ${previousSha}`)
|
||||
|
||||
if (
|
||||
!(await canDiffCommits({
|
||||
cwd: workingDirectory,
|
||||
sha1: previousSha,
|
||||
sha2: currentSha,
|
||||
diff
|
||||
}))
|
||||
) {
|
||||
throw new Error(
|
||||
`Unable to determine a difference between ${previousSha}${diff}${currentSha}`
|
||||
)
|
||||
}
|
||||
|
||||
if (previousSha === currentSha) {
|
||||
core.error(
|
||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||
)
|
||||
core.error(
|
||||
`Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.`
|
||||
)
|
||||
throw new Error('Similar commit hashes detected.')
|
||||
}
|
||||
|
||||
return {
|
||||
previousSha,
|
||||
currentSha,
|
||||
currentBranch,
|
||||
targetBranch,
|
||||
diff
|
||||
}
|
||||
}
|
60
src/env.ts
Normal file
60
src/env.ts
Normal file
|
@ -0,0 +1,60 @@
|
|||
import {promises as fs} from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
|
||||
export type Env = {
|
||||
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: string
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_REF: string
|
||||
GITHUB_EVENT_BEFORE: string
|
||||
GITHUB_REFNAME: string
|
||||
GITHUB_REF: string
|
||||
GITHUB_EVENT_BASE_REF: string
|
||||
GITHUB_EVENT_HEAD_REPO_FORK: string
|
||||
GITHUB_WORKSPACE: string
|
||||
GITHUB_EVENT_FORCED: string
|
||||
GITHUB_EVENT_PULL_REQUEST_NUMBER: string
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: string
|
||||
}
|
||||
|
||||
type GithubEvent = {
|
||||
forced?: string
|
||||
pull_request?: {
|
||||
head: {
|
||||
ref: string
|
||||
}
|
||||
base: {
|
||||
ref: string
|
||||
sha: string
|
||||
}
|
||||
number: string
|
||||
}
|
||||
before?: string
|
||||
base_ref?: string
|
||||
head_repo?: {
|
||||
fork: string
|
||||
}
|
||||
}
|
||||
|
||||
export const getEnv = async (): Promise<Env> => {
|
||||
const eventPath = process.env.GITHUB_EVENT_PATH
|
||||
let eventJson: GithubEvent = {}
|
||||
|
||||
if (eventPath) {
|
||||
eventJson = JSON.parse(await fs.readFile(eventPath, {encoding: 'utf8'}))
|
||||
}
|
||||
|
||||
core.debug(`Event: ${JSON.stringify(eventJson, null, 2)}`)
|
||||
|
||||
return {
|
||||
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: eventJson.pull_request?.head?.ref || '',
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_REF: eventJson.pull_request?.base?.ref || '',
|
||||
GITHUB_EVENT_BEFORE: eventJson.before || '',
|
||||
GITHUB_EVENT_BASE_REF: eventJson.base_ref || '',
|
||||
GITHUB_EVENT_HEAD_REPO_FORK: eventJson.head_repo?.fork || '',
|
||||
GITHUB_EVENT_PULL_REQUEST_NUMBER: eventJson.pull_request?.number || '',
|
||||
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: eventJson.pull_request?.base?.sha || '',
|
||||
GITHUB_EVENT_FORCED: eventJson.forced || '',
|
||||
GITHUB_REFNAME: process.env.GITHUB_REFNAME || '',
|
||||
GITHUB_REF: process.env.GITHUB_REF || '',
|
||||
GITHUB_WORKSPACE: process.env.GITHUB_WORKSPACE || ''
|
||||
}
|
||||
}
|
146
src/inputs.ts
Normal file
146
src/inputs.ts
Normal file
|
@ -0,0 +1,146 @@
|
|||
import * as core from '@actions/core'
|
||||
|
||||
export type Inputs = {
|
||||
files: string
|
||||
filesSeparator: string
|
||||
filesFromSourceFile: string
|
||||
filesFromSourceFileSeparator: string
|
||||
filesIgnore: string
|
||||
filesIgnoreSeparator: string
|
||||
filesIgnoreFromSourceFile: string
|
||||
filesIgnoreFromSourceFileSeparator: string
|
||||
separator: string
|
||||
includeAllOldNewRenamedFiles: boolean
|
||||
oldNewSeparator: string
|
||||
oldNewFilesSeparator: string
|
||||
sha: string
|
||||
baseSha: string
|
||||
since: string
|
||||
until: string
|
||||
path: string
|
||||
quotePath: boolean
|
||||
diffRelative: boolean
|
||||
dirNames: boolean
|
||||
dirNamesMaxDepth?: number
|
||||
dirNamesExcludeRoot: boolean
|
||||
json: boolean
|
||||
escapeJson: boolean
|
||||
fetchDepth?: number
|
||||
sinceLastRemoteCommit: boolean
|
||||
writeOutputFiles: boolean
|
||||
outputDir: string
|
||||
}
|
||||
|
||||
export const getInputs = (): Inputs => {
|
||||
const files = core.getInput('files', {required: false})
|
||||
const filesSeparator = core.getInput('files_separator', {
|
||||
required: false,
|
||||
trimWhitespace: false
|
||||
})
|
||||
const filesIgnore = core.getInput('files_ignore', {required: false})
|
||||
const filesIgnoreSeparator = core.getInput('files_ignore_separator', {
|
||||
required: false,
|
||||
trimWhitespace: false
|
||||
})
|
||||
const filesFromSourceFile = core.getInput('files_from_source_file', {
|
||||
required: false
|
||||
})
|
||||
const filesFromSourceFileSeparator = core.getInput(
|
||||
'files_from_source_file_separator',
|
||||
{
|
||||
required: false,
|
||||
trimWhitespace: false
|
||||
}
|
||||
)
|
||||
const filesIgnoreFromSourceFile = core.getInput(
|
||||
'files_ignore_from_source_file',
|
||||
{required: false}
|
||||
)
|
||||
const filesIgnoreFromSourceFileSeparator = core.getInput(
|
||||
'files_ignore_from_source_file_separator',
|
||||
{
|
||||
required: false,
|
||||
trimWhitespace: false
|
||||
}
|
||||
)
|
||||
const separator = core.getInput('separator', {
|
||||
required: true,
|
||||
trimWhitespace: false
|
||||
})
|
||||
const includeAllOldNewRenamedFiles = core.getBooleanInput(
|
||||
'include_all_old_new_renamed_files',
|
||||
{required: false}
|
||||
)
|
||||
const oldNewSeparator = core.getInput('old_new_separator', {
|
||||
required: true,
|
||||
trimWhitespace: false
|
||||
})
|
||||
const oldNewFilesSeparator = core.getInput('old_new_files_separator', {
|
||||
required: true,
|
||||
trimWhitespace: false
|
||||
})
|
||||
const sha = core.getInput('sha', {required: false})
|
||||
const baseSha = core.getInput('base_sha', {required: false})
|
||||
const since = core.getInput('since', {required: false})
|
||||
const until = core.getInput('until', {required: false})
|
||||
const path = core.getInput('path', {required: false})
|
||||
const quotePath = core.getBooleanInput('quotepath', {required: false})
|
||||
const diffRelative = core.getBooleanInput('diff_relative', {required: false})
|
||||
const dirNames = core.getBooleanInput('dir_names', {required: false})
|
||||
const dirNamesMaxDepth = core.getInput('dir_names_max_depth', {
|
||||
required: false
|
||||
})
|
||||
const dirNamesExcludeRoot = core.getBooleanInput('dir_names_exclude_root', {
|
||||
required: false
|
||||
})
|
||||
const json = core.getBooleanInput('json', {required: false})
|
||||
const escapeJson = core.getBooleanInput('escape_json', {required: false})
|
||||
const fetchDepth = core.getInput('fetch_depth', {required: false})
|
||||
const sinceLastRemoteCommit = core.getBooleanInput(
|
||||
'since_last_remote_commit',
|
||||
{required: false}
|
||||
)
|
||||
const writeOutputFiles = core.getBooleanInput('write_output_files', {
|
||||
required: false
|
||||
})
|
||||
const outputDir = core.getInput('output_dir', {required: false})
|
||||
|
||||
const inputs: Inputs = {
|
||||
files,
|
||||
filesSeparator,
|
||||
filesFromSourceFile,
|
||||
filesFromSourceFileSeparator,
|
||||
filesIgnore,
|
||||
filesIgnoreSeparator,
|
||||
filesIgnoreFromSourceFile,
|
||||
filesIgnoreFromSourceFileSeparator,
|
||||
separator,
|
||||
includeAllOldNewRenamedFiles,
|
||||
oldNewSeparator,
|
||||
oldNewFilesSeparator,
|
||||
sha,
|
||||
baseSha,
|
||||
since,
|
||||
until,
|
||||
path,
|
||||
quotePath,
|
||||
diffRelative,
|
||||
dirNames,
|
||||
dirNamesExcludeRoot,
|
||||
json,
|
||||
escapeJson,
|
||||
sinceLastRemoteCommit,
|
||||
writeOutputFiles,
|
||||
outputDir
|
||||
}
|
||||
|
||||
if (fetchDepth) {
|
||||
inputs.fetchDepth = parseInt(fetchDepth, 10)
|
||||
}
|
||||
|
||||
if (dirNamesMaxDepth) {
|
||||
inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10)
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
411
src/main.ts
Normal file
411
src/main.ts
Normal file
|
@ -0,0 +1,411 @@
|
|||
import * as core from '@actions/core'
|
||||
import path from 'path'
|
||||
import {getDiffFiles, getRenamedFiles} from './changedFiles'
|
||||
import {
|
||||
getSHAForPullRequestEvent,
|
||||
getSHAForPushEvent,
|
||||
DiffResult
|
||||
} from './commitSha'
|
||||
import {getEnv} from './env'
|
||||
import {getInputs} from './inputs'
|
||||
import {
|
||||
getFilePatterns,
|
||||
getSubmodulePath,
|
||||
isRepoShallow,
|
||||
setOutput,
|
||||
submoduleExists,
|
||||
updateGitGlobalConfig,
|
||||
verifyMinimumGitVersion
|
||||
} from './utils'
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
core.startGroup('changed-files')
|
||||
|
||||
const env = await getEnv()
|
||||
core.debug(`Env: ${JSON.stringify(env, null, 2)}`)
|
||||
const inputs = getInputs()
|
||||
core.debug(`Inputs: ${JSON.stringify(inputs, null, 2)}`)
|
||||
|
||||
await verifyMinimumGitVersion()
|
||||
|
||||
let quotePathValue = 'on'
|
||||
|
||||
if (!inputs.quotePath) {
|
||||
quotePathValue = 'off'
|
||||
}
|
||||
|
||||
await updateGitGlobalConfig({
|
||||
name: 'core.quotepath',
|
||||
value: quotePathValue
|
||||
})
|
||||
|
||||
if (inputs.diffRelative) {
|
||||
await updateGitGlobalConfig({
|
||||
name: 'diff.relative',
|
||||
value: 'true'
|
||||
})
|
||||
}
|
||||
|
||||
const workingDirectory = path.resolve(
|
||||
env.GITHUB_WORKSPACE || process.cwd(),
|
||||
inputs.path
|
||||
)
|
||||
const isShallow = await isRepoShallow({cwd: workingDirectory})
|
||||
const hasSubmodule = await submoduleExists({cwd: workingDirectory})
|
||||
let gitExtraArgs = ['--no-tags', '--prune', '--recurse-submodules']
|
||||
const isTag = env.GITHUB_REF?.startsWith('refs/tags/')
|
||||
const submodulePaths = await getSubmodulePath({
|
||||
cwd: workingDirectory
|
||||
})
|
||||
|
||||
if (isTag) {
|
||||
gitExtraArgs = ['--prune', '--no-recurse-submodules']
|
||||
}
|
||||
|
||||
let diffResult: DiffResult
|
||||
|
||||
if (!env.GITHUB_EVENT_PULL_REQUEST_BASE_REF) {
|
||||
core.info('Running on a push event...')
|
||||
diffResult = await getSHAForPushEvent(
|
||||
inputs,
|
||||
env,
|
||||
workingDirectory,
|
||||
isShallow,
|
||||
hasSubmodule,
|
||||
gitExtraArgs,
|
||||
isTag
|
||||
)
|
||||
} else {
|
||||
core.info('Running on a pull request event...')
|
||||
diffResult = await getSHAForPullRequestEvent(
|
||||
inputs,
|
||||
env,
|
||||
workingDirectory,
|
||||
isShallow,
|
||||
hasSubmodule,
|
||||
gitExtraArgs
|
||||
)
|
||||
}
|
||||
|
||||
core.info(
|
||||
`Retrieving changes between ${diffResult.previousSha} (${diffResult.targetBranch}) → ${diffResult.currentSha} (${diffResult.currentBranch})`
|
||||
)
|
||||
|
||||
const filePatterns = await getFilePatterns({
|
||||
inputs
|
||||
})
|
||||
|
||||
const addedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'A',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Added files: ${addedFiles}`)
|
||||
await setOutput({
|
||||
key: 'added_files',
|
||||
value: addedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const copiedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'C',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Copied files: ${copiedFiles}`)
|
||||
await setOutput({
|
||||
key: 'copied_files',
|
||||
value: copiedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const modifiedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'M',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Modified files: ${modifiedFiles}`)
|
||||
await setOutput({
|
||||
key: 'modified_files',
|
||||
value: modifiedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const renamedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'R',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Renamed files: ${renamedFiles}`)
|
||||
await setOutput({
|
||||
key: 'renamed_files',
|
||||
value: renamedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const typeChangedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'T',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Type changed files: ${typeChangedFiles}`)
|
||||
await setOutput({
|
||||
key: 'type_changed_files',
|
||||
value: typeChangedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const unmergedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'U',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Unmerged files: ${unmergedFiles}`)
|
||||
await setOutput({
|
||||
key: 'unmerged_files',
|
||||
value: unmergedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const unknownFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'X',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Unknown files: ${unknownFiles}`)
|
||||
await setOutput({
|
||||
key: 'unknown_files',
|
||||
value: unknownFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const allChangedAndModifiedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'ACDMRTUX',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`All changed and modified files: ${allChangedAndModifiedFiles}`)
|
||||
await setOutput({
|
||||
key: 'all_changed_and_modified_files',
|
||||
value: allChangedAndModifiedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
const allChangedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'ACMR',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`All changed files: ${allChangedFiles}`)
|
||||
await setOutput({
|
||||
key: 'all_changed_files',
|
||||
value: allChangedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: 'any_changed',
|
||||
value: allChangedFiles.length > 0 && filePatterns.length > 0,
|
||||
inputs
|
||||
})
|
||||
|
||||
const allOtherChangedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'ACMR',
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`All other changed files: ${allOtherChangedFiles}`)
|
||||
|
||||
const otherChangedFiles = allOtherChangedFiles
|
||||
.split(inputs.filesSeparator)
|
||||
.filter(
|
||||
filePath =>
|
||||
!allChangedFiles.split(inputs.filesSeparator).includes(filePath)
|
||||
)
|
||||
|
||||
const onlyChanged =
|
||||
otherChangedFiles.length === 0 && allChangedFiles.length > 0
|
||||
|
||||
await setOutput({
|
||||
key: 'only_changed',
|
||||
value: onlyChanged,
|
||||
inputs
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: 'other_changed_files',
|
||||
value: otherChangedFiles.join(inputs.filesSeparator),
|
||||
inputs
|
||||
})
|
||||
|
||||
const allModifiedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'ACMRD',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`All modified files: ${allModifiedFiles}`)
|
||||
await setOutput({
|
||||
key: 'all_modified_files',
|
||||
value: allModifiedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: 'any_modified',
|
||||
value: allModifiedFiles.length > 0 && filePatterns.length > 0,
|
||||
inputs
|
||||
})
|
||||
|
||||
const allOtherModifiedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'ACMRD',
|
||||
submodulePaths
|
||||
})
|
||||
|
||||
const otherModifiedFiles = allOtherModifiedFiles
|
||||
.split(inputs.filesSeparator)
|
||||
.filter(
|
||||
filePath =>
|
||||
!allModifiedFiles.split(inputs.filesSeparator).includes(filePath)
|
||||
)
|
||||
|
||||
const onlyModified =
|
||||
otherModifiedFiles.length === 0 && allModifiedFiles.length > 0
|
||||
|
||||
await setOutput({
|
||||
key: 'only_modified',
|
||||
value: onlyModified,
|
||||
inputs
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: 'other_modified_files',
|
||||
value: otherModifiedFiles.join(inputs.filesSeparator),
|
||||
inputs
|
||||
})
|
||||
|
||||
const deletedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'D',
|
||||
filePatterns,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`Deleted files: ${deletedFiles}`)
|
||||
await setOutput({
|
||||
key: 'deleted_files',
|
||||
value: deletedFiles,
|
||||
inputs
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: 'any_deleted',
|
||||
value: deletedFiles.length > 0 && filePatterns.length > 0,
|
||||
inputs
|
||||
})
|
||||
|
||||
const allOtherDeletedFiles = await getDiffFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
diffFilter: 'D',
|
||||
submodulePaths
|
||||
})
|
||||
|
||||
const otherDeletedFiles = allOtherDeletedFiles
|
||||
.split(inputs.filesSeparator)
|
||||
.filter(
|
||||
filePath => !deletedFiles.split(inputs.filesSeparator).includes(filePath)
|
||||
)
|
||||
|
||||
const onlyDeleted = otherDeletedFiles.length === 0 && deletedFiles.length > 0
|
||||
|
||||
await setOutput({
|
||||
key: 'only_deleted',
|
||||
value: onlyDeleted,
|
||||
inputs
|
||||
})
|
||||
|
||||
await setOutput({
|
||||
key: 'other_deleted_files',
|
||||
value: otherDeletedFiles.join(inputs.filesSeparator),
|
||||
inputs
|
||||
})
|
||||
|
||||
if (inputs.includeAllOldNewRenamedFiles) {
|
||||
const allOldNewRenamedFiles = await getRenamedFiles({
|
||||
inputs,
|
||||
workingDirectory,
|
||||
hasSubmodule,
|
||||
diffResult,
|
||||
submodulePaths
|
||||
})
|
||||
core.debug(`All old new renamed files: ${allOldNewRenamedFiles}`)
|
||||
await setOutput({
|
||||
key: 'all_old_new_renamed_files',
|
||||
value: allOldNewRenamedFiles,
|
||||
inputs
|
||||
})
|
||||
}
|
||||
|
||||
core.info('All Done!')
|
||||
|
||||
core.endGroup()
|
||||
}
|
||||
|
||||
/* istanbul ignore if */
|
||||
if (!process.env.TESTING) {
|
||||
// eslint-disable-next-line github/no-then
|
||||
run().catch(e => {
|
||||
core.setFailed(e.message || e)
|
||||
})
|
||||
}
|
769
src/utils.ts
Normal file
769
src/utils.ts
Normal file
|
@ -0,0 +1,769 @@
|
|||
/*global AsyncIterableIterator*/
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import {createReadStream, promises as fs} from 'fs'
|
||||
import mm from 'micromatch'
|
||||
import * as path from 'path'
|
||||
import {createInterface} from 'readline'
|
||||
|
||||
import {Inputs} from './inputs'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
const MINIMUM_GIT_VERSION = '2.18.0'
|
||||
|
||||
/**
|
||||
* Normalize file path separators to '/' on Windows and Linux/macOS
|
||||
* @param p file path
|
||||
* @returns file path with normalized separators
|
||||
*/
|
||||
const normalizeSeparators = (p: string): string => {
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Convert slashes on Windows
|
||||
p = p.replace(/\//g, '\\')
|
||||
|
||||
// Remove redundant slashes
|
||||
const isUnc = /^\\\\+[^\\]/.test(p) // e.g. \\hello
|
||||
return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\') // preserve leading \\ for UNC
|
||||
}
|
||||
|
||||
// Remove redundant slashes
|
||||
return p.replace(/\/\/+/g, '/')
|
||||
}
|
||||
|
||||
/**
|
||||
* Trims unnecessary trailing slash from file path
|
||||
* @param p file path
|
||||
* @returns file path without unnecessary trailing slash
|
||||
*/
|
||||
const safeTrimTrailingSeparator = (p: string): string => {
|
||||
// Empty path
|
||||
if (!p) {
|
||||
return ''
|
||||
}
|
||||
|
||||
// Normalize separators
|
||||
p = normalizeSeparators(p)
|
||||
|
||||
// No trailing slash
|
||||
if (!p.endsWith(path.sep)) {
|
||||
return p
|
||||
}
|
||||
|
||||
// Check '/' on Linux/macOS and '\' on Windows
|
||||
if (p === path.sep) {
|
||||
return p
|
||||
}
|
||||
|
||||
// On Windows, avoid trimming the drive root, e.g. C:\ or \\hello
|
||||
if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
|
||||
return p
|
||||
}
|
||||
|
||||
// Trim trailing slash
|
||||
return p.substring(0, p.length - 1)
|
||||
}
|
||||
|
||||
const dirname = (p: string): string => {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
p = safeTrimTrailingSeparator(p)
|
||||
|
||||
// Windows UNC root, e.g. \\hello or \\hello\world
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
|
||||
return p
|
||||
}
|
||||
|
||||
// Get dirname
|
||||
let result = path.dirname(p)
|
||||
|
||||
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
|
||||
result = safeTrimTrailingSeparator(result)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const versionToNumber = (version: string): number => {
|
||||
const [major, minor, patch] = version.split('.').map(Number)
|
||||
return major * 1000000 + minor * 1000 + patch
|
||||
}
|
||||
|
||||
export const verifyMinimumGitVersion = async (): Promise<void> => {
|
||||
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
['--version'],
|
||||
{silent: false}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(stderr || 'An unexpected error occurred')
|
||||
}
|
||||
|
||||
const gitVersion = stdout.trim()
|
||||
|
||||
if (versionToNumber(gitVersion) < versionToNumber(MINIMUM_GIT_VERSION)) {
|
||||
throw new Error(
|
||||
`Minimum required git version is ${MINIMUM_GIT_VERSION}, your version is ${gitVersion}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const exists = async (filePath: string): Promise<boolean> => {
|
||||
try {
|
||||
await fs.access(filePath)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async function* lineOfFileGenerator({
|
||||
filePath,
|
||||
excludedFiles
|
||||
}: {
|
||||
filePath: string
|
||||
excludedFiles: boolean
|
||||
}): AsyncIterableIterator<string> {
|
||||
const fileStream = createReadStream(filePath)
|
||||
/* istanbul ignore next */
|
||||
fileStream.on('error', error => {
|
||||
throw error
|
||||
})
|
||||
const rl = createInterface({
|
||||
input: fileStream,
|
||||
crlfDelay: Infinity
|
||||
})
|
||||
for await (const line of rl) {
|
||||
if (!line.startsWith('#') && line !== '') {
|
||||
if (excludedFiles) {
|
||||
if (line.startsWith('!')) {
|
||||
yield line
|
||||
} else {
|
||||
yield `!${line}`
|
||||
}
|
||||
} else {
|
||||
yield line
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getFilesFromSourceFile = async ({
|
||||
filePaths,
|
||||
excludedFiles = false
|
||||
}: {
|
||||
filePaths: string[]
|
||||
excludedFiles?: boolean
|
||||
}): Promise<string[]> => {
|
||||
const lines = []
|
||||
for (const filePath of filePaths) {
|
||||
for await (const line of lineOfFileGenerator({filePath, excludedFiles})) {
|
||||
lines.push(line)
|
||||
}
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
export const updateGitGlobalConfig = async ({
|
||||
name,
|
||||
value
|
||||
}: {
|
||||
name: string
|
||||
value: string
|
||||
}): Promise<void> => {
|
||||
const {exitCode, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
['config', '--global', name, value],
|
||||
{
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
/* istanbul ignore if */
|
||||
if (exitCode !== 0 || stderr) {
|
||||
core.warning(stderr || `Couldn't update git global config ${name}`)
|
||||
}
|
||||
}
|
||||
|
||||
export const isRepoShallow = async ({cwd}: {cwd: string}): Promise<boolean> => {
|
||||
const {stdout} = await exec.getExecOutput(
|
||||
'git',
|
||||
['rev-parse', '--is-shallow-repository'],
|
||||
{
|
||||
cwd,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
return stdout.trim() === 'true'
|
||||
}
|
||||
|
||||
export const submoduleExists = async ({
|
||||
cwd
|
||||
}: {
|
||||
cwd: string
|
||||
}): Promise<boolean> => {
|
||||
const {stdout} = await exec.getExecOutput('git', ['submodule', 'status'], {
|
||||
cwd,
|
||||
silent: false
|
||||
})
|
||||
|
||||
return stdout.trim() !== ''
|
||||
}
|
||||
|
||||
export const gitFetch = async ({
|
||||
args,
|
||||
cwd
|
||||
}: {
|
||||
args: string[]
|
||||
cwd: string
|
||||
}): Promise<number> => {
|
||||
const {exitCode} = await exec.getExecOutput('git', ['fetch', '-q', ...args], {
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
})
|
||||
|
||||
return exitCode
|
||||
}
|
||||
|
||||
export const gitFetchSubmodules = async ({
|
||||
args,
|
||||
cwd
|
||||
}: {
|
||||
args: string[]
|
||||
cwd: string
|
||||
}): Promise<void> => {
|
||||
const {exitCode, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
['submodule', 'foreach', 'git', 'fetch', '-q', ...args],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
/* istanbul ignore if */
|
||||
if (exitCode !== 0) {
|
||||
core.warning(stderr || "Couldn't fetch submodules")
|
||||
}
|
||||
}
|
||||
|
||||
const normalizePath = (p: string): string => {
|
||||
return p.replace(/\\/g, '/')
|
||||
}
|
||||
|
||||
export const getSubmodulePath = async ({
|
||||
cwd
|
||||
}: {
|
||||
cwd: string
|
||||
}): Promise<string[]> => {
|
||||
// git submodule status | awk '{print $2}'
|
||||
|
||||
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
['submodule', 'status'],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
core.warning(stderr || "Couldn't get submodule names")
|
||||
return []
|
||||
}
|
||||
|
||||
return stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map(line => normalizePath(line.split(' ')[1]))
|
||||
}
|
||||
|
||||
export const gitSubmoduleDiffSHA = async ({
|
||||
cwd,
|
||||
parentSha1,
|
||||
parentSha2,
|
||||
submodulePath,
|
||||
diff
|
||||
}: {
|
||||
cwd: string
|
||||
parentSha1: string
|
||||
parentSha2: string
|
||||
submodulePath: string
|
||||
diff: string
|
||||
}): Promise<{previousSha?: string; currentSha?: string}> => {
|
||||
const {stdout} = await exec.getExecOutput(
|
||||
'git',
|
||||
['diff', parentSha1, parentSha2, '--', submodulePath],
|
||||
{
|
||||
cwd,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
const subprojectCommitPreRegex =
|
||||
/^(?<preCommit>-)Subproject commit (?<commitHash>.+)$/m
|
||||
const subprojectCommitCurRegex =
|
||||
/^(?<curCommit>\+)Subproject commit (?<commitHash>.+)$/m
|
||||
|
||||
const previousSha =
|
||||
subprojectCommitPreRegex.exec(stdout)?.groups?.commitHash ||
|
||||
'4b825dc642cb6eb9a060e54bf8d69288fbee4904'
|
||||
const currentSha = subprojectCommitCurRegex.exec(stdout)?.groups?.commitHash
|
||||
|
||||
if (currentSha) {
|
||||
return {previousSha, currentSha}
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`No submodule commit found for ${submodulePath} between ${parentSha1}${diff}${parentSha2}`
|
||||
)
|
||||
return {}
|
||||
}
|
||||
|
||||
export const gitRenamedFiles = async ({
|
||||
cwd,
|
||||
sha1,
|
||||
sha2,
|
||||
diff,
|
||||
oldNewSeparator,
|
||||
isSubmodule = false,
|
||||
parentDir = ''
|
||||
}: {
|
||||
cwd: string
|
||||
sha1: string
|
||||
sha2: string
|
||||
diff: string
|
||||
oldNewSeparator: string
|
||||
isSubmodule?: boolean
|
||||
parentDir?: string
|
||||
}): Promise<string[]> => {
|
||||
const {exitCode, stderr, stdout} = await exec.getExecOutput(
|
||||
'git',
|
||||
[
|
||||
'diff',
|
||||
'--name-status',
|
||||
'--ignore-submodules=all',
|
||||
'--diff-filter=R',
|
||||
`${sha1}${diff}${sha2}`
|
||||
],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
if (isSubmodule) {
|
||||
core.warning(
|
||||
stderr ||
|
||||
`Failed to get renamed files for submodule between: ${sha1}${diff}${sha2}`
|
||||
)
|
||||
core.warning(
|
||||
'Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage'
|
||||
)
|
||||
} else {
|
||||
core.error(
|
||||
stderr || `Failed to get renamed files between: ${sha1}${diff}${sha2}`
|
||||
)
|
||||
throw new Error('Unable to get renamed files')
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
return stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map(line => {
|
||||
core.debug(`Renamed file: ${line}`)
|
||||
const [, oldPath, newPath] = line.split('\t')
|
||||
if (isSubmodule) {
|
||||
return `${normalizePath(
|
||||
path.join(parentDir, oldPath)
|
||||
)}${oldNewSeparator}${normalizePath(path.join(parentDir, newPath))}`
|
||||
}
|
||||
return `${normalizePath(oldPath)}${oldNewSeparator}${normalizePath(
|
||||
newPath
|
||||
)}`
|
||||
})
|
||||
}
|
||||
|
||||
export const gitDiff = async ({
|
||||
cwd,
|
||||
sha1,
|
||||
sha2,
|
||||
diff,
|
||||
diffFilter,
|
||||
filePatterns = [],
|
||||
isSubmodule = false,
|
||||
parentDir = ''
|
||||
}: {
|
||||
cwd: string
|
||||
sha1: string
|
||||
sha2: string
|
||||
diffFilter: string
|
||||
diff: string
|
||||
filePatterns?: string[]
|
||||
isSubmodule?: boolean
|
||||
parentDir?: string
|
||||
}): Promise<string[]> => {
|
||||
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
[
|
||||
'diff',
|
||||
'--name-only',
|
||||
'--ignore-submodules=all',
|
||||
`--diff-filter=${diffFilter}`,
|
||||
`${sha1}${diff}${sha2}`
|
||||
],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
if (isSubmodule) {
|
||||
core.warning(
|
||||
stderr ||
|
||||
`Failed to get changed files for submodule between: ${sha1}${diff}${sha2}`
|
||||
)
|
||||
core.warning(
|
||||
'Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage'
|
||||
)
|
||||
} else {
|
||||
core.warning(
|
||||
stderr || `Failed to get changed files between: ${sha1}${diff}${sha2}`
|
||||
)
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
const files = stdout
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map(p => {
|
||||
if (isSubmodule) {
|
||||
return normalizePath(path.join(parentDir, p))
|
||||
}
|
||||
return normalizePath(p)
|
||||
})
|
||||
|
||||
if (filePatterns.length === 0) {
|
||||
return files
|
||||
}
|
||||
|
||||
return mm(files, filePatterns, {
|
||||
dot: true,
|
||||
windows: IS_WINDOWS,
|
||||
noext: true
|
||||
})
|
||||
}
|
||||
|
||||
export const gitLog = async ({
|
||||
args,
|
||||
cwd
|
||||
}: {
|
||||
args: string[]
|
||||
cwd: string
|
||||
}): Promise<string> => {
|
||||
const {stdout} = await exec.getExecOutput('git', ['log', ...args], {
|
||||
cwd,
|
||||
silent: false
|
||||
})
|
||||
|
||||
return stdout.trim()
|
||||
}
|
||||
|
||||
export const getHeadSha = async ({cwd}: {cwd: string}): Promise<string> => {
|
||||
const {stdout} = await exec.getExecOutput('git', ['rev-parse', 'HEAD'], {
|
||||
cwd,
|
||||
silent: false
|
||||
})
|
||||
|
||||
return stdout.trim()
|
||||
}
|
||||
|
||||
export const gitLsRemote = async ({
|
||||
cwd,
|
||||
args
|
||||
}: {
|
||||
cwd: string
|
||||
args: string[]
|
||||
}): Promise<string> => {
|
||||
const {stdout} = await exec.getExecOutput(
|
||||
'git',
|
||||
['ls-remote', 'origin', ...args],
|
||||
{
|
||||
cwd,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
const output = stdout.trim().split('\t')
|
||||
|
||||
if (output.length === 0) {
|
||||
throw new Error('No output returned from git ls-remote')
|
||||
}
|
||||
|
||||
return output[0]
|
||||
}
|
||||
|
||||
export const getParentHeadSha = async ({
|
||||
cwd
|
||||
}: {
|
||||
cwd: string
|
||||
}): Promise<string> => {
|
||||
const {stdout} = await exec.getExecOutput('git', ['rev-parse', 'HEAD^'], {
|
||||
cwd,
|
||||
silent: false
|
||||
})
|
||||
|
||||
return stdout.trim()
|
||||
}
|
||||
|
||||
export const verifyCommitSha = async ({
|
||||
sha,
|
||||
cwd,
|
||||
showAsErrorMessage = true
|
||||
}: {
|
||||
sha: string
|
||||
cwd: string
|
||||
showAsErrorMessage?: boolean
|
||||
}): Promise<number> => {
|
||||
const {exitCode, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
['rev-parse', '--verify', `${sha}^{commit}`],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
if (showAsErrorMessage) {
|
||||
core.error(`Unable to locate the commit sha: ${sha}`)
|
||||
core.error(
|
||||
"Please verify that the commit sha is correct, and increase the 'fetch_depth' input if needed"
|
||||
)
|
||||
core.debug(stderr)
|
||||
} else {
|
||||
core.warning(`Unable to locate the commit sha: ${sha}`)
|
||||
core.debug(stderr)
|
||||
}
|
||||
}
|
||||
|
||||
return exitCode
|
||||
}
|
||||
|
||||
export const getPreviousGitTag = async ({
|
||||
cwd
|
||||
}: {
|
||||
cwd: string
|
||||
}): Promise<{tag: string; sha: string}> => {
|
||||
const {stdout} = await exec.getExecOutput(
|
||||
'git',
|
||||
['tag', '--sort=-version:refname'],
|
||||
{
|
||||
cwd,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
const tags = stdout.trim().split('\n')
|
||||
|
||||
if (tags.length < 2) {
|
||||
core.warning('No previous tag found')
|
||||
return {tag: '', sha: ''}
|
||||
}
|
||||
|
||||
const previousTag = tags[1]
|
||||
|
||||
const {stdout: stdout2} = await exec.getExecOutput(
|
||||
'git',
|
||||
['rev-parse', previousTag],
|
||||
{
|
||||
cwd,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
const sha = stdout2.trim()
|
||||
|
||||
return {tag: previousTag, sha}
|
||||
}
|
||||
|
||||
export const canDiffCommits = async ({
|
||||
cwd,
|
||||
sha1,
|
||||
sha2,
|
||||
diff
|
||||
}: {
|
||||
cwd: string
|
||||
sha1: string
|
||||
sha2: string
|
||||
diff: string
|
||||
}): Promise<boolean> => {
|
||||
const {exitCode, stderr} = await exec.getExecOutput(
|
||||
'git',
|
||||
['diff', '--name-only', '--ignore-submodules=all', `${sha1}${diff}${sha2}`],
|
||||
{
|
||||
cwd,
|
||||
ignoreReturnCode: true,
|
||||
silent: false
|
||||
}
|
||||
)
|
||||
|
||||
if (exitCode !== 0) {
|
||||
core.warning(stderr || `Unable find merge base between ${sha1} and ${sha2}`)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
export const getDirnameMaxDepth = ({
|
||||
pathStr,
|
||||
dirNamesMaxDepth,
|
||||
excludeRoot
|
||||
}: {
|
||||
pathStr: string
|
||||
dirNamesMaxDepth?: number
|
||||
excludeRoot?: boolean
|
||||
}): string => {
|
||||
const pathArr = dirname(pathStr).split(path.sep)
|
||||
const maxDepth = Math.min(dirNamesMaxDepth || pathArr.length, pathArr.length)
|
||||
let output = pathArr[0]
|
||||
|
||||
for (let i = 1; i < maxDepth; i++) {
|
||||
output = path.join(output, pathArr[i])
|
||||
}
|
||||
|
||||
if (excludeRoot && output === '.') {
|
||||
return ''
|
||||
}
|
||||
|
||||
return normalizePath(output)
|
||||
}
|
||||
|
||||
export const jsonOutput = ({
|
||||
value,
|
||||
shouldEscape
|
||||
}: {
|
||||
value: string | string[]
|
||||
shouldEscape: boolean
|
||||
}): string => {
|
||||
const result = JSON.stringify(value)
|
||||
|
||||
return shouldEscape ? result.replace(/"/g, '\\"') : result
|
||||
}
|
||||
|
||||
export const getFilePatterns = async ({
|
||||
inputs
|
||||
}: {
|
||||
inputs: Inputs
|
||||
}): Promise<string[]> => {
|
||||
let filePatterns = inputs.files
|
||||
.split(inputs.filesSeparator)
|
||||
.filter(p => p !== '')
|
||||
.join('\n')
|
||||
|
||||
if (inputs.filesFromSourceFile !== '') {
|
||||
const inputFilesFromSourceFile = inputs.filesFromSourceFile
|
||||
.split(inputs.filesFromSourceFileSeparator)
|
||||
.filter(p => p !== '')
|
||||
|
||||
core.debug(`files from source file: ${inputFilesFromSourceFile}`)
|
||||
|
||||
const filesFromSourceFiles = (
|
||||
await getFilesFromSourceFile({filePaths: inputFilesFromSourceFile})
|
||||
).join('\n')
|
||||
|
||||
core.debug(`files from source files patterns: ${filesFromSourceFiles}`)
|
||||
|
||||
filePatterns = filePatterns.concat('\n', filesFromSourceFiles)
|
||||
}
|
||||
|
||||
if (inputs.filesIgnore) {
|
||||
const filesIgnorePatterns = inputs.filesIgnore
|
||||
.split(inputs.filesIgnoreSeparator)
|
||||
.filter(p => p !== '')
|
||||
.map(p => {
|
||||
if (!p.startsWith('!')) {
|
||||
p = `!${p}`
|
||||
}
|
||||
return p
|
||||
})
|
||||
.join('\n')
|
||||
|
||||
core.debug(`files ignore patterns: ${filesIgnorePatterns}`)
|
||||
|
||||
filePatterns = filePatterns.concat('\n', filesIgnorePatterns)
|
||||
}
|
||||
|
||||
if (inputs.filesIgnoreFromSourceFile) {
|
||||
const inputFilesIgnoreFromSourceFile = inputs.filesIgnoreFromSourceFile
|
||||
.split(inputs.filesIgnoreFromSourceFileSeparator)
|
||||
.filter(p => p !== '')
|
||||
|
||||
core.debug(
|
||||
`files ignore from source file: ${inputFilesIgnoreFromSourceFile}`
|
||||
)
|
||||
|
||||
const filesIgnoreFromSourceFiles = (
|
||||
await getFilesFromSourceFile({
|
||||
filePaths: inputFilesIgnoreFromSourceFile,
|
||||
excludedFiles: true
|
||||
})
|
||||
).join('\n')
|
||||
|
||||
core.debug(
|
||||
`files ignore from source files patterns: ${filesIgnoreFromSourceFiles}`
|
||||
)
|
||||
|
||||
filePatterns = filePatterns.concat('\n', filesIgnoreFromSourceFiles)
|
||||
}
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
filePatterns = filePatterns.replace(/\r\n/g, '\n')
|
||||
filePatterns = filePatterns.replace(/\r/g, '\n')
|
||||
}
|
||||
|
||||
core.debug(`file patterns: ${filePatterns}`)
|
||||
|
||||
return filePatterns.trim().split('\n').filter(Boolean)
|
||||
}
|
||||
|
||||
export const setOutput = async ({
|
||||
key,
|
||||
value,
|
||||
inputs
|
||||
}: {
|
||||
key: string
|
||||
value: string | boolean
|
||||
inputs: Inputs
|
||||
}): Promise<void> => {
|
||||
const cleanedValue = value.toString().trim()
|
||||
core.setOutput(key, cleanedValue)
|
||||
|
||||
if (inputs.writeOutputFiles) {
|
||||
const outputDir = inputs.outputDir || '.github/outputs'
|
||||
const extension = inputs.json ? 'json' : 'txt'
|
||||
const outputFilePath = path.join(outputDir, `${key}.${extension}`)
|
||||
|
||||
if (!(await exists(outputDir))) {
|
||||
await fs.mkdir(outputDir, {recursive: true})
|
||||
}
|
||||
await fs.writeFile(outputFilePath, cleanedValue.replace(/\\"/g, '"'))
|
||||
}
|
||||
}
|
|
@ -1 +1 @@
|
|||
This is a test file with non ascii character filename.
|
||||
This is a test file with non ascii character in the filename.
|
||||
|
|
12
tsconfig.json
Normal file
12
tsconfig.json
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
},
|
||||
"exclude": ["node_modules", "jest/setEnvVars.cjs"]
|
||||
}
|
Loading…
Reference in a new issue