Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 44 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,13 @@ For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob
# Default: none
list-files: ''

# Enables writing the lists of matching files to a corresponding file.
# If set, the action will create the specified file with the list of matching files.
# The file will be written in the format specified by the `list-files` option and named
# after the filter. The path to the file will be relative to the working directory and
# exported as an output variable named `<filter-name>_files_path`.
write-to-files: ''

# Relative path under $GITHUB_WORKSPACE where the repository was checked out.
working-directory: ''

Expand All @@ -154,14 +161,14 @@ For more information, see [CHANGELOG](https://github.com/dorny/paths-filter/blob
# Default: ${{ github.token }}
token: ''

# Optional parameter to override the default behavior of file matching algorithm.
# Optional parameter to override the default behavior of file matching algorithm.
# By default files that match at least one pattern defined by the filters will be included.
# This parameter allows to override the "at least one pattern" behavior to make it so that
# all of the patterns have to match or otherwise the file is excluded.
# An example scenario where this is useful if you would like to match all
# .ts files in a sub-directory but not .md files.
# The filters below will match markdown files despite the exclusion syntax UNLESS
# you specify 'every' as the predicate-quantifier parameter. When you do that,
# all of the patterns have to match or otherwise the file is excluded.
# An example scenario where this is useful if you would like to match all
# .ts files in a sub-directory but not .md files.
# The filters below will match markdown files despite the exclusion syntax UNLESS
# you specify 'every' as the predicate-quantifier parameter. When you do that,
# it will only match the .ts files in the subdirectory as expected.
#
# backend:
Expand Down Expand Up @@ -505,6 +512,37 @@ jobs:

</details>

<details>
<summary>Handle large change sets (2000+ files)</summary>

```yaml
- uses: dorny/paths-filter@v3
id: changed
with:
# Enable writing the files matching each filter to the disk in addition to the output '<filter_name>_files'.
# The path for each filter's file is output in the format '<filter_name>_files_path'.
write-to-files: true
list-files: json
filters: |
content:
- 'content/**'


- name: List changed directories relative to the base directory
shell: bash
env:
BASE_DIR: ${{ inputs.base-directory }}
CHANGED_CONTENT_FILES_PATH: ${{ steps.changed.outputs.content_files_path }}
run: |
CHANGED_CONTENT_DIRECTORIES=$(cat "${CHANGED_CONTENT_FILES_PATH}" | xargs -n1 realpath -m --relative-to=${BASE_DIR} | cut -f1 -d / | sort -u)
for d in $CHANGED_CONTENT_DIRECTORIES
do
echo "Content directory change detected: ${d}"
done
```

</details>

### Custom processing of changed files

<details>
Expand Down
7 changes: 7 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ inputs:
Backslash escapes every potentially unsafe character.
required: false
default: none
write-to-files:
description: |
Enables writing the lists of matching files to a corresponding file in addition to the output '<filter-name>_files'.
If set, the action will create the specified file with the list of matching files.
The file will be written in the format specified by the `list-files` option and named
after the filter. The path to the file will be output as a variable named `<filter-name>_files_path`.
required: false
initial-fetch-depth:
description: |
How many commits are initially fetched from base branch.
Expand Down
26 changes: 20 additions & 6 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -552,15 +552,20 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const fs = __importStar(__nccwpck_require__(7147));
const core = __importStar(__nccwpck_require__(2186));
const github = __importStar(__nccwpck_require__(5438));
const path_1 = __importDefault(__nccwpck_require__(1017));
const filter_1 = __nccwpck_require__(3707);
const file_1 = __nccwpck_require__(4014);
const git = __importStar(__nccwpck_require__(3374));
const shell_escape_1 = __nccwpck_require__(4613);
const csv_escape_1 = __nccwpck_require__(7402);
const fs_1 = __nccwpck_require__(7147);
async function run() {
try {
const workingDirectory = core.getInput('working-directory', { required: false });
Expand All @@ -573,6 +578,7 @@ async function run() {
const filtersInput = core.getInput('filters', { required: true });
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput;
const listFiles = core.getInput('list-files', { required: false }).toLowerCase() || 'none';
const writeToFiles = core.getInput('write-to-files', { required: false }).toLowerCase() === 'true';
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', { required: false })) || 10;
const predicateQuantifier = core.getInput('predicate-quantifier', { required: false }) || filter_1.PredicateQuantifier.SOME;
if (!isExportFormat(listFiles)) {
Expand All @@ -589,7 +595,7 @@ async function run() {
const files = await getChangedFiles(token, base, ref, initialFetchDepth);
core.info(`Detected ${files.length} changed files`);
const results = filter.match(files);
exportResults(results, listFiles);
exportResults(results, listFiles, writeToFiles);
}
catch (error) {
core.setFailed(getErrorMessage(error));
Expand Down Expand Up @@ -742,13 +748,14 @@ async function getChangedFilesFromApi(token, pullRequest) {
core.endGroup();
}
}
function exportResults(results, format) {
function exportResults(results, format, writeToFiles) {
const tempDir = (0, fs_1.mkdtempSync)(path_1.default.join(process.cwd(), 'paths-filter-'));
core.info('Results:');
const changes = [];
for (const [key, files] of Object.entries(results)) {
const value = files.length > 0;
core.startGroup(`Filter ${key} = ${value}`);
if (files.length > 0) {
const match = files.length > 0;
core.startGroup(`Filter ${key} = ${match}`);
if (match) {
changes.push(key);
core.info('Matching files:');
for (const file of files) {
Expand All @@ -758,11 +765,18 @@ function exportResults(results, format) {
else {
core.info('Matching files: none');
}
core.setOutput(key, value);
core.setOutput(key, match);
core.setOutput(`${key}_count`, files.length);
if (format !== 'none') {
const filesValue = serializeExport(files, format);
core.setOutput(`${key}_files`, filesValue);
if (writeToFiles) {
const ext = format === 'json' ? 'json' : 'txt';
const filePath = path_1.default.join(tempDir, `${key}-files.${ext}`);
fs.writeFileSync(filePath, filesValue);
core.info(`Matching files list for filter '${key}' written to '${filePath}'`);
core.setOutput(`${key}_files_path`, filePath);
}
}
core.endGroup();
}
Expand Down
26 changes: 20 additions & 6 deletions src/main.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import * as fs from 'fs'
import * as core from '@actions/core'
import * as github from '@actions/github'
import path from 'path'
import {GetResponseDataTypeFromEndpointMethod} from '@octokit/types'
import {PushEvent, PullRequestEvent} from '@octokit/webhooks-types'

Expand All @@ -16,6 +17,7 @@ import {File, ChangeStatus} from './file'
import * as git from './git'
import {backslashEscape, shellEscape} from './list-format/shell-escape'
import {csvEscape} from './list-format/csv-escape'
import {mkdtempSync} from 'fs'

type ExportFormat = 'none' | 'csv' | 'json' | 'shell' | 'escape'

Expand All @@ -32,6 +34,7 @@ async function run(): Promise<void> {
const filtersInput = core.getInput('filters', {required: true})
const filtersYaml = isPathInput(filtersInput) ? getConfigFileContent(filtersInput) : filtersInput
const listFiles = core.getInput('list-files', {required: false}).toLowerCase() || 'none'
const writeToFiles = core.getInput('write-to-files', {required: false}).toLowerCase() === 'true'
const initialFetchDepth = parseInt(core.getInput('initial-fetch-depth', {required: false})) || 10
const predicateQuantifier = core.getInput('predicate-quantifier', {required: false}) || PredicateQuantifier.SOME

Expand All @@ -52,7 +55,7 @@ async function run(): Promise<void> {
const files = await getChangedFiles(token, base, ref, initialFetchDepth)
core.info(`Detected ${files.length} changed files`)
const results = filter.match(files)
exportResults(results, listFiles)
exportResults(results, listFiles, writeToFiles)
} catch (error) {
core.setFailed(getErrorMessage(error))
}
Expand Down Expand Up @@ -228,13 +231,15 @@ async function getChangedFilesFromApi(token: string, pullRequest: PullRequestEve
}
}

function exportResults(results: FilterResults, format: ExportFormat): void {
function exportResults(results: FilterResults, format: ExportFormat, writeToFiles: boolean): void {
const tempDir = mkdtempSync(path.join(process.cwd(), 'paths-filter-'))

core.info('Results:')
const changes = []
for (const [key, files] of Object.entries(results)) {
const value = files.length > 0
core.startGroup(`Filter ${key} = ${value}`)
if (files.length > 0) {
const match = files.length > 0
core.startGroup(`Filter ${key} = ${match}`)
if (match) {
changes.push(key)
core.info('Matching files:')
for (const file of files) {
Expand All @@ -244,12 +249,21 @@ function exportResults(results: FilterResults, format: ExportFormat): void {
core.info('Matching files: none')
}

core.setOutput(key, value)
core.setOutput(key, match)
core.setOutput(`${key}_count`, files.length)
if (format !== 'none') {
const filesValue = serializeExport(files, format)
core.setOutput(`${key}_files`, filesValue)

if (writeToFiles) {
const ext = format === 'json' ? 'json' : 'txt'
const filePath = path.join(tempDir, `${key}-files.${ext}`)
fs.writeFileSync(filePath, filesValue)
core.info(`Matching files list for filter '${key}' written to '${filePath}'`)
core.setOutput(`${key}_files_path`, filePath)
}
}

core.endGroup()
}

Expand Down