mirror of
https://github.com/tj-actions/changed-files
synced 2025-02-21 15:28:08 +00:00
Added missing changes and modified dist assets.
This commit is contained in:
parent
f93ff33629
commit
9d7201ded6
2 changed files with 157 additions and 79 deletions
234
dist/index.js
generated
vendored
234
dist/index.js
generated
vendored
|
@ -62471,6 +62471,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
|
|||
const opts = Object.assign({ _directives: directives }, options);
|
||||
const doc = new Document.Document(undefined, opts);
|
||||
const ctx = {
|
||||
atKey: false,
|
||||
atRoot: true,
|
||||
directives: doc.directives,
|
||||
options: doc.options,
|
||||
|
@ -62515,6 +62516,7 @@ exports.composeDoc = composeDoc;
|
|||
|
||||
|
||||
var Alias = __nccwpck_require__(4065);
|
||||
var identity = __nccwpck_require__(1127);
|
||||
var composeCollection = __nccwpck_require__(7349);
|
||||
var composeScalar = __nccwpck_require__(5413);
|
||||
var resolveEnd = __nccwpck_require__(7788);
|
||||
|
@ -62522,6 +62524,7 @@ var utilEmptyScalarPosition = __nccwpck_require__(2599);
|
|||
|
||||
const CN = { composeNode, composeEmptyNode };
|
||||
function composeNode(ctx, token, props, onError) {
|
||||
const atKey = ctx.atKey;
|
||||
const { spaceBefore, comment, anchor, tag } = props;
|
||||
let node;
|
||||
let isSrcToken = true;
|
||||
|
@ -62557,6 +62560,14 @@ function composeNode(ctx, token, props, onError) {
|
|||
}
|
||||
if (anchor && node.anchor === '')
|
||||
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||||
if (atKey &&
|
||||
ctx.options.stringKeys &&
|
||||
(!identity.isScalar(node) ||
|
||||
typeof node.value !== 'string' ||
|
||||
(node.tag && node.tag !== 'tag:yaml.org,2002:str'))) {
|
||||
const msg = 'With stringKeys, all keys must be strings';
|
||||
onError(tag ?? token, 'NON_STRING_KEY', msg);
|
||||
}
|
||||
if (spaceBefore)
|
||||
node.spaceBefore = true;
|
||||
if (comment) {
|
||||
|
@ -62629,11 +62640,16 @@ function composeScalar(ctx, token, tagToken, onError) {
|
|||
const tagName = tagToken
|
||||
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
|
||||
: null;
|
||||
const tag = tagToken && tagName
|
||||
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
|
||||
: token.type === 'scalar'
|
||||
? findScalarTagByTest(ctx, value, token, onError)
|
||||
: ctx.schema[identity.SCALAR];
|
||||
let tag;
|
||||
if (ctx.options.stringKeys && ctx.atKey) {
|
||||
tag = ctx.schema[identity.SCALAR];
|
||||
}
|
||||
else if (tagName)
|
||||
tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError);
|
||||
else if (token.type === 'scalar')
|
||||
tag = findScalarTagByTest(ctx, value, token, onError);
|
||||
else
|
||||
tag = ctx.schema[identity.SCALAR];
|
||||
let scalar;
|
||||
try {
|
||||
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
|
||||
|
@ -62681,8 +62697,9 @@ function findScalarTagByName(schema, value, tagName, tagToken, onError) {
|
|||
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
|
||||
return schema[identity.SCALAR];
|
||||
}
|
||||
function findScalarTagByTest({ directives, schema }, value, token, onError) {
|
||||
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[identity.SCALAR];
|
||||
function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) {
|
||||
const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) &&
|
||||
tag.test?.test(value)) || schema[identity.SCALAR];
|
||||
if (schema.compat) {
|
||||
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
|
||||
schema[identity.SCALAR];
|
||||
|
@ -62988,12 +63005,14 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
|
|||
onError(offset, 'BAD_INDENT', startColMsg);
|
||||
}
|
||||
// key value
|
||||
ctx.atKey = true;
|
||||
const keyStart = keyProps.end;
|
||||
const keyNode = key
|
||||
? composeNode(ctx, key, keyProps, onError)
|
||||
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
|
||||
if (ctx.schema.compat)
|
||||
utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);
|
||||
ctx.atKey = false;
|
||||
if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
|
||||
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||||
// value properties
|
||||
|
@ -63276,6 +63295,8 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
|
|||
const seq = new NodeClass(ctx.schema);
|
||||
if (ctx.atRoot)
|
||||
ctx.atRoot = false;
|
||||
if (ctx.atKey)
|
||||
ctx.atKey = false;
|
||||
let offset = bs.offset;
|
||||
let commentEnd = null;
|
||||
for (const { start, value } of bs.items) {
|
||||
|
@ -63391,6 +63412,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
|
|||
const atRoot = ctx.atRoot;
|
||||
if (atRoot)
|
||||
ctx.atRoot = false;
|
||||
if (ctx.atKey)
|
||||
ctx.atKey = false;
|
||||
let offset = fc.offset + fc.start.source.length;
|
||||
for (let i = 0; i < fc.items.length; ++i) {
|
||||
const collItem = fc.items[i];
|
||||
|
@ -63470,12 +63493,14 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
|
|||
else {
|
||||
// item is a key+value pair
|
||||
// key value
|
||||
ctx.atKey = true;
|
||||
const keyStart = props.end;
|
||||
const keyNode = key
|
||||
? composeNode(ctx, key, props, onError)
|
||||
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
|
||||
if (isBlock(key))
|
||||
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
ctx.atKey = false;
|
||||
// value properties
|
||||
const valueProps = resolveProps.resolveProps(sep ?? [], {
|
||||
flow: fcName,
|
||||
|
@ -64089,11 +64114,7 @@ function mapIncludes(ctx, items, search) {
|
|||
return false;
|
||||
const isEqual = typeof uniqueKeys === 'function'
|
||||
? uniqueKeys
|
||||
: (a, b) => a === b ||
|
||||
(identity.isScalar(a) &&
|
||||
identity.isScalar(b) &&
|
||||
a.value === b.value &&
|
||||
!(a.value === '<<' && ctx.schema.merge));
|
||||
: (a, b) => a === b || (identity.isScalar(a) && identity.isScalar(b) && a.value === b.value);
|
||||
return items.some(pair => isEqual(pair.key, search));
|
||||
}
|
||||
|
||||
|
@ -64145,6 +64166,7 @@ class Document {
|
|||
logLevel: 'warn',
|
||||
prettyErrors: true,
|
||||
strict: true,
|
||||
stringKeys: false,
|
||||
uniqueKeys: true,
|
||||
version: '1.2'
|
||||
}, options);
|
||||
|
@ -64368,7 +64390,7 @@ class Document {
|
|||
this.directives.yaml.version = '1.1';
|
||||
else
|
||||
this.directives = new directives.Directives({ version: '1.1' });
|
||||
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
|
||||
opt = { resolveKnownTags: false, schema: 'yaml-1.1' };
|
||||
break;
|
||||
case '1.2':
|
||||
case 'next':
|
||||
|
@ -64376,7 +64398,7 @@ class Document {
|
|||
this.directives.yaml.version = version;
|
||||
else
|
||||
this.directives = new directives.Directives({ version });
|
||||
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
|
||||
opt = { resolveKnownTags: true, schema: 'core' };
|
||||
break;
|
||||
case null:
|
||||
if (this.directives)
|
||||
|
@ -65719,24 +65741,17 @@ exports.YAMLSeq = YAMLSeq;
|
|||
|
||||
|
||||
var log = __nccwpck_require__(7249);
|
||||
var merge = __nccwpck_require__(452);
|
||||
var stringify = __nccwpck_require__(2148);
|
||||
var identity = __nccwpck_require__(1127);
|
||||
var Scalar = __nccwpck_require__(3301);
|
||||
var toJS = __nccwpck_require__(6424);
|
||||
|
||||
const MERGE_KEY = '<<';
|
||||
function addPairToJSMap(ctx, map, { key, value }) {
|
||||
if (ctx?.doc.schema.merge && isMergeKey(key)) {
|
||||
value = identity.isAlias(value) ? value.resolve(ctx.doc) : value;
|
||||
if (identity.isSeq(value))
|
||||
for (const it of value.items)
|
||||
mergeToJSMap(ctx, map, it);
|
||||
else if (Array.isArray(value))
|
||||
for (const it of value)
|
||||
mergeToJSMap(ctx, map, it);
|
||||
else
|
||||
mergeToJSMap(ctx, map, value);
|
||||
}
|
||||
if (identity.isNode(key) && key.addToJSMap)
|
||||
key.addToJSMap(ctx, map, value);
|
||||
// TODO: Should drop this special case for bare << handling
|
||||
else if (merge.isMergeKey(ctx, key))
|
||||
merge.addMergeToJSMap(ctx, map, value);
|
||||
else {
|
||||
const jsKey = toJS.toJS(key, '', ctx);
|
||||
if (map instanceof Map) {
|
||||
|
@ -65761,41 +65776,6 @@ function addPairToJSMap(ctx, map, { key, value }) {
|
|||
}
|
||||
return map;
|
||||
}
|
||||
const isMergeKey = (key) => key === MERGE_KEY ||
|
||||
(identity.isScalar(key) &&
|
||||
key.value === MERGE_KEY &&
|
||||
(!key.type || key.type === Scalar.Scalar.PLAIN));
|
||||
// If the value associated with a merge key is a single mapping node, each of
|
||||
// its key/value pairs is inserted into the current mapping, unless the key
|
||||
// already exists in it. If the value associated with the merge key is a
|
||||
// sequence, then this sequence is expected to contain mapping nodes and each
|
||||
// of these nodes is merged in turn according to its order in the sequence.
|
||||
// Keys in mapping nodes earlier in the sequence override keys specified in
|
||||
// later mapping nodes. -- http://yaml.org/type/merge.html
|
||||
function mergeToJSMap(ctx, map, value) {
|
||||
const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;
|
||||
if (!identity.isMap(source))
|
||||
throw new Error('Merge sources must be maps or map aliases');
|
||||
const srcMap = source.toJSON(null, ctx, Map);
|
||||
for (const [key, value] of srcMap) {
|
||||
if (map instanceof Map) {
|
||||
if (!map.has(key))
|
||||
map.set(key, value);
|
||||
}
|
||||
else if (map instanceof Set) {
|
||||
map.add(key);
|
||||
}
|
||||
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
|
||||
Object.defineProperty(map, key, {
|
||||
value,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
function stringifyKey(key, jsKey, ctx) {
|
||||
if (jsKey === null)
|
||||
return '';
|
||||
|
@ -68210,6 +68190,7 @@ var composer = __nccwpck_require__(9984);
|
|||
var Document = __nccwpck_require__(3021);
|
||||
var errors = __nccwpck_require__(1464);
|
||||
var log = __nccwpck_require__(7249);
|
||||
var identity = __nccwpck_require__(1127);
|
||||
var lineCounter = __nccwpck_require__(6628);
|
||||
var parser = __nccwpck_require__(3456);
|
||||
|
||||
|
@ -68301,6 +68282,8 @@ function stringify(value, replacer, options) {
|
|||
if (!keepUndefined)
|
||||
return undefined;
|
||||
}
|
||||
if (identity.isDocument(value) && !_replacer)
|
||||
return value.toString(options);
|
||||
return new Document.Document(value, _replacer, options).toString(options);
|
||||
}
|
||||
|
||||
|
@ -68332,10 +68315,9 @@ class Schema {
|
|||
: compat
|
||||
? tags.getTags(null, compat)
|
||||
: null;
|
||||
this.merge = !!merge;
|
||||
this.name = (typeof schema === 'string' && schema) || 'core';
|
||||
this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};
|
||||
this.tags = tags.getTags(customTags, this.name);
|
||||
this.tags = tags.getTags(customTags, this.name, merge);
|
||||
this.toStringOptions = toStringDefaults ?? null;
|
||||
Object.defineProperty(this, identity.MAP, { value: map.map });
|
||||
Object.defineProperty(this, identity.SCALAR, { value: string.string });
|
||||
|
@ -68718,6 +68700,7 @@ var int = __nccwpck_require__(9874);
|
|||
var schema = __nccwpck_require__(896);
|
||||
var schema$1 = __nccwpck_require__(3559);
|
||||
var binary = __nccwpck_require__(6083);
|
||||
var merge = __nccwpck_require__(452);
|
||||
var omap = __nccwpck_require__(303);
|
||||
var pairs = __nccwpck_require__(8385);
|
||||
var schema$2 = __nccwpck_require__(8294);
|
||||
|
@ -68743,6 +68726,7 @@ const tagsByName = {
|
|||
intOct: int.intOct,
|
||||
intTime: timestamp.intTime,
|
||||
map: map.map,
|
||||
merge: merge.merge,
|
||||
null: _null.nullTag,
|
||||
omap: omap.omap,
|
||||
pairs: pairs.pairs,
|
||||
|
@ -68752,13 +68736,20 @@ const tagsByName = {
|
|||
};
|
||||
const coreKnownTags = {
|
||||
'tag:yaml.org,2002:binary': binary.binary,
|
||||
'tag:yaml.org,2002:merge': merge.merge,
|
||||
'tag:yaml.org,2002:omap': omap.omap,
|
||||
'tag:yaml.org,2002:pairs': pairs.pairs,
|
||||
'tag:yaml.org,2002:set': set.set,
|
||||
'tag:yaml.org,2002:timestamp': timestamp.timestamp
|
||||
};
|
||||
function getTags(customTags, schemaName) {
|
||||
let tags = schemas.get(schemaName);
|
||||
function getTags(customTags, schemaName, addMergeTag) {
|
||||
const schemaTags = schemas.get(schemaName);
|
||||
if (schemaTags && !customTags) {
|
||||
return addMergeTag && !schemaTags.includes(merge.merge)
|
||||
? schemaTags.concat(merge.merge)
|
||||
: schemaTags.slice();
|
||||
}
|
||||
let tags = schemaTags;
|
||||
if (!tags) {
|
||||
if (Array.isArray(customTags))
|
||||
tags = [];
|
||||
|
@ -68777,17 +68768,21 @@ function getTags(customTags, schemaName) {
|
|||
else if (typeof customTags === 'function') {
|
||||
tags = customTags(tags.slice());
|
||||
}
|
||||
return tags.map(tag => {
|
||||
if (typeof tag !== 'string')
|
||||
return tag;
|
||||
const tagObj = tagsByName[tag];
|
||||
if (tagObj)
|
||||
return tagObj;
|
||||
const keys = Object.keys(tagsByName)
|
||||
.map(key => JSON.stringify(key))
|
||||
.join(', ');
|
||||
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
|
||||
});
|
||||
if (addMergeTag)
|
||||
tags = tags.concat(merge.merge);
|
||||
return tags.reduce((tags, tag) => {
|
||||
const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag;
|
||||
if (!tagObj) {
|
||||
const tagName = JSON.stringify(tag);
|
||||
const keys = Object.keys(tagsByName)
|
||||
.map(key => JSON.stringify(key))
|
||||
.join(', ');
|
||||
throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`);
|
||||
}
|
||||
if (!tags.includes(tagObj))
|
||||
tags.push(tagObj);
|
||||
return tags;
|
||||
}, []);
|
||||
}
|
||||
|
||||
exports.coreKnownTags = coreKnownTags;
|
||||
|
@ -69049,6 +69044,82 @@ exports.intHex = intHex;
|
|||
exports.intOct = intOct;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 452:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
var identity = __nccwpck_require__(1127);
|
||||
var Scalar = __nccwpck_require__(3301);
|
||||
|
||||
// If the value associated with a merge key is a single mapping node, each of
|
||||
// its key/value pairs is inserted into the current mapping, unless the key
|
||||
// already exists in it. If the value associated with the merge key is a
|
||||
// sequence, then this sequence is expected to contain mapping nodes and each
|
||||
// of these nodes is merged in turn according to its order in the sequence.
|
||||
// Keys in mapping nodes earlier in the sequence override keys specified in
|
||||
// later mapping nodes. -- http://yaml.org/type/merge.html
|
||||
const MERGE_KEY = '<<';
|
||||
const merge = {
|
||||
identify: value => value === MERGE_KEY ||
|
||||
(typeof value === 'symbol' && value.description === MERGE_KEY),
|
||||
default: 'key',
|
||||
tag: 'tag:yaml.org,2002:merge',
|
||||
test: /^<<$/,
|
||||
resolve: () => Object.assign(new Scalar.Scalar(Symbol(MERGE_KEY)), {
|
||||
addToJSMap: addMergeToJSMap
|
||||
}),
|
||||
stringify: () => MERGE_KEY
|
||||
};
|
||||
const isMergeKey = (ctx, key) => (merge.identify(key) ||
|
||||
(identity.isScalar(key) &&
|
||||
(!key.type || key.type === Scalar.Scalar.PLAIN) &&
|
||||
merge.identify(key.value))) &&
|
||||
ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default);
|
||||
function addMergeToJSMap(ctx, map, value) {
|
||||
value = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;
|
||||
if (identity.isSeq(value))
|
||||
for (const it of value.items)
|
||||
mergeValue(ctx, map, it);
|
||||
else if (Array.isArray(value))
|
||||
for (const it of value)
|
||||
mergeValue(ctx, map, it);
|
||||
else
|
||||
mergeValue(ctx, map, value);
|
||||
}
|
||||
function mergeValue(ctx, map, value) {
|
||||
const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;
|
||||
if (!identity.isMap(source))
|
||||
throw new Error('Merge sources must be maps or map aliases');
|
||||
const srcMap = source.toJSON(null, ctx, Map);
|
||||
for (const [key, value] of srcMap) {
|
||||
if (map instanceof Map) {
|
||||
if (!map.has(key))
|
||||
map.set(key, value);
|
||||
}
|
||||
else if (map instanceof Set) {
|
||||
map.add(key);
|
||||
}
|
||||
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
|
||||
Object.defineProperty(map, key, {
|
||||
value,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
exports.addMergeToJSMap = addMergeToJSMap;
|
||||
exports.isMergeKey = isMergeKey;
|
||||
exports.merge = merge;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 303:
|
||||
|
@ -69240,6 +69311,7 @@ var binary = __nccwpck_require__(6083);
|
|||
var bool = __nccwpck_require__(8398);
|
||||
var float = __nccwpck_require__(5782);
|
||||
var int = __nccwpck_require__(873);
|
||||
var merge = __nccwpck_require__(452);
|
||||
var omap = __nccwpck_require__(303);
|
||||
var pairs = __nccwpck_require__(8385);
|
||||
var set = __nccwpck_require__(1528);
|
||||
|
@ -69260,6 +69332,7 @@ const schema = [
|
|||
float.floatExp,
|
||||
float.float,
|
||||
binary.binary,
|
||||
merge.merge,
|
||||
omap.omap,
|
||||
pairs.pairs,
|
||||
set.set,
|
||||
|
@ -69711,7 +69784,12 @@ function getTagObject(tags, item) {
|
|||
let obj;
|
||||
if (identity.isScalar(item)) {
|
||||
obj = item.value;
|
||||
const match = tags.filter(t => t.identify?.(obj));
|
||||
let match = tags.filter(t => t.identify?.(obj));
|
||||
if (match.length > 1) {
|
||||
const testMatch = match.filter(t => t.test);
|
||||
if (testMatch.length > 0)
|
||||
match = testMatch;
|
||||
}
|
||||
tagObj =
|
||||
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
|
||||
}
|
||||
|
|
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
Loading…
Add table
Reference in a new issue