FIN INIT
This commit is contained in:
8
node_modules/yaml/dist/cli.d.ts
generated
vendored
Normal file
8
node_modules/yaml/dist/cli.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
export declare const help = "yaml: A command-line YAML processor and inspector\n\nReads stdin and writes output to stdout and errors & warnings to stderr.\n\nUsage:\n yaml Process a YAML stream, outputting it as YAML\n yaml cst Parse the CST of a YAML stream\n yaml lex Parse the lexical tokens of a YAML stream\n yaml valid Validate a YAML stream, returning 0 on success\n\nOptions:\n --help, -h Show this message.\n --json, -j Output JSON.\n --indent 2 Output pretty-printed data, indented by the given number of spaces.\n --merge, -m Enable support for \"<<\" merge keys.\n\nAdditional options for bare \"yaml\" command:\n --doc, -d Output pretty-printed JS Document objects.\n --single, -1 Require the input to consist of a single YAML document.\n --strict, -s Stop on errors.\n --visit, -v Apply a visitor to each document (requires a path to import)\n --yaml 1.1 Set the YAML version. (default: 1.2)";
|
||||
export declare class UserError extends Error {
|
||||
static ARGS: number;
|
||||
static SINGLE: number;
|
||||
code: number;
|
||||
constructor(code: number, message: string);
|
||||
}
|
||||
export declare function cli(stdin: NodeJS.ReadableStream, done: (error?: Error) => void, argv?: string[]): Promise<void>;
|
201
node_modules/yaml/dist/cli.mjs
generated
vendored
Normal file
201
node_modules/yaml/dist/cli.mjs
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
import { resolve } from 'path';
|
||||
import { parseArgs } from 'util';
|
||||
import { prettyToken } from './parse/cst.js';
|
||||
import { Lexer } from './parse/lexer.js';
|
||||
import { Parser } from './parse/parser.js';
|
||||
import { Composer } from './compose/composer.js';
|
||||
import { LineCounter } from './parse/line-counter.js';
|
||||
import { prettifyError } from './errors.js';
|
||||
import { visit } from './visit.js';
|
||||
|
||||
const help = `\
|
||||
yaml: A command-line YAML processor and inspector
|
||||
|
||||
Reads stdin and writes output to stdout and errors & warnings to stderr.
|
||||
|
||||
Usage:
|
||||
yaml Process a YAML stream, outputting it as YAML
|
||||
yaml cst Parse the CST of a YAML stream
|
||||
yaml lex Parse the lexical tokens of a YAML stream
|
||||
yaml valid Validate a YAML stream, returning 0 on success
|
||||
|
||||
Options:
|
||||
--help, -h Show this message.
|
||||
--json, -j Output JSON.
|
||||
--indent 2 Output pretty-printed data, indented by the given number of spaces.
|
||||
--merge, -m Enable support for "<<" merge keys.
|
||||
|
||||
Additional options for bare "yaml" command:
|
||||
--doc, -d Output pretty-printed JS Document objects.
|
||||
--single, -1 Require the input to consist of a single YAML document.
|
||||
--strict, -s Stop on errors.
|
||||
--visit, -v Apply a visitor to each document (requires a path to import)
|
||||
--yaml 1.1 Set the YAML version. (default: 1.2)`;
|
||||
class UserError extends Error {
|
||||
constructor(code, message) {
|
||||
super(`Error: ${message}`);
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
UserError.ARGS = 2;
|
||||
UserError.SINGLE = 3;
|
||||
async function cli(stdin, done, argv) {
|
||||
let args;
|
||||
try {
|
||||
args = parseArgs({
|
||||
args: argv,
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
doc: { type: 'boolean', short: 'd' },
|
||||
help: { type: 'boolean', short: 'h' },
|
||||
indent: { type: 'string', short: 'i' },
|
||||
merge: { type: 'boolean', short: 'm' },
|
||||
json: { type: 'boolean', short: 'j' },
|
||||
single: { type: 'boolean', short: '1' },
|
||||
strict: { type: 'boolean', short: 's' },
|
||||
visit: { type: 'string', short: 'v' },
|
||||
yaml: { type: 'string', default: '1.2' }
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
return done(new UserError(UserError.ARGS, error.message));
|
||||
}
|
||||
const { positionals: [mode], values: opt } = args;
|
||||
let indent = Number(opt.indent);
|
||||
stdin.setEncoding('utf-8');
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
switch (opt.help || mode) {
|
||||
/* istanbul ignore next */
|
||||
case true: // --help
|
||||
console.log(help);
|
||||
break;
|
||||
case 'lex': {
|
||||
const lexer = new Lexer();
|
||||
const data = [];
|
||||
const add = (tok) => {
|
||||
if (opt.json)
|
||||
data.push(tok);
|
||||
else
|
||||
console.log(prettyToken(tok));
|
||||
};
|
||||
stdin.on('data', (chunk) => {
|
||||
for (const tok of lexer.lex(chunk, true))
|
||||
add(tok);
|
||||
});
|
||||
stdin.on('end', () => {
|
||||
for (const tok of lexer.lex('', false))
|
||||
add(tok);
|
||||
if (opt.json)
|
||||
console.log(JSON.stringify(data, null, indent));
|
||||
done();
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'cst': {
|
||||
const parser = new Parser();
|
||||
const data = [];
|
||||
const add = (tok) => {
|
||||
if (opt.json)
|
||||
data.push(tok);
|
||||
else
|
||||
console.dir(tok, { depth: null });
|
||||
};
|
||||
stdin.on('data', (chunk) => {
|
||||
for (const tok of parser.parse(chunk, true))
|
||||
add(tok);
|
||||
});
|
||||
stdin.on('end', () => {
|
||||
for (const tok of parser.parse('', false))
|
||||
add(tok);
|
||||
if (opt.json)
|
||||
console.log(JSON.stringify(data, null, indent));
|
||||
done();
|
||||
});
|
||||
break;
|
||||
}
|
||||
case undefined:
|
||||
case 'valid': {
|
||||
const lineCounter = new LineCounter();
|
||||
const parser = new Parser(lineCounter.addNewLine);
|
||||
// @ts-expect-error Version is validated at runtime
|
||||
const composer = new Composer({ version: opt.yaml, merge: opt.merge });
|
||||
const visitor = opt.visit
|
||||
? (await import(resolve(opt.visit))).default
|
||||
: null;
|
||||
let source = '';
|
||||
let hasDoc = false;
|
||||
let reqDocEnd = false;
|
||||
const data = [];
|
||||
const add = (doc) => {
|
||||
if (hasDoc && opt.single) {
|
||||
return done(new UserError(UserError.SINGLE, 'Input stream contains multiple documents'));
|
||||
}
|
||||
for (const error of doc.errors) {
|
||||
prettifyError(source, lineCounter)(error);
|
||||
if (opt.strict || mode === 'valid')
|
||||
return done(error);
|
||||
console.error(error);
|
||||
}
|
||||
for (const warning of doc.warnings) {
|
||||
prettifyError(source, lineCounter)(warning);
|
||||
console.error(warning);
|
||||
}
|
||||
if (visitor)
|
||||
visit(doc, visitor);
|
||||
if (mode === 'valid')
|
||||
doc.toJS();
|
||||
else if (opt.json)
|
||||
data.push(doc);
|
||||
else if (opt.doc) {
|
||||
Object.defineProperties(doc, {
|
||||
options: { enumerable: false },
|
||||
schema: { enumerable: false }
|
||||
});
|
||||
console.dir(doc, { depth: null });
|
||||
}
|
||||
else {
|
||||
if (reqDocEnd)
|
||||
console.log('...');
|
||||
try {
|
||||
indent || (indent = 2);
|
||||
const str = doc.toString({ indent });
|
||||
console.log(str.endsWith('\n') ? str.slice(0, -1) : str);
|
||||
}
|
||||
catch (error) {
|
||||
done(error);
|
||||
}
|
||||
}
|
||||
hasDoc = true;
|
||||
reqDocEnd = !doc.directives?.docEnd;
|
||||
};
|
||||
stdin.on('data', (chunk) => {
|
||||
source += chunk;
|
||||
for (const tok of parser.parse(chunk, true)) {
|
||||
for (const doc of composer.next(tok))
|
||||
add(doc);
|
||||
}
|
||||
});
|
||||
stdin.on('end', () => {
|
||||
for (const tok of parser.parse('', false)) {
|
||||
for (const doc of composer.next(tok))
|
||||
add(doc);
|
||||
}
|
||||
for (const doc of composer.end(false))
|
||||
add(doc);
|
||||
if (opt.single && !hasDoc) {
|
||||
return done(new UserError(UserError.SINGLE, 'Input stream contained no documents'));
|
||||
}
|
||||
if (mode !== 'valid' && opt.json) {
|
||||
console.log(JSON.stringify(opt.single ? data[0] : data, null, indent));
|
||||
}
|
||||
done();
|
||||
});
|
||||
break;
|
||||
}
|
||||
default:
|
||||
done(new UserError(UserError.ARGS, `Unknown command: ${JSON.stringify(mode)}`));
|
||||
}
|
||||
}
|
||||
|
||||
export { UserError, cli, help };
|
11
node_modules/yaml/dist/compose/compose-collection.d.ts
generated
vendored
Normal file
11
node_modules/yaml/dist/compose/compose-collection.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import type { ParsedNode } from '../nodes/Node';
|
||||
import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst';
|
||||
import type { ComposeContext, ComposeNode } from './compose-node';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
interface Props {
|
||||
anchor: SourceToken | null;
|
||||
tag: SourceToken | null;
|
||||
newlineAfterProp: SourceToken | null;
|
||||
}
|
||||
export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, props: Props, onError: ComposeErrorHandler): ParsedNode;
|
||||
export {};
|
90
node_modules/yaml/dist/compose/compose-collection.js
generated
vendored
Normal file
90
node_modules/yaml/dist/compose/compose-collection.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
var Scalar = require('../nodes/Scalar.js');
|
||||
var YAMLMap = require('../nodes/YAMLMap.js');
|
||||
var YAMLSeq = require('../nodes/YAMLSeq.js');
|
||||
var resolveBlockMap = require('./resolve-block-map.js');
|
||||
var resolveBlockSeq = require('./resolve-block-seq.js');
|
||||
var resolveFlowCollection = require('./resolve-flow-collection.js');
|
||||
|
||||
function resolveCollection(CN, ctx, token, onError, tagName, tag) {
|
||||
const coll = token.type === 'block-map'
|
||||
? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag)
|
||||
: token.type === 'block-seq'
|
||||
? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag)
|
||||
: resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag);
|
||||
const Coll = coll.constructor;
|
||||
// If we got a tagName matching the class, or the tag name is '!',
|
||||
// then use the tagName from the node class used to create it.
|
||||
if (tagName === '!' || tagName === Coll.tagName) {
|
||||
coll.tag = Coll.tagName;
|
||||
return coll;
|
||||
}
|
||||
if (tagName)
|
||||
coll.tag = tagName;
|
||||
return coll;
|
||||
}
|
||||
function composeCollection(CN, ctx, token, props, onError) {
|
||||
const tagToken = props.tag;
|
||||
const tagName = !tagToken
|
||||
? null
|
||||
: ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
|
||||
if (token.type === 'block-seq') {
|
||||
const { anchor, newlineAfterProp: nl } = props;
|
||||
const lastProp = anchor && tagToken
|
||||
? anchor.offset > tagToken.offset
|
||||
? anchor
|
||||
: tagToken
|
||||
: (anchor ?? tagToken);
|
||||
if (lastProp && (!nl || nl.offset < lastProp.offset)) {
|
||||
const message = 'Missing newline after block sequence props';
|
||||
onError(lastProp, 'MISSING_CHAR', message);
|
||||
}
|
||||
}
|
||||
const expType = token.type === 'block-map'
|
||||
? 'map'
|
||||
: token.type === 'block-seq'
|
||||
? 'seq'
|
||||
: token.start.source === '{'
|
||||
? 'map'
|
||||
: 'seq';
|
||||
// shortcut: check if it's a generic YAMLMap or YAMLSeq
|
||||
// before jumping into the custom tag logic.
|
||||
if (!tagToken ||
|
||||
!tagName ||
|
||||
tagName === '!' ||
|
||||
(tagName === YAMLMap.YAMLMap.tagName && expType === 'map') ||
|
||||
(tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq')) {
|
||||
return resolveCollection(CN, ctx, token, onError, tagName);
|
||||
}
|
||||
let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);
|
||||
if (!tag) {
|
||||
const kt = ctx.schema.knownTags[tagName];
|
||||
if (kt && kt.collection === expType) {
|
||||
ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
|
||||
tag = kt;
|
||||
}
|
||||
else {
|
||||
if (kt) {
|
||||
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection ?? 'scalar'}`, true);
|
||||
}
|
||||
else {
|
||||
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
|
||||
}
|
||||
return resolveCollection(CN, ctx, token, onError, tagName);
|
||||
}
|
||||
}
|
||||
const coll = resolveCollection(CN, ctx, token, onError, tagName, tag);
|
||||
const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll;
|
||||
const node = identity.isNode(res)
|
||||
? res
|
||||
: new Scalar.Scalar(res);
|
||||
node.range = coll.range;
|
||||
node.tag = tagName;
|
||||
if (tag?.format)
|
||||
node.format = tag.format;
|
||||
return node;
|
||||
}
|
||||
|
||||
exports.composeCollection = composeCollection;
|
7
node_modules/yaml/dist/compose/compose-doc.d.ts
generated
vendored
Normal file
7
node_modules/yaml/dist/compose/compose-doc.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import type { Directives } from '../doc/directives';
|
||||
import { Document } from '../doc/Document';
|
||||
import type { ParsedNode } from '../nodes/Node';
|
||||
import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options';
|
||||
import type * as CST from '../parse/cst';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function composeDoc<Contents extends ParsedNode = ParsedNode, Strict extends boolean = true>(options: ParseOptions & DocumentOptions & SchemaOptions, directives: Directives, { offset, start, value, end }: CST.Document, onError: ComposeErrorHandler): Document.Parsed<Contents, Strict>;
|
45
node_modules/yaml/dist/compose/compose-doc.js
generated
vendored
Normal file
45
node_modules/yaml/dist/compose/compose-doc.js
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
'use strict';
|
||||
|
||||
var Document = require('../doc/Document.js');
|
||||
var composeNode = require('./compose-node.js');
|
||||
var resolveEnd = require('./resolve-end.js');
|
||||
var resolveProps = require('./resolve-props.js');
|
||||
|
||||
function composeDoc(options, directives, { offset, start, value, end }, onError) {
|
||||
const opts = Object.assign({ _directives: directives }, options);
|
||||
const doc = new Document.Document(undefined, opts);
|
||||
const ctx = {
|
||||
atKey: false,
|
||||
atRoot: true,
|
||||
directives: doc.directives,
|
||||
options: doc.options,
|
||||
schema: doc.schema
|
||||
};
|
||||
const props = resolveProps.resolveProps(start, {
|
||||
indicator: 'doc-start',
|
||||
next: value ?? end?.[0],
|
||||
offset,
|
||||
onError,
|
||||
parentIndent: 0,
|
||||
startOnNewline: true
|
||||
});
|
||||
if (props.found) {
|
||||
doc.directives.docStart = true;
|
||||
if (value &&
|
||||
(value.type === 'block-map' || value.type === 'block-seq') &&
|
||||
!props.hasNewline)
|
||||
onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
|
||||
}
|
||||
// @ts-expect-error If Contents is set, let's trust the user
|
||||
doc.contents = value
|
||||
? composeNode.composeNode(ctx, value, props, onError)
|
||||
: composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);
|
||||
const contentEnd = doc.contents.range[2];
|
||||
const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);
|
||||
if (re.comment)
|
||||
doc.comment = re.comment;
|
||||
doc.range = [offset, contentEnd, re.offset];
|
||||
return doc;
|
||||
}
|
||||
|
||||
exports.composeDoc = composeDoc;
|
29
node_modules/yaml/dist/compose/compose-node.d.ts
generated
vendored
Normal file
29
node_modules/yaml/dist/compose/compose-node.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
import type { Directives } from '../doc/directives';
|
||||
import type { ParsedNode } from '../nodes/Node';
|
||||
import type { ParseOptions } from '../options';
|
||||
import type { SourceToken, Token } from '../parse/cst';
|
||||
import type { Schema } from '../schema/Schema';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export interface ComposeContext {
|
||||
atKey: boolean;
|
||||
atRoot: boolean;
|
||||
directives: Directives;
|
||||
options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>>;
|
||||
schema: Readonly<Schema>;
|
||||
}
|
||||
interface Props {
|
||||
spaceBefore: boolean;
|
||||
comment: string;
|
||||
anchor: SourceToken | null;
|
||||
tag: SourceToken | null;
|
||||
newlineAfterProp: SourceToken | null;
|
||||
end: number;
|
||||
}
|
||||
declare const CN: {
|
||||
composeNode: typeof composeNode;
|
||||
composeEmptyNode: typeof composeEmptyNode;
|
||||
};
|
||||
export type ComposeNode = typeof CN;
|
||||
export declare function composeNode(ctx: ComposeContext, token: Token, props: Props, onError: ComposeErrorHandler): ParsedNode;
|
||||
export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag, end }: Props, onError: ComposeErrorHandler): import('../index').Scalar.Parsed;
|
||||
export {};
|
105
node_modules/yaml/dist/compose/compose-node.js
generated
vendored
Normal file
105
node_modules/yaml/dist/compose/compose-node.js
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
|
||||
var Alias = require('../nodes/Alias.js');
|
||||
var identity = require('../nodes/identity.js');
|
||||
var composeCollection = require('./compose-collection.js');
|
||||
var composeScalar = require('./compose-scalar.js');
|
||||
var resolveEnd = require('./resolve-end.js');
|
||||
var utilEmptyScalarPosition = require('./util-empty-scalar-position.js');
|
||||
|
||||
const CN = { composeNode, composeEmptyNode };
|
||||
function composeNode(ctx, token, props, onError) {
|
||||
const atKey = ctx.atKey;
|
||||
const { spaceBefore, comment, anchor, tag } = props;
|
||||
let node;
|
||||
let isSrcToken = true;
|
||||
switch (token.type) {
|
||||
case 'alias':
|
||||
node = composeAlias(ctx, token, onError);
|
||||
if (anchor || tag)
|
||||
onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
|
||||
break;
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'block-scalar':
|
||||
node = composeScalar.composeScalar(ctx, token, tag, onError);
|
||||
if (anchor)
|
||||
node.anchor = anchor.source.substring(1);
|
||||
break;
|
||||
case 'block-map':
|
||||
case 'block-seq':
|
||||
case 'flow-collection':
|
||||
node = composeCollection.composeCollection(CN, ctx, token, props, onError);
|
||||
if (anchor)
|
||||
node.anchor = anchor.source.substring(1);
|
||||
break;
|
||||
default: {
|
||||
const message = token.type === 'error'
|
||||
? token.message
|
||||
: `Unsupported token (type: ${token.type})`;
|
||||
onError(token, 'UNEXPECTED_TOKEN', message);
|
||||
node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
|
||||
isSrcToken = false;
|
||||
}
|
||||
}
|
||||
if (anchor && node.anchor === '')
|
||||
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||||
if (atKey &&
|
||||
ctx.options.stringKeys &&
|
||||
(!identity.isScalar(node) ||
|
||||
typeof node.value !== 'string' ||
|
||||
(node.tag && node.tag !== 'tag:yaml.org,2002:str'))) {
|
||||
const msg = 'With stringKeys, all keys must be strings';
|
||||
onError(tag ?? token, 'NON_STRING_KEY', msg);
|
||||
}
|
||||
if (spaceBefore)
|
||||
node.spaceBefore = true;
|
||||
if (comment) {
|
||||
if (token.type === 'scalar' && token.source === '')
|
||||
node.comment = comment;
|
||||
else
|
||||
node.commentBefore = comment;
|
||||
}
|
||||
// @ts-expect-error Type checking misses meaning of isSrcToken
|
||||
if (ctx.options.keepSourceTokens && isSrcToken)
|
||||
node.srcToken = token;
|
||||
return node;
|
||||
}
|
||||
function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {
|
||||
const token = {
|
||||
type: 'scalar',
|
||||
offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),
|
||||
indent: -1,
|
||||
source: ''
|
||||
};
|
||||
const node = composeScalar.composeScalar(ctx, token, tag, onError);
|
||||
if (anchor) {
|
||||
node.anchor = anchor.source.substring(1);
|
||||
if (node.anchor === '')
|
||||
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||||
}
|
||||
if (spaceBefore)
|
||||
node.spaceBefore = true;
|
||||
if (comment) {
|
||||
node.comment = comment;
|
||||
node.range[2] = end;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
function composeAlias({ options }, { offset, source, end }, onError) {
|
||||
const alias = new Alias.Alias(source.substring(1));
|
||||
if (alias.source === '')
|
||||
onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
|
||||
if (alias.source.endsWith(':'))
|
||||
onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
|
||||
const valueEnd = offset + source.length;
|
||||
const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);
|
||||
alias.range = [offset, valueEnd, re.offset];
|
||||
if (re.comment)
|
||||
alias.comment = re.comment;
|
||||
return alias;
|
||||
}
|
||||
|
||||
exports.composeEmptyNode = composeEmptyNode;
|
||||
exports.composeNode = composeNode;
|
5
node_modules/yaml/dist/compose/compose-scalar.d.ts
generated
vendored
Normal file
5
node_modules/yaml/dist/compose/compose-scalar.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
import { Scalar } from '../nodes/Scalar';
|
||||
import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst';
|
||||
import type { ComposeContext } from './compose-node';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function composeScalar(ctx: ComposeContext, token: FlowScalar | BlockScalar, tagToken: SourceToken | null, onError: ComposeErrorHandler): Scalar.Parsed;
|
88
node_modules/yaml/dist/compose/compose-scalar.js
generated
vendored
Normal file
88
node_modules/yaml/dist/compose/compose-scalar.js
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
var Scalar = require('../nodes/Scalar.js');
|
||||
var resolveBlockScalar = require('./resolve-block-scalar.js');
|
||||
var resolveFlowScalar = require('./resolve-flow-scalar.js');
|
||||
|
||||
function composeScalar(ctx, token, tagToken, onError) {
|
||||
const { value, type, comment, range } = token.type === 'block-scalar'
|
||||
? resolveBlockScalar.resolveBlockScalar(ctx, token, onError)
|
||||
: resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);
|
||||
const tagName = tagToken
|
||||
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
|
||||
: null;
|
||||
let tag;
|
||||
if (ctx.options.stringKeys && ctx.atKey) {
|
||||
tag = ctx.schema[identity.SCALAR];
|
||||
}
|
||||
else if (tagName)
|
||||
tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError);
|
||||
else if (token.type === 'scalar')
|
||||
tag = findScalarTagByTest(ctx, value, token, onError);
|
||||
else
|
||||
tag = ctx.schema[identity.SCALAR];
|
||||
let scalar;
|
||||
try {
|
||||
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
|
||||
scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res);
|
||||
}
|
||||
catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);
|
||||
scalar = new Scalar.Scalar(value);
|
||||
}
|
||||
scalar.range = range;
|
||||
scalar.source = value;
|
||||
if (type)
|
||||
scalar.type = type;
|
||||
if (tagName)
|
||||
scalar.tag = tagName;
|
||||
if (tag.format)
|
||||
scalar.format = tag.format;
|
||||
if (comment)
|
||||
scalar.comment = comment;
|
||||
return scalar;
|
||||
}
|
||||
function findScalarTagByName(schema, value, tagName, tagToken, onError) {
|
||||
if (tagName === '!')
|
||||
return schema[identity.SCALAR]; // non-specific tag
|
||||
const matchWithTest = [];
|
||||
for (const tag of schema.tags) {
|
||||
if (!tag.collection && tag.tag === tagName) {
|
||||
if (tag.default && tag.test)
|
||||
matchWithTest.push(tag);
|
||||
else
|
||||
return tag;
|
||||
}
|
||||
}
|
||||
for (const tag of matchWithTest)
|
||||
if (tag.test?.test(value))
|
||||
return tag;
|
||||
const kt = schema.knownTags[tagName];
|
||||
if (kt && !kt.collection) {
|
||||
// Ensure that the known tag is available for stringifying,
|
||||
// but does not get used by default.
|
||||
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
|
||||
return kt;
|
||||
}
|
||||
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
|
||||
return schema[identity.SCALAR];
|
||||
}
|
||||
function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) {
|
||||
const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) &&
|
||||
tag.test?.test(value)) || schema[identity.SCALAR];
|
||||
if (schema.compat) {
|
||||
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
|
||||
schema[identity.SCALAR];
|
||||
if (tag.tag !== compat.tag) {
|
||||
const ts = directives.tagString(tag.tag);
|
||||
const cs = directives.tagString(compat.tag);
|
||||
const msg = `Value may be parsed as either ${ts} or ${cs}`;
|
||||
onError(token, 'TAG_RESOLVE_FAILED', msg, true);
|
||||
}
|
||||
}
|
||||
return tag;
|
||||
}
|
||||
|
||||
exports.composeScalar = composeScalar;
|
63
node_modules/yaml/dist/compose/composer.d.ts
generated
vendored
Normal file
63
node_modules/yaml/dist/compose/composer.d.ts
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
import { Directives } from '../doc/directives';
|
||||
import { Document } from '../doc/Document';
|
||||
import type { ErrorCode } from '../errors';
|
||||
import { YAMLParseError, YAMLWarning } from '../errors';
|
||||
import type { ParsedNode, Range } from '../nodes/Node';
|
||||
import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options';
|
||||
import type { Token } from '../parse/cst';
|
||||
type ErrorSource = number | [number, number] | Range | {
|
||||
offset: number;
|
||||
source?: string;
|
||||
};
|
||||
export type ComposeErrorHandler = (source: ErrorSource, code: ErrorCode, message: string, warning?: boolean) => void;
|
||||
/**
|
||||
* Compose a stream of CST nodes into a stream of YAML Documents.
|
||||
*
|
||||
* ```ts
|
||||
* import { Composer, Parser } from 'yaml'
|
||||
*
|
||||
* const src: string = ...
|
||||
* const tokens = new Parser().parse(src)
|
||||
* const docs = new Composer().compose(tokens)
|
||||
* ```
|
||||
*/
|
||||
export declare class Composer<Contents extends ParsedNode = ParsedNode, Strict extends boolean = true> {
|
||||
private directives;
|
||||
private doc;
|
||||
private options;
|
||||
private atDirectives;
|
||||
private prelude;
|
||||
private errors;
|
||||
private warnings;
|
||||
constructor(options?: ParseOptions & DocumentOptions & SchemaOptions);
|
||||
private onError;
|
||||
private decorate;
|
||||
/**
|
||||
* Current stream status information.
|
||||
*
|
||||
* Mostly useful at the end of input for an empty stream.
|
||||
*/
|
||||
streamInfo(): {
|
||||
comment: string;
|
||||
directives: Directives;
|
||||
errors: YAMLParseError[];
|
||||
warnings: YAMLWarning[];
|
||||
};
|
||||
/**
|
||||
* Compose tokens into documents.
|
||||
*
|
||||
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||||
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||||
*/
|
||||
compose(tokens: Iterable<Token>, forceDoc?: boolean, endOffset?: number): Generator<Document.Parsed<Contents, Strict>, void, unknown>;
|
||||
/** Advance the composer by one CST token. */
|
||||
next(token: Token): Generator<Document.Parsed<Contents, Strict>, void, unknown>;
|
||||
/**
|
||||
* Call at end of input to yield any remaining document.
|
||||
*
|
||||
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||||
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||||
*/
|
||||
end(forceDoc?: boolean, endOffset?: number): Generator<Document.Parsed<Contents, Strict>, void, unknown>;
|
||||
}
|
||||
export {};
|
222
node_modules/yaml/dist/compose/composer.js
generated
vendored
Normal file
222
node_modules/yaml/dist/compose/composer.js
generated
vendored
Normal file
@ -0,0 +1,222 @@
|
||||
'use strict';
|
||||
|
||||
var node_process = require('process');
|
||||
var directives = require('../doc/directives.js');
|
||||
var Document = require('../doc/Document.js');
|
||||
var errors = require('../errors.js');
|
||||
var identity = require('../nodes/identity.js');
|
||||
var composeDoc = require('./compose-doc.js');
|
||||
var resolveEnd = require('./resolve-end.js');
|
||||
|
||||
function getErrorPos(src) {
|
||||
if (typeof src === 'number')
|
||||
return [src, src + 1];
|
||||
if (Array.isArray(src))
|
||||
return src.length === 2 ? src : [src[0], src[1]];
|
||||
const { offset, source } = src;
|
||||
return [offset, offset + (typeof source === 'string' ? source.length : 1)];
|
||||
}
|
||||
function parsePrelude(prelude) {
|
||||
let comment = '';
|
||||
let atComment = false;
|
||||
let afterEmptyLine = false;
|
||||
for (let i = 0; i < prelude.length; ++i) {
|
||||
const source = prelude[i];
|
||||
switch (source[0]) {
|
||||
case '#':
|
||||
comment +=
|
||||
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
|
||||
(source.substring(1) || ' ');
|
||||
atComment = true;
|
||||
afterEmptyLine = false;
|
||||
break;
|
||||
case '%':
|
||||
if (prelude[i + 1]?.[0] !== '#')
|
||||
i += 1;
|
||||
atComment = false;
|
||||
break;
|
||||
default:
|
||||
// This may be wrong after doc-end, but in that case it doesn't matter
|
||||
if (!atComment)
|
||||
afterEmptyLine = true;
|
||||
atComment = false;
|
||||
}
|
||||
}
|
||||
return { comment, afterEmptyLine };
|
||||
}
|
||||
/**
|
||||
* Compose a stream of CST nodes into a stream of YAML Documents.
|
||||
*
|
||||
* ```ts
|
||||
* import { Composer, Parser } from 'yaml'
|
||||
*
|
||||
* const src: string = ...
|
||||
* const tokens = new Parser().parse(src)
|
||||
* const docs = new Composer().compose(tokens)
|
||||
* ```
|
||||
*/
|
||||
class Composer {
|
||||
constructor(options = {}) {
|
||||
this.doc = null;
|
||||
this.atDirectives = false;
|
||||
this.prelude = [];
|
||||
this.errors = [];
|
||||
this.warnings = [];
|
||||
this.onError = (source, code, message, warning) => {
|
||||
const pos = getErrorPos(source);
|
||||
if (warning)
|
||||
this.warnings.push(new errors.YAMLWarning(pos, code, message));
|
||||
else
|
||||
this.errors.push(new errors.YAMLParseError(pos, code, message));
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
this.directives = new directives.Directives({ version: options.version || '1.2' });
|
||||
this.options = options;
|
||||
}
|
||||
decorate(doc, afterDoc) {
|
||||
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
|
||||
//console.log({ dc: doc.comment, prelude, comment })
|
||||
if (comment) {
|
||||
const dc = doc.contents;
|
||||
if (afterDoc) {
|
||||
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
|
||||
}
|
||||
else if (afterEmptyLine || doc.directives.docStart || !dc) {
|
||||
doc.commentBefore = comment;
|
||||
}
|
||||
else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) {
|
||||
let it = dc.items[0];
|
||||
if (identity.isPair(it))
|
||||
it = it.key;
|
||||
const cb = it.commentBefore;
|
||||
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||||
}
|
||||
else {
|
||||
const cb = dc.commentBefore;
|
||||
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||||
}
|
||||
}
|
||||
if (afterDoc) {
|
||||
Array.prototype.push.apply(doc.errors, this.errors);
|
||||
Array.prototype.push.apply(doc.warnings, this.warnings);
|
||||
}
|
||||
else {
|
||||
doc.errors = this.errors;
|
||||
doc.warnings = this.warnings;
|
||||
}
|
||||
this.prelude = [];
|
||||
this.errors = [];
|
||||
this.warnings = [];
|
||||
}
|
||||
/**
|
||||
* Current stream status information.
|
||||
*
|
||||
* Mostly useful at the end of input for an empty stream.
|
||||
*/
|
||||
streamInfo() {
|
||||
return {
|
||||
comment: parsePrelude(this.prelude).comment,
|
||||
directives: this.directives,
|
||||
errors: this.errors,
|
||||
warnings: this.warnings
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Compose tokens into documents.
|
||||
*
|
||||
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||||
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||||
*/
|
||||
*compose(tokens, forceDoc = false, endOffset = -1) {
|
||||
for (const token of tokens)
|
||||
yield* this.next(token);
|
||||
yield* this.end(forceDoc, endOffset);
|
||||
}
|
||||
/** Advance the composer by one CST token. */
|
||||
*next(token) {
|
||||
if (node_process.env.LOG_STREAM)
|
||||
console.dir(token, { depth: null });
|
||||
switch (token.type) {
|
||||
case 'directive':
|
||||
this.directives.add(token.source, (offset, message, warning) => {
|
||||
const pos = getErrorPos(token);
|
||||
pos[0] += offset;
|
||||
this.onError(pos, 'BAD_DIRECTIVE', message, warning);
|
||||
});
|
||||
this.prelude.push(token.source);
|
||||
this.atDirectives = true;
|
||||
break;
|
||||
case 'document': {
|
||||
const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);
|
||||
if (this.atDirectives && !doc.directives.docStart)
|
||||
this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
|
||||
this.decorate(doc, false);
|
||||
if (this.doc)
|
||||
yield this.doc;
|
||||
this.doc = doc;
|
||||
this.atDirectives = false;
|
||||
break;
|
||||
}
|
||||
case 'byte-order-mark':
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
this.prelude.push(token.source);
|
||||
break;
|
||||
case 'error': {
|
||||
const msg = token.source
|
||||
? `${token.message}: ${JSON.stringify(token.source)}`
|
||||
: token.message;
|
||||
const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
|
||||
if (this.atDirectives || !this.doc)
|
||||
this.errors.push(error);
|
||||
else
|
||||
this.doc.errors.push(error);
|
||||
break;
|
||||
}
|
||||
case 'doc-end': {
|
||||
if (!this.doc) {
|
||||
const msg = 'Unexpected doc-end without preceding document';
|
||||
this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
|
||||
break;
|
||||
}
|
||||
this.doc.directives.docEnd = true;
|
||||
const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
|
||||
this.decorate(this.doc, true);
|
||||
if (end.comment) {
|
||||
const dc = this.doc.comment;
|
||||
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
|
||||
}
|
||||
this.doc.range[2] = end.offset;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Call at end of input to yield any remaining document.
|
||||
*
|
||||
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||||
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||||
*/
|
||||
*end(forceDoc = false, endOffset = -1) {
|
||||
if (this.doc) {
|
||||
this.decorate(this.doc, true);
|
||||
yield this.doc;
|
||||
this.doc = null;
|
||||
}
|
||||
else if (forceDoc) {
|
||||
const opts = Object.assign({ _directives: this.directives }, this.options);
|
||||
const doc = new Document.Document(undefined, opts);
|
||||
if (this.atDirectives)
|
||||
this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
|
||||
doc.range = [0, endOffset, endOffset];
|
||||
this.decorate(doc, false);
|
||||
yield doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.Composer = Composer;
|
6
node_modules/yaml/dist/compose/resolve-block-map.d.ts
generated
vendored
Normal file
6
node_modules/yaml/dist/compose/resolve-block-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { YAMLMap } from '../nodes/YAMLMap';
|
||||
import type { BlockMap } from '../parse/cst';
|
||||
import type { CollectionTag } from '../schema/types';
|
||||
import type { ComposeContext, ComposeNode } from './compose-node';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed;
|
117
node_modules/yaml/dist/compose/resolve-block-map.js
generated
vendored
Normal file
117
node_modules/yaml/dist/compose/resolve-block-map.js
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
'use strict';
|
||||
|
||||
var Pair = require('../nodes/Pair.js');
|
||||
var YAMLMap = require('../nodes/YAMLMap.js');
|
||||
var resolveProps = require('./resolve-props.js');
|
||||
var utilContainsNewline = require('./util-contains-newline.js');
|
||||
var utilFlowIndentCheck = require('./util-flow-indent-check.js');
|
||||
var utilMapIncludes = require('./util-map-includes.js');
|
||||
|
||||
const startColMsg = 'All mapping items must start at the same column';
|
||||
function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) {
|
||||
const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap;
|
||||
const map = new NodeClass(ctx.schema);
|
||||
if (ctx.atRoot)
|
||||
ctx.atRoot = false;
|
||||
let offset = bm.offset;
|
||||
let commentEnd = null;
|
||||
for (const collItem of bm.items) {
|
||||
const { start, key, sep, value } = collItem;
|
||||
// key properties
|
||||
const keyProps = resolveProps.resolveProps(start, {
|
||||
indicator: 'explicit-key-ind',
|
||||
next: key ?? sep?.[0],
|
||||
offset,
|
||||
onError,
|
||||
parentIndent: bm.indent,
|
||||
startOnNewline: true
|
||||
});
|
||||
const implicitKey = !keyProps.found;
|
||||
if (implicitKey) {
|
||||
if (key) {
|
||||
if (key.type === 'block-seq')
|
||||
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
|
||||
else if ('indent' in key && key.indent !== bm.indent)
|
||||
onError(offset, 'BAD_INDENT', startColMsg);
|
||||
}
|
||||
if (!keyProps.anchor && !keyProps.tag && !sep) {
|
||||
commentEnd = keyProps.end;
|
||||
if (keyProps.comment) {
|
||||
if (map.comment)
|
||||
map.comment += '\n' + keyProps.comment;
|
||||
else
|
||||
map.comment = keyProps.comment;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (keyProps.newlineAfterProp || utilContainsNewline.containsNewline(key)) {
|
||||
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
|
||||
}
|
||||
}
|
||||
else if (keyProps.found?.indent !== bm.indent) {
|
||||
onError(offset, 'BAD_INDENT', startColMsg);
|
||||
}
|
||||
// key value
|
||||
ctx.atKey = true;
|
||||
const keyStart = keyProps.end;
|
||||
const keyNode = key
|
||||
? composeNode(ctx, key, keyProps, onError)
|
||||
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
|
||||
if (ctx.schema.compat)
|
||||
utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);
|
||||
ctx.atKey = false;
|
||||
if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
|
||||
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||||
// value properties
|
||||
const valueProps = resolveProps.resolveProps(sep ?? [], {
|
||||
indicator: 'map-value-ind',
|
||||
next: value,
|
||||
offset: keyNode.range[2],
|
||||
onError,
|
||||
parentIndent: bm.indent,
|
||||
startOnNewline: !key || key.type === 'block-scalar'
|
||||
});
|
||||
offset = valueProps.end;
|
||||
if (valueProps.found) {
|
||||
if (implicitKey) {
|
||||
if (value?.type === 'block-map' && !valueProps.hasNewline)
|
||||
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
|
||||
if (ctx.options.strict &&
|
||||
keyProps.start < valueProps.found.offset - 1024)
|
||||
onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
|
||||
}
|
||||
// value value
|
||||
const valueNode = value
|
||||
? composeNode(ctx, value, valueProps, onError)
|
||||
: composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
|
||||
if (ctx.schema.compat)
|
||||
utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);
|
||||
offset = valueNode.range[2];
|
||||
const pair = new Pair.Pair(keyNode, valueNode);
|
||||
if (ctx.options.keepSourceTokens)
|
||||
pair.srcToken = collItem;
|
||||
map.items.push(pair);
|
||||
}
|
||||
else {
|
||||
// key with no value
|
||||
if (implicitKey)
|
||||
onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
|
||||
if (valueProps.comment) {
|
||||
if (keyNode.comment)
|
||||
keyNode.comment += '\n' + valueProps.comment;
|
||||
else
|
||||
keyNode.comment = valueProps.comment;
|
||||
}
|
||||
const pair = new Pair.Pair(keyNode);
|
||||
if (ctx.options.keepSourceTokens)
|
||||
pair.srcToken = collItem;
|
||||
map.items.push(pair);
|
||||
}
|
||||
}
|
||||
if (commentEnd && commentEnd < offset)
|
||||
onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');
|
||||
map.range = [bm.offset, offset, commentEnd ?? offset];
|
||||
return map;
|
||||
}
|
||||
|
||||
exports.resolveBlockMap = resolveBlockMap;
|
11
node_modules/yaml/dist/compose/resolve-block-scalar.d.ts
generated
vendored
Normal file
11
node_modules/yaml/dist/compose/resolve-block-scalar.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import type { Range } from '../nodes/Node';
|
||||
import { Scalar } from '../nodes/Scalar';
|
||||
import type { BlockScalar } from '../parse/cst';
|
||||
import type { ComposeContext } from './compose-node';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function resolveBlockScalar(ctx: ComposeContext, scalar: BlockScalar, onError: ComposeErrorHandler): {
|
||||
value: string;
|
||||
type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null;
|
||||
comment: string;
|
||||
range: Range;
|
||||
};
|
200
node_modules/yaml/dist/compose/resolve-block-scalar.js
generated
vendored
Normal file
200
node_modules/yaml/dist/compose/resolve-block-scalar.js
generated
vendored
Normal file
@ -0,0 +1,200 @@
|
||||
'use strict';
|
||||
|
||||
var Scalar = require('../nodes/Scalar.js');
|
||||
|
||||
function resolveBlockScalar(ctx, scalar, onError) {
|
||||
const start = scalar.offset;
|
||||
const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);
|
||||
if (!header)
|
||||
return { value: '', type: null, comment: '', range: [start, start, start] };
|
||||
const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;
|
||||
const lines = scalar.source ? splitLines(scalar.source) : [];
|
||||
// determine the end of content & start of chomping
|
||||
let chompStart = lines.length;
|
||||
for (let i = lines.length - 1; i >= 0; --i) {
|
||||
const content = lines[i][1];
|
||||
if (content === '' || content === '\r')
|
||||
chompStart = i;
|
||||
else
|
||||
break;
|
||||
}
|
||||
// shortcut for empty contents
|
||||
if (chompStart === 0) {
|
||||
const value = header.chomp === '+' && lines.length > 0
|
||||
? '\n'.repeat(Math.max(1, lines.length - 1))
|
||||
: '';
|
||||
let end = start + header.length;
|
||||
if (scalar.source)
|
||||
end += scalar.source.length;
|
||||
return { value, type, comment: header.comment, range: [start, end, end] };
|
||||
}
|
||||
// find the indentation level to trim from start
|
||||
let trimIndent = scalar.indent + header.indent;
|
||||
let offset = scalar.offset + header.length;
|
||||
let contentStart = 0;
|
||||
for (let i = 0; i < chompStart; ++i) {
|
||||
const [indent, content] = lines[i];
|
||||
if (content === '' || content === '\r') {
|
||||
if (header.indent === 0 && indent.length > trimIndent)
|
||||
trimIndent = indent.length;
|
||||
}
|
||||
else {
|
||||
if (indent.length < trimIndent) {
|
||||
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
|
||||
onError(offset + indent.length, 'MISSING_CHAR', message);
|
||||
}
|
||||
if (header.indent === 0)
|
||||
trimIndent = indent.length;
|
||||
contentStart = i;
|
||||
if (trimIndent === 0 && !ctx.atRoot) {
|
||||
const message = 'Block scalar values in collections must be indented';
|
||||
onError(offset, 'BAD_INDENT', message);
|
||||
}
|
||||
break;
|
||||
}
|
||||
offset += indent.length + content.length + 1;
|
||||
}
|
||||
// include trailing more-indented empty lines in content
|
||||
for (let i = lines.length - 1; i >= chompStart; --i) {
|
||||
if (lines[i][0].length > trimIndent)
|
||||
chompStart = i + 1;
|
||||
}
|
||||
let value = '';
|
||||
let sep = '';
|
||||
let prevMoreIndented = false;
|
||||
// leading whitespace is kept intact
|
||||
for (let i = 0; i < contentStart; ++i)
|
||||
value += lines[i][0].slice(trimIndent) + '\n';
|
||||
for (let i = contentStart; i < chompStart; ++i) {
|
||||
let [indent, content] = lines[i];
|
||||
offset += indent.length + content.length + 1;
|
||||
const crlf = content[content.length - 1] === '\r';
|
||||
if (crlf)
|
||||
content = content.slice(0, -1);
|
||||
/* istanbul ignore if already caught in lexer */
|
||||
if (content && indent.length < trimIndent) {
|
||||
const src = header.indent
|
||||
? 'explicit indentation indicator'
|
||||
: 'first line';
|
||||
const message = `Block scalar lines must not be less indented than their ${src}`;
|
||||
onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
|
||||
indent = '';
|
||||
}
|
||||
if (type === Scalar.Scalar.BLOCK_LITERAL) {
|
||||
value += sep + indent.slice(trimIndent) + content;
|
||||
sep = '\n';
|
||||
}
|
||||
else if (indent.length > trimIndent || content[0] === '\t') {
|
||||
// more-indented content within a folded block
|
||||
if (sep === ' ')
|
||||
sep = '\n';
|
||||
else if (!prevMoreIndented && sep === '\n')
|
||||
sep = '\n\n';
|
||||
value += sep + indent.slice(trimIndent) + content;
|
||||
sep = '\n';
|
||||
prevMoreIndented = true;
|
||||
}
|
||||
else if (content === '') {
|
||||
// empty line
|
||||
if (sep === '\n')
|
||||
value += '\n';
|
||||
else
|
||||
sep = '\n';
|
||||
}
|
||||
else {
|
||||
value += sep + content;
|
||||
sep = ' ';
|
||||
prevMoreIndented = false;
|
||||
}
|
||||
}
|
||||
switch (header.chomp) {
|
||||
case '-':
|
||||
break;
|
||||
case '+':
|
||||
for (let i = chompStart; i < lines.length; ++i)
|
||||
value += '\n' + lines[i][0].slice(trimIndent);
|
||||
if (value[value.length - 1] !== '\n')
|
||||
value += '\n';
|
||||
break;
|
||||
default:
|
||||
value += '\n';
|
||||
}
|
||||
const end = start + header.length + scalar.source.length;
|
||||
return { value, type, comment: header.comment, range: [start, end, end] };
|
||||
}
|
||||
function parseBlockScalarHeader({ offset, props }, strict, onError) {
|
||||
/* istanbul ignore if should not happen */
|
||||
if (props[0].type !== 'block-scalar-header') {
|
||||
onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
|
||||
return null;
|
||||
}
|
||||
const { source } = props[0];
|
||||
const mode = source[0];
|
||||
let indent = 0;
|
||||
let chomp = '';
|
||||
let error = -1;
|
||||
for (let i = 1; i < source.length; ++i) {
|
||||
const ch = source[i];
|
||||
if (!chomp && (ch === '-' || ch === '+'))
|
||||
chomp = ch;
|
||||
else {
|
||||
const n = Number(ch);
|
||||
if (!indent && n)
|
||||
indent = n;
|
||||
else if (error === -1)
|
||||
error = offset + i;
|
||||
}
|
||||
}
|
||||
if (error !== -1)
|
||||
onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
|
||||
let hasSpace = false;
|
||||
let comment = '';
|
||||
let length = source.length;
|
||||
for (let i = 1; i < props.length; ++i) {
|
||||
const token = props[i];
|
||||
switch (token.type) {
|
||||
case 'space':
|
||||
hasSpace = true;
|
||||
// fallthrough
|
||||
case 'newline':
|
||||
length += token.source.length;
|
||||
break;
|
||||
case 'comment':
|
||||
if (strict && !hasSpace) {
|
||||
const message = 'Comments must be separated from other tokens by white space characters';
|
||||
onError(token, 'MISSING_CHAR', message);
|
||||
}
|
||||
length += token.source.length;
|
||||
comment = token.source.substring(1);
|
||||
break;
|
||||
case 'error':
|
||||
onError(token, 'UNEXPECTED_TOKEN', token.message);
|
||||
length += token.source.length;
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default: {
|
||||
const message = `Unexpected token in block scalar header: ${token.type}`;
|
||||
onError(token, 'UNEXPECTED_TOKEN', message);
|
||||
const ts = token.source;
|
||||
if (ts && typeof ts === 'string')
|
||||
length += ts.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { mode, indent, chomp, comment, length };
|
||||
}
|
||||
/** @returns Array of lines split up as `[indent, content]` */
|
||||
function splitLines(source) {
|
||||
const split = source.split(/\n( *)/);
|
||||
const first = split[0];
|
||||
const m = first.match(/^( *)/);
|
||||
const line0 = m?.[1]
|
||||
? [m[1], first.slice(m[1].length)]
|
||||
: ['', first];
|
||||
const lines = [line0];
|
||||
for (let i = 1; i < split.length; i += 2)
|
||||
lines.push([split[i], split[i + 1]]);
|
||||
return lines;
|
||||
}
|
||||
|
||||
exports.resolveBlockScalar = resolveBlockScalar;
|
6
node_modules/yaml/dist/compose/resolve-block-seq.d.ts
generated
vendored
Normal file
6
node_modules/yaml/dist/compose/resolve-block-seq.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { YAMLSeq } from '../nodes/YAMLSeq';
|
||||
import type { BlockSequence } from '../parse/cst';
|
||||
import type { CollectionTag } from '../schema/types';
|
||||
import type { ComposeContext, ComposeNode } from './compose-node';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLSeq.Parsed;
|
51
node_modules/yaml/dist/compose/resolve-block-seq.js
generated
vendored
Normal file
51
node_modules/yaml/dist/compose/resolve-block-seq.js
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
'use strict';
|
||||
|
||||
var YAMLSeq = require('../nodes/YAMLSeq.js');
|
||||
var resolveProps = require('./resolve-props.js');
|
||||
var utilFlowIndentCheck = require('./util-flow-indent-check.js');
|
||||
|
||||
function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) {
|
||||
const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq;
|
||||
const seq = new NodeClass(ctx.schema);
|
||||
if (ctx.atRoot)
|
||||
ctx.atRoot = false;
|
||||
if (ctx.atKey)
|
||||
ctx.atKey = false;
|
||||
let offset = bs.offset;
|
||||
let commentEnd = null;
|
||||
for (const { start, value } of bs.items) {
|
||||
const props = resolveProps.resolveProps(start, {
|
||||
indicator: 'seq-item-ind',
|
||||
next: value,
|
||||
offset,
|
||||
onError,
|
||||
parentIndent: bs.indent,
|
||||
startOnNewline: true
|
||||
});
|
||||
if (!props.found) {
|
||||
if (props.anchor || props.tag || value) {
|
||||
if (value && value.type === 'block-seq')
|
||||
onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');
|
||||
else
|
||||
onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
|
||||
}
|
||||
else {
|
||||
commentEnd = props.end;
|
||||
if (props.comment)
|
||||
seq.comment = props.comment;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
const node = value
|
||||
? composeNode(ctx, value, props, onError)
|
||||
: composeEmptyNode(ctx, props.end, start, null, props, onError);
|
||||
if (ctx.schema.compat)
|
||||
utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);
|
||||
offset = node.range[2];
|
||||
seq.items.push(node);
|
||||
}
|
||||
seq.range = [bs.offset, offset, commentEnd ?? offset];
|
||||
return seq;
|
||||
}
|
||||
|
||||
exports.resolveBlockSeq = resolveBlockSeq;
|
6
node_modules/yaml/dist/compose/resolve-end.d.ts
generated
vendored
Normal file
6
node_modules/yaml/dist/compose/resolve-end.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { SourceToken } from '../parse/cst';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function resolveEnd(end: SourceToken[] | undefined, offset: number, reqSpace: boolean, onError: ComposeErrorHandler): {
|
||||
comment: string;
|
||||
offset: number;
|
||||
};
|
39
node_modules/yaml/dist/compose/resolve-end.js
generated
vendored
Normal file
39
node_modules/yaml/dist/compose/resolve-end.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
function resolveEnd(end, offset, reqSpace, onError) {
|
||||
let comment = '';
|
||||
if (end) {
|
||||
let hasSpace = false;
|
||||
let sep = '';
|
||||
for (const token of end) {
|
||||
const { source, type } = token;
|
||||
switch (type) {
|
||||
case 'space':
|
||||
hasSpace = true;
|
||||
break;
|
||||
case 'comment': {
|
||||
if (reqSpace && !hasSpace)
|
||||
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
|
||||
const cb = source.substring(1) || ' ';
|
||||
if (!comment)
|
||||
comment = cb;
|
||||
else
|
||||
comment += sep + cb;
|
||||
sep = '';
|
||||
break;
|
||||
}
|
||||
case 'newline':
|
||||
if (comment)
|
||||
sep += source;
|
||||
hasSpace = true;
|
||||
break;
|
||||
default:
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
|
||||
}
|
||||
offset += source.length;
|
||||
}
|
||||
}
|
||||
return { comment, offset };
|
||||
}
|
||||
|
||||
exports.resolveEnd = resolveEnd;
|
7
node_modules/yaml/dist/compose/resolve-flow-collection.d.ts
generated
vendored
Normal file
7
node_modules/yaml/dist/compose/resolve-flow-collection.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { YAMLMap } from '../nodes/YAMLMap';
|
||||
import { YAMLSeq } from '../nodes/YAMLSeq';
|
||||
import type { FlowCollection } from '../parse/cst';
|
||||
import type { CollectionTag } from '../schema/types';
|
||||
import type { ComposeContext, ComposeNode } from './compose-node';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed<import('../index').ParsedNode, import('../index').ParsedNode | null> | YAMLSeq.Parsed<import('../index').ParsedNode>;
|
209
node_modules/yaml/dist/compose/resolve-flow-collection.js
generated
vendored
Normal file
209
node_modules/yaml/dist/compose/resolve-flow-collection.js
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
var Pair = require('../nodes/Pair.js');
|
||||
var YAMLMap = require('../nodes/YAMLMap.js');
|
||||
var YAMLSeq = require('../nodes/YAMLSeq.js');
|
||||
var resolveEnd = require('./resolve-end.js');
|
||||
var resolveProps = require('./resolve-props.js');
|
||||
var utilContainsNewline = require('./util-contains-newline.js');
|
||||
var utilMapIncludes = require('./util-map-includes.js');
|
||||
|
||||
const blockMsg = 'Block collections are not allowed within flow collections';
|
||||
const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
|
||||
function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) {
|
||||
const isMap = fc.start.source === '{';
|
||||
const fcName = isMap ? 'flow map' : 'flow sequence';
|
||||
const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq));
|
||||
const coll = new NodeClass(ctx.schema);
|
||||
coll.flow = true;
|
||||
const atRoot = ctx.atRoot;
|
||||
if (atRoot)
|
||||
ctx.atRoot = false;
|
||||
if (ctx.atKey)
|
||||
ctx.atKey = false;
|
||||
let offset = fc.offset + fc.start.source.length;
|
||||
for (let i = 0; i < fc.items.length; ++i) {
|
||||
const collItem = fc.items[i];
|
||||
const { start, key, sep, value } = collItem;
|
||||
const props = resolveProps.resolveProps(start, {
|
||||
flow: fcName,
|
||||
indicator: 'explicit-key-ind',
|
||||
next: key ?? sep?.[0],
|
||||
offset,
|
||||
onError,
|
||||
parentIndent: fc.indent,
|
||||
startOnNewline: false
|
||||
});
|
||||
if (!props.found) {
|
||||
if (!props.anchor && !props.tag && !sep && !value) {
|
||||
if (i === 0 && props.comma)
|
||||
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
|
||||
else if (i < fc.items.length - 1)
|
||||
onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
|
||||
if (props.comment) {
|
||||
if (coll.comment)
|
||||
coll.comment += '\n' + props.comment;
|
||||
else
|
||||
coll.comment = props.comment;
|
||||
}
|
||||
offset = props.end;
|
||||
continue;
|
||||
}
|
||||
if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))
|
||||
onError(key, // checked by containsNewline()
|
||||
'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
|
||||
}
|
||||
if (i === 0) {
|
||||
if (props.comma)
|
||||
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
|
||||
}
|
||||
else {
|
||||
if (!props.comma)
|
||||
onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
|
||||
if (props.comment) {
|
||||
let prevItemComment = '';
|
||||
loop: for (const st of start) {
|
||||
switch (st.type) {
|
||||
case 'comma':
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
prevItemComment = st.source.substring(1);
|
||||
break loop;
|
||||
default:
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
if (prevItemComment) {
|
||||
let prev = coll.items[coll.items.length - 1];
|
||||
if (identity.isPair(prev))
|
||||
prev = prev.value ?? prev.key;
|
||||
if (prev.comment)
|
||||
prev.comment += '\n' + prevItemComment;
|
||||
else
|
||||
prev.comment = prevItemComment;
|
||||
props.comment = props.comment.substring(prevItemComment.length + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!isMap && !sep && !props.found) {
|
||||
// item is a value in a seq
|
||||
// → key & sep are empty, start does not include ? or :
|
||||
const valueNode = value
|
||||
? composeNode(ctx, value, props, onError)
|
||||
: composeEmptyNode(ctx, props.end, sep, null, props, onError);
|
||||
coll.items.push(valueNode);
|
||||
offset = valueNode.range[2];
|
||||
if (isBlock(value))
|
||||
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
}
|
||||
else {
|
||||
// item is a key+value pair
|
||||
// key value
|
||||
ctx.atKey = true;
|
||||
const keyStart = props.end;
|
||||
const keyNode = key
|
||||
? composeNode(ctx, key, props, onError)
|
||||
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
|
||||
if (isBlock(key))
|
||||
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
ctx.atKey = false;
|
||||
// value properties
|
||||
const valueProps = resolveProps.resolveProps(sep ?? [], {
|
||||
flow: fcName,
|
||||
indicator: 'map-value-ind',
|
||||
next: value,
|
||||
offset: keyNode.range[2],
|
||||
onError,
|
||||
parentIndent: fc.indent,
|
||||
startOnNewline: false
|
||||
});
|
||||
if (valueProps.found) {
|
||||
if (!isMap && !props.found && ctx.options.strict) {
|
||||
if (sep)
|
||||
for (const st of sep) {
|
||||
if (st === valueProps.found)
|
||||
break;
|
||||
if (st.type === 'newline') {
|
||||
onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (props.start < valueProps.found.offset - 1024)
|
||||
onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
|
||||
}
|
||||
}
|
||||
else if (value) {
|
||||
if ('source' in value && value.source && value.source[0] === ':')
|
||||
onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
|
||||
else
|
||||
onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
|
||||
}
|
||||
// value value
|
||||
const valueNode = value
|
||||
? composeNode(ctx, value, valueProps, onError)
|
||||
: valueProps.found
|
||||
? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
|
||||
: null;
|
||||
if (valueNode) {
|
||||
if (isBlock(value))
|
||||
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
}
|
||||
else if (valueProps.comment) {
|
||||
if (keyNode.comment)
|
||||
keyNode.comment += '\n' + valueProps.comment;
|
||||
else
|
||||
keyNode.comment = valueProps.comment;
|
||||
}
|
||||
const pair = new Pair.Pair(keyNode, valueNode);
|
||||
if (ctx.options.keepSourceTokens)
|
||||
pair.srcToken = collItem;
|
||||
if (isMap) {
|
||||
const map = coll;
|
||||
if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
|
||||
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||||
map.items.push(pair);
|
||||
}
|
||||
else {
|
||||
const map = new YAMLMap.YAMLMap(ctx.schema);
|
||||
map.flow = true;
|
||||
map.items.push(pair);
|
||||
const endRange = (valueNode ?? keyNode).range;
|
||||
map.range = [keyNode.range[0], endRange[1], endRange[2]];
|
||||
coll.items.push(map);
|
||||
}
|
||||
offset = valueNode ? valueNode.range[2] : valueProps.end;
|
||||
}
|
||||
}
|
||||
const expectedEnd = isMap ? '}' : ']';
|
||||
const [ce, ...ee] = fc.end;
|
||||
let cePos = offset;
|
||||
if (ce && ce.source === expectedEnd)
|
||||
cePos = ce.offset + ce.source.length;
|
||||
else {
|
||||
const name = fcName[0].toUpperCase() + fcName.substring(1);
|
||||
const msg = atRoot
|
||||
? `${name} must end with a ${expectedEnd}`
|
||||
: `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
|
||||
onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
|
||||
if (ce && ce.source.length !== 1)
|
||||
ee.unshift(ce);
|
||||
}
|
||||
if (ee.length > 0) {
|
||||
const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);
|
||||
if (end.comment) {
|
||||
if (coll.comment)
|
||||
coll.comment += '\n' + end.comment;
|
||||
else
|
||||
coll.comment = end.comment;
|
||||
}
|
||||
coll.range = [fc.offset, cePos, end.offset];
|
||||
}
|
||||
else {
|
||||
coll.range = [fc.offset, cePos, cePos];
|
||||
}
|
||||
return coll;
|
||||
}
|
||||
|
||||
exports.resolveFlowCollection = resolveFlowCollection;
|
10
node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts
generated
vendored
Normal file
10
node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
import type { Range } from '../nodes/Node';
|
||||
import { Scalar } from '../nodes/Scalar';
|
||||
import type { FlowScalar } from '../parse/cst';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function resolveFlowScalar(scalar: FlowScalar, strict: boolean, onError: ComposeErrorHandler): {
|
||||
value: string;
|
||||
type: Scalar.PLAIN | Scalar.QUOTE_DOUBLE | Scalar.QUOTE_SINGLE | null;
|
||||
comment: string;
|
||||
range: Range;
|
||||
};
|
225
node_modules/yaml/dist/compose/resolve-flow-scalar.js
generated
vendored
Normal file
225
node_modules/yaml/dist/compose/resolve-flow-scalar.js
generated
vendored
Normal file
@ -0,0 +1,225 @@
|
||||
'use strict';
|
||||
|
||||
var Scalar = require('../nodes/Scalar.js');
|
||||
var resolveEnd = require('./resolve-end.js');
|
||||
|
||||
function resolveFlowScalar(scalar, strict, onError) {
|
||||
const { offset, type, source, end } = scalar;
|
||||
let _type;
|
||||
let value;
|
||||
const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
|
||||
switch (type) {
|
||||
case 'scalar':
|
||||
_type = Scalar.Scalar.PLAIN;
|
||||
value = plainValue(source, _onError);
|
||||
break;
|
||||
case 'single-quoted-scalar':
|
||||
_type = Scalar.Scalar.QUOTE_SINGLE;
|
||||
value = singleQuotedValue(source, _onError);
|
||||
break;
|
||||
case 'double-quoted-scalar':
|
||||
_type = Scalar.Scalar.QUOTE_DOUBLE;
|
||||
value = doubleQuotedValue(source, _onError);
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
|
||||
return {
|
||||
value: '',
|
||||
type: null,
|
||||
comment: '',
|
||||
range: [offset, offset + source.length, offset + source.length]
|
||||
};
|
||||
}
|
||||
const valueEnd = offset + source.length;
|
||||
const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);
|
||||
return {
|
||||
value,
|
||||
type: _type,
|
||||
comment: re.comment,
|
||||
range: [offset, valueEnd, re.offset]
|
||||
};
|
||||
}
|
||||
function plainValue(source, onError) {
|
||||
let badChar = '';
|
||||
switch (source[0]) {
|
||||
/* istanbul ignore next should not happen */
|
||||
case '\t':
|
||||
badChar = 'a tab character';
|
||||
break;
|
||||
case ',':
|
||||
badChar = 'flow indicator character ,';
|
||||
break;
|
||||
case '%':
|
||||
badChar = 'directive indicator character %';
|
||||
break;
|
||||
case '|':
|
||||
case '>': {
|
||||
badChar = `block scalar indicator ${source[0]}`;
|
||||
break;
|
||||
}
|
||||
case '@':
|
||||
case '`': {
|
||||
badChar = `reserved character ${source[0]}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (badChar)
|
||||
onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
|
||||
return foldLines(source);
|
||||
}
|
||||
function singleQuotedValue(source, onError) {
|
||||
if (source[source.length - 1] !== "'" || source.length === 1)
|
||||
onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
|
||||
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
|
||||
}
|
||||
function foldLines(source) {
|
||||
/**
|
||||
* The negative lookbehind here and in the `re` RegExp is to
|
||||
* prevent causing a polynomial search time in certain cases.
|
||||
*
|
||||
* The try-catch is for Safari, which doesn't support this yet:
|
||||
* https://caniuse.com/js-regexp-lookbehind
|
||||
*/
|
||||
let first, line;
|
||||
try {
|
||||
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
|
||||
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
|
||||
}
|
||||
catch {
|
||||
first = /(.*?)[ \t]*\r?\n/sy;
|
||||
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
|
||||
}
|
||||
let match = first.exec(source);
|
||||
if (!match)
|
||||
return source;
|
||||
let res = match[1];
|
||||
let sep = ' ';
|
||||
let pos = first.lastIndex;
|
||||
line.lastIndex = pos;
|
||||
while ((match = line.exec(source))) {
|
||||
if (match[1] === '') {
|
||||
if (sep === '\n')
|
||||
res += sep;
|
||||
else
|
||||
sep = '\n';
|
||||
}
|
||||
else {
|
||||
res += sep + match[1];
|
||||
sep = ' ';
|
||||
}
|
||||
pos = line.lastIndex;
|
||||
}
|
||||
const last = /[ \t]*(.*)/sy;
|
||||
last.lastIndex = pos;
|
||||
match = last.exec(source);
|
||||
return res + sep + (match?.[1] ?? '');
|
||||
}
|
||||
function doubleQuotedValue(source, onError) {
|
||||
let res = '';
|
||||
for (let i = 1; i < source.length - 1; ++i) {
|
||||
const ch = source[i];
|
||||
if (ch === '\r' && source[i + 1] === '\n')
|
||||
continue;
|
||||
if (ch === '\n') {
|
||||
const { fold, offset } = foldNewline(source, i);
|
||||
res += fold;
|
||||
i = offset;
|
||||
}
|
||||
else if (ch === '\\') {
|
||||
let next = source[++i];
|
||||
const cc = escapeCodes[next];
|
||||
if (cc)
|
||||
res += cc;
|
||||
else if (next === '\n') {
|
||||
// skip escaped newlines, but still trim the following line
|
||||
next = source[i + 1];
|
||||
while (next === ' ' || next === '\t')
|
||||
next = source[++i + 1];
|
||||
}
|
||||
else if (next === '\r' && source[i + 1] === '\n') {
|
||||
// skip escaped CRLF newlines, but still trim the following line
|
||||
next = source[++i + 1];
|
||||
while (next === ' ' || next === '\t')
|
||||
next = source[++i + 1];
|
||||
}
|
||||
else if (next === 'x' || next === 'u' || next === 'U') {
|
||||
const length = { x: 2, u: 4, U: 8 }[next];
|
||||
res += parseCharCode(source, i + 1, length, onError);
|
||||
i += length;
|
||||
}
|
||||
else {
|
||||
const raw = source.substr(i - 1, 2);
|
||||
onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
|
||||
res += raw;
|
||||
}
|
||||
}
|
||||
else if (ch === ' ' || ch === '\t') {
|
||||
// trim trailing whitespace
|
||||
const wsStart = i;
|
||||
let next = source[i + 1];
|
||||
while (next === ' ' || next === '\t')
|
||||
next = source[++i + 1];
|
||||
if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
|
||||
res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
|
||||
}
|
||||
else {
|
||||
res += ch;
|
||||
}
|
||||
}
|
||||
if (source[source.length - 1] !== '"' || source.length === 1)
|
||||
onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
|
||||
* Presumes `source[offset] === '\n'`
|
||||
*/
|
||||
function foldNewline(source, offset) {
|
||||
let fold = '';
|
||||
let ch = source[offset + 1];
|
||||
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
|
||||
if (ch === '\r' && source[offset + 2] !== '\n')
|
||||
break;
|
||||
if (ch === '\n')
|
||||
fold += '\n';
|
||||
offset += 1;
|
||||
ch = source[offset + 1];
|
||||
}
|
||||
if (!fold)
|
||||
fold = ' ';
|
||||
return { fold, offset };
|
||||
}
|
||||
const escapeCodes = {
|
||||
'0': '\0', // null character
|
||||
a: '\x07', // bell character
|
||||
b: '\b', // backspace
|
||||
e: '\x1b', // escape character
|
||||
f: '\f', // form feed
|
||||
n: '\n', // line feed
|
||||
r: '\r', // carriage return
|
||||
t: '\t', // horizontal tab
|
||||
v: '\v', // vertical tab
|
||||
N: '\u0085', // Unicode next line
|
||||
_: '\u00a0', // Unicode non-breaking space
|
||||
L: '\u2028', // Unicode line separator
|
||||
P: '\u2029', // Unicode paragraph separator
|
||||
' ': ' ',
|
||||
'"': '"',
|
||||
'/': '/',
|
||||
'\\': '\\',
|
||||
'\t': '\t'
|
||||
};
|
||||
function parseCharCode(source, offset, length, onError) {
|
||||
const cc = source.substr(offset, length);
|
||||
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
|
||||
const code = ok ? parseInt(cc, 16) : NaN;
|
||||
if (isNaN(code)) {
|
||||
const raw = source.substr(offset - 2, length + 2);
|
||||
onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
|
||||
return raw;
|
||||
}
|
||||
return String.fromCodePoint(code);
|
||||
}
|
||||
|
||||
exports.resolveFlowScalar = resolveFlowScalar;
|
23
node_modules/yaml/dist/compose/resolve-props.d.ts
generated
vendored
Normal file
23
node_modules/yaml/dist/compose/resolve-props.d.ts
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
import type { SourceToken, Token } from '../parse/cst';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export interface ResolvePropsArg {
|
||||
flow?: 'flow map' | 'flow sequence';
|
||||
indicator: 'doc-start' | 'explicit-key-ind' | 'map-value-ind' | 'seq-item-ind';
|
||||
next: Token | null | undefined;
|
||||
offset: number;
|
||||
onError: ComposeErrorHandler;
|
||||
parentIndent: number;
|
||||
startOnNewline: boolean;
|
||||
}
|
||||
export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, parentIndent, startOnNewline }: ResolvePropsArg): {
|
||||
comma: SourceToken | null;
|
||||
found: SourceToken | null;
|
||||
spaceBefore: boolean;
|
||||
comment: string;
|
||||
hasNewline: boolean;
|
||||
anchor: SourceToken | null;
|
||||
tag: SourceToken | null;
|
||||
newlineAfterProp: SourceToken | null;
|
||||
end: number;
|
||||
start: number;
|
||||
};
|
148
node_modules/yaml/dist/compose/resolve-props.js
generated
vendored
Normal file
148
node_modules/yaml/dist/compose/resolve-props.js
generated
vendored
Normal file
@ -0,0 +1,148 @@
|
||||
'use strict';
|
||||
|
||||
function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {
|
||||
let spaceBefore = false;
|
||||
let atNewline = startOnNewline;
|
||||
let hasSpace = startOnNewline;
|
||||
let comment = '';
|
||||
let commentSep = '';
|
||||
let hasNewline = false;
|
||||
let reqSpace = false;
|
||||
let tab = null;
|
||||
let anchor = null;
|
||||
let tag = null;
|
||||
let newlineAfterProp = null;
|
||||
let comma = null;
|
||||
let found = null;
|
||||
let start = null;
|
||||
for (const token of tokens) {
|
||||
if (reqSpace) {
|
||||
if (token.type !== 'space' &&
|
||||
token.type !== 'newline' &&
|
||||
token.type !== 'comma')
|
||||
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
|
||||
reqSpace = false;
|
||||
}
|
||||
if (tab) {
|
||||
if (atNewline && token.type !== 'comment' && token.type !== 'newline') {
|
||||
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
|
||||
}
|
||||
tab = null;
|
||||
}
|
||||
switch (token.type) {
|
||||
case 'space':
|
||||
// At the doc level, tabs at line start may be parsed
|
||||
// as leading white space rather than indentation.
|
||||
// In a flow collection, only the parser handles indent.
|
||||
if (!flow &&
|
||||
(indicator !== 'doc-start' || next?.type !== 'flow-collection') &&
|
||||
token.source.includes('\t')) {
|
||||
tab = token;
|
||||
}
|
||||
hasSpace = true;
|
||||
break;
|
||||
case 'comment': {
|
||||
if (!hasSpace)
|
||||
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
|
||||
const cb = token.source.substring(1) || ' ';
|
||||
if (!comment)
|
||||
comment = cb;
|
||||
else
|
||||
comment += commentSep + cb;
|
||||
commentSep = '';
|
||||
atNewline = false;
|
||||
break;
|
||||
}
|
||||
case 'newline':
|
||||
if (atNewline) {
|
||||
if (comment)
|
||||
comment += token.source;
|
||||
else if (!found || indicator !== 'seq-item-ind')
|
||||
spaceBefore = true;
|
||||
}
|
||||
else
|
||||
commentSep += token.source;
|
||||
atNewline = true;
|
||||
hasNewline = true;
|
||||
if (anchor || tag)
|
||||
newlineAfterProp = token;
|
||||
hasSpace = true;
|
||||
break;
|
||||
case 'anchor':
|
||||
if (anchor)
|
||||
onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
|
||||
if (token.source.endsWith(':'))
|
||||
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
|
||||
anchor = token;
|
||||
start ?? (start = token.offset);
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
reqSpace = true;
|
||||
break;
|
||||
case 'tag': {
|
||||
if (tag)
|
||||
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
|
||||
tag = token;
|
||||
start ?? (start = token.offset);
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
reqSpace = true;
|
||||
break;
|
||||
}
|
||||
case indicator:
|
||||
// Could here handle preceding comments differently
|
||||
if (anchor || tag)
|
||||
onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
|
||||
if (found)
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
|
||||
found = token;
|
||||
atNewline =
|
||||
indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';
|
||||
hasSpace = false;
|
||||
break;
|
||||
case 'comma':
|
||||
if (flow) {
|
||||
if (comma)
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
|
||||
comma = token;
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
break;
|
||||
}
|
||||
// else fallthrough
|
||||
default:
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
}
|
||||
}
|
||||
const last = tokens[tokens.length - 1];
|
||||
const end = last ? last.offset + last.source.length : offset;
|
||||
if (reqSpace &&
|
||||
next &&
|
||||
next.type !== 'space' &&
|
||||
next.type !== 'newline' &&
|
||||
next.type !== 'comma' &&
|
||||
(next.type !== 'scalar' || next.source !== '')) {
|
||||
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
|
||||
}
|
||||
if (tab &&
|
||||
((atNewline && tab.indent <= parentIndent) ||
|
||||
next?.type === 'block-map' ||
|
||||
next?.type === 'block-seq'))
|
||||
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
|
||||
return {
|
||||
comma,
|
||||
found,
|
||||
spaceBefore,
|
||||
comment,
|
||||
hasNewline,
|
||||
anchor,
|
||||
tag,
|
||||
newlineAfterProp,
|
||||
end,
|
||||
start: start ?? end
|
||||
};
|
||||
}
|
||||
|
||||
exports.resolveProps = resolveProps;
|
2
node_modules/yaml/dist/compose/util-contains-newline.d.ts
generated
vendored
Normal file
2
node_modules/yaml/dist/compose/util-contains-newline.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { Token } from '../parse/cst';
|
||||
export declare function containsNewline(key: Token | null | undefined): boolean | null;
|
36
node_modules/yaml/dist/compose/util-contains-newline.js
generated
vendored
Normal file
36
node_modules/yaml/dist/compose/util-contains-newline.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
|
||||
function containsNewline(key) {
|
||||
if (!key)
|
||||
return null;
|
||||
switch (key.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'single-quoted-scalar':
|
||||
if (key.source.includes('\n'))
|
||||
return true;
|
||||
if (key.end)
|
||||
for (const st of key.end)
|
||||
if (st.type === 'newline')
|
||||
return true;
|
||||
return false;
|
||||
case 'flow-collection':
|
||||
for (const it of key.items) {
|
||||
for (const st of it.start)
|
||||
if (st.type === 'newline')
|
||||
return true;
|
||||
if (it.sep)
|
||||
for (const st of it.sep)
|
||||
if (st.type === 'newline')
|
||||
return true;
|
||||
if (containsNewline(it.key) || containsNewline(it.value))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
exports.containsNewline = containsNewline;
|
2
node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts
generated
vendored
Normal file
2
node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { Token } from '../parse/cst';
|
||||
export declare function emptyScalarPosition(offset: number, before: Token[] | undefined, pos: number | null): number;
|
28
node_modules/yaml/dist/compose/util-empty-scalar-position.js
generated
vendored
Normal file
28
node_modules/yaml/dist/compose/util-empty-scalar-position.js
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
'use strict';
|
||||
|
||||
function emptyScalarPosition(offset, before, pos) {
|
||||
if (before) {
|
||||
pos ?? (pos = before.length);
|
||||
for (let i = pos - 1; i >= 0; --i) {
|
||||
let st = before[i];
|
||||
switch (st.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
offset -= st.source.length;
|
||||
continue;
|
||||
}
|
||||
// Technically, an empty scalar is immediately after the last non-empty
|
||||
// node, but it's more useful to place it after any whitespace.
|
||||
st = before[++i];
|
||||
while (st?.type === 'space') {
|
||||
offset += st.source.length;
|
||||
st = before[++i];
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
|
||||
exports.emptyScalarPosition = emptyScalarPosition;
|
3
node_modules/yaml/dist/compose/util-flow-indent-check.d.ts
generated
vendored
Normal file
3
node_modules/yaml/dist/compose/util-flow-indent-check.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import type { Token } from '../parse/cst';
|
||||
import type { ComposeErrorHandler } from './composer';
|
||||
export declare function flowIndentCheck(indent: number, fc: Token | null | undefined, onError: ComposeErrorHandler): void;
|
17
node_modules/yaml/dist/compose/util-flow-indent-check.js
generated
vendored
Normal file
17
node_modules/yaml/dist/compose/util-flow-indent-check.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
'use strict';
|
||||
|
||||
var utilContainsNewline = require('./util-contains-newline.js');
|
||||
|
||||
function flowIndentCheck(indent, fc, onError) {
|
||||
if (fc?.type === 'flow-collection') {
|
||||
const end = fc.end[0];
|
||||
if (end.indent === indent &&
|
||||
(end.source === ']' || end.source === '}') &&
|
||||
utilContainsNewline.containsNewline(fc)) {
|
||||
const msg = 'Flow end indicator should be more indented than parent';
|
||||
onError(end, 'BAD_INDENT', msg, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.flowIndentCheck = flowIndentCheck;
|
4
node_modules/yaml/dist/compose/util-map-includes.d.ts
generated
vendored
Normal file
4
node_modules/yaml/dist/compose/util-map-includes.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import type { ParsedNode } from '../nodes/Node';
|
||||
import type { Pair } from '../nodes/Pair';
|
||||
import type { ComposeContext } from './compose-node';
|
||||
export declare function mapIncludes(ctx: ComposeContext, items: Pair<ParsedNode>[], search: ParsedNode): boolean;
|
15
node_modules/yaml/dist/compose/util-map-includes.js
generated
vendored
Normal file
15
node_modules/yaml/dist/compose/util-map-includes.js
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
|
||||
function mapIncludes(ctx, items, search) {
|
||||
const { uniqueKeys } = ctx.options;
|
||||
if (uniqueKeys === false)
|
||||
return false;
|
||||
const isEqual = typeof uniqueKeys === 'function'
|
||||
? uniqueKeys
|
||||
: (a, b) => a === b || (identity.isScalar(a) && identity.isScalar(b) && a.value === b.value);
|
||||
return items.some(pair => isEqual(pair.key, search));
|
||||
}
|
||||
|
||||
exports.mapIncludes = mapIncludes;
|
141
node_modules/yaml/dist/doc/Document.d.ts
generated
vendored
Normal file
141
node_modules/yaml/dist/doc/Document.d.ts
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
import type { YAMLError, YAMLWarning } from '../errors';
|
||||
import { Alias } from '../nodes/Alias';
|
||||
import { NODE_TYPE } from '../nodes/identity';
|
||||
import type { Node, NodeType, ParsedNode, Range } from '../nodes/Node';
|
||||
import { Pair } from '../nodes/Pair';
|
||||
import type { Scalar } from '../nodes/Scalar';
|
||||
import type { YAMLMap } from '../nodes/YAMLMap';
|
||||
import type { YAMLSeq } from '../nodes/YAMLSeq';
|
||||
import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options';
|
||||
import { Schema } from '../schema/Schema';
|
||||
import { Directives } from './directives';
|
||||
export type Replacer = any[] | ((key: any, value: any) => unknown);
|
||||
export declare namespace Document {
|
||||
/** @ts-ignore The typing of directives fails in TS <= 4.2 */
|
||||
interface Parsed<Contents extends ParsedNode = ParsedNode, Strict extends boolean = true> extends Document<Contents, Strict> {
|
||||
directives: Directives;
|
||||
range: Range;
|
||||
}
|
||||
}
|
||||
export declare class Document<Contents extends Node = Node, Strict extends boolean = true> {
|
||||
readonly [NODE_TYPE]: symbol;
|
||||
/** A comment before this Document */
|
||||
commentBefore: string | null;
|
||||
/** A comment immediately after this Document */
|
||||
comment: string | null;
|
||||
/** The document contents. */
|
||||
contents: Strict extends true ? Contents | null : Contents;
|
||||
directives: Strict extends true ? Directives | undefined : Directives;
|
||||
/** Errors encountered during parsing. */
|
||||
errors: YAMLError[];
|
||||
options: Required<Omit<ParseOptions & DocumentOptions, '_directives' | 'lineCounter' | 'version'>>;
|
||||
/**
|
||||
* The `[start, value-end, node-end]` character offsets for the part of the
|
||||
* source parsed into this document (undefined if not parsed). The `value-end`
|
||||
* and `node-end` positions are themselves not included in their respective
|
||||
* ranges.
|
||||
*/
|
||||
range?: Range;
|
||||
/** The schema used with the document. Use `setSchema()` to change. */
|
||||
schema: Schema;
|
||||
/** Warnings encountered during parsing. */
|
||||
warnings: YAMLWarning[];
|
||||
/**
|
||||
* @param value - The initial value for the document, which will be wrapped
|
||||
* in a Node container.
|
||||
*/
|
||||
constructor(value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions);
|
||||
constructor(value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions);
|
||||
/**
|
||||
* Create a deep copy of this Document and its contents.
|
||||
*
|
||||
* Custom Node values that inherit from `Object` still refer to their original instances.
|
||||
*/
|
||||
clone(): Document<Contents, Strict>;
|
||||
/** Adds a value to the document. */
|
||||
add(value: any): void;
|
||||
/** Adds a value to the document. */
|
||||
addIn(path: Iterable<unknown>, value: unknown): void;
|
||||
/**
|
||||
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
|
||||
*
|
||||
* If `node` already has an anchor, `name` is ignored.
|
||||
* Otherwise, the `node.anchor` value will be set to `name`,
|
||||
* or if an anchor with that name is already present in the document,
|
||||
* `name` will be used as a prefix for a new unique anchor.
|
||||
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
|
||||
*/
|
||||
createAlias(node: Strict extends true ? Scalar | YAMLMap | YAMLSeq : Node, name?: string): Alias;
|
||||
/**
|
||||
* Convert any value into a `Node` using the current schema, recursively
|
||||
* turning objects into collections.
|
||||
*/
|
||||
createNode<T = unknown>(value: T, options?: CreateNodeOptions): NodeType<T>;
|
||||
createNode<T = unknown>(value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions): NodeType<T>;
|
||||
/**
|
||||
* Convert a key and a value into a `Pair` using the current schema,
|
||||
* recursively wrapping all values as `Scalar` or `Collection` nodes.
|
||||
*/
|
||||
createPair<K extends Node = Node, V extends Node = Node>(key: unknown, value: unknown, options?: CreateNodeOptions): Pair<K, V>;
|
||||
/**
|
||||
* Removes a value from the document.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key: unknown): boolean;
|
||||
/**
|
||||
* Removes a value from the document.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
deleteIn(path: Iterable<unknown> | null): boolean;
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
get(key: unknown, keepScalar?: boolean): Strict extends true ? unknown : any;
|
||||
/**
|
||||
* Returns item at `path`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
getIn(path: Iterable<unknown> | null, keepScalar?: boolean): Strict extends true ? unknown : any;
|
||||
/**
|
||||
* Checks if the document includes a value with the key `key`.
|
||||
*/
|
||||
has(key: unknown): boolean;
|
||||
/**
|
||||
* Checks if the document includes a value at `path`.
|
||||
*/
|
||||
hasIn(path: Iterable<unknown> | null): boolean;
|
||||
/**
|
||||
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
set(key: any, value: unknown): void;
|
||||
/**
|
||||
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
setIn(path: Iterable<unknown> | null, value: unknown): void;
|
||||
/**
|
||||
* Change the YAML version and schema used by the document.
|
||||
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
|
||||
* It also requires the `schema` option to be given as a `Schema` instance value.
|
||||
*
|
||||
* Overrides all previously set schema options.
|
||||
*/
|
||||
setSchema(version: '1.1' | '1.2' | 'next' | null, options?: SchemaOptions): void;
|
||||
/** A plain JavaScript representation of the document `contents`. */
|
||||
toJS(opt?: ToJSOptions & {
|
||||
[ignored: string]: unknown;
|
||||
}): any;
|
||||
/**
|
||||
* A JSON representation of the document `contents`.
|
||||
*
|
||||
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
|
||||
* property name.
|
||||
*/
|
||||
toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any;
|
||||
/** A YAML representation of the document. */
|
||||
toString(options?: ToStringOptions): string;
|
||||
}
|
337
node_modules/yaml/dist/doc/Document.js
generated
vendored
Normal file
337
node_modules/yaml/dist/doc/Document.js
generated
vendored
Normal file
@ -0,0 +1,337 @@
|
||||
'use strict';
|
||||
|
||||
var Alias = require('../nodes/Alias.js');
|
||||
var Collection = require('../nodes/Collection.js');
|
||||
var identity = require('../nodes/identity.js');
|
||||
var Pair = require('../nodes/Pair.js');
|
||||
var toJS = require('../nodes/toJS.js');
|
||||
var Schema = require('../schema/Schema.js');
|
||||
var stringifyDocument = require('../stringify/stringifyDocument.js');
|
||||
var anchors = require('./anchors.js');
|
||||
var applyReviver = require('./applyReviver.js');
|
||||
var createNode = require('./createNode.js');
|
||||
var directives = require('./directives.js');
|
||||
|
||||
class Document {
|
||||
constructor(value, replacer, options) {
|
||||
/** A comment before this Document */
|
||||
this.commentBefore = null;
|
||||
/** A comment immediately after this Document */
|
||||
this.comment = null;
|
||||
/** Errors encountered during parsing. */
|
||||
this.errors = [];
|
||||
/** Warnings encountered during parsing. */
|
||||
this.warnings = [];
|
||||
Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC });
|
||||
let _replacer = null;
|
||||
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (options === undefined && replacer) {
|
||||
options = replacer;
|
||||
replacer = undefined;
|
||||
}
|
||||
const opt = Object.assign({
|
||||
intAsBigInt: false,
|
||||
keepSourceTokens: false,
|
||||
logLevel: 'warn',
|
||||
prettyErrors: true,
|
||||
strict: true,
|
||||
stringKeys: false,
|
||||
uniqueKeys: true,
|
||||
version: '1.2'
|
||||
}, options);
|
||||
this.options = opt;
|
||||
let { version } = opt;
|
||||
if (options?._directives) {
|
||||
this.directives = options._directives.atDocument();
|
||||
if (this.directives.yaml.explicit)
|
||||
version = this.directives.yaml.version;
|
||||
}
|
||||
else
|
||||
this.directives = new directives.Directives({ version });
|
||||
this.setSchema(version, options);
|
||||
// @ts-expect-error We can't really know that this matches Contents.
|
||||
this.contents =
|
||||
value === undefined ? null : this.createNode(value, _replacer, options);
|
||||
}
|
||||
/**
|
||||
* Create a deep copy of this Document and its contents.
|
||||
*
|
||||
* Custom Node values that inherit from `Object` still refer to their original instances.
|
||||
*/
|
||||
clone() {
|
||||
const copy = Object.create(Document.prototype, {
|
||||
[identity.NODE_TYPE]: { value: identity.DOC }
|
||||
});
|
||||
copy.commentBefore = this.commentBefore;
|
||||
copy.comment = this.comment;
|
||||
copy.errors = this.errors.slice();
|
||||
copy.warnings = this.warnings.slice();
|
||||
copy.options = Object.assign({}, this.options);
|
||||
if (this.directives)
|
||||
copy.directives = this.directives.clone();
|
||||
copy.schema = this.schema.clone();
|
||||
// @ts-expect-error We can't really know that this matches Contents.
|
||||
copy.contents = identity.isNode(this.contents)
|
||||
? this.contents.clone(copy.schema)
|
||||
: this.contents;
|
||||
if (this.range)
|
||||
copy.range = this.range.slice();
|
||||
return copy;
|
||||
}
|
||||
/** Adds a value to the document. */
|
||||
add(value) {
|
||||
if (assertCollection(this.contents))
|
||||
this.contents.add(value);
|
||||
}
|
||||
/** Adds a value to the document. */
|
||||
addIn(path, value) {
|
||||
if (assertCollection(this.contents))
|
||||
this.contents.addIn(path, value);
|
||||
}
|
||||
/**
|
||||
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
|
||||
*
|
||||
* If `node` already has an anchor, `name` is ignored.
|
||||
* Otherwise, the `node.anchor` value will be set to `name`,
|
||||
* or if an anchor with that name is already present in the document,
|
||||
* `name` will be used as a prefix for a new unique anchor.
|
||||
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
|
||||
*/
|
||||
createAlias(node, name) {
|
||||
if (!node.anchor) {
|
||||
const prev = anchors.anchorNames(this);
|
||||
node.anchor =
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
!name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;
|
||||
}
|
||||
return new Alias.Alias(node.anchor);
|
||||
}
|
||||
createNode(value, replacer, options) {
|
||||
let _replacer = undefined;
|
||||
if (typeof replacer === 'function') {
|
||||
value = replacer.call({ '': value }, '', value);
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (Array.isArray(replacer)) {
|
||||
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
|
||||
const asStr = replacer.filter(keyToStr).map(String);
|
||||
if (asStr.length > 0)
|
||||
replacer = replacer.concat(asStr);
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (options === undefined && replacer) {
|
||||
options = replacer;
|
||||
replacer = undefined;
|
||||
}
|
||||
const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};
|
||||
const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this,
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
anchorPrefix || 'a');
|
||||
const ctx = {
|
||||
aliasDuplicateObjects: aliasDuplicateObjects ?? true,
|
||||
keepUndefined: keepUndefined ?? false,
|
||||
onAnchor,
|
||||
onTagObj,
|
||||
replacer: _replacer,
|
||||
schema: this.schema,
|
||||
sourceObjects
|
||||
};
|
||||
const node = createNode.createNode(value, tag, ctx);
|
||||
if (flow && identity.isCollection(node))
|
||||
node.flow = true;
|
||||
setAnchors();
|
||||
return node;
|
||||
}
|
||||
/**
|
||||
* Convert a key and a value into a `Pair` using the current schema,
|
||||
* recursively wrapping all values as `Scalar` or `Collection` nodes.
|
||||
*/
|
||||
createPair(key, value, options = {}) {
|
||||
const k = this.createNode(key, null, options);
|
||||
const v = this.createNode(value, null, options);
|
||||
return new Pair.Pair(k, v);
|
||||
}
|
||||
/**
|
||||
* Removes a value from the document.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key) {
|
||||
return assertCollection(this.contents) ? this.contents.delete(key) : false;
|
||||
}
|
||||
/**
|
||||
* Removes a value from the document.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
deleteIn(path) {
|
||||
if (Collection.isEmptyPath(path)) {
|
||||
if (this.contents == null)
|
||||
return false;
|
||||
// @ts-expect-error Presumed impossible if Strict extends false
|
||||
this.contents = null;
|
||||
return true;
|
||||
}
|
||||
return assertCollection(this.contents)
|
||||
? this.contents.deleteIn(path)
|
||||
: false;
|
||||
}
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
get(key, keepScalar) {
|
||||
return identity.isCollection(this.contents)
|
||||
? this.contents.get(key, keepScalar)
|
||||
: undefined;
|
||||
}
|
||||
/**
|
||||
* Returns item at `path`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
getIn(path, keepScalar) {
|
||||
if (Collection.isEmptyPath(path))
|
||||
return !keepScalar && identity.isScalar(this.contents)
|
||||
? this.contents.value
|
||||
: this.contents;
|
||||
return identity.isCollection(this.contents)
|
||||
? this.contents.getIn(path, keepScalar)
|
||||
: undefined;
|
||||
}
|
||||
/**
|
||||
* Checks if the document includes a value with the key `key`.
|
||||
*/
|
||||
has(key) {
|
||||
return identity.isCollection(this.contents) ? this.contents.has(key) : false;
|
||||
}
|
||||
/**
|
||||
* Checks if the document includes a value at `path`.
|
||||
*/
|
||||
hasIn(path) {
|
||||
if (Collection.isEmptyPath(path))
|
||||
return this.contents !== undefined;
|
||||
return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false;
|
||||
}
|
||||
/**
|
||||
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
set(key, value) {
|
||||
if (this.contents == null) {
|
||||
// @ts-expect-error We can't really know that this matches Contents.
|
||||
this.contents = Collection.collectionFromPath(this.schema, [key], value);
|
||||
}
|
||||
else if (assertCollection(this.contents)) {
|
||||
this.contents.set(key, value);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
setIn(path, value) {
|
||||
if (Collection.isEmptyPath(path)) {
|
||||
// @ts-expect-error We can't really know that this matches Contents.
|
||||
this.contents = value;
|
||||
}
|
||||
else if (this.contents == null) {
|
||||
// @ts-expect-error We can't really know that this matches Contents.
|
||||
this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);
|
||||
}
|
||||
else if (assertCollection(this.contents)) {
|
||||
this.contents.setIn(path, value);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Change the YAML version and schema used by the document.
|
||||
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
|
||||
* It also requires the `schema` option to be given as a `Schema` instance value.
|
||||
*
|
||||
* Overrides all previously set schema options.
|
||||
*/
|
||||
setSchema(version, options = {}) {
|
||||
if (typeof version === 'number')
|
||||
version = String(version);
|
||||
let opt;
|
||||
switch (version) {
|
||||
case '1.1':
|
||||
if (this.directives)
|
||||
this.directives.yaml.version = '1.1';
|
||||
else
|
||||
this.directives = new directives.Directives({ version: '1.1' });
|
||||
opt = { resolveKnownTags: false, schema: 'yaml-1.1' };
|
||||
break;
|
||||
case '1.2':
|
||||
case 'next':
|
||||
if (this.directives)
|
||||
this.directives.yaml.version = version;
|
||||
else
|
||||
this.directives = new directives.Directives({ version });
|
||||
opt = { resolveKnownTags: true, schema: 'core' };
|
||||
break;
|
||||
case null:
|
||||
if (this.directives)
|
||||
delete this.directives;
|
||||
opt = null;
|
||||
break;
|
||||
default: {
|
||||
const sv = JSON.stringify(version);
|
||||
throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
|
||||
}
|
||||
}
|
||||
// Not using `instanceof Schema` to allow for duck typing
|
||||
if (options.schema instanceof Object)
|
||||
this.schema = options.schema;
|
||||
else if (opt)
|
||||
this.schema = new Schema.Schema(Object.assign(opt, options));
|
||||
else
|
||||
throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
|
||||
}
|
||||
// json & jsonArg are only used from toJSON()
|
||||
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
|
||||
const ctx = {
|
||||
anchors: new Map(),
|
||||
doc: this,
|
||||
keep: !json,
|
||||
mapAsMap: mapAsMap === true,
|
||||
mapKeyWarned: false,
|
||||
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100
|
||||
};
|
||||
const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);
|
||||
if (typeof onAnchor === 'function')
|
||||
for (const { count, res } of ctx.anchors.values())
|
||||
onAnchor(res, count);
|
||||
return typeof reviver === 'function'
|
||||
? applyReviver.applyReviver(reviver, { '': res }, '', res)
|
||||
: res;
|
||||
}
|
||||
/**
|
||||
* A JSON representation of the document `contents`.
|
||||
*
|
||||
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
|
||||
* property name.
|
||||
*/
|
||||
toJSON(jsonArg, onAnchor) {
|
||||
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
|
||||
}
|
||||
/** A YAML representation of the document. */
|
||||
toString(options = {}) {
|
||||
if (this.errors.length > 0)
|
||||
throw new Error('Document with errors cannot be stringified');
|
||||
if ('indent' in options &&
|
||||
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
|
||||
const s = JSON.stringify(options.indent);
|
||||
throw new Error(`"indent" option must be a positive integer, not ${s}`);
|
||||
}
|
||||
return stringifyDocument.stringifyDocument(this, options);
|
||||
}
|
||||
}
|
||||
function assertCollection(contents) {
|
||||
if (identity.isCollection(contents))
|
||||
return true;
|
||||
throw new Error('Expected a YAML collection as document contents');
|
||||
}
|
||||
|
||||
exports.Document = Document;
|
24
node_modules/yaml/dist/doc/anchors.d.ts
generated
vendored
Normal file
24
node_modules/yaml/dist/doc/anchors.d.ts
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
import type { Node } from '../nodes/Node';
|
||||
import type { Document } from './Document';
|
||||
/**
|
||||
* Verify that the input string is a valid anchor.
|
||||
*
|
||||
* Will throw on errors.
|
||||
*/
|
||||
export declare function anchorIsValid(anchor: string): true;
|
||||
export declare function anchorNames(root: Document<Node, boolean> | Node): Set<string>;
|
||||
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
|
||||
export declare function findNewAnchor(prefix: string, exclude: Set<string>): string;
|
||||
export declare function createNodeAnchors(doc: Document<Node, boolean>, prefix: string): {
|
||||
onAnchor: (source: unknown) => string;
|
||||
/**
|
||||
* With circular references, the source node is only resolved after all
|
||||
* of its child nodes are. This is why anchors are set only after all of
|
||||
* the nodes have been created.
|
||||
*/
|
||||
setAnchors: () => void;
|
||||
sourceObjects: Map<unknown, {
|
||||
anchor: string | null;
|
||||
node: Node | null;
|
||||
}>;
|
||||
};
|
76
node_modules/yaml/dist/doc/anchors.js
generated
vendored
Normal file
76
node_modules/yaml/dist/doc/anchors.js
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
var visit = require('../visit.js');
|
||||
|
||||
/**
|
||||
* Verify that the input string is a valid anchor.
|
||||
*
|
||||
* Will throw on errors.
|
||||
*/
|
||||
function anchorIsValid(anchor) {
|
||||
if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
|
||||
const sa = JSON.stringify(anchor);
|
||||
const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function anchorNames(root) {
|
||||
const anchors = new Set();
|
||||
visit.visit(root, {
|
||||
Value(_key, node) {
|
||||
if (node.anchor)
|
||||
anchors.add(node.anchor);
|
||||
}
|
||||
});
|
||||
return anchors;
|
||||
}
|
||||
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
|
||||
function findNewAnchor(prefix, exclude) {
|
||||
for (let i = 1; true; ++i) {
|
||||
const name = `${prefix}${i}`;
|
||||
if (!exclude.has(name))
|
||||
return name;
|
||||
}
|
||||
}
|
||||
function createNodeAnchors(doc, prefix) {
|
||||
const aliasObjects = [];
|
||||
const sourceObjects = new Map();
|
||||
let prevAnchors = null;
|
||||
return {
|
||||
onAnchor: (source) => {
|
||||
aliasObjects.push(source);
|
||||
prevAnchors ?? (prevAnchors = anchorNames(doc));
|
||||
const anchor = findNewAnchor(prefix, prevAnchors);
|
||||
prevAnchors.add(anchor);
|
||||
return anchor;
|
||||
},
|
||||
/**
|
||||
* With circular references, the source node is only resolved after all
|
||||
* of its child nodes are. This is why anchors are set only after all of
|
||||
* the nodes have been created.
|
||||
*/
|
||||
setAnchors: () => {
|
||||
for (const source of aliasObjects) {
|
||||
const ref = sourceObjects.get(source);
|
||||
if (typeof ref === 'object' &&
|
||||
ref.anchor &&
|
||||
(identity.isScalar(ref.node) || identity.isCollection(ref.node))) {
|
||||
ref.node.anchor = ref.anchor;
|
||||
}
|
||||
else {
|
||||
const error = new Error('Failed to resolve repeated object (this should not happen)');
|
||||
error.source = source;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
},
|
||||
sourceObjects
|
||||
};
|
||||
}
|
||||
|
||||
exports.anchorIsValid = anchorIsValid;
|
||||
exports.anchorNames = anchorNames;
|
||||
exports.createNodeAnchors = createNodeAnchors;
|
||||
exports.findNewAnchor = findNewAnchor;
|
9
node_modules/yaml/dist/doc/applyReviver.d.ts
generated
vendored
Normal file
9
node_modules/yaml/dist/doc/applyReviver.d.ts
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
export type Reviver = (key: unknown, value: unknown) => unknown;
|
||||
/**
|
||||
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
|
||||
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
|
||||
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
|
||||
*
|
||||
* Includes extensions for handling Map and Set objects.
|
||||
*/
|
||||
export declare function applyReviver(reviver: Reviver, obj: unknown, key: unknown, val: any): unknown;
|
57
node_modules/yaml/dist/doc/applyReviver.js
generated
vendored
Normal file
57
node_modules/yaml/dist/doc/applyReviver.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
|
||||
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
|
||||
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
|
||||
*
|
||||
* Includes extensions for handling Map and Set objects.
|
||||
*/
|
||||
function applyReviver(reviver, obj, key, val) {
|
||||
if (val && typeof val === 'object') {
|
||||
if (Array.isArray(val)) {
|
||||
for (let i = 0, len = val.length; i < len; ++i) {
|
||||
const v0 = val[i];
|
||||
const v1 = applyReviver(reviver, val, String(i), v0);
|
||||
// eslint-disable-next-line @typescript-eslint/no-array-delete
|
||||
if (v1 === undefined)
|
||||
delete val[i];
|
||||
else if (v1 !== v0)
|
||||
val[i] = v1;
|
||||
}
|
||||
}
|
||||
else if (val instanceof Map) {
|
||||
for (const k of Array.from(val.keys())) {
|
||||
const v0 = val.get(k);
|
||||
const v1 = applyReviver(reviver, val, k, v0);
|
||||
if (v1 === undefined)
|
||||
val.delete(k);
|
||||
else if (v1 !== v0)
|
||||
val.set(k, v1);
|
||||
}
|
||||
}
|
||||
else if (val instanceof Set) {
|
||||
for (const v0 of Array.from(val)) {
|
||||
const v1 = applyReviver(reviver, val, v0, v0);
|
||||
if (v1 === undefined)
|
||||
val.delete(v0);
|
||||
else if (v1 !== v0) {
|
||||
val.delete(v0);
|
||||
val.add(v1);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (const [k, v0] of Object.entries(val)) {
|
||||
const v1 = applyReviver(reviver, val, k, v0);
|
||||
if (v1 === undefined)
|
||||
delete val[k];
|
||||
else if (v1 !== v0)
|
||||
val[k] = v1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(obj, key, val);
|
||||
}
|
||||
|
||||
exports.applyReviver = applyReviver;
|
17
node_modules/yaml/dist/doc/createNode.d.ts
generated
vendored
Normal file
17
node_modules/yaml/dist/doc/createNode.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import type { Node } from '../nodes/Node';
|
||||
import type { Schema } from '../schema/Schema';
|
||||
import type { CollectionTag, ScalarTag } from '../schema/types';
|
||||
import type { Replacer } from './Document';
|
||||
export interface CreateNodeContext {
|
||||
aliasDuplicateObjects: boolean;
|
||||
keepUndefined: boolean;
|
||||
onAnchor: (source: unknown) => string;
|
||||
onTagObj?: (tagObj: ScalarTag | CollectionTag) => void;
|
||||
sourceObjects: Map<unknown, {
|
||||
anchor: string | null;
|
||||
node: Node | null;
|
||||
}>;
|
||||
replacer?: Replacer;
|
||||
schema: Schema;
|
||||
}
|
||||
export declare function createNode(value: unknown, tagName: string | undefined, ctx: CreateNodeContext): Node;
|
90
node_modules/yaml/dist/doc/createNode.js
generated
vendored
Normal file
90
node_modules/yaml/dist/doc/createNode.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
'use strict';
|
||||
|
||||
var Alias = require('../nodes/Alias.js');
|
||||
var identity = require('../nodes/identity.js');
|
||||
var Scalar = require('../nodes/Scalar.js');
|
||||
|
||||
const defaultTagPrefix = 'tag:yaml.org,2002:';
|
||||
function findTagObject(value, tagName, tags) {
|
||||
if (tagName) {
|
||||
const match = tags.filter(t => t.tag === tagName);
|
||||
const tagObj = match.find(t => !t.format) ?? match[0];
|
||||
if (!tagObj)
|
||||
throw new Error(`Tag ${tagName} not found`);
|
||||
return tagObj;
|
||||
}
|
||||
return tags.find(t => t.identify?.(value) && !t.format);
|
||||
}
|
||||
function createNode(value, tagName, ctx) {
|
||||
if (identity.isDocument(value))
|
||||
value = value.contents;
|
||||
if (identity.isNode(value))
|
||||
return value;
|
||||
if (identity.isPair(value)) {
|
||||
const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx);
|
||||
map.items.push(value);
|
||||
return map;
|
||||
}
|
||||
if (value instanceof String ||
|
||||
value instanceof Number ||
|
||||
value instanceof Boolean ||
|
||||
(typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere
|
||||
) {
|
||||
// https://tc39.es/ecma262/#sec-serializejsonproperty
|
||||
value = value.valueOf();
|
||||
}
|
||||
const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
|
||||
// Detect duplicate references to the same object & use Alias nodes for all
|
||||
// after first. The `ref` wrapper allows for circular references to resolve.
|
||||
let ref = undefined;
|
||||
if (aliasDuplicateObjects && value && typeof value === 'object') {
|
||||
ref = sourceObjects.get(value);
|
||||
if (ref) {
|
||||
ref.anchor ?? (ref.anchor = onAnchor(value));
|
||||
return new Alias.Alias(ref.anchor);
|
||||
}
|
||||
else {
|
||||
ref = { anchor: null, node: null };
|
||||
sourceObjects.set(value, ref);
|
||||
}
|
||||
}
|
||||
if (tagName?.startsWith('!!'))
|
||||
tagName = defaultTagPrefix + tagName.slice(2);
|
||||
let tagObj = findTagObject(value, tagName, schema.tags);
|
||||
if (!tagObj) {
|
||||
if (value && typeof value.toJSON === 'function') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||||
value = value.toJSON();
|
||||
}
|
||||
if (!value || typeof value !== 'object') {
|
||||
const node = new Scalar.Scalar(value);
|
||||
if (ref)
|
||||
ref.node = node;
|
||||
return node;
|
||||
}
|
||||
tagObj =
|
||||
value instanceof Map
|
||||
? schema[identity.MAP]
|
||||
: Symbol.iterator in Object(value)
|
||||
? schema[identity.SEQ]
|
||||
: schema[identity.MAP];
|
||||
}
|
||||
if (onTagObj) {
|
||||
onTagObj(tagObj);
|
||||
delete ctx.onTagObj;
|
||||
}
|
||||
const node = tagObj?.createNode
|
||||
? tagObj.createNode(ctx.schema, value, ctx)
|
||||
: typeof tagObj?.nodeClass?.from === 'function'
|
||||
? tagObj.nodeClass.from(ctx.schema, value, ctx)
|
||||
: new Scalar.Scalar(value);
|
||||
if (tagName)
|
||||
node.tag = tagName;
|
||||
else if (!tagObj.default)
|
||||
node.tag = tagObj.tag;
|
||||
if (ref)
|
||||
ref.node = node;
|
||||
return node;
|
||||
}
|
||||
|
||||
exports.createNode = createNode;
|
49
node_modules/yaml/dist/doc/directives.d.ts
generated
vendored
Normal file
49
node_modules/yaml/dist/doc/directives.d.ts
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
import type { Document } from './Document';
|
||||
export declare class Directives {
|
||||
static defaultYaml: Directives['yaml'];
|
||||
static defaultTags: Directives['tags'];
|
||||
yaml: {
|
||||
version: '1.1' | '1.2' | 'next';
|
||||
explicit?: boolean;
|
||||
};
|
||||
tags: Record<string, string>;
|
||||
/**
|
||||
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
|
||||
* included in the document's stringified representation.
|
||||
*/
|
||||
docStart: true | null;
|
||||
/** The doc-end marker `...`. */
|
||||
docEnd: boolean;
|
||||
/**
|
||||
* Used when parsing YAML 1.1, where:
|
||||
* > If the document specifies no directives, it is parsed using the same
|
||||
* > settings as the previous document. If the document does specify any
|
||||
* > directives, all directives of previous documents, if any, are ignored.
|
||||
*/
|
||||
private atNextDocument?;
|
||||
constructor(yaml?: Directives['yaml'], tags?: Directives['tags']);
|
||||
clone(): Directives;
|
||||
/**
|
||||
* During parsing, get a Directives instance for the current document and
|
||||
* update the stream state according to the current version's spec.
|
||||
*/
|
||||
atDocument(): Directives;
|
||||
/**
|
||||
* @param onError - May be called even if the action was successful
|
||||
* @returns `true` on success
|
||||
*/
|
||||
add(line: string, onError: (offset: number, message: string, warning?: boolean) => void): boolean;
|
||||
/**
|
||||
* Resolves a tag, matching handles to those defined in %TAG directives.
|
||||
*
|
||||
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
|
||||
* `'!local'` tag, or `null` if unresolvable.
|
||||
*/
|
||||
tagName(source: string, onError: (message: string) => void): string | null;
|
||||
/**
|
||||
* Given a fully resolved tag, returns its printable string form,
|
||||
* taking into account current tag prefixes and defaults.
|
||||
*/
|
||||
tagString(tag: string): string;
|
||||
toString(doc?: Document): string;
|
||||
}
|
178
node_modules/yaml/dist/doc/directives.js
generated
vendored
Normal file
178
node_modules/yaml/dist/doc/directives.js
generated
vendored
Normal file
@ -0,0 +1,178 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
var visit = require('../visit.js');
|
||||
|
||||
const escapeChars = {
|
||||
'!': '%21',
|
||||
',': '%2C',
|
||||
'[': '%5B',
|
||||
']': '%5D',
|
||||
'{': '%7B',
|
||||
'}': '%7D'
|
||||
};
|
||||
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
|
||||
class Directives {
|
||||
constructor(yaml, tags) {
|
||||
/**
|
||||
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
|
||||
* included in the document's stringified representation.
|
||||
*/
|
||||
this.docStart = null;
|
||||
/** The doc-end marker `...`. */
|
||||
this.docEnd = false;
|
||||
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
|
||||
this.tags = Object.assign({}, Directives.defaultTags, tags);
|
||||
}
|
||||
clone() {
|
||||
const copy = new Directives(this.yaml, this.tags);
|
||||
copy.docStart = this.docStart;
|
||||
return copy;
|
||||
}
|
||||
/**
|
||||
* During parsing, get a Directives instance for the current document and
|
||||
* update the stream state according to the current version's spec.
|
||||
*/
|
||||
atDocument() {
|
||||
const res = new Directives(this.yaml, this.tags);
|
||||
switch (this.yaml.version) {
|
||||
case '1.1':
|
||||
this.atNextDocument = true;
|
||||
break;
|
||||
case '1.2':
|
||||
this.atNextDocument = false;
|
||||
this.yaml = {
|
||||
explicit: Directives.defaultYaml.explicit,
|
||||
version: '1.2'
|
||||
};
|
||||
this.tags = Object.assign({}, Directives.defaultTags);
|
||||
break;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* @param onError - May be called even if the action was successful
|
||||
* @returns `true` on success
|
||||
*/
|
||||
add(line, onError) {
|
||||
if (this.atNextDocument) {
|
||||
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
|
||||
this.tags = Object.assign({}, Directives.defaultTags);
|
||||
this.atNextDocument = false;
|
||||
}
|
||||
const parts = line.trim().split(/[ \t]+/);
|
||||
const name = parts.shift();
|
||||
switch (name) {
|
||||
case '%TAG': {
|
||||
if (parts.length !== 2) {
|
||||
onError(0, '%TAG directive should contain exactly two parts');
|
||||
if (parts.length < 2)
|
||||
return false;
|
||||
}
|
||||
const [handle, prefix] = parts;
|
||||
this.tags[handle] = prefix;
|
||||
return true;
|
||||
}
|
||||
case '%YAML': {
|
||||
this.yaml.explicit = true;
|
||||
if (parts.length !== 1) {
|
||||
onError(0, '%YAML directive should contain exactly one part');
|
||||
return false;
|
||||
}
|
||||
const [version] = parts;
|
||||
if (version === '1.1' || version === '1.2') {
|
||||
this.yaml.version = version;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
const isValid = /^\d+\.\d+$/.test(version);
|
||||
onError(6, `Unsupported YAML version ${version}`, isValid);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
default:
|
||||
onError(0, `Unknown directive ${name}`, true);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Resolves a tag, matching handles to those defined in %TAG directives.
|
||||
*
|
||||
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
|
||||
* `'!local'` tag, or `null` if unresolvable.
|
||||
*/
|
||||
tagName(source, onError) {
|
||||
if (source === '!')
|
||||
return '!'; // non-specific tag
|
||||
if (source[0] !== '!') {
|
||||
onError(`Not a valid tag: ${source}`);
|
||||
return null;
|
||||
}
|
||||
if (source[1] === '<') {
|
||||
const verbatim = source.slice(2, -1);
|
||||
if (verbatim === '!' || verbatim === '!!') {
|
||||
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
|
||||
return null;
|
||||
}
|
||||
if (source[source.length - 1] !== '>')
|
||||
onError('Verbatim tags must end with a >');
|
||||
return verbatim;
|
||||
}
|
||||
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s);
|
||||
if (!suffix)
|
||||
onError(`The ${source} tag has no suffix`);
|
||||
const prefix = this.tags[handle];
|
||||
if (prefix) {
|
||||
try {
|
||||
return prefix + decodeURIComponent(suffix);
|
||||
}
|
||||
catch (error) {
|
||||
onError(String(error));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
if (handle === '!')
|
||||
return source; // local tag
|
||||
onError(`Could not resolve tag: ${source}`);
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Given a fully resolved tag, returns its printable string form,
|
||||
* taking into account current tag prefixes and defaults.
|
||||
*/
|
||||
tagString(tag) {
|
||||
for (const [handle, prefix] of Object.entries(this.tags)) {
|
||||
if (tag.startsWith(prefix))
|
||||
return handle + escapeTagName(tag.substring(prefix.length));
|
||||
}
|
||||
return tag[0] === '!' ? tag : `!<${tag}>`;
|
||||
}
|
||||
toString(doc) {
|
||||
const lines = this.yaml.explicit
|
||||
? [`%YAML ${this.yaml.version || '1.2'}`]
|
||||
: [];
|
||||
const tagEntries = Object.entries(this.tags);
|
||||
let tagNames;
|
||||
if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) {
|
||||
const tags = {};
|
||||
visit.visit(doc.contents, (_key, node) => {
|
||||
if (identity.isNode(node) && node.tag)
|
||||
tags[node.tag] = true;
|
||||
});
|
||||
tagNames = Object.keys(tags);
|
||||
}
|
||||
else
|
||||
tagNames = [];
|
||||
for (const [handle, prefix] of tagEntries) {
|
||||
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
|
||||
continue;
|
||||
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
|
||||
lines.push(`%TAG ${handle} ${prefix}`);
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
}
|
||||
Directives.defaultYaml = { explicit: false, version: '1.2' };
|
||||
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
|
||||
|
||||
exports.Directives = Directives;
|
21
node_modules/yaml/dist/errors.d.ts
generated
vendored
Normal file
21
node_modules/yaml/dist/errors.d.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import type { LineCounter } from './parse/line-counter';
|
||||
export type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'NON_STRING_KEY' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN' | 'BAD_COLLECTION_TYPE';
|
||||
export type LinePos = {
|
||||
line: number;
|
||||
col: number;
|
||||
};
|
||||
export declare class YAMLError extends Error {
|
||||
name: 'YAMLParseError' | 'YAMLWarning';
|
||||
code: ErrorCode;
|
||||
message: string;
|
||||
pos: [number, number];
|
||||
linePos?: [LinePos] | [LinePos, LinePos];
|
||||
constructor(name: YAMLError['name'], pos: [number, number], code: ErrorCode, message: string);
|
||||
}
|
||||
export declare class YAMLParseError extends YAMLError {
|
||||
constructor(pos: [number, number], code: ErrorCode, message: string);
|
||||
}
|
||||
export declare class YAMLWarning extends YAMLError {
|
||||
constructor(pos: [number, number], code: ErrorCode, message: string);
|
||||
}
|
||||
export declare const prettifyError: (src: string, lc: LineCounter) => (error: YAMLError) => void;
|
62
node_modules/yaml/dist/errors.js
generated
vendored
Normal file
62
node_modules/yaml/dist/errors.js
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
'use strict';
|
||||
|
||||
class YAMLError extends Error {
|
||||
constructor(name, pos, code, message) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.code = code;
|
||||
this.message = message;
|
||||
this.pos = pos;
|
||||
}
|
||||
}
|
||||
class YAMLParseError extends YAMLError {
|
||||
constructor(pos, code, message) {
|
||||
super('YAMLParseError', pos, code, message);
|
||||
}
|
||||
}
|
||||
class YAMLWarning extends YAMLError {
|
||||
constructor(pos, code, message) {
|
||||
super('YAMLWarning', pos, code, message);
|
||||
}
|
||||
}
|
||||
const prettifyError = (src, lc) => (error) => {
|
||||
if (error.pos[0] === -1)
|
||||
return;
|
||||
error.linePos = error.pos.map(pos => lc.linePos(pos));
|
||||
const { line, col } = error.linePos[0];
|
||||
error.message += ` at line ${line}, column ${col}`;
|
||||
let ci = col - 1;
|
||||
let lineStr = src
|
||||
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
|
||||
.replace(/[\n\r]+$/, '');
|
||||
// Trim to max 80 chars, keeping col position near the middle
|
||||
if (ci >= 60 && lineStr.length > 80) {
|
||||
const trimStart = Math.min(ci - 39, lineStr.length - 79);
|
||||
lineStr = '…' + lineStr.substring(trimStart);
|
||||
ci -= trimStart - 1;
|
||||
}
|
||||
if (lineStr.length > 80)
|
||||
lineStr = lineStr.substring(0, 79) + '…';
|
||||
// Include previous line in context if pointing at line start
|
||||
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
|
||||
// Regexp won't match if start is trimmed
|
||||
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
|
||||
if (prev.length > 80)
|
||||
prev = prev.substring(0, 79) + '…\n';
|
||||
lineStr = prev + lineStr;
|
||||
}
|
||||
if (/[^ ]/.test(lineStr)) {
|
||||
let count = 1;
|
||||
const end = error.linePos[1];
|
||||
if (end && end.line === line && end.col > col) {
|
||||
count = Math.max(1, Math.min(end.col - col, 80 - ci));
|
||||
}
|
||||
const pointer = ' '.repeat(ci) + '^'.repeat(count);
|
||||
error.message += `:\n\n${lineStr}\n${pointer}\n`;
|
||||
}
|
||||
};
|
||||
|
||||
exports.YAMLError = YAMLError;
|
||||
exports.YAMLParseError = YAMLParseError;
|
||||
exports.YAMLWarning = YAMLWarning;
|
||||
exports.prettifyError = prettifyError;
|
25
node_modules/yaml/dist/index.d.ts
generated
vendored
Normal file
25
node_modules/yaml/dist/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
export { Composer } from './compose/composer';
|
||||
export { Document } from './doc/Document';
|
||||
export { Schema } from './schema/Schema';
|
||||
export type { ErrorCode } from './errors';
|
||||
export { YAMLError, YAMLParseError, YAMLWarning } from './errors';
|
||||
export { Alias } from './nodes/Alias';
|
||||
export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity';
|
||||
export type { Node, ParsedNode, Range } from './nodes/Node';
|
||||
export { Pair } from './nodes/Pair';
|
||||
export { Scalar } from './nodes/Scalar';
|
||||
export { YAMLMap } from './nodes/YAMLMap';
|
||||
export { YAMLSeq } from './nodes/YAMLSeq';
|
||||
export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options';
|
||||
export * as CST from './parse/cst';
|
||||
export { Lexer } from './parse/lexer';
|
||||
export { LineCounter } from './parse/line-counter';
|
||||
export { Parser } from './parse/parser';
|
||||
export type { EmptyStream } from './public-api';
|
||||
export { parse, parseAllDocuments, parseDocument, stringify } from './public-api';
|
||||
export type { TagId, Tags } from './schema/tags';
|
||||
export type { CollectionTag, ScalarTag } from './schema/types';
|
||||
export type { YAMLOMap } from './schema/yaml-1.1/omap';
|
||||
export type { YAMLSet } from './schema/yaml-1.1/set';
|
||||
export type { asyncVisitor, asyncVisitorFn, visitor, visitorFn } from './visit';
|
||||
export { visit, visitAsync } from './visit';
|
50
node_modules/yaml/dist/index.js
generated
vendored
Normal file
50
node_modules/yaml/dist/index.js
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
'use strict';
|
||||
|
||||
var composer = require('./compose/composer.js');
|
||||
var Document = require('./doc/Document.js');
|
||||
var Schema = require('./schema/Schema.js');
|
||||
var errors = require('./errors.js');
|
||||
var Alias = require('./nodes/Alias.js');
|
||||
var identity = require('./nodes/identity.js');
|
||||
var Pair = require('./nodes/Pair.js');
|
||||
var Scalar = require('./nodes/Scalar.js');
|
||||
var YAMLMap = require('./nodes/YAMLMap.js');
|
||||
var YAMLSeq = require('./nodes/YAMLSeq.js');
|
||||
var cst = require('./parse/cst.js');
|
||||
var lexer = require('./parse/lexer.js');
|
||||
var lineCounter = require('./parse/line-counter.js');
|
||||
var parser = require('./parse/parser.js');
|
||||
var publicApi = require('./public-api.js');
|
||||
var visit = require('./visit.js');
|
||||
|
||||
|
||||
|
||||
exports.Composer = composer.Composer;
|
||||
exports.Document = Document.Document;
|
||||
exports.Schema = Schema.Schema;
|
||||
exports.YAMLError = errors.YAMLError;
|
||||
exports.YAMLParseError = errors.YAMLParseError;
|
||||
exports.YAMLWarning = errors.YAMLWarning;
|
||||
exports.Alias = Alias.Alias;
|
||||
exports.isAlias = identity.isAlias;
|
||||
exports.isCollection = identity.isCollection;
|
||||
exports.isDocument = identity.isDocument;
|
||||
exports.isMap = identity.isMap;
|
||||
exports.isNode = identity.isNode;
|
||||
exports.isPair = identity.isPair;
|
||||
exports.isScalar = identity.isScalar;
|
||||
exports.isSeq = identity.isSeq;
|
||||
exports.Pair = Pair.Pair;
|
||||
exports.Scalar = Scalar.Scalar;
|
||||
exports.YAMLMap = YAMLMap.YAMLMap;
|
||||
exports.YAMLSeq = YAMLSeq.YAMLSeq;
|
||||
exports.CST = cst;
|
||||
exports.Lexer = lexer.Lexer;
|
||||
exports.LineCounter = lineCounter.LineCounter;
|
||||
exports.Parser = parser.Parser;
|
||||
exports.parse = publicApi.parse;
|
||||
exports.parseAllDocuments = publicApi.parseAllDocuments;
|
||||
exports.parseDocument = publicApi.parseDocument;
|
||||
exports.stringify = publicApi.stringify;
|
||||
exports.visit = visit.visit;
|
||||
exports.visitAsync = visit.visitAsync;
|
3
node_modules/yaml/dist/log.d.ts
generated
vendored
Normal file
3
node_modules/yaml/dist/log.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
export type LogLevelId = 'silent' | 'error' | 'warn' | 'debug';
|
||||
export declare function debug(logLevel: LogLevelId, ...messages: any[]): void;
|
||||
export declare function warn(logLevel: LogLevelId, warning: string | Error): void;
|
19
node_modules/yaml/dist/log.js
generated
vendored
Normal file
19
node_modules/yaml/dist/log.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
var node_process = require('process');
|
||||
|
||||
function debug(logLevel, ...messages) {
|
||||
if (logLevel === 'debug')
|
||||
console.log(...messages);
|
||||
}
|
||||
function warn(logLevel, warning) {
|
||||
if (logLevel === 'debug' || logLevel === 'warn') {
|
||||
if (typeof node_process.emitWarning === 'function')
|
||||
node_process.emitWarning(warning);
|
||||
else
|
||||
console.warn(warning);
|
||||
}
|
||||
}
|
||||
|
||||
exports.debug = debug;
|
||||
exports.warn = warn;
|
29
node_modules/yaml/dist/nodes/Alias.d.ts
generated
vendored
Normal file
29
node_modules/yaml/dist/nodes/Alias.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
import type { Document } from '../doc/Document';
|
||||
import type { FlowScalar } from '../parse/cst';
|
||||
import type { StringifyContext } from '../stringify/stringify';
|
||||
import type { Range } from './Node';
|
||||
import { NodeBase } from './Node';
|
||||
import type { Scalar } from './Scalar';
|
||||
import type { ToJSContext } from './toJS';
|
||||
import type { YAMLMap } from './YAMLMap';
|
||||
import type { YAMLSeq } from './YAMLSeq';
|
||||
export declare namespace Alias {
|
||||
interface Parsed extends Alias {
|
||||
range: Range;
|
||||
srcToken?: FlowScalar & {
|
||||
type: 'alias';
|
||||
};
|
||||
}
|
||||
}
|
||||
export declare class Alias extends NodeBase {
|
||||
source: string;
|
||||
anchor?: never;
|
||||
constructor(source: string);
|
||||
/**
|
||||
* Resolve the value of this alias within `doc`, finding the last
|
||||
* instance of the `source` anchor before this node.
|
||||
*/
|
||||
resolve(doc: Document, ctx?: ToJSContext): Scalar | YAMLMap | YAMLSeq | undefined;
|
||||
toJSON(_arg?: unknown, ctx?: ToJSContext): unknown;
|
||||
toString(ctx?: StringifyContext, _onComment?: () => void, _onChompKeep?: () => void): string;
|
||||
}
|
116
node_modules/yaml/dist/nodes/Alias.js
generated
vendored
Normal file
116
node_modules/yaml/dist/nodes/Alias.js
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
'use strict';
|
||||
|
||||
var anchors = require('../doc/anchors.js');
|
||||
var visit = require('../visit.js');
|
||||
var identity = require('./identity.js');
|
||||
var Node = require('./Node.js');
|
||||
var toJS = require('./toJS.js');
|
||||
|
||||
class Alias extends Node.NodeBase {
|
||||
constructor(source) {
|
||||
super(identity.ALIAS);
|
||||
this.source = source;
|
||||
Object.defineProperty(this, 'tag', {
|
||||
set() {
|
||||
throw new Error('Alias nodes cannot have tags');
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Resolve the value of this alias within `doc`, finding the last
|
||||
* instance of the `source` anchor before this node.
|
||||
*/
|
||||
resolve(doc, ctx) {
|
||||
let nodes;
|
||||
if (ctx?.aliasResolveCache) {
|
||||
nodes = ctx.aliasResolveCache;
|
||||
}
|
||||
else {
|
||||
nodes = [];
|
||||
visit.visit(doc, {
|
||||
Node: (_key, node) => {
|
||||
if (identity.isAlias(node) || identity.hasAnchor(node))
|
||||
nodes.push(node);
|
||||
}
|
||||
});
|
||||
if (ctx)
|
||||
ctx.aliasResolveCache = nodes;
|
||||
}
|
||||
let found = undefined;
|
||||
for (const node of nodes) {
|
||||
if (node === this)
|
||||
break;
|
||||
if (node.anchor === this.source)
|
||||
found = node;
|
||||
}
|
||||
return found;
|
||||
}
|
||||
toJSON(_arg, ctx) {
|
||||
if (!ctx)
|
||||
return { source: this.source };
|
||||
const { anchors, doc, maxAliasCount } = ctx;
|
||||
const source = this.resolve(doc, ctx);
|
||||
if (!source) {
|
||||
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
|
||||
throw new ReferenceError(msg);
|
||||
}
|
||||
let data = anchors.get(source);
|
||||
if (!data) {
|
||||
// Resolve anchors for Node.prototype.toJS()
|
||||
toJS.toJS(source, null, ctx);
|
||||
data = anchors.get(source);
|
||||
}
|
||||
/* istanbul ignore if */
|
||||
if (!data || data.res === undefined) {
|
||||
const msg = 'This should not happen: Alias anchor was not resolved?';
|
||||
throw new ReferenceError(msg);
|
||||
}
|
||||
if (maxAliasCount >= 0) {
|
||||
data.count += 1;
|
||||
if (data.aliasCount === 0)
|
||||
data.aliasCount = getAliasCount(doc, source, anchors);
|
||||
if (data.count * data.aliasCount > maxAliasCount) {
|
||||
const msg = 'Excessive alias count indicates a resource exhaustion attack';
|
||||
throw new ReferenceError(msg);
|
||||
}
|
||||
}
|
||||
return data.res;
|
||||
}
|
||||
toString(ctx, _onComment, _onChompKeep) {
|
||||
const src = `*${this.source}`;
|
||||
if (ctx) {
|
||||
anchors.anchorIsValid(this.source);
|
||||
if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
|
||||
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
if (ctx.implicitKey)
|
||||
return `${src} `;
|
||||
}
|
||||
return src;
|
||||
}
|
||||
}
|
||||
function getAliasCount(doc, node, anchors) {
|
||||
if (identity.isAlias(node)) {
|
||||
const source = node.resolve(doc);
|
||||
const anchor = anchors && source && anchors.get(source);
|
||||
return anchor ? anchor.count * anchor.aliasCount : 0;
|
||||
}
|
||||
else if (identity.isCollection(node)) {
|
||||
let count = 0;
|
||||
for (const item of node.items) {
|
||||
const c = getAliasCount(doc, item, anchors);
|
||||
if (c > count)
|
||||
count = c;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
else if (identity.isPair(node)) {
|
||||
const kc = getAliasCount(doc, node.key, anchors);
|
||||
const vc = getAliasCount(doc, node.value, anchors);
|
||||
return Math.max(kc, vc);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
exports.Alias = Alias;
|
73
node_modules/yaml/dist/nodes/Collection.d.ts
generated
vendored
Normal file
73
node_modules/yaml/dist/nodes/Collection.d.ts
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
import type { Schema } from '../schema/Schema';
|
||||
import { NODE_TYPE } from './identity';
|
||||
import { NodeBase } from './Node';
|
||||
export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import('./Node').Node;
|
||||
export declare const isEmptyPath: (path: Iterable<unknown> | null | undefined) => path is null | undefined;
|
||||
export declare abstract class Collection extends NodeBase {
|
||||
schema: Schema | undefined;
|
||||
[NODE_TYPE]: symbol;
|
||||
items: unknown[];
|
||||
/** An optional anchor on this node. Used by alias nodes. */
|
||||
anchor?: string;
|
||||
/**
|
||||
* If true, stringify this and all child nodes using flow rather than
|
||||
* block styles.
|
||||
*/
|
||||
flow?: boolean;
|
||||
constructor(type: symbol, schema?: Schema);
|
||||
/**
|
||||
* Create a copy of this collection.
|
||||
*
|
||||
* @param schema - If defined, overwrites the original's schema
|
||||
*/
|
||||
clone(schema?: Schema): Collection;
|
||||
/** Adds a value to the collection. */
|
||||
abstract add(value: unknown): void;
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
abstract delete(key: unknown): boolean;
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
abstract get(key: unknown, keepScalar?: boolean): unknown;
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*/
|
||||
abstract has(key: unknown): boolean;
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
abstract set(key: unknown, value: unknown): void;
|
||||
/**
|
||||
* Adds a value to the collection. For `!!map` and `!!omap` the value must
|
||||
* be a Pair instance or a `{ key, value }` object, which may not have a key
|
||||
* that already exists in the map.
|
||||
*/
|
||||
addIn(path: Iterable<unknown>, value: unknown): void;
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
deleteIn(path: Iterable<unknown>): boolean;
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
getIn(path: Iterable<unknown>, keepScalar?: boolean): unknown;
|
||||
hasAllNullValues(allowScalar?: boolean): boolean;
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*/
|
||||
hasIn(path: Iterable<unknown>): boolean;
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
setIn(path: Iterable<unknown>, value: unknown): void;
|
||||
}
|
151
node_modules/yaml/dist/nodes/Collection.js
generated
vendored
Normal file
151
node_modules/yaml/dist/nodes/Collection.js
generated
vendored
Normal file
@ -0,0 +1,151 @@
|
||||
'use strict';
|
||||
|
||||
var createNode = require('../doc/createNode.js');
|
||||
var identity = require('./identity.js');
|
||||
var Node = require('./Node.js');
|
||||
|
||||
function collectionFromPath(schema, path, value) {
|
||||
let v = value;
|
||||
for (let i = path.length - 1; i >= 0; --i) {
|
||||
const k = path[i];
|
||||
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
|
||||
const a = [];
|
||||
a[k] = v;
|
||||
v = a;
|
||||
}
|
||||
else {
|
||||
v = new Map([[k, v]]);
|
||||
}
|
||||
}
|
||||
return createNode.createNode(v, undefined, {
|
||||
aliasDuplicateObjects: false,
|
||||
keepUndefined: false,
|
||||
onAnchor: () => {
|
||||
throw new Error('This should not happen, please report a bug.');
|
||||
},
|
||||
schema,
|
||||
sourceObjects: new Map()
|
||||
});
|
||||
}
|
||||
// Type guard is intentionally a little wrong so as to be more useful,
|
||||
// as it does not cover untypable empty non-string iterables (e.g. []).
|
||||
const isEmptyPath = (path) => path == null ||
|
||||
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
|
||||
class Collection extends Node.NodeBase {
|
||||
constructor(type, schema) {
|
||||
super(type);
|
||||
Object.defineProperty(this, 'schema', {
|
||||
value: schema,
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
writable: true
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Create a copy of this collection.
|
||||
*
|
||||
* @param schema - If defined, overwrites the original's schema
|
||||
*/
|
||||
clone(schema) {
|
||||
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
|
||||
if (schema)
|
||||
copy.schema = schema;
|
||||
copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it);
|
||||
if (this.range)
|
||||
copy.range = this.range.slice();
|
||||
return copy;
|
||||
}
|
||||
/**
|
||||
* Adds a value to the collection. For `!!map` and `!!omap` the value must
|
||||
* be a Pair instance or a `{ key, value }` object, which may not have a key
|
||||
* that already exists in the map.
|
||||
*/
|
||||
addIn(path, value) {
|
||||
if (isEmptyPath(path))
|
||||
this.add(value);
|
||||
else {
|
||||
const [key, ...rest] = path;
|
||||
const node = this.get(key, true);
|
||||
if (identity.isCollection(node))
|
||||
node.addIn(rest, value);
|
||||
else if (node === undefined && this.schema)
|
||||
this.set(key, collectionFromPath(this.schema, rest, value));
|
||||
else
|
||||
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
deleteIn(path) {
|
||||
const [key, ...rest] = path;
|
||||
if (rest.length === 0)
|
||||
return this.delete(key);
|
||||
const node = this.get(key, true);
|
||||
if (identity.isCollection(node))
|
||||
return node.deleteIn(rest);
|
||||
else
|
||||
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||||
}
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
getIn(path, keepScalar) {
|
||||
const [key, ...rest] = path;
|
||||
const node = this.get(key, true);
|
||||
if (rest.length === 0)
|
||||
return !keepScalar && identity.isScalar(node) ? node.value : node;
|
||||
else
|
||||
return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
|
||||
}
|
||||
hasAllNullValues(allowScalar) {
|
||||
return this.items.every(node => {
|
||||
if (!identity.isPair(node))
|
||||
return false;
|
||||
const n = node.value;
|
||||
return (n == null ||
|
||||
(allowScalar &&
|
||||
identity.isScalar(n) &&
|
||||
n.value == null &&
|
||||
!n.commentBefore &&
|
||||
!n.comment &&
|
||||
!n.tag));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*/
|
||||
hasIn(path) {
|
||||
const [key, ...rest] = path;
|
||||
if (rest.length === 0)
|
||||
return this.has(key);
|
||||
const node = this.get(key, true);
|
||||
return identity.isCollection(node) ? node.hasIn(rest) : false;
|
||||
}
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
setIn(path, value) {
|
||||
const [key, ...rest] = path;
|
||||
if (rest.length === 0) {
|
||||
this.set(key, value);
|
||||
}
|
||||
else {
|
||||
const node = this.get(key, true);
|
||||
if (identity.isCollection(node))
|
||||
node.setIn(rest, value);
|
||||
else if (node === undefined && this.schema)
|
||||
this.set(key, collectionFromPath(this.schema, rest, value));
|
||||
else
|
||||
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.Collection = Collection;
|
||||
exports.collectionFromPath = collectionFromPath;
|
||||
exports.isEmptyPath = isEmptyPath;
|
53
node_modules/yaml/dist/nodes/Node.d.ts
generated
vendored
Normal file
53
node_modules/yaml/dist/nodes/Node.d.ts
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
import type { Document } from '../doc/Document';
|
||||
import type { ToJSOptions } from '../options';
|
||||
import type { Token } from '../parse/cst';
|
||||
import type { StringifyContext } from '../stringify/stringify';
|
||||
import type { Alias } from './Alias';
|
||||
import { NODE_TYPE } from './identity';
|
||||
import type { Scalar } from './Scalar';
|
||||
import type { ToJSContext } from './toJS';
|
||||
import type { MapLike, YAMLMap } from './YAMLMap';
|
||||
import type { YAMLSeq } from './YAMLSeq';
|
||||
export type Node<T = unknown> = Alias | Scalar<T> | YAMLMap<unknown, T> | YAMLSeq<T>;
|
||||
/** Utility type mapper */
|
||||
export type NodeType<T> = T extends string | number | bigint | boolean | null | undefined ? Scalar<T> : T extends Date ? Scalar<string | Date> : T extends Array<any> ? YAMLSeq<NodeType<T[number]>> : T extends {
|
||||
[key: string]: any;
|
||||
} ? YAMLMap<NodeType<keyof T>, NodeType<T[keyof T]>> : T extends {
|
||||
[key: number]: any;
|
||||
} ? YAMLMap<NodeType<keyof T>, NodeType<T[keyof T]>> : Node;
|
||||
export type ParsedNode = Alias.Parsed | Scalar.Parsed | YAMLMap.Parsed | YAMLSeq.Parsed;
|
||||
/** `[start, value-end, node-end]` */
|
||||
export type Range = [number, number, number];
|
||||
export declare abstract class NodeBase {
|
||||
readonly [NODE_TYPE]: symbol;
|
||||
/** A comment on or immediately after this */
|
||||
comment?: string | null;
|
||||
/** A comment before this */
|
||||
commentBefore?: string | null;
|
||||
/**
|
||||
* The `[start, value-end, node-end]` character offsets for the part of the
|
||||
* source parsed into this node (undefined if not parsed). The `value-end`
|
||||
* and `node-end` positions are themselves not included in their respective
|
||||
* ranges.
|
||||
*/
|
||||
range?: Range | null;
|
||||
/** A blank line before this node and its commentBefore */
|
||||
spaceBefore?: boolean;
|
||||
/** The CST token that was composed into this node. */
|
||||
srcToken?: Token;
|
||||
/** A fully qualified tag, if required */
|
||||
tag?: string;
|
||||
/**
|
||||
* Customize the way that a key-value pair is resolved.
|
||||
* Used for YAML 1.1 !!merge << handling.
|
||||
*/
|
||||
addToJSMap?: (ctx: ToJSContext | undefined, map: MapLike, value: unknown) => void;
|
||||
/** A plain JS representation of this node */
|
||||
abstract toJSON(): any;
|
||||
abstract toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
|
||||
constructor(type: symbol);
|
||||
/** Create a copy of this node. */
|
||||
clone(): NodeBase;
|
||||
/** A plain JavaScript representation of this node. */
|
||||
toJS(doc: Document<Node, boolean>, { mapAsMap, maxAliasCount, onAnchor, reviver }?: ToJSOptions): any;
|
||||
}
|
40
node_modules/yaml/dist/nodes/Node.js
generated
vendored
Normal file
40
node_modules/yaml/dist/nodes/Node.js
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
'use strict';
|
||||
|
||||
var applyReviver = require('../doc/applyReviver.js');
|
||||
var identity = require('./identity.js');
|
||||
var toJS = require('./toJS.js');
|
||||
|
||||
class NodeBase {
|
||||
constructor(type) {
|
||||
Object.defineProperty(this, identity.NODE_TYPE, { value: type });
|
||||
}
|
||||
/** Create a copy of this node. */
|
||||
clone() {
|
||||
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
|
||||
if (this.range)
|
||||
copy.range = this.range.slice();
|
||||
return copy;
|
||||
}
|
||||
/** A plain JavaScript representation of this node. */
|
||||
toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
|
||||
if (!identity.isDocument(doc))
|
||||
throw new TypeError('A document argument is required');
|
||||
const ctx = {
|
||||
anchors: new Map(),
|
||||
doc,
|
||||
keep: true,
|
||||
mapAsMap: mapAsMap === true,
|
||||
mapKeyWarned: false,
|
||||
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100
|
||||
};
|
||||
const res = toJS.toJS(this, '', ctx);
|
||||
if (typeof onAnchor === 'function')
|
||||
for (const { count, res } of ctx.anchors.values())
|
||||
onAnchor(res, count);
|
||||
return typeof reviver === 'function'
|
||||
? applyReviver.applyReviver(reviver, { '': res }, '', res)
|
||||
: res;
|
||||
}
|
||||
}
|
||||
|
||||
exports.NodeBase = NodeBase;
|
22
node_modules/yaml/dist/nodes/Pair.d.ts
generated
vendored
Normal file
22
node_modules/yaml/dist/nodes/Pair.d.ts
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
import type { CreateNodeContext } from '../doc/createNode';
|
||||
import type { CollectionItem } from '../parse/cst';
|
||||
import type { Schema } from '../schema/Schema';
|
||||
import type { StringifyContext } from '../stringify/stringify';
|
||||
import { addPairToJSMap } from './addPairToJSMap';
|
||||
import { NODE_TYPE } from './identity';
|
||||
import type { Node } from './Node';
|
||||
import type { ToJSContext } from './toJS';
|
||||
export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair<Node, Node>;
|
||||
export declare class Pair<K = unknown, V = unknown> {
|
||||
readonly [NODE_TYPE]: symbol;
|
||||
/** Always Node or null when parsed, but can be set to anything. */
|
||||
key: K;
|
||||
/** Always Node or null when parsed, but can be set to anything. */
|
||||
value: V | null;
|
||||
/** The CST token that was composed into this pair. */
|
||||
srcToken?: CollectionItem;
|
||||
constructor(key: K, value?: V | null);
|
||||
clone(schema?: Schema): Pair<K, V>;
|
||||
toJSON(_?: unknown, ctx?: ToJSContext): ReturnType<typeof addPairToJSMap>;
|
||||
toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
|
||||
}
|
39
node_modules/yaml/dist/nodes/Pair.js
generated
vendored
Normal file
39
node_modules/yaml/dist/nodes/Pair.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
var createNode = require('../doc/createNode.js');
|
||||
var stringifyPair = require('../stringify/stringifyPair.js');
|
||||
var addPairToJSMap = require('./addPairToJSMap.js');
|
||||
var identity = require('./identity.js');
|
||||
|
||||
function createPair(key, value, ctx) {
|
||||
const k = createNode.createNode(key, undefined, ctx);
|
||||
const v = createNode.createNode(value, undefined, ctx);
|
||||
return new Pair(k, v);
|
||||
}
|
||||
class Pair {
|
||||
constructor(key, value = null) {
|
||||
Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR });
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
clone(schema) {
|
||||
let { key, value } = this;
|
||||
if (identity.isNode(key))
|
||||
key = key.clone(schema);
|
||||
if (identity.isNode(value))
|
||||
value = value.clone(schema);
|
||||
return new Pair(key, value);
|
||||
}
|
||||
toJSON(_, ctx) {
|
||||
const pair = ctx?.mapAsMap ? new Map() : {};
|
||||
return addPairToJSMap.addPairToJSMap(ctx, pair, this);
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
return ctx?.doc
|
||||
? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)
|
||||
: JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
exports.Pair = Pair;
|
||||
exports.createPair = createPair;
|
43
node_modules/yaml/dist/nodes/Scalar.d.ts
generated
vendored
Normal file
43
node_modules/yaml/dist/nodes/Scalar.d.ts
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
import type { BlockScalar, FlowScalar } from '../parse/cst';
|
||||
import type { Range } from './Node';
|
||||
import { NodeBase } from './Node';
|
||||
import type { ToJSContext } from './toJS';
|
||||
export declare const isScalarValue: (value: unknown) => boolean;
|
||||
export declare namespace Scalar {
|
||||
interface Parsed extends Scalar {
|
||||
range: Range;
|
||||
source: string;
|
||||
srcToken?: FlowScalar | BlockScalar;
|
||||
}
|
||||
type BLOCK_FOLDED = 'BLOCK_FOLDED';
|
||||
type BLOCK_LITERAL = 'BLOCK_LITERAL';
|
||||
type PLAIN = 'PLAIN';
|
||||
type QUOTE_DOUBLE = 'QUOTE_DOUBLE';
|
||||
type QUOTE_SINGLE = 'QUOTE_SINGLE';
|
||||
type Type = BLOCK_FOLDED | BLOCK_LITERAL | PLAIN | QUOTE_DOUBLE | QUOTE_SINGLE;
|
||||
}
|
||||
export declare class Scalar<T = unknown> extends NodeBase {
|
||||
static readonly BLOCK_FOLDED = "BLOCK_FOLDED";
|
||||
static readonly BLOCK_LITERAL = "BLOCK_LITERAL";
|
||||
static readonly PLAIN = "PLAIN";
|
||||
static readonly QUOTE_DOUBLE = "QUOTE_DOUBLE";
|
||||
static readonly QUOTE_SINGLE = "QUOTE_SINGLE";
|
||||
value: T;
|
||||
/** An optional anchor on this node. Used by alias nodes. */
|
||||
anchor?: string;
|
||||
/**
|
||||
* By default (undefined), numbers use decimal notation.
|
||||
* The YAML 1.2 core schema only supports 'HEX' and 'OCT'.
|
||||
* The YAML 1.1 schema also supports 'BIN' and 'TIME'
|
||||
*/
|
||||
format?: string;
|
||||
/** If `value` is a number, use this value when stringifying this node. */
|
||||
minFractionDigits?: number;
|
||||
/** Set during parsing to the source string value */
|
||||
source?: string;
|
||||
/** The scalar style used for the node's string representation */
|
||||
type?: Scalar.Type;
|
||||
constructor(value: T);
|
||||
toJSON(arg?: any, ctx?: ToJSContext): any;
|
||||
toString(): string;
|
||||
}
|
27
node_modules/yaml/dist/nodes/Scalar.js
generated
vendored
Normal file
27
node_modules/yaml/dist/nodes/Scalar.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('./identity.js');
|
||||
var Node = require('./Node.js');
|
||||
var toJS = require('./toJS.js');
|
||||
|
||||
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
|
||||
class Scalar extends Node.NodeBase {
|
||||
constructor(value) {
|
||||
super(identity.SCALAR);
|
||||
this.value = value;
|
||||
}
|
||||
toJSON(arg, ctx) {
|
||||
return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);
|
||||
}
|
||||
toString() {
|
||||
return String(this.value);
|
||||
}
|
||||
}
|
||||
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
|
||||
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
|
||||
Scalar.PLAIN = 'PLAIN';
|
||||
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
|
||||
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
|
||||
|
||||
exports.Scalar = Scalar;
|
||||
exports.isScalarValue = isScalarValue;
|
53
node_modules/yaml/dist/nodes/YAMLMap.d.ts
generated
vendored
Normal file
53
node_modules/yaml/dist/nodes/YAMLMap.d.ts
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
import type { BlockMap, FlowCollection } from '../parse/cst';
|
||||
import type { Schema } from '../schema/Schema';
|
||||
import type { StringifyContext } from '../stringify/stringify';
|
||||
import type { CreateNodeContext } from '../util';
|
||||
import { Collection } from './Collection';
|
||||
import type { ParsedNode, Range } from './Node';
|
||||
import { Pair } from './Pair';
|
||||
import type { Scalar } from './Scalar';
|
||||
import type { ToJSContext } from './toJS';
|
||||
export type MapLike = Map<unknown, unknown> | Set<unknown> | Record<string | number | symbol, unknown>;
|
||||
export declare function findPair<K = unknown, V = unknown>(items: Iterable<Pair<K, V>>, key: unknown): Pair<K, V> | undefined;
|
||||
export declare namespace YAMLMap {
|
||||
interface Parsed<K extends ParsedNode = ParsedNode, V extends ParsedNode | null = ParsedNode | null> extends YAMLMap<K, V> {
|
||||
items: Pair<K, V>[];
|
||||
range: Range;
|
||||
srcToken?: BlockMap | FlowCollection;
|
||||
}
|
||||
}
|
||||
export declare class YAMLMap<K = unknown, V = unknown> extends Collection {
|
||||
static get tagName(): 'tag:yaml.org,2002:map';
|
||||
items: Pair<K, V>[];
|
||||
constructor(schema?: Schema);
|
||||
/**
|
||||
* A generic collection parsing method that can be extended
|
||||
* to other node classes that inherit from YAMLMap
|
||||
*/
|
||||
static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLMap;
|
||||
/**
|
||||
* Adds a value to the collection.
|
||||
*
|
||||
* @param overwrite - If not set `true`, using a key that is already in the
|
||||
* collection will throw. Otherwise, overwrites the previous value.
|
||||
*/
|
||||
add(pair: Pair<K, V> | {
|
||||
key: K;
|
||||
value: V;
|
||||
}, overwrite?: boolean): void;
|
||||
delete(key: unknown): boolean;
|
||||
get(key: unknown, keepScalar: true): Scalar<V> | undefined;
|
||||
get(key: unknown, keepScalar?: false): V | undefined;
|
||||
get(key: unknown, keepScalar?: boolean): V | Scalar<V> | undefined;
|
||||
has(key: unknown): boolean;
|
||||
set(key: K, value: V): void;
|
||||
/**
|
||||
* @param ctx - Conversion context, originally set in Document#toJS()
|
||||
* @param {Class} Type - If set, forces the returned collection type
|
||||
* @returns Instance of Type, Map, or Object
|
||||
*/
|
||||
toJSON<T extends MapLike = Map<unknown, unknown>>(_?: unknown, ctx?: ToJSContext, Type?: {
|
||||
new (): T;
|
||||
}): any;
|
||||
toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
|
||||
}
|
147
node_modules/yaml/dist/nodes/YAMLMap.js
generated
vendored
Normal file
147
node_modules/yaml/dist/nodes/YAMLMap.js
generated
vendored
Normal file
@ -0,0 +1,147 @@
|
||||
'use strict';
|
||||
|
||||
var stringifyCollection = require('../stringify/stringifyCollection.js');
|
||||
var addPairToJSMap = require('./addPairToJSMap.js');
|
||||
var Collection = require('./Collection.js');
|
||||
var identity = require('./identity.js');
|
||||
var Pair = require('./Pair.js');
|
||||
var Scalar = require('./Scalar.js');
|
||||
|
||||
function findPair(items, key) {
|
||||
const k = identity.isScalar(key) ? key.value : key;
|
||||
for (const it of items) {
|
||||
if (identity.isPair(it)) {
|
||||
if (it.key === key || it.key === k)
|
||||
return it;
|
||||
if (identity.isScalar(it.key) && it.key.value === k)
|
||||
return it;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
class YAMLMap extends Collection.Collection {
|
||||
static get tagName() {
|
||||
return 'tag:yaml.org,2002:map';
|
||||
}
|
||||
constructor(schema) {
|
||||
super(identity.MAP, schema);
|
||||
this.items = [];
|
||||
}
|
||||
/**
|
||||
* A generic collection parsing method that can be extended
|
||||
* to other node classes that inherit from YAMLMap
|
||||
*/
|
||||
static from(schema, obj, ctx) {
|
||||
const { keepUndefined, replacer } = ctx;
|
||||
const map = new this(schema);
|
||||
const add = (key, value) => {
|
||||
if (typeof replacer === 'function')
|
||||
value = replacer.call(obj, key, value);
|
||||
else if (Array.isArray(replacer) && !replacer.includes(key))
|
||||
return;
|
||||
if (value !== undefined || keepUndefined)
|
||||
map.items.push(Pair.createPair(key, value, ctx));
|
||||
};
|
||||
if (obj instanceof Map) {
|
||||
for (const [key, value] of obj)
|
||||
add(key, value);
|
||||
}
|
||||
else if (obj && typeof obj === 'object') {
|
||||
for (const key of Object.keys(obj))
|
||||
add(key, obj[key]);
|
||||
}
|
||||
if (typeof schema.sortMapEntries === 'function') {
|
||||
map.items.sort(schema.sortMapEntries);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Adds a value to the collection.
|
||||
*
|
||||
* @param overwrite - If not set `true`, using a key that is already in the
|
||||
* collection will throw. Otherwise, overwrites the previous value.
|
||||
*/
|
||||
add(pair, overwrite) {
|
||||
let _pair;
|
||||
if (identity.isPair(pair))
|
||||
_pair = pair;
|
||||
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
|
||||
// In TypeScript, this never happens.
|
||||
_pair = new Pair.Pair(pair, pair?.value);
|
||||
}
|
||||
else
|
||||
_pair = new Pair.Pair(pair.key, pair.value);
|
||||
const prev = findPair(this.items, _pair.key);
|
||||
const sortEntries = this.schema?.sortMapEntries;
|
||||
if (prev) {
|
||||
if (!overwrite)
|
||||
throw new Error(`Key ${_pair.key} already set`);
|
||||
// For scalars, keep the old node & its comments and anchors
|
||||
if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))
|
||||
prev.value.value = _pair.value;
|
||||
else
|
||||
prev.value = _pair.value;
|
||||
}
|
||||
else if (sortEntries) {
|
||||
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
|
||||
if (i === -1)
|
||||
this.items.push(_pair);
|
||||
else
|
||||
this.items.splice(i, 0, _pair);
|
||||
}
|
||||
else {
|
||||
this.items.push(_pair);
|
||||
}
|
||||
}
|
||||
delete(key) {
|
||||
const it = findPair(this.items, key);
|
||||
if (!it)
|
||||
return false;
|
||||
const del = this.items.splice(this.items.indexOf(it), 1);
|
||||
return del.length > 0;
|
||||
}
|
||||
get(key, keepScalar) {
|
||||
const it = findPair(this.items, key);
|
||||
const node = it?.value;
|
||||
return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined;
|
||||
}
|
||||
has(key) {
|
||||
return !!findPair(this.items, key);
|
||||
}
|
||||
set(key, value) {
|
||||
this.add(new Pair.Pair(key, value), true);
|
||||
}
|
||||
/**
|
||||
* @param ctx - Conversion context, originally set in Document#toJS()
|
||||
* @param {Class} Type - If set, forces the returned collection type
|
||||
* @returns Instance of Type, Map, or Object
|
||||
*/
|
||||
toJSON(_, ctx, Type) {
|
||||
const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};
|
||||
if (ctx?.onCreate)
|
||||
ctx.onCreate(map);
|
||||
for (const item of this.items)
|
||||
addPairToJSMap.addPairToJSMap(ctx, map, item);
|
||||
return map;
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx)
|
||||
return JSON.stringify(this);
|
||||
for (const item of this.items) {
|
||||
if (!identity.isPair(item))
|
||||
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
|
||||
}
|
||||
if (!ctx.allNullValues && this.hasAllNullValues(false))
|
||||
ctx = Object.assign({}, ctx, { allNullValues: true });
|
||||
return stringifyCollection.stringifyCollection(this, ctx, {
|
||||
blockItemPrefix: '',
|
||||
flowChars: { start: '{', end: '}' },
|
||||
itemIndent: ctx.indent || '',
|
||||
onChompKeep,
|
||||
onComment
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
exports.YAMLMap = YAMLMap;
|
||||
exports.findPair = findPair;
|
60
node_modules/yaml/dist/nodes/YAMLSeq.d.ts
generated
vendored
Normal file
60
node_modules/yaml/dist/nodes/YAMLSeq.d.ts
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
import type { CreateNodeContext } from '../doc/createNode';
|
||||
import type { BlockSequence, FlowCollection } from '../parse/cst';
|
||||
import type { Schema } from '../schema/Schema';
|
||||
import type { StringifyContext } from '../stringify/stringify';
|
||||
import { Collection } from './Collection';
|
||||
import type { ParsedNode, Range } from './Node';
|
||||
import type { Pair } from './Pair';
|
||||
import type { Scalar } from './Scalar';
|
||||
import type { ToJSContext } from './toJS';
|
||||
export declare namespace YAMLSeq {
|
||||
interface Parsed<T extends ParsedNode | Pair<ParsedNode, ParsedNode | null> = ParsedNode> extends YAMLSeq<T> {
|
||||
items: T[];
|
||||
range: Range;
|
||||
srcToken?: BlockSequence | FlowCollection;
|
||||
}
|
||||
}
|
||||
export declare class YAMLSeq<T = unknown> extends Collection {
|
||||
static get tagName(): 'tag:yaml.org,2002:seq';
|
||||
items: T[];
|
||||
constructor(schema?: Schema);
|
||||
add(value: T): void;
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key: unknown): boolean;
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
get(key: unknown, keepScalar: true): Scalar<T> | undefined;
|
||||
get(key: unknown, keepScalar?: false): T | undefined;
|
||||
get(key: unknown, keepScalar?: boolean): T | Scalar<T> | undefined;
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
has(key: unknown): boolean;
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*
|
||||
* If `key` does not contain a representation of an integer, this will throw.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
set(key: unknown, value: T): void;
|
||||
toJSON(_?: unknown, ctx?: ToJSContext): unknown[];
|
||||
toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
|
||||
static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLSeq;
|
||||
}
|
115
node_modules/yaml/dist/nodes/YAMLSeq.js
generated
vendored
Normal file
115
node_modules/yaml/dist/nodes/YAMLSeq.js
generated
vendored
Normal file
@ -0,0 +1,115 @@
|
||||
'use strict';
|
||||
|
||||
var createNode = require('../doc/createNode.js');
|
||||
var stringifyCollection = require('../stringify/stringifyCollection.js');
|
||||
var Collection = require('./Collection.js');
|
||||
var identity = require('./identity.js');
|
||||
var Scalar = require('./Scalar.js');
|
||||
var toJS = require('./toJS.js');
|
||||
|
||||
class YAMLSeq extends Collection.Collection {
|
||||
static get tagName() {
|
||||
return 'tag:yaml.org,2002:seq';
|
||||
}
|
||||
constructor(schema) {
|
||||
super(identity.SEQ, schema);
|
||||
this.items = [];
|
||||
}
|
||||
add(value) {
|
||||
this.items.push(value);
|
||||
}
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key) {
|
||||
const idx = asItemIndex(key);
|
||||
if (typeof idx !== 'number')
|
||||
return false;
|
||||
const del = this.items.splice(idx, 1);
|
||||
return del.length > 0;
|
||||
}
|
||||
get(key, keepScalar) {
|
||||
const idx = asItemIndex(key);
|
||||
if (typeof idx !== 'number')
|
||||
return undefined;
|
||||
const it = this.items[idx];
|
||||
return !keepScalar && identity.isScalar(it) ? it.value : it;
|
||||
}
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
has(key) {
|
||||
const idx = asItemIndex(key);
|
||||
return typeof idx === 'number' && idx < this.items.length;
|
||||
}
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*
|
||||
* If `key` does not contain a representation of an integer, this will throw.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
set(key, value) {
|
||||
const idx = asItemIndex(key);
|
||||
if (typeof idx !== 'number')
|
||||
throw new Error(`Expected a valid index, not ${key}.`);
|
||||
const prev = this.items[idx];
|
||||
if (identity.isScalar(prev) && Scalar.isScalarValue(value))
|
||||
prev.value = value;
|
||||
else
|
||||
this.items[idx] = value;
|
||||
}
|
||||
toJSON(_, ctx) {
|
||||
const seq = [];
|
||||
if (ctx?.onCreate)
|
||||
ctx.onCreate(seq);
|
||||
let i = 0;
|
||||
for (const item of this.items)
|
||||
seq.push(toJS.toJS(item, String(i++), ctx));
|
||||
return seq;
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx)
|
||||
return JSON.stringify(this);
|
||||
return stringifyCollection.stringifyCollection(this, ctx, {
|
||||
blockItemPrefix: '- ',
|
||||
flowChars: { start: '[', end: ']' },
|
||||
itemIndent: (ctx.indent || '') + ' ',
|
||||
onChompKeep,
|
||||
onComment
|
||||
});
|
||||
}
|
||||
static from(schema, obj, ctx) {
|
||||
const { replacer } = ctx;
|
||||
const seq = new this(schema);
|
||||
if (obj && Symbol.iterator in Object(obj)) {
|
||||
let i = 0;
|
||||
for (let it of obj) {
|
||||
if (typeof replacer === 'function') {
|
||||
const key = obj instanceof Set ? it : String(i++);
|
||||
it = replacer.call(obj, key, it);
|
||||
}
|
||||
seq.items.push(createNode.createNode(it, undefined, ctx));
|
||||
}
|
||||
}
|
||||
return seq;
|
||||
}
|
||||
}
|
||||
function asItemIndex(key) {
|
||||
let idx = identity.isScalar(key) ? key.value : key;
|
||||
if (idx && typeof idx === 'string')
|
||||
idx = Number(idx);
|
||||
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
|
||||
? idx
|
||||
: null;
|
||||
}
|
||||
|
||||
exports.YAMLSeq = YAMLSeq;
|
4
node_modules/yaml/dist/nodes/addPairToJSMap.d.ts
generated
vendored
Normal file
4
node_modules/yaml/dist/nodes/addPairToJSMap.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import type { Pair } from './Pair';
|
||||
import type { ToJSContext } from './toJS';
|
||||
import type { MapLike } from './YAMLMap';
|
||||
export declare function addPairToJSMap(ctx: ToJSContext | undefined, map: MapLike, { key, value }: Pair): MapLike;
|
65
node_modules/yaml/dist/nodes/addPairToJSMap.js
generated
vendored
Normal file
65
node_modules/yaml/dist/nodes/addPairToJSMap.js
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
'use strict';
|
||||
|
||||
var log = require('../log.js');
|
||||
var merge = require('../schema/yaml-1.1/merge.js');
|
||||
var stringify = require('../stringify/stringify.js');
|
||||
var identity = require('./identity.js');
|
||||
var toJS = require('./toJS.js');
|
||||
|
||||
function addPairToJSMap(ctx, map, { key, value }) {
|
||||
if (identity.isNode(key) && key.addToJSMap)
|
||||
key.addToJSMap(ctx, map, value);
|
||||
// TODO: Should drop this special case for bare << handling
|
||||
else if (merge.isMergeKey(ctx, key))
|
||||
merge.addMergeToJSMap(ctx, map, value);
|
||||
else {
|
||||
const jsKey = toJS.toJS(key, '', ctx);
|
||||
if (map instanceof Map) {
|
||||
map.set(jsKey, toJS.toJS(value, jsKey, ctx));
|
||||
}
|
||||
else if (map instanceof Set) {
|
||||
map.add(jsKey);
|
||||
}
|
||||
else {
|
||||
const stringKey = stringifyKey(key, jsKey, ctx);
|
||||
const jsValue = toJS.toJS(value, stringKey, ctx);
|
||||
if (stringKey in map)
|
||||
Object.defineProperty(map, stringKey, {
|
||||
value: jsValue,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
else
|
||||
map[stringKey] = jsValue;
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
function stringifyKey(key, jsKey, ctx) {
|
||||
if (jsKey === null)
|
||||
return '';
|
||||
// eslint-disable-next-line @typescript-eslint/no-base-to-string
|
||||
if (typeof jsKey !== 'object')
|
||||
return String(jsKey);
|
||||
if (identity.isNode(key) && ctx?.doc) {
|
||||
const strCtx = stringify.createStringifyContext(ctx.doc, {});
|
||||
strCtx.anchors = new Set();
|
||||
for (const node of ctx.anchors.keys())
|
||||
strCtx.anchors.add(node.anchor);
|
||||
strCtx.inFlow = true;
|
||||
strCtx.inStringifyKey = true;
|
||||
const strKey = key.toString(strCtx);
|
||||
if (!ctx.mapKeyWarned) {
|
||||
let jsonStr = JSON.stringify(strKey);
|
||||
if (jsonStr.length > 40)
|
||||
jsonStr = jsonStr.substring(0, 36) + '..."';
|
||||
log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
|
||||
ctx.mapKeyWarned = true;
|
||||
}
|
||||
return strKey;
|
||||
}
|
||||
return JSON.stringify(jsKey);
|
||||
}
|
||||
|
||||
exports.addPairToJSMap = addPairToJSMap;
|
23
node_modules/yaml/dist/nodes/identity.d.ts
generated
vendored
Normal file
23
node_modules/yaml/dist/nodes/identity.d.ts
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
import type { Document } from '../doc/Document';
|
||||
import type { Alias } from './Alias';
|
||||
import type { Node } from './Node';
|
||||
import type { Pair } from './Pair';
|
||||
import type { Scalar } from './Scalar';
|
||||
import type { YAMLMap } from './YAMLMap';
|
||||
import type { YAMLSeq } from './YAMLSeq';
|
||||
export declare const ALIAS: unique symbol;
|
||||
export declare const DOC: unique symbol;
|
||||
export declare const MAP: unique symbol;
|
||||
export declare const PAIR: unique symbol;
|
||||
export declare const SCALAR: unique symbol;
|
||||
export declare const SEQ: unique symbol;
|
||||
export declare const NODE_TYPE: unique symbol;
|
||||
export declare const isAlias: (node: any) => node is Alias;
|
||||
export declare const isDocument: <T extends Node = Node>(node: any) => node is Document<T>;
|
||||
export declare const isMap: <K = unknown, V = unknown>(node: any) => node is YAMLMap<K, V>;
|
||||
export declare const isPair: <K = unknown, V = unknown>(node: any) => node is Pair<K, V>;
|
||||
export declare const isScalar: <T = unknown>(node: any) => node is Scalar<T>;
|
||||
export declare const isSeq: <T = unknown>(node: any) => node is YAMLSeq<T>;
|
||||
export declare function isCollection<K = unknown, V = unknown>(node: any): node is YAMLMap<K, V> | YAMLSeq<V>;
|
||||
export declare function isNode<T = unknown>(node: any): node is Node<T>;
|
||||
export declare const hasAnchor: <K = unknown, V = unknown>(node: unknown) => node is Scalar<V> | YAMLMap<K, V> | YAMLSeq<V>;
|
53
node_modules/yaml/dist/nodes/identity.js
generated
vendored
Normal file
53
node_modules/yaml/dist/nodes/identity.js
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
'use strict';
|
||||
|
||||
const ALIAS = Symbol.for('yaml.alias');
|
||||
const DOC = Symbol.for('yaml.document');
|
||||
const MAP = Symbol.for('yaml.map');
|
||||
const PAIR = Symbol.for('yaml.pair');
|
||||
const SCALAR = Symbol.for('yaml.scalar');
|
||||
const SEQ = Symbol.for('yaml.seq');
|
||||
const NODE_TYPE = Symbol.for('yaml.node.type');
|
||||
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
|
||||
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
|
||||
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
|
||||
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
|
||||
const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;
|
||||
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
|
||||
function isCollection(node) {
|
||||
if (node && typeof node === 'object')
|
||||
switch (node[NODE_TYPE]) {
|
||||
case MAP:
|
||||
case SEQ:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isNode(node) {
|
||||
if (node && typeof node === 'object')
|
||||
switch (node[NODE_TYPE]) {
|
||||
case ALIAS:
|
||||
case MAP:
|
||||
case SCALAR:
|
||||
case SEQ:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;
|
||||
|
||||
exports.ALIAS = ALIAS;
|
||||
exports.DOC = DOC;
|
||||
exports.MAP = MAP;
|
||||
exports.NODE_TYPE = NODE_TYPE;
|
||||
exports.PAIR = PAIR;
|
||||
exports.SCALAR = SCALAR;
|
||||
exports.SEQ = SEQ;
|
||||
exports.hasAnchor = hasAnchor;
|
||||
exports.isAlias = isAlias;
|
||||
exports.isCollection = isCollection;
|
||||
exports.isDocument = isDocument;
|
||||
exports.isMap = isMap;
|
||||
exports.isNode = isNode;
|
||||
exports.isPair = isPair;
|
||||
exports.isScalar = isScalar;
|
||||
exports.isSeq = isSeq;
|
29
node_modules/yaml/dist/nodes/toJS.d.ts
generated
vendored
Normal file
29
node_modules/yaml/dist/nodes/toJS.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
import type { Document } from '../doc/Document';
|
||||
import type { Node } from './Node';
|
||||
export interface AnchorData {
|
||||
aliasCount: number;
|
||||
count: number;
|
||||
res: unknown;
|
||||
}
|
||||
export interface ToJSContext {
|
||||
anchors: Map<Node, AnchorData>;
|
||||
/** Cached anchor and alias nodes in the order they occur in the document */
|
||||
aliasResolveCache?: Node[];
|
||||
doc: Document<Node, boolean>;
|
||||
keep: boolean;
|
||||
mapAsMap: boolean;
|
||||
mapKeyWarned: boolean;
|
||||
maxAliasCount: number;
|
||||
onCreate?: (res: unknown) => void;
|
||||
}
|
||||
/**
|
||||
* Recursively convert any node or its contents to native JavaScript
|
||||
*
|
||||
* @param value - The input value
|
||||
* @param arg - If `value` defines a `toJSON()` method, use this
|
||||
* as its first argument
|
||||
* @param ctx - Conversion context, originally set in Document#toJS(). If
|
||||
* `{ keep: true }` is not set, output should be suitable for JSON
|
||||
* stringification.
|
||||
*/
|
||||
export declare function toJS(value: any, arg: string | null, ctx?: ToJSContext): any;
|
39
node_modules/yaml/dist/nodes/toJS.js
generated
vendored
Normal file
39
node_modules/yaml/dist/nodes/toJS.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('./identity.js');
|
||||
|
||||
/**
|
||||
* Recursively convert any node or its contents to native JavaScript
|
||||
*
|
||||
* @param value - The input value
|
||||
* @param arg - If `value` defines a `toJSON()` method, use this
|
||||
* as its first argument
|
||||
* @param ctx - Conversion context, originally set in Document#toJS(). If
|
||||
* `{ keep: true }` is not set, output should be suitable for JSON
|
||||
* stringification.
|
||||
*/
|
||||
function toJS(value, arg, ctx) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
if (Array.isArray(value))
|
||||
return value.map((v, i) => toJS(v, String(i), ctx));
|
||||
if (value && typeof value.toJSON === 'function') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||||
if (!ctx || !identity.hasAnchor(value))
|
||||
return value.toJSON(arg, ctx);
|
||||
const data = { aliasCount: 0, count: 1, res: undefined };
|
||||
ctx.anchors.set(value, data);
|
||||
ctx.onCreate = res => {
|
||||
data.res = res;
|
||||
delete ctx.onCreate;
|
||||
};
|
||||
const res = value.toJSON(arg, ctx);
|
||||
if (ctx.onCreate)
|
||||
ctx.onCreate(res);
|
||||
return res;
|
||||
}
|
||||
if (typeof value === 'bigint' && !ctx?.keep)
|
||||
return Number(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
exports.toJS = toJS;
|
344
node_modules/yaml/dist/options.d.ts
generated
vendored
Normal file
344
node_modules/yaml/dist/options.d.ts
generated
vendored
Normal file
@ -0,0 +1,344 @@
|
||||
import type { Reviver } from './doc/applyReviver';
|
||||
import type { Directives } from './doc/directives';
|
||||
import type { LogLevelId } from './log';
|
||||
import type { ParsedNode } from './nodes/Node';
|
||||
import type { Pair } from './nodes/Pair';
|
||||
import type { Scalar } from './nodes/Scalar';
|
||||
import type { LineCounter } from './parse/line-counter';
|
||||
import type { Schema } from './schema/Schema';
|
||||
import type { Tags } from './schema/tags';
|
||||
import type { CollectionTag, ScalarTag } from './schema/types';
|
||||
export type ParseOptions = {
|
||||
/**
|
||||
* Whether integers should be parsed into BigInt rather than number values.
|
||||
*
|
||||
* Default: `false`
|
||||
*
|
||||
* https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/BigInt
|
||||
*/
|
||||
intAsBigInt?: boolean;
|
||||
/**
|
||||
* Include a `srcToken` value on each parsed `Node`, containing the CST token
|
||||
* that was composed into this node.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
keepSourceTokens?: boolean;
|
||||
/**
|
||||
* If set, newlines will be tracked, to allow for `lineCounter.linePos(offset)`
|
||||
* to provide the `{ line, col }` positions within the input.
|
||||
*/
|
||||
lineCounter?: LineCounter;
|
||||
/**
|
||||
* Include line/col position & node type directly in parse errors.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
prettyErrors?: boolean;
|
||||
/**
|
||||
* Detect and report errors that are required by the YAML 1.2 spec,
|
||||
* but are caused by unambiguous content.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
strict?: boolean;
|
||||
/**
|
||||
* Parse all mapping keys as strings. Treat all non-scalar keys as errors.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
stringKeys?: boolean;
|
||||
/**
|
||||
* YAML requires map keys to be unique. By default, this is checked by
|
||||
* comparing scalar values with `===`; deep equality is not checked for
|
||||
* aliases or collections. If merge keys are enabled by the schema,
|
||||
* multiple `<<` keys are allowed.
|
||||
*
|
||||
* Set `false` to disable, or provide your own comparator function to
|
||||
* customise. The comparator will be passed two `ParsedNode` values, and
|
||||
* is expected to return a `boolean` indicating their equality.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
uniqueKeys?: boolean | ((a: ParsedNode, b: ParsedNode) => boolean);
|
||||
};
|
||||
export type DocumentOptions = {
|
||||
/**
|
||||
* @internal
|
||||
* Used internally by Composer. If set and includes an explicit version,
|
||||
* that overrides the `version` option.
|
||||
*/
|
||||
_directives?: Directives;
|
||||
/**
|
||||
* Control the logging level during parsing
|
||||
*
|
||||
* Default: `'warn'`
|
||||
*/
|
||||
logLevel?: LogLevelId;
|
||||
/**
|
||||
* The YAML version used by documents without a `%YAML` directive.
|
||||
*
|
||||
* Default: `"1.2"`
|
||||
*/
|
||||
version?: '1.1' | '1.2' | 'next';
|
||||
};
|
||||
export type SchemaOptions = {
|
||||
/**
|
||||
* When parsing, warn about compatibility issues with the given schema.
|
||||
* When stringifying, use scalar styles that are parsed correctly
|
||||
* by the `compat` schema as well as the actual schema.
|
||||
*
|
||||
* Default: `null`
|
||||
*/
|
||||
compat?: string | Tags | null;
|
||||
/**
|
||||
* Array of additional tags to include in the schema, or a function that may
|
||||
* modify the schema's base tag array.
|
||||
*/
|
||||
customTags?: Tags | ((tags: Tags) => Tags) | null;
|
||||
/**
|
||||
* Enable support for `<<` merge keys.
|
||||
*
|
||||
* Default: `false` for YAML 1.2, `true` for earlier versions
|
||||
*/
|
||||
merge?: boolean;
|
||||
/**
|
||||
* When using the `'core'` schema, support parsing values with these
|
||||
* explicit YAML 1.1 tags:
|
||||
*
|
||||
* `!!binary`, `!!omap`, `!!pairs`, `!!set`, `!!timestamp`.
|
||||
*
|
||||
* Default `true`
|
||||
*/
|
||||
resolveKnownTags?: boolean;
|
||||
/**
|
||||
* The base schema to use.
|
||||
*
|
||||
* The core library has built-in support for the following:
|
||||
* - `'failsafe'`: A minimal schema that parses all scalars as strings
|
||||
* - `'core'`: The YAML 1.2 core schema
|
||||
* - `'json'`: The YAML 1.2 JSON schema, with minimal rules for JSON compatibility
|
||||
* - `'yaml-1.1'`: The YAML 1.1 schema
|
||||
*
|
||||
* If using another (custom) schema, the `customTags` array needs to
|
||||
* fully define the schema's tags.
|
||||
*
|
||||
* Default: `'core'` for YAML 1.2, `'yaml-1.1'` for earlier versions
|
||||
*/
|
||||
schema?: string | Schema;
|
||||
/**
|
||||
* When adding to or stringifying a map, sort the entries.
|
||||
* If `true`, sort by comparing key values with `<`.
|
||||
* Does not affect item order when parsing.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
sortMapEntries?: boolean | ((a: Pair, b: Pair) => number);
|
||||
/**
|
||||
* Override default values for `toString()` options.
|
||||
*/
|
||||
toStringDefaults?: ToStringOptions;
|
||||
};
|
||||
export type CreateNodeOptions = {
|
||||
/**
|
||||
* During node construction, use anchors and aliases to keep strictly equal
|
||||
* non-null objects as equivalent in YAML.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
aliasDuplicateObjects?: boolean;
|
||||
/**
|
||||
* Default prefix for anchors.
|
||||
*
|
||||
* Default: `'a'`, resulting in anchors `a1`, `a2`, etc.
|
||||
*/
|
||||
anchorPrefix?: string;
|
||||
/** Force the top-level collection node to use flow style. */
|
||||
flow?: boolean;
|
||||
/**
|
||||
* Keep `undefined` object values when creating mappings, rather than
|
||||
* discarding them.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
keepUndefined?: boolean | null;
|
||||
onTagObj?: (tagObj: ScalarTag | CollectionTag) => void;
|
||||
/**
|
||||
* Specify the top-level collection type, e.g. `"!!omap"`. Note that this
|
||||
* requires the corresponding tag to be available in this document's schema.
|
||||
*/
|
||||
tag?: string;
|
||||
};
|
||||
export type ToJSOptions = {
|
||||
/**
|
||||
* Use Map rather than Object to represent mappings.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
mapAsMap?: boolean;
|
||||
/**
|
||||
* Prevent exponential entity expansion attacks by limiting data aliasing count;
|
||||
* set to `-1` to disable checks; `0` disallows all alias nodes.
|
||||
*
|
||||
* Default: `100`
|
||||
*/
|
||||
maxAliasCount?: number;
|
||||
/**
|
||||
* If defined, called with the resolved `value` and reference `count` for
|
||||
* each anchor in the document.
|
||||
*/
|
||||
onAnchor?: (value: unknown, count: number) => void;
|
||||
/**
|
||||
* Optional function that may filter or modify the output JS value
|
||||
*
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter
|
||||
*/
|
||||
reviver?: Reviver;
|
||||
};
|
||||
export type ToStringOptions = {
|
||||
/**
|
||||
* Use block quote styles for scalar values where applicable.
|
||||
* Set to `false` to disable block quotes completely.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
blockQuote?: boolean | 'folded' | 'literal';
|
||||
/**
|
||||
* Enforce `'block'` or `'flow'` style on maps and sequences.
|
||||
* Empty collections will always be stringified as `{}` or `[]`.
|
||||
*
|
||||
* Default: `'any'`, allowing each node to set its style separately
|
||||
* with its `flow: boolean` (default `false`) property.
|
||||
*/
|
||||
collectionStyle?: 'any' | 'block' | 'flow';
|
||||
/**
|
||||
* Comment stringifier.
|
||||
* Output should be valid for the current schema.
|
||||
*
|
||||
* By default, empty comment lines are left empty,
|
||||
* lines consisting of a single space are replaced by `#`,
|
||||
* and all other lines are prefixed with a `#`.
|
||||
*/
|
||||
commentString?: (comment: string) => string;
|
||||
/**
|
||||
* The default type of string literal used to stringify implicit key values.
|
||||
* Output may use other types if required to fully represent the value.
|
||||
*
|
||||
* If `null`, the value of `defaultStringType` is used.
|
||||
*
|
||||
* Default: `null`
|
||||
*/
|
||||
defaultKeyType?: Scalar.Type | null;
|
||||
/**
|
||||
* The default type of string literal used to stringify values in general.
|
||||
* Output may use other types if required to fully represent the value.
|
||||
*
|
||||
* Default: `'PLAIN'`
|
||||
*/
|
||||
defaultStringType?: Scalar.Type;
|
||||
/**
|
||||
* Include directives in the output.
|
||||
*
|
||||
* - If `true`, at least the document-start marker `---` is always included.
|
||||
* This does not force the `%YAML` directive to be included. To do that,
|
||||
* set `doc.directives.yaml.explicit = true`.
|
||||
* - If `false`, no directives or marker is ever included. If using the `%TAG`
|
||||
* directive, you are expected to include it manually in the stream before
|
||||
* its use.
|
||||
* - If `null`, directives and marker may be included if required.
|
||||
*
|
||||
* Default: `null`
|
||||
*/
|
||||
directives?: boolean | null;
|
||||
/**
|
||||
* Restrict double-quoted strings to use JSON-compatible syntax.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
doubleQuotedAsJSON?: boolean;
|
||||
/**
|
||||
* Minimum length for double-quoted strings to use multiple lines to
|
||||
* represent the value. Ignored if `doubleQuotedAsJSON` is set.
|
||||
*
|
||||
* Default: `40`
|
||||
*/
|
||||
doubleQuotedMinMultiLineLength?: number;
|
||||
/**
|
||||
* String representation for `false`.
|
||||
* With the core schema, use `'false'`, `'False'`, or `'FALSE'`.
|
||||
*
|
||||
* Default: `'false'`
|
||||
*/
|
||||
falseStr?: string;
|
||||
/**
|
||||
* When true, a single space of padding will be added inside the delimiters
|
||||
* of non-empty single-line flow collections.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
flowCollectionPadding?: boolean;
|
||||
/**
|
||||
* The number of spaces to use when indenting code.
|
||||
*
|
||||
* Default: `2`
|
||||
*/
|
||||
indent?: number;
|
||||
/**
|
||||
* Whether block sequences should be indented.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
indentSeq?: boolean;
|
||||
/**
|
||||
* Maximum line width (set to `0` to disable folding).
|
||||
*
|
||||
* This is a soft limit, as only double-quoted semantics allow for inserting
|
||||
* a line break in the middle of a word, as well as being influenced by the
|
||||
* `minContentWidth` option.
|
||||
*
|
||||
* Default: `80`
|
||||
*/
|
||||
lineWidth?: number;
|
||||
/**
|
||||
* Minimum line width for highly-indented content (set to `0` to disable).
|
||||
*
|
||||
* Default: `20`
|
||||
*/
|
||||
minContentWidth?: number;
|
||||
/**
|
||||
* String representation for `null`.
|
||||
* With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty
|
||||
* string `''`.
|
||||
*
|
||||
* Default: `'null'`
|
||||
*/
|
||||
nullStr?: string;
|
||||
/**
|
||||
* Require keys to be scalars and to use implicit rather than explicit notation.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
simpleKeys?: boolean;
|
||||
/**
|
||||
* Use 'single quote' rather than "double quote" where applicable.
|
||||
* Set to `false` to disable single quotes completely.
|
||||
*
|
||||
* Default: `null`
|
||||
*/
|
||||
singleQuote?: boolean | null;
|
||||
/**
|
||||
* String representation for `true`.
|
||||
* With the core schema, use `'true'`, `'True'`, or `'TRUE'`.
|
||||
*
|
||||
* Default: `'true'`
|
||||
*/
|
||||
trueStr?: string;
|
||||
/**
|
||||
* The anchor used by an alias must be defined before the alias node. As it's
|
||||
* possible for the document to be modified manually, the order may be
|
||||
* verified during stringification.
|
||||
*
|
||||
* Default: `'true'`
|
||||
*/
|
||||
verifyAliasOrder?: boolean;
|
||||
};
|
64
node_modules/yaml/dist/parse/cst-scalar.d.ts
generated
vendored
Normal file
64
node_modules/yaml/dist/parse/cst-scalar.d.ts
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
import type { ErrorCode } from '../errors';
|
||||
import type { Range } from '../nodes/Node';
|
||||
import type { Scalar } from '../nodes/Scalar';
|
||||
import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst';
|
||||
/**
|
||||
* If `token` is a CST flow or block scalar, determine its string value and a few other attributes.
|
||||
* Otherwise, return `null`.
|
||||
*/
|
||||
export declare function resolveAsScalar(token: FlowScalar | BlockScalar, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): {
|
||||
value: string;
|
||||
type: Scalar.Type | null;
|
||||
comment: string;
|
||||
range: Range;
|
||||
};
|
||||
export declare function resolveAsScalar(token: Token | null | undefined, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): {
|
||||
value: string;
|
||||
type: Scalar.Type | null;
|
||||
comment: string;
|
||||
range: Range;
|
||||
} | null;
|
||||
/**
|
||||
* Create a new scalar token with `value`
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.indent The indent level of the token.
|
||||
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
|
||||
* @param context.offset The offset position of the token.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
export declare function createScalarToken(value: string, context: {
|
||||
end?: SourceToken[];
|
||||
implicitKey?: boolean;
|
||||
indent: number;
|
||||
inFlow?: boolean;
|
||||
offset?: number;
|
||||
type?: Scalar.Type;
|
||||
}): BlockScalar | FlowScalar;
|
||||
/**
|
||||
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
|
||||
*
|
||||
* Best efforts are made to retain any comments previously associated with the `token`,
|
||||
* though all contents within a collection's `items` will be overwritten.
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
export declare function setScalarValue(token: Token, value: string, context?: {
|
||||
afterKey?: boolean;
|
||||
implicitKey?: boolean;
|
||||
inFlow?: boolean;
|
||||
type?: Scalar.Type;
|
||||
}): void;
|
218
node_modules/yaml/dist/parse/cst-scalar.js
generated
vendored
Normal file
218
node_modules/yaml/dist/parse/cst-scalar.js
generated
vendored
Normal file
@ -0,0 +1,218 @@
|
||||
'use strict';
|
||||
|
||||
var resolveBlockScalar = require('../compose/resolve-block-scalar.js');
|
||||
var resolveFlowScalar = require('../compose/resolve-flow-scalar.js');
|
||||
var errors = require('../errors.js');
|
||||
var stringifyString = require('../stringify/stringifyString.js');
|
||||
|
||||
function resolveAsScalar(token, strict = true, onError) {
|
||||
if (token) {
|
||||
const _onError = (pos, code, message) => {
|
||||
const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
|
||||
if (onError)
|
||||
onError(offset, code, message);
|
||||
else
|
||||
throw new errors.YAMLParseError([offset, offset + 1], code, message);
|
||||
};
|
||||
switch (token.type) {
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);
|
||||
case 'block-scalar':
|
||||
return resolveBlockScalar.resolveBlockScalar({ options: { strict } }, token, _onError);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Create a new scalar token with `value`
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.indent The indent level of the token.
|
||||
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
|
||||
* @param context.offset The offset position of the token.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
function createScalarToken(value, context) {
|
||||
const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
|
||||
const source = stringifyString.stringifyString({ type, value }, {
|
||||
implicitKey,
|
||||
indent: indent > 0 ? ' '.repeat(indent) : '',
|
||||
inFlow,
|
||||
options: { blockQuote: true, lineWidth: -1 }
|
||||
});
|
||||
const end = context.end ?? [
|
||||
{ type: 'newline', offset: -1, indent, source: '\n' }
|
||||
];
|
||||
switch (source[0]) {
|
||||
case '|':
|
||||
case '>': {
|
||||
const he = source.indexOf('\n');
|
||||
const head = source.substring(0, he);
|
||||
const body = source.substring(he + 1) + '\n';
|
||||
const props = [
|
||||
{ type: 'block-scalar-header', offset, indent, source: head }
|
||||
];
|
||||
if (!addEndtoBlockProps(props, end))
|
||||
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||||
return { type: 'block-scalar', offset, indent, props, source: body };
|
||||
}
|
||||
case '"':
|
||||
return { type: 'double-quoted-scalar', offset, indent, source, end };
|
||||
case "'":
|
||||
return { type: 'single-quoted-scalar', offset, indent, source, end };
|
||||
default:
|
||||
return { type: 'scalar', offset, indent, source, end };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
|
||||
*
|
||||
* Best efforts are made to retain any comments previously associated with the `token`,
|
||||
* though all contents within a collection's `items` will be overwritten.
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
function setScalarValue(token, value, context = {}) {
|
||||
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
|
||||
let indent = 'indent' in token ? token.indent : null;
|
||||
if (afterKey && typeof indent === 'number')
|
||||
indent += 2;
|
||||
if (!type)
|
||||
switch (token.type) {
|
||||
case 'single-quoted-scalar':
|
||||
type = 'QUOTE_SINGLE';
|
||||
break;
|
||||
case 'double-quoted-scalar':
|
||||
type = 'QUOTE_DOUBLE';
|
||||
break;
|
||||
case 'block-scalar': {
|
||||
const header = token.props[0];
|
||||
if (header.type !== 'block-scalar-header')
|
||||
throw new Error('Invalid block scalar header');
|
||||
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
|
||||
break;
|
||||
}
|
||||
default:
|
||||
type = 'PLAIN';
|
||||
}
|
||||
const source = stringifyString.stringifyString({ type, value }, {
|
||||
implicitKey: implicitKey || indent === null,
|
||||
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
|
||||
inFlow,
|
||||
options: { blockQuote: true, lineWidth: -1 }
|
||||
});
|
||||
switch (source[0]) {
|
||||
case '|':
|
||||
case '>':
|
||||
setBlockScalarValue(token, source);
|
||||
break;
|
||||
case '"':
|
||||
setFlowScalarValue(token, source, 'double-quoted-scalar');
|
||||
break;
|
||||
case "'":
|
||||
setFlowScalarValue(token, source, 'single-quoted-scalar');
|
||||
break;
|
||||
default:
|
||||
setFlowScalarValue(token, source, 'scalar');
|
||||
}
|
||||
}
|
||||
function setBlockScalarValue(token, source) {
|
||||
const he = source.indexOf('\n');
|
||||
const head = source.substring(0, he);
|
||||
const body = source.substring(he + 1) + '\n';
|
||||
if (token.type === 'block-scalar') {
|
||||
const header = token.props[0];
|
||||
if (header.type !== 'block-scalar-header')
|
||||
throw new Error('Invalid block scalar header');
|
||||
header.source = head;
|
||||
token.source = body;
|
||||
}
|
||||
else {
|
||||
const { offset } = token;
|
||||
const indent = 'indent' in token ? token.indent : -1;
|
||||
const props = [
|
||||
{ type: 'block-scalar-header', offset, indent, source: head }
|
||||
];
|
||||
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
|
||||
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||||
for (const key of Object.keys(token))
|
||||
if (key !== 'type' && key !== 'offset')
|
||||
delete token[key];
|
||||
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
|
||||
}
|
||||
}
|
||||
/** @returns `true` if last token is a newline */
|
||||
function addEndtoBlockProps(props, end) {
|
||||
if (end)
|
||||
for (const st of end)
|
||||
switch (st.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
props.push(st);
|
||||
break;
|
||||
case 'newline':
|
||||
props.push(st);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function setFlowScalarValue(token, source, type) {
|
||||
switch (token.type) {
|
||||
case 'scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'single-quoted-scalar':
|
||||
token.type = type;
|
||||
token.source = source;
|
||||
break;
|
||||
case 'block-scalar': {
|
||||
const end = token.props.slice(1);
|
||||
let oa = source.length;
|
||||
if (token.props[0].type === 'block-scalar-header')
|
||||
oa -= token.props[0].source.length;
|
||||
for (const tok of end)
|
||||
tok.offset += oa;
|
||||
delete token.props;
|
||||
Object.assign(token, { type, source, end });
|
||||
break;
|
||||
}
|
||||
case 'block-map':
|
||||
case 'block-seq': {
|
||||
const offset = token.offset + source.length;
|
||||
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
|
||||
delete token.items;
|
||||
Object.assign(token, { type, source, end: [nl] });
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
const indent = 'indent' in token ? token.indent : -1;
|
||||
const end = 'end' in token && Array.isArray(token.end)
|
||||
? token.end.filter(st => st.type === 'space' ||
|
||||
st.type === 'comment' ||
|
||||
st.type === 'newline')
|
||||
: [];
|
||||
for (const key of Object.keys(token))
|
||||
if (key !== 'type' && key !== 'offset')
|
||||
delete token[key];
|
||||
Object.assign(token, { type, indent, source, end });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.createScalarToken = createScalarToken;
|
||||
exports.resolveAsScalar = resolveAsScalar;
|
||||
exports.setScalarValue = setScalarValue;
|
8
node_modules/yaml/dist/parse/cst-stringify.d.ts
generated
vendored
Normal file
8
node_modules/yaml/dist/parse/cst-stringify.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import type { CollectionItem, Token } from './cst';
|
||||
/**
|
||||
* Stringify a CST document, token, or collection item
|
||||
*
|
||||
* Fair warning: This applies no validation whatsoever, and
|
||||
* simply concatenates the sources in their logical order.
|
||||
*/
|
||||
export declare const stringify: (cst: Token | CollectionItem) => string;
|
63
node_modules/yaml/dist/parse/cst-stringify.js
generated
vendored
Normal file
63
node_modules/yaml/dist/parse/cst-stringify.js
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Stringify a CST document, token, or collection item
|
||||
*
|
||||
* Fair warning: This applies no validation whatsoever, and
|
||||
* simply concatenates the sources in their logical order.
|
||||
*/
|
||||
const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
|
||||
function stringifyToken(token) {
|
||||
switch (token.type) {
|
||||
case 'block-scalar': {
|
||||
let res = '';
|
||||
for (const tok of token.props)
|
||||
res += stringifyToken(tok);
|
||||
return res + token.source;
|
||||
}
|
||||
case 'block-map':
|
||||
case 'block-seq': {
|
||||
let res = '';
|
||||
for (const item of token.items)
|
||||
res += stringifyItem(item);
|
||||
return res;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
let res = token.start.source;
|
||||
for (const item of token.items)
|
||||
res += stringifyItem(item);
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
case 'document': {
|
||||
let res = stringifyItem(token);
|
||||
if (token.end)
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
default: {
|
||||
let res = token.source;
|
||||
if ('end' in token && token.end)
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
function stringifyItem({ start, key, sep, value }) {
|
||||
let res = '';
|
||||
for (const st of start)
|
||||
res += st.source;
|
||||
if (key)
|
||||
res += stringifyToken(key);
|
||||
if (sep)
|
||||
for (const st of sep)
|
||||
res += st.source;
|
||||
if (value)
|
||||
res += stringifyToken(value);
|
||||
return res;
|
||||
}
|
||||
|
||||
exports.stringify = stringify;
|
39
node_modules/yaml/dist/parse/cst-visit.d.ts
generated
vendored
Normal file
39
node_modules/yaml/dist/parse/cst-visit.d.ts
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
import type { BlockMap, BlockSequence, CollectionItem, Document, FlowCollection } from './cst';
|
||||
export type VisitPath = readonly ['key' | 'value', number][];
|
||||
export type Visitor = (item: CollectionItem, path: VisitPath) => number | symbol | Visitor | void;
|
||||
/**
|
||||
* Apply a visitor to a CST document or item.
|
||||
*
|
||||
* Walks through the tree (depth-first) starting from the root, calling a
|
||||
* `visitor` function with two arguments when entering each item:
|
||||
* - `item`: The current item, which included the following members:
|
||||
* - `start: SourceToken[]` – Source tokens before the key or value,
|
||||
* possibly including its anchor or tag.
|
||||
* - `key?: Token | null` – Set for pair values. May then be `null`, if
|
||||
* the key before the `:` separator is empty.
|
||||
* - `sep?: SourceToken[]` – Source tokens between the key and the value,
|
||||
* which should include the `:` map value indicator if `value` is set.
|
||||
* - `value?: Token` – The value of a sequence item, or of a map pair.
|
||||
* - `path`: The steps from the root to the current node, as an array of
|
||||
* `['key' | 'value', number]` tuples.
|
||||
*
|
||||
* The return value of the visitor may be used to control the traversal:
|
||||
* - `undefined` (default): Do nothing and continue
|
||||
* - `visit.SKIP`: Do not visit the children of this token, continue with
|
||||
* next sibling
|
||||
* - `visit.BREAK`: Terminate traversal completely
|
||||
* - `visit.REMOVE`: Remove the current item, then continue with the next one
|
||||
* - `number`: Set the index of the next step. This is useful especially if
|
||||
* the index of the current token has changed.
|
||||
* - `function`: Define the next visitor for this item. After the original
|
||||
* visitor is called on item entry, next visitors are called after handling
|
||||
* a non-empty `key` and when exiting the item.
|
||||
*/
|
||||
export declare function visit(cst: Document | CollectionItem, visitor: Visitor): void;
|
||||
export declare namespace visit {
|
||||
var BREAK: symbol;
|
||||
var SKIP: symbol;
|
||||
var REMOVE: symbol;
|
||||
var itemAtPath: (cst: Document | CollectionItem, path: VisitPath) => CollectionItem | undefined;
|
||||
var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => BlockMap | BlockSequence | FlowCollection;
|
||||
}
|
99
node_modules/yaml/dist/parse/cst-visit.js
generated
vendored
Normal file
99
node_modules/yaml/dist/parse/cst-visit.js
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
'use strict';
|
||||
|
||||
const BREAK = Symbol('break visit');
|
||||
const SKIP = Symbol('skip children');
|
||||
const REMOVE = Symbol('remove item');
|
||||
/**
|
||||
* Apply a visitor to a CST document or item.
|
||||
*
|
||||
* Walks through the tree (depth-first) starting from the root, calling a
|
||||
* `visitor` function with two arguments when entering each item:
|
||||
* - `item`: The current item, which included the following members:
|
||||
* - `start: SourceToken[]` – Source tokens before the key or value,
|
||||
* possibly including its anchor or tag.
|
||||
* - `key?: Token | null` – Set for pair values. May then be `null`, if
|
||||
* the key before the `:` separator is empty.
|
||||
* - `sep?: SourceToken[]` – Source tokens between the key and the value,
|
||||
* which should include the `:` map value indicator if `value` is set.
|
||||
* - `value?: Token` – The value of a sequence item, or of a map pair.
|
||||
* - `path`: The steps from the root to the current node, as an array of
|
||||
* `['key' | 'value', number]` tuples.
|
||||
*
|
||||
* The return value of the visitor may be used to control the traversal:
|
||||
* - `undefined` (default): Do nothing and continue
|
||||
* - `visit.SKIP`: Do not visit the children of this token, continue with
|
||||
* next sibling
|
||||
* - `visit.BREAK`: Terminate traversal completely
|
||||
* - `visit.REMOVE`: Remove the current item, then continue with the next one
|
||||
* - `number`: Set the index of the next step. This is useful especially if
|
||||
* the index of the current token has changed.
|
||||
* - `function`: Define the next visitor for this item. After the original
|
||||
* visitor is called on item entry, next visitors are called after handling
|
||||
* a non-empty `key` and when exiting the item.
|
||||
*/
|
||||
function visit(cst, visitor) {
|
||||
if ('type' in cst && cst.type === 'document')
|
||||
cst = { start: cst.start, value: cst.value };
|
||||
_visit(Object.freeze([]), cst, visitor);
|
||||
}
|
||||
// Without the `as symbol` casts, TS declares these in the `visit`
|
||||
// namespace using `var`, but then complains about that because
|
||||
// `unique symbol` must be `const`.
|
||||
/** Terminate visit traversal completely */
|
||||
visit.BREAK = BREAK;
|
||||
/** Do not visit the children of the current item */
|
||||
visit.SKIP = SKIP;
|
||||
/** Remove the current item */
|
||||
visit.REMOVE = REMOVE;
|
||||
/** Find the item at `path` from `cst` as the root */
|
||||
visit.itemAtPath = (cst, path) => {
|
||||
let item = cst;
|
||||
for (const [field, index] of path) {
|
||||
const tok = item?.[field];
|
||||
if (tok && 'items' in tok) {
|
||||
item = tok.items[index];
|
||||
}
|
||||
else
|
||||
return undefined;
|
||||
}
|
||||
return item;
|
||||
};
|
||||
/**
|
||||
* Get the immediate parent collection of the item at `path` from `cst` as the root.
|
||||
*
|
||||
* Throws an error if the collection is not found, which should never happen if the item itself exists.
|
||||
*/
|
||||
visit.parentCollection = (cst, path) => {
|
||||
const parent = visit.itemAtPath(cst, path.slice(0, -1));
|
||||
const field = path[path.length - 1][0];
|
||||
const coll = parent?.[field];
|
||||
if (coll && 'items' in coll)
|
||||
return coll;
|
||||
throw new Error('Parent collection not found');
|
||||
};
|
||||
function _visit(path, item, visitor) {
|
||||
let ctrl = visitor(item, path);
|
||||
if (typeof ctrl === 'symbol')
|
||||
return ctrl;
|
||||
for (const field of ['key', 'value']) {
|
||||
const token = item[field];
|
||||
if (token && 'items' in token) {
|
||||
for (let i = 0; i < token.items.length; ++i) {
|
||||
const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
|
||||
if (typeof ci === 'number')
|
||||
i = ci - 1;
|
||||
else if (ci === BREAK)
|
||||
return BREAK;
|
||||
else if (ci === REMOVE) {
|
||||
token.items.splice(i, 1);
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
if (typeof ctrl === 'function' && field === 'key')
|
||||
ctrl = ctrl(item, path);
|
||||
}
|
||||
}
|
||||
return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
|
||||
}
|
||||
|
||||
exports.visit = visit;
|
109
node_modules/yaml/dist/parse/cst.d.ts
generated
vendored
Normal file
109
node_modules/yaml/dist/parse/cst.d.ts
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar';
|
||||
export { stringify } from './cst-stringify';
|
||||
export type { Visitor, VisitPath } from './cst-visit';
|
||||
export { visit } from './cst-visit';
|
||||
export interface SourceToken {
|
||||
type: 'byte-order-mark' | 'doc-mode' | 'doc-start' | 'space' | 'comment' | 'newline' | 'directive-line' | 'anchor' | 'tag' | 'seq-item-ind' | 'explicit-key-ind' | 'map-value-ind' | 'flow-map-start' | 'flow-map-end' | 'flow-seq-start' | 'flow-seq-end' | 'flow-error-end' | 'comma' | 'block-scalar-header';
|
||||
offset: number;
|
||||
indent: number;
|
||||
source: string;
|
||||
}
|
||||
export interface ErrorToken {
|
||||
type: 'error';
|
||||
offset: number;
|
||||
source: string;
|
||||
message: string;
|
||||
}
|
||||
export interface Directive {
|
||||
type: 'directive';
|
||||
offset: number;
|
||||
source: string;
|
||||
}
|
||||
export interface Document {
|
||||
type: 'document';
|
||||
offset: number;
|
||||
start: SourceToken[];
|
||||
value?: Token;
|
||||
end?: SourceToken[];
|
||||
}
|
||||
export interface DocumentEnd {
|
||||
type: 'doc-end';
|
||||
offset: number;
|
||||
source: string;
|
||||
end?: SourceToken[];
|
||||
}
|
||||
export interface FlowScalar {
|
||||
type: 'alias' | 'scalar' | 'single-quoted-scalar' | 'double-quoted-scalar';
|
||||
offset: number;
|
||||
indent: number;
|
||||
source: string;
|
||||
end?: SourceToken[];
|
||||
}
|
||||
export interface BlockScalar {
|
||||
type: 'block-scalar';
|
||||
offset: number;
|
||||
indent: number;
|
||||
props: Token[];
|
||||
source: string;
|
||||
}
|
||||
export interface BlockMap {
|
||||
type: 'block-map';
|
||||
offset: number;
|
||||
indent: number;
|
||||
items: Array<{
|
||||
start: SourceToken[];
|
||||
explicitKey?: true;
|
||||
key?: never;
|
||||
sep?: never;
|
||||
value?: never;
|
||||
} | {
|
||||
start: SourceToken[];
|
||||
explicitKey?: true;
|
||||
key: Token | null;
|
||||
sep: SourceToken[];
|
||||
value?: Token;
|
||||
}>;
|
||||
}
|
||||
export interface BlockSequence {
|
||||
type: 'block-seq';
|
||||
offset: number;
|
||||
indent: number;
|
||||
items: Array<{
|
||||
start: SourceToken[];
|
||||
key?: never;
|
||||
sep?: never;
|
||||
value?: Token;
|
||||
}>;
|
||||
}
|
||||
export type CollectionItem = {
|
||||
start: SourceToken[];
|
||||
key?: Token | null;
|
||||
sep?: SourceToken[];
|
||||
value?: Token;
|
||||
};
|
||||
export interface FlowCollection {
|
||||
type: 'flow-collection';
|
||||
offset: number;
|
||||
indent: number;
|
||||
start: SourceToken;
|
||||
items: CollectionItem[];
|
||||
end: SourceToken[];
|
||||
}
|
||||
export type Token = SourceToken | ErrorToken | Directive | Document | DocumentEnd | FlowScalar | BlockScalar | BlockMap | BlockSequence | FlowCollection;
|
||||
export type TokenType = SourceToken['type'] | DocumentEnd['type'] | FlowScalar['type'];
|
||||
/** The byte order mark */
|
||||
export declare const BOM = "\uFEFF";
|
||||
/** Start of doc-mode */
|
||||
export declare const DOCUMENT = "\u0002";
|
||||
/** Unexpected end of flow-mode */
|
||||
export declare const FLOW_END = "\u0018";
|
||||
/** Next token is a scalar value */
|
||||
export declare const SCALAR = "\u001F";
|
||||
/** @returns `true` if `token` is a flow or block collection */
|
||||
export declare const isCollection: (token: Token | null | undefined) => token is BlockMap | BlockSequence | FlowCollection;
|
||||
/** @returns `true` if `token` is a flow or block scalar; not an alias */
|
||||
export declare const isScalar: (token: Token | null | undefined) => token is FlowScalar | BlockScalar;
|
||||
/** Get a printable representation of a lexer token */
|
||||
export declare function prettyToken(token: string): string;
|
||||
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||||
export declare function tokenType(source: string): TokenType | null;
|
112
node_modules/yaml/dist/parse/cst.js
generated
vendored
Normal file
112
node_modules/yaml/dist/parse/cst.js
generated
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
'use strict';
|
||||
|
||||
var cstScalar = require('./cst-scalar.js');
|
||||
var cstStringify = require('./cst-stringify.js');
|
||||
var cstVisit = require('./cst-visit.js');
|
||||
|
||||
/** The byte order mark */
|
||||
const BOM = '\u{FEFF}';
|
||||
/** Start of doc-mode */
|
||||
const DOCUMENT = '\x02'; // C0: Start of Text
|
||||
/** Unexpected end of flow-mode */
|
||||
const FLOW_END = '\x18'; // C0: Cancel
|
||||
/** Next token is a scalar value */
|
||||
const SCALAR = '\x1f'; // C0: Unit Separator
|
||||
/** @returns `true` if `token` is a flow or block collection */
|
||||
const isCollection = (token) => !!token && 'items' in token;
|
||||
/** @returns `true` if `token` is a flow or block scalar; not an alias */
|
||||
const isScalar = (token) => !!token &&
|
||||
(token.type === 'scalar' ||
|
||||
token.type === 'single-quoted-scalar' ||
|
||||
token.type === 'double-quoted-scalar' ||
|
||||
token.type === 'block-scalar');
|
||||
/* istanbul ignore next */
|
||||
/** Get a printable representation of a lexer token */
|
||||
function prettyToken(token) {
|
||||
switch (token) {
|
||||
case BOM:
|
||||
return '<BOM>';
|
||||
case DOCUMENT:
|
||||
return '<DOC>';
|
||||
case FLOW_END:
|
||||
return '<FLOW_END>';
|
||||
case SCALAR:
|
||||
return '<SCALAR>';
|
||||
default:
|
||||
return JSON.stringify(token);
|
||||
}
|
||||
}
|
||||
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||||
function tokenType(source) {
|
||||
switch (source) {
|
||||
case BOM:
|
||||
return 'byte-order-mark';
|
||||
case DOCUMENT:
|
||||
return 'doc-mode';
|
||||
case FLOW_END:
|
||||
return 'flow-error-end';
|
||||
case SCALAR:
|
||||
return 'scalar';
|
||||
case '---':
|
||||
return 'doc-start';
|
||||
case '...':
|
||||
return 'doc-end';
|
||||
case '':
|
||||
case '\n':
|
||||
case '\r\n':
|
||||
return 'newline';
|
||||
case '-':
|
||||
return 'seq-item-ind';
|
||||
case '?':
|
||||
return 'explicit-key-ind';
|
||||
case ':':
|
||||
return 'map-value-ind';
|
||||
case '{':
|
||||
return 'flow-map-start';
|
||||
case '}':
|
||||
return 'flow-map-end';
|
||||
case '[':
|
||||
return 'flow-seq-start';
|
||||
case ']':
|
||||
return 'flow-seq-end';
|
||||
case ',':
|
||||
return 'comma';
|
||||
}
|
||||
switch (source[0]) {
|
||||
case ' ':
|
||||
case '\t':
|
||||
return 'space';
|
||||
case '#':
|
||||
return 'comment';
|
||||
case '%':
|
||||
return 'directive-line';
|
||||
case '*':
|
||||
return 'alias';
|
||||
case '&':
|
||||
return 'anchor';
|
||||
case '!':
|
||||
return 'tag';
|
||||
case "'":
|
||||
return 'single-quoted-scalar';
|
||||
case '"':
|
||||
return 'double-quoted-scalar';
|
||||
case '|':
|
||||
case '>':
|
||||
return 'block-scalar-header';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
exports.createScalarToken = cstScalar.createScalarToken;
|
||||
exports.resolveAsScalar = cstScalar.resolveAsScalar;
|
||||
exports.setScalarValue = cstScalar.setScalarValue;
|
||||
exports.stringify = cstStringify.stringify;
|
||||
exports.visit = cstVisit.visit;
|
||||
exports.BOM = BOM;
|
||||
exports.DOCUMENT = DOCUMENT;
|
||||
exports.FLOW_END = FLOW_END;
|
||||
exports.SCALAR = SCALAR;
|
||||
exports.isCollection = isCollection;
|
||||
exports.isScalar = isScalar;
|
||||
exports.prettyToken = prettyToken;
|
||||
exports.tokenType = tokenType;
|
87
node_modules/yaml/dist/parse/lexer.d.ts
generated
vendored
Normal file
87
node_modules/yaml/dist/parse/lexer.d.ts
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
/**
|
||||
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||||
* easily identifiable by `tokens.tokenType()`.
|
||||
*
|
||||
* Lexing starts always in a "stream" context. Incomplete input may be buffered
|
||||
* until a complete token can be emitted.
|
||||
*
|
||||
* In addition to slices of the original input, the following control characters
|
||||
* may also be emitted:
|
||||
*
|
||||
* - `\x02` (Start of Text): A document starts with the next token
|
||||
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
|
||||
* - `\x1f` (Unit Separator): Next token is a scalar value
|
||||
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
|
||||
*/
|
||||
export declare class Lexer {
|
||||
/**
|
||||
* Flag indicating whether the end of the current buffer marks the end of
|
||||
* all input
|
||||
*/
|
||||
private atEnd;
|
||||
/**
|
||||
* Explicit indent set in block scalar header, as an offset from the current
|
||||
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
|
||||
* explicitly set.
|
||||
*/
|
||||
private blockScalarIndent;
|
||||
/**
|
||||
* Block scalars that include a + (keep) chomping indicator in their header
|
||||
* include trailing empty lines, which are otherwise excluded from the
|
||||
* scalar's contents.
|
||||
*/
|
||||
private blockScalarKeep;
|
||||
/** Current input */
|
||||
private buffer;
|
||||
/**
|
||||
* Flag noting whether the map value indicator : can immediately follow this
|
||||
* node within a flow context.
|
||||
*/
|
||||
private flowKey;
|
||||
/** Count of surrounding flow collection levels. */
|
||||
private flowLevel;
|
||||
/**
|
||||
* Minimum level of indentation required for next lines to be parsed as a
|
||||
* part of the current scalar value.
|
||||
*/
|
||||
private indentNext;
|
||||
/** Indentation level of the current line. */
|
||||
private indentValue;
|
||||
/** Position of the next \n character. */
|
||||
private lineEndPos;
|
||||
/** Stores the state of the lexer if reaching the end of incpomplete input */
|
||||
private next;
|
||||
/** A pointer to `buffer`; the current position of the lexer. */
|
||||
private pos;
|
||||
/**
|
||||
* Generate YAML tokens from the `source` string. If `incomplete`,
|
||||
* a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* @returns A generator of lexical tokens
|
||||
*/
|
||||
lex(source: string, incomplete?: boolean): Generator<string, void>;
|
||||
private atLineEnd;
|
||||
private charAt;
|
||||
private continueScalar;
|
||||
private getLine;
|
||||
private hasChars;
|
||||
private setNext;
|
||||
private peek;
|
||||
private parseNext;
|
||||
private parseStream;
|
||||
private parseLineStart;
|
||||
private parseBlockStart;
|
||||
private parseDocument;
|
||||
private parseFlowCollection;
|
||||
private parseQuotedScalar;
|
||||
private parseBlockScalarHeader;
|
||||
private parseBlockScalar;
|
||||
private parsePlainScalar;
|
||||
private pushCount;
|
||||
private pushToIndex;
|
||||
private pushIndicators;
|
||||
private pushTag;
|
||||
private pushNewline;
|
||||
private pushSpaces;
|
||||
private pushUntil;
|
||||
}
|
719
node_modules/yaml/dist/parse/lexer.js
generated
vendored
Normal file
719
node_modules/yaml/dist/parse/lexer.js
generated
vendored
Normal file
@ -0,0 +1,719 @@
|
||||
'use strict';
|
||||
|
||||
var cst = require('./cst.js');
|
||||
|
||||
/*
|
||||
START -> stream
|
||||
|
||||
stream
|
||||
directive -> line-end -> stream
|
||||
indent + line-end -> stream
|
||||
[else] -> line-start
|
||||
|
||||
line-end
|
||||
comment -> line-end
|
||||
newline -> .
|
||||
input-end -> END
|
||||
|
||||
line-start
|
||||
doc-start -> doc
|
||||
doc-end -> stream
|
||||
[else] -> indent -> block-start
|
||||
|
||||
block-start
|
||||
seq-item-start -> block-start
|
||||
explicit-key-start -> block-start
|
||||
map-value-start -> block-start
|
||||
[else] -> doc
|
||||
|
||||
doc
|
||||
line-end -> line-start
|
||||
spaces -> doc
|
||||
anchor -> doc
|
||||
tag -> doc
|
||||
flow-start -> flow -> doc
|
||||
flow-end -> error -> doc
|
||||
seq-item-start -> error -> doc
|
||||
explicit-key-start -> error -> doc
|
||||
map-value-start -> doc
|
||||
alias -> doc
|
||||
quote-start -> quoted-scalar -> doc
|
||||
block-scalar-header -> line-end -> block-scalar(min) -> line-start
|
||||
[else] -> plain-scalar(false, min) -> doc
|
||||
|
||||
flow
|
||||
line-end -> flow
|
||||
spaces -> flow
|
||||
anchor -> flow
|
||||
tag -> flow
|
||||
flow-start -> flow -> flow
|
||||
flow-end -> .
|
||||
seq-item-start -> error -> flow
|
||||
explicit-key-start -> flow
|
||||
map-value-start -> flow
|
||||
alias -> flow
|
||||
quote-start -> quoted-scalar -> flow
|
||||
comma -> flow
|
||||
[else] -> plain-scalar(true, 0) -> flow
|
||||
|
||||
quoted-scalar
|
||||
quote-end -> .
|
||||
[else] -> quoted-scalar
|
||||
|
||||
block-scalar(min)
|
||||
newline + peek(indent < min) -> .
|
||||
[else] -> block-scalar(min)
|
||||
|
||||
plain-scalar(is-flow, min)
|
||||
scalar-end(is-flow) -> .
|
||||
peek(newline + (indent < min)) -> .
|
||||
[else] -> plain-scalar(min)
|
||||
*/
|
||||
function isEmpty(ch) {
|
||||
switch (ch) {
|
||||
case undefined:
|
||||
case ' ':
|
||||
case '\n':
|
||||
case '\r':
|
||||
case '\t':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const hexDigits = new Set('0123456789ABCDEFabcdef');
|
||||
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
|
||||
const flowIndicatorChars = new Set(',[]{}');
|
||||
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
|
||||
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
|
||||
/**
|
||||
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||||
* easily identifiable by `tokens.tokenType()`.
|
||||
*
|
||||
* Lexing starts always in a "stream" context. Incomplete input may be buffered
|
||||
* until a complete token can be emitted.
|
||||
*
|
||||
* In addition to slices of the original input, the following control characters
|
||||
* may also be emitted:
|
||||
*
|
||||
* - `\x02` (Start of Text): A document starts with the next token
|
||||
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
|
||||
* - `\x1f` (Unit Separator): Next token is a scalar value
|
||||
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
|
||||
*/
|
||||
class Lexer {
|
||||
constructor() {
|
||||
/**
|
||||
* Flag indicating whether the end of the current buffer marks the end of
|
||||
* all input
|
||||
*/
|
||||
this.atEnd = false;
|
||||
/**
|
||||
* Explicit indent set in block scalar header, as an offset from the current
|
||||
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
|
||||
* explicitly set.
|
||||
*/
|
||||
this.blockScalarIndent = -1;
|
||||
/**
|
||||
* Block scalars that include a + (keep) chomping indicator in their header
|
||||
* include trailing empty lines, which are otherwise excluded from the
|
||||
* scalar's contents.
|
||||
*/
|
||||
this.blockScalarKeep = false;
|
||||
/** Current input */
|
||||
this.buffer = '';
|
||||
/**
|
||||
* Flag noting whether the map value indicator : can immediately follow this
|
||||
* node within a flow context.
|
||||
*/
|
||||
this.flowKey = false;
|
||||
/** Count of surrounding flow collection levels. */
|
||||
this.flowLevel = 0;
|
||||
/**
|
||||
* Minimum level of indentation required for next lines to be parsed as a
|
||||
* part of the current scalar value.
|
||||
*/
|
||||
this.indentNext = 0;
|
||||
/** Indentation level of the current line. */
|
||||
this.indentValue = 0;
|
||||
/** Position of the next \n character. */
|
||||
this.lineEndPos = null;
|
||||
/** Stores the state of the lexer if reaching the end of incpomplete input */
|
||||
this.next = null;
|
||||
/** A pointer to `buffer`; the current position of the lexer. */
|
||||
this.pos = 0;
|
||||
}
|
||||
/**
|
||||
* Generate YAML tokens from the `source` string. If `incomplete`,
|
||||
* a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* @returns A generator of lexical tokens
|
||||
*/
|
||||
*lex(source, incomplete = false) {
|
||||
if (source) {
|
||||
if (typeof source !== 'string')
|
||||
throw TypeError('source is not a string');
|
||||
this.buffer = this.buffer ? this.buffer + source : source;
|
||||
this.lineEndPos = null;
|
||||
}
|
||||
this.atEnd = !incomplete;
|
||||
let next = this.next ?? 'stream';
|
||||
while (next && (incomplete || this.hasChars(1)))
|
||||
next = yield* this.parseNext(next);
|
||||
}
|
||||
atLineEnd() {
|
||||
let i = this.pos;
|
||||
let ch = this.buffer[i];
|
||||
while (ch === ' ' || ch === '\t')
|
||||
ch = this.buffer[++i];
|
||||
if (!ch || ch === '#' || ch === '\n')
|
||||
return true;
|
||||
if (ch === '\r')
|
||||
return this.buffer[i + 1] === '\n';
|
||||
return false;
|
||||
}
|
||||
charAt(n) {
|
||||
return this.buffer[this.pos + n];
|
||||
}
|
||||
continueScalar(offset) {
|
||||
let ch = this.buffer[offset];
|
||||
if (this.indentNext > 0) {
|
||||
let indent = 0;
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[++indent + offset];
|
||||
if (ch === '\r') {
|
||||
const next = this.buffer[indent + offset + 1];
|
||||
if (next === '\n' || (!next && !this.atEnd))
|
||||
return offset + indent + 1;
|
||||
}
|
||||
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
|
||||
? offset + indent
|
||||
: -1;
|
||||
}
|
||||
if (ch === '-' || ch === '.') {
|
||||
const dt = this.buffer.substr(offset, 3);
|
||||
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
|
||||
return -1;
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
getLine() {
|
||||
let end = this.lineEndPos;
|
||||
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
|
||||
end = this.buffer.indexOf('\n', this.pos);
|
||||
this.lineEndPos = end;
|
||||
}
|
||||
if (end === -1)
|
||||
return this.atEnd ? this.buffer.substring(this.pos) : null;
|
||||
if (this.buffer[end - 1] === '\r')
|
||||
end -= 1;
|
||||
return this.buffer.substring(this.pos, end);
|
||||
}
|
||||
hasChars(n) {
|
||||
return this.pos + n <= this.buffer.length;
|
||||
}
|
||||
setNext(state) {
|
||||
this.buffer = this.buffer.substring(this.pos);
|
||||
this.pos = 0;
|
||||
this.lineEndPos = null;
|
||||
this.next = state;
|
||||
return null;
|
||||
}
|
||||
peek(n) {
|
||||
return this.buffer.substr(this.pos, n);
|
||||
}
|
||||
*parseNext(next) {
|
||||
switch (next) {
|
||||
case 'stream':
|
||||
return yield* this.parseStream();
|
||||
case 'line-start':
|
||||
return yield* this.parseLineStart();
|
||||
case 'block-start':
|
||||
return yield* this.parseBlockStart();
|
||||
case 'doc':
|
||||
return yield* this.parseDocument();
|
||||
case 'flow':
|
||||
return yield* this.parseFlowCollection();
|
||||
case 'quoted-scalar':
|
||||
return yield* this.parseQuotedScalar();
|
||||
case 'block-scalar':
|
||||
return yield* this.parseBlockScalar();
|
||||
case 'plain-scalar':
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseStream() {
|
||||
let line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('stream');
|
||||
if (line[0] === cst.BOM) {
|
||||
yield* this.pushCount(1);
|
||||
line = line.substring(1);
|
||||
}
|
||||
if (line[0] === '%') {
|
||||
let dirEnd = line.length;
|
||||
let cs = line.indexOf('#');
|
||||
while (cs !== -1) {
|
||||
const ch = line[cs - 1];
|
||||
if (ch === ' ' || ch === '\t') {
|
||||
dirEnd = cs - 1;
|
||||
break;
|
||||
}
|
||||
else {
|
||||
cs = line.indexOf('#', cs + 1);
|
||||
}
|
||||
}
|
||||
while (true) {
|
||||
const ch = line[dirEnd - 1];
|
||||
if (ch === ' ' || ch === '\t')
|
||||
dirEnd -= 1;
|
||||
else
|
||||
break;
|
||||
}
|
||||
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
|
||||
yield* this.pushCount(line.length - n); // possible comment
|
||||
this.pushNewline();
|
||||
return 'stream';
|
||||
}
|
||||
if (this.atLineEnd()) {
|
||||
const sp = yield* this.pushSpaces(true);
|
||||
yield* this.pushCount(line.length - sp);
|
||||
yield* this.pushNewline();
|
||||
return 'stream';
|
||||
}
|
||||
yield cst.DOCUMENT;
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
*parseLineStart() {
|
||||
const ch = this.charAt(0);
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('line-start');
|
||||
if (ch === '-' || ch === '.') {
|
||||
if (!this.atEnd && !this.hasChars(4))
|
||||
return this.setNext('line-start');
|
||||
const s = this.peek(3);
|
||||
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
this.indentValue = 0;
|
||||
this.indentNext = 0;
|
||||
return s === '---' ? 'doc' : 'stream';
|
||||
}
|
||||
}
|
||||
this.indentValue = yield* this.pushSpaces(false);
|
||||
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
|
||||
this.indentNext = this.indentValue;
|
||||
return yield* this.parseBlockStart();
|
||||
}
|
||||
*parseBlockStart() {
|
||||
const [ch0, ch1] = this.peek(2);
|
||||
if (!ch1 && !this.atEnd)
|
||||
return this.setNext('block-start');
|
||||
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
|
||||
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
|
||||
this.indentNext = this.indentValue + 1;
|
||||
this.indentValue += n;
|
||||
return yield* this.parseBlockStart();
|
||||
}
|
||||
return 'doc';
|
||||
}
|
||||
*parseDocument() {
|
||||
yield* this.pushSpaces(true);
|
||||
const line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('doc');
|
||||
let n = yield* this.pushIndicators();
|
||||
switch (line[n]) {
|
||||
case '#':
|
||||
yield* this.pushCount(line.length - n);
|
||||
// fallthrough
|
||||
case undefined:
|
||||
yield* this.pushNewline();
|
||||
return yield* this.parseLineStart();
|
||||
case '{':
|
||||
case '[':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = false;
|
||||
this.flowLevel = 1;
|
||||
return 'flow';
|
||||
case '}':
|
||||
case ']':
|
||||
// this is an error
|
||||
yield* this.pushCount(1);
|
||||
return 'doc';
|
||||
case '*':
|
||||
yield* this.pushUntil(isNotAnchorChar);
|
||||
return 'doc';
|
||||
case '"':
|
||||
case "'":
|
||||
return yield* this.parseQuotedScalar();
|
||||
case '|':
|
||||
case '>':
|
||||
n += yield* this.parseBlockScalarHeader();
|
||||
n += yield* this.pushSpaces(true);
|
||||
yield* this.pushCount(line.length - n);
|
||||
yield* this.pushNewline();
|
||||
return yield* this.parseBlockScalar();
|
||||
default:
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseFlowCollection() {
|
||||
let nl, sp;
|
||||
let indent = -1;
|
||||
do {
|
||||
nl = yield* this.pushNewline();
|
||||
if (nl > 0) {
|
||||
sp = yield* this.pushSpaces(false);
|
||||
this.indentValue = indent = sp;
|
||||
}
|
||||
else {
|
||||
sp = 0;
|
||||
}
|
||||
sp += yield* this.pushSpaces(true);
|
||||
} while (nl + sp > 0);
|
||||
const line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('flow');
|
||||
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
|
||||
(indent === 0 &&
|
||||
(line.startsWith('---') || line.startsWith('...')) &&
|
||||
isEmpty(line[3]))) {
|
||||
// Allowing for the terminal ] or } at the same (rather than greater)
|
||||
// indent level as the initial [ or { is technically invalid, but
|
||||
// failing here would be surprising to users.
|
||||
const atFlowEndMarker = indent === this.indentNext - 1 &&
|
||||
this.flowLevel === 1 &&
|
||||
(line[0] === ']' || line[0] === '}');
|
||||
if (!atFlowEndMarker) {
|
||||
// this is an error
|
||||
this.flowLevel = 0;
|
||||
yield cst.FLOW_END;
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
}
|
||||
let n = 0;
|
||||
while (line[n] === ',') {
|
||||
n += yield* this.pushCount(1);
|
||||
n += yield* this.pushSpaces(true);
|
||||
this.flowKey = false;
|
||||
}
|
||||
n += yield* this.pushIndicators();
|
||||
switch (line[n]) {
|
||||
case undefined:
|
||||
return 'flow';
|
||||
case '#':
|
||||
yield* this.pushCount(line.length - n);
|
||||
return 'flow';
|
||||
case '{':
|
||||
case '[':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = false;
|
||||
this.flowLevel += 1;
|
||||
return 'flow';
|
||||
case '}':
|
||||
case ']':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = true;
|
||||
this.flowLevel -= 1;
|
||||
return this.flowLevel ? 'flow' : 'doc';
|
||||
case '*':
|
||||
yield* this.pushUntil(isNotAnchorChar);
|
||||
return 'flow';
|
||||
case '"':
|
||||
case "'":
|
||||
this.flowKey = true;
|
||||
return yield* this.parseQuotedScalar();
|
||||
case ':': {
|
||||
const next = this.charAt(1);
|
||||
if (this.flowKey || isEmpty(next) || next === ',') {
|
||||
this.flowKey = false;
|
||||
yield* this.pushCount(1);
|
||||
yield* this.pushSpaces(true);
|
||||
return 'flow';
|
||||
}
|
||||
}
|
||||
// fallthrough
|
||||
default:
|
||||
this.flowKey = false;
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseQuotedScalar() {
|
||||
const quote = this.charAt(0);
|
||||
let end = this.buffer.indexOf(quote, this.pos + 1);
|
||||
if (quote === "'") {
|
||||
while (end !== -1 && this.buffer[end + 1] === "'")
|
||||
end = this.buffer.indexOf("'", end + 2);
|
||||
}
|
||||
else {
|
||||
// double-quote
|
||||
while (end !== -1) {
|
||||
let n = 0;
|
||||
while (this.buffer[end - 1 - n] === '\\')
|
||||
n += 1;
|
||||
if (n % 2 === 0)
|
||||
break;
|
||||
end = this.buffer.indexOf('"', end + 1);
|
||||
}
|
||||
}
|
||||
// Only looking for newlines within the quotes
|
||||
const qb = this.buffer.substring(0, end);
|
||||
let nl = qb.indexOf('\n', this.pos);
|
||||
if (nl !== -1) {
|
||||
while (nl !== -1) {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
nl = qb.indexOf('\n', cs);
|
||||
}
|
||||
if (nl !== -1) {
|
||||
// this is an error caused by an unexpected unindent
|
||||
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
|
||||
}
|
||||
}
|
||||
if (end === -1) {
|
||||
if (!this.atEnd)
|
||||
return this.setNext('quoted-scalar');
|
||||
end = this.buffer.length;
|
||||
}
|
||||
yield* this.pushToIndex(end + 1, false);
|
||||
return this.flowLevel ? 'flow' : 'doc';
|
||||
}
|
||||
*parseBlockScalarHeader() {
|
||||
this.blockScalarIndent = -1;
|
||||
this.blockScalarKeep = false;
|
||||
let i = this.pos;
|
||||
while (true) {
|
||||
const ch = this.buffer[++i];
|
||||
if (ch === '+')
|
||||
this.blockScalarKeep = true;
|
||||
else if (ch > '0' && ch <= '9')
|
||||
this.blockScalarIndent = Number(ch) - 1;
|
||||
else if (ch !== '-')
|
||||
break;
|
||||
}
|
||||
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
|
||||
}
|
||||
*parseBlockScalar() {
|
||||
let nl = this.pos - 1; // may be -1 if this.pos === 0
|
||||
let indent = 0;
|
||||
let ch;
|
||||
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
|
||||
switch (ch) {
|
||||
case ' ':
|
||||
indent += 1;
|
||||
break;
|
||||
case '\n':
|
||||
nl = i;
|
||||
indent = 0;
|
||||
break;
|
||||
case '\r': {
|
||||
const next = this.buffer[i + 1];
|
||||
if (!next && !this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
if (next === '\n')
|
||||
break;
|
||||
} // fallthrough
|
||||
default:
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
if (indent >= this.indentNext) {
|
||||
if (this.blockScalarIndent === -1)
|
||||
this.indentNext = indent;
|
||||
else {
|
||||
this.indentNext =
|
||||
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
|
||||
}
|
||||
do {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
nl = this.buffer.indexOf('\n', cs);
|
||||
} while (nl !== -1);
|
||||
if (nl === -1) {
|
||||
if (!this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
nl = this.buffer.length;
|
||||
}
|
||||
}
|
||||
// Trailing insufficiently indented tabs are invalid.
|
||||
// To catch that during parsing, we include them in the block scalar value.
|
||||
let i = nl + 1;
|
||||
ch = this.buffer[i];
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[++i];
|
||||
if (ch === '\t') {
|
||||
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
|
||||
ch = this.buffer[++i];
|
||||
nl = i - 1;
|
||||
}
|
||||
else if (!this.blockScalarKeep) {
|
||||
do {
|
||||
let i = nl - 1;
|
||||
let ch = this.buffer[i];
|
||||
if (ch === '\r')
|
||||
ch = this.buffer[--i];
|
||||
const lastChar = i; // Drop the line if last char not more indented
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[--i];
|
||||
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
|
||||
nl = i;
|
||||
else
|
||||
break;
|
||||
} while (true);
|
||||
}
|
||||
yield cst.SCALAR;
|
||||
yield* this.pushToIndex(nl + 1, true);
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
*parsePlainScalar() {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
let end = this.pos - 1;
|
||||
let i = this.pos - 1;
|
||||
let ch;
|
||||
while ((ch = this.buffer[++i])) {
|
||||
if (ch === ':') {
|
||||
const next = this.buffer[i + 1];
|
||||
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
else if (isEmpty(ch)) {
|
||||
let next = this.buffer[i + 1];
|
||||
if (ch === '\r') {
|
||||
if (next === '\n') {
|
||||
i += 1;
|
||||
ch = '\n';
|
||||
next = this.buffer[i + 1];
|
||||
}
|
||||
else
|
||||
end = i;
|
||||
}
|
||||
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
|
||||
break;
|
||||
if (ch === '\n') {
|
||||
const cs = this.continueScalar(i + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (inFlow && flowIndicatorChars.has(ch))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
}
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('plain-scalar');
|
||||
yield cst.SCALAR;
|
||||
yield* this.pushToIndex(end + 1, true);
|
||||
return inFlow ? 'flow' : 'doc';
|
||||
}
|
||||
*pushCount(n) {
|
||||
if (n > 0) {
|
||||
yield this.buffer.substr(this.pos, n);
|
||||
this.pos += n;
|
||||
return n;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*pushToIndex(i, allowEmpty) {
|
||||
const s = this.buffer.slice(this.pos, i);
|
||||
if (s) {
|
||||
yield s;
|
||||
this.pos += s.length;
|
||||
return s.length;
|
||||
}
|
||||
else if (allowEmpty)
|
||||
yield '';
|
||||
return 0;
|
||||
}
|
||||
*pushIndicators() {
|
||||
switch (this.charAt(0)) {
|
||||
case '!':
|
||||
return ((yield* this.pushTag()) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
case '&':
|
||||
return ((yield* this.pushUntil(isNotAnchorChar)) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
case '-': // this is an error
|
||||
case '?': // this is an error outside flow collections
|
||||
case ':': {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
const ch1 = this.charAt(1);
|
||||
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
|
||||
if (!inFlow)
|
||||
this.indentNext = this.indentValue + 1;
|
||||
else if (this.flowKey)
|
||||
this.flowKey = false;
|
||||
return ((yield* this.pushCount(1)) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*pushTag() {
|
||||
if (this.charAt(1) === '<') {
|
||||
let i = this.pos + 2;
|
||||
let ch = this.buffer[i];
|
||||
while (!isEmpty(ch) && ch !== '>')
|
||||
ch = this.buffer[++i];
|
||||
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
|
||||
}
|
||||
else {
|
||||
let i = this.pos + 1;
|
||||
let ch = this.buffer[i];
|
||||
while (ch) {
|
||||
if (tagChars.has(ch))
|
||||
ch = this.buffer[++i];
|
||||
else if (ch === '%' &&
|
||||
hexDigits.has(this.buffer[i + 1]) &&
|
||||
hexDigits.has(this.buffer[i + 2])) {
|
||||
ch = this.buffer[(i += 3)];
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
return yield* this.pushToIndex(i, false);
|
||||
}
|
||||
}
|
||||
*pushNewline() {
|
||||
const ch = this.buffer[this.pos];
|
||||
if (ch === '\n')
|
||||
return yield* this.pushCount(1);
|
||||
else if (ch === '\r' && this.charAt(1) === '\n')
|
||||
return yield* this.pushCount(2);
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
*pushSpaces(allowTabs) {
|
||||
let i = this.pos - 1;
|
||||
let ch;
|
||||
do {
|
||||
ch = this.buffer[++i];
|
||||
} while (ch === ' ' || (allowTabs && ch === '\t'));
|
||||
const n = i - this.pos;
|
||||
if (n > 0) {
|
||||
yield this.buffer.substr(this.pos, n);
|
||||
this.pos = i;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
*pushUntil(test) {
|
||||
let i = this.pos;
|
||||
let ch = this.buffer[i];
|
||||
while (!test(ch))
|
||||
ch = this.buffer[++i];
|
||||
return yield* this.pushToIndex(i, false);
|
||||
}
|
||||
}
|
||||
|
||||
exports.Lexer = Lexer;
|
22
node_modules/yaml/dist/parse/line-counter.d.ts
generated
vendored
Normal file
22
node_modules/yaml/dist/parse/line-counter.d.ts
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Tracks newlines during parsing in order to provide an efficient API for
|
||||
* determining the one-indexed `{ line, col }` position for any offset
|
||||
* within the input.
|
||||
*/
|
||||
export declare class LineCounter {
|
||||
lineStarts: number[];
|
||||
/**
|
||||
* Should be called in ascending order. Otherwise, call
|
||||
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
|
||||
*/
|
||||
addNewLine: (offset: number) => number;
|
||||
/**
|
||||
* Performs a binary search and returns the 1-indexed { line, col }
|
||||
* position of `offset`. If `line === 0`, `addNewLine` has never been
|
||||
* called or `offset` is before the first known newline.
|
||||
*/
|
||||
linePos: (offset: number) => {
|
||||
line: number;
|
||||
col: number;
|
||||
};
|
||||
}
|
41
node_modules/yaml/dist/parse/line-counter.js
generated
vendored
Normal file
41
node_modules/yaml/dist/parse/line-counter.js
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Tracks newlines during parsing in order to provide an efficient API for
|
||||
* determining the one-indexed `{ line, col }` position for any offset
|
||||
* within the input.
|
||||
*/
|
||||
class LineCounter {
|
||||
constructor() {
|
||||
this.lineStarts = [];
|
||||
/**
|
||||
* Should be called in ascending order. Otherwise, call
|
||||
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
|
||||
*/
|
||||
this.addNewLine = (offset) => this.lineStarts.push(offset);
|
||||
/**
|
||||
* Performs a binary search and returns the 1-indexed { line, col }
|
||||
* position of `offset`. If `line === 0`, `addNewLine` has never been
|
||||
* called or `offset` is before the first known newline.
|
||||
*/
|
||||
this.linePos = (offset) => {
|
||||
let low = 0;
|
||||
let high = this.lineStarts.length;
|
||||
while (low < high) {
|
||||
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
|
||||
if (this.lineStarts[mid] < offset)
|
||||
low = mid + 1;
|
||||
else
|
||||
high = mid;
|
||||
}
|
||||
if (this.lineStarts[low] === offset)
|
||||
return { line: low + 1, col: 1 };
|
||||
if (low === 0)
|
||||
return { line: 0, col: offset };
|
||||
const start = this.lineStarts[low - 1];
|
||||
return { line: low, col: offset - start + 1 };
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
exports.LineCounter = LineCounter;
|
84
node_modules/yaml/dist/parse/parser.d.ts
generated
vendored
Normal file
84
node_modules/yaml/dist/parse/parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
import type { Token } from './cst';
|
||||
/**
|
||||
* A YAML concrete syntax tree (CST) parser
|
||||
*
|
||||
* ```ts
|
||||
* const src: string = ...
|
||||
* for (const token of new Parser().parse(src)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* To use the parser with a user-provided lexer:
|
||||
*
|
||||
* ```ts
|
||||
* function* parse(source: string, lexer: Lexer) {
|
||||
* const parser = new Parser()
|
||||
* for (const lexeme of lexer.lex(source))
|
||||
* yield* parser.next(lexeme)
|
||||
* yield* parser.end()
|
||||
* }
|
||||
*
|
||||
* const src: string = ...
|
||||
* const lexer = new Lexer()
|
||||
* for (const token of parse(src, lexer)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export declare class Parser {
|
||||
private onNewLine?;
|
||||
/** If true, space and sequence indicators count as indentation */
|
||||
private atNewLine;
|
||||
/** If true, next token is a scalar value */
|
||||
private atScalar;
|
||||
/** Current indentation level */
|
||||
private indent;
|
||||
/** Current offset since the start of parsing */
|
||||
offset: number;
|
||||
/** On the same line with a block map key */
|
||||
private onKeyLine;
|
||||
/** Top indicates the node that's currently being built */
|
||||
stack: Token[];
|
||||
/** The source of the current token, set in parse() */
|
||||
private source;
|
||||
/** The type of the current token, set in parse() */
|
||||
private type;
|
||||
/**
|
||||
* @param onNewLine - If defined, called separately with the start position of
|
||||
* each new line (in `parse()`, including the start of input).
|
||||
*/
|
||||
constructor(onNewLine?: (offset: number) => void);
|
||||
/**
|
||||
* Parse `source` as a YAML stream.
|
||||
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
|
||||
*
|
||||
* @returns A generator of tokens representing each directive, document, and other structure.
|
||||
*/
|
||||
parse(source: string, incomplete?: boolean): Generator<Token, void>;
|
||||
/**
|
||||
* Advance the parser by the `source` of one lexical token.
|
||||
*/
|
||||
next(source: string): Generator<Token, void>;
|
||||
private lexer;
|
||||
/** Call at end of input to push out any remaining constructions */
|
||||
end(): Generator<Token, void>;
|
||||
private get sourceToken();
|
||||
private step;
|
||||
private peek;
|
||||
private pop;
|
||||
private stream;
|
||||
private document;
|
||||
private scalar;
|
||||
private blockScalar;
|
||||
private blockMap;
|
||||
private blockSequence;
|
||||
private flowCollection;
|
||||
private flowScalar;
|
||||
private startBlockValue;
|
||||
private atIndentedComment;
|
||||
private documentEnd;
|
||||
private lineEnd;
|
||||
}
|
972
node_modules/yaml/dist/parse/parser.js
generated
vendored
Normal file
972
node_modules/yaml/dist/parse/parser.js
generated
vendored
Normal file
@ -0,0 +1,972 @@
|
||||
'use strict';
|
||||
|
||||
var node_process = require('process');
|
||||
var cst = require('./cst.js');
|
||||
var lexer = require('./lexer.js');
|
||||
|
||||
function includesToken(list, type) {
|
||||
for (let i = 0; i < list.length; ++i)
|
||||
if (list[i].type === type)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
function findNonEmptyIndex(list) {
|
||||
for (let i = 0; i < list.length; ++i) {
|
||||
switch (list[i].type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
break;
|
||||
default:
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
function isFlowToken(token) {
|
||||
switch (token?.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'flow-collection':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function getPrevProps(parent) {
|
||||
switch (parent.type) {
|
||||
case 'document':
|
||||
return parent.start;
|
||||
case 'block-map': {
|
||||
const it = parent.items[parent.items.length - 1];
|
||||
return it.sep ?? it.start;
|
||||
}
|
||||
case 'block-seq':
|
||||
return parent.items[parent.items.length - 1].start;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/** Note: May modify input array */
|
||||
function getFirstKeyStartProps(prev) {
|
||||
if (prev.length === 0)
|
||||
return [];
|
||||
let i = prev.length;
|
||||
loop: while (--i >= 0) {
|
||||
switch (prev[i].type) {
|
||||
case 'doc-start':
|
||||
case 'explicit-key-ind':
|
||||
case 'map-value-ind':
|
||||
case 'seq-item-ind':
|
||||
case 'newline':
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
while (prev[++i]?.type === 'space') {
|
||||
/* loop */
|
||||
}
|
||||
return prev.splice(i, prev.length);
|
||||
}
|
||||
function fixFlowSeqItems(fc) {
|
||||
if (fc.start.type === 'flow-seq-start') {
|
||||
for (const it of fc.items) {
|
||||
if (it.sep &&
|
||||
!it.value &&
|
||||
!includesToken(it.start, 'explicit-key-ind') &&
|
||||
!includesToken(it.sep, 'map-value-ind')) {
|
||||
if (it.key)
|
||||
it.value = it.key;
|
||||
delete it.key;
|
||||
if (isFlowToken(it.value)) {
|
||||
if (it.value.end)
|
||||
Array.prototype.push.apply(it.value.end, it.sep);
|
||||
else
|
||||
it.value.end = it.sep;
|
||||
}
|
||||
else
|
||||
Array.prototype.push.apply(it.start, it.sep);
|
||||
delete it.sep;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A YAML concrete syntax tree (CST) parser
|
||||
*
|
||||
* ```ts
|
||||
* const src: string = ...
|
||||
* for (const token of new Parser().parse(src)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* To use the parser with a user-provided lexer:
|
||||
*
|
||||
* ```ts
|
||||
* function* parse(source: string, lexer: Lexer) {
|
||||
* const parser = new Parser()
|
||||
* for (const lexeme of lexer.lex(source))
|
||||
* yield* parser.next(lexeme)
|
||||
* yield* parser.end()
|
||||
* }
|
||||
*
|
||||
* const src: string = ...
|
||||
* const lexer = new Lexer()
|
||||
* for (const token of parse(src, lexer)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class Parser {
|
||||
/**
|
||||
* @param onNewLine - If defined, called separately with the start position of
|
||||
* each new line (in `parse()`, including the start of input).
|
||||
*/
|
||||
constructor(onNewLine) {
|
||||
/** If true, space and sequence indicators count as indentation */
|
||||
this.atNewLine = true;
|
||||
/** If true, next token is a scalar value */
|
||||
this.atScalar = false;
|
||||
/** Current indentation level */
|
||||
this.indent = 0;
|
||||
/** Current offset since the start of parsing */
|
||||
this.offset = 0;
|
||||
/** On the same line with a block map key */
|
||||
this.onKeyLine = false;
|
||||
/** Top indicates the node that's currently being built */
|
||||
this.stack = [];
|
||||
/** The source of the current token, set in parse() */
|
||||
this.source = '';
|
||||
/** The type of the current token, set in parse() */
|
||||
this.type = '';
|
||||
// Must be defined after `next()`
|
||||
this.lexer = new lexer.Lexer();
|
||||
this.onNewLine = onNewLine;
|
||||
}
|
||||
/**
|
||||
* Parse `source` as a YAML stream.
|
||||
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
|
||||
*
|
||||
* @returns A generator of tokens representing each directive, document, and other structure.
|
||||
*/
|
||||
*parse(source, incomplete = false) {
|
||||
if (this.onNewLine && this.offset === 0)
|
||||
this.onNewLine(0);
|
||||
for (const lexeme of this.lexer.lex(source, incomplete))
|
||||
yield* this.next(lexeme);
|
||||
if (!incomplete)
|
||||
yield* this.end();
|
||||
}
|
||||
/**
|
||||
* Advance the parser by the `source` of one lexical token.
|
||||
*/
|
||||
*next(source) {
|
||||
this.source = source;
|
||||
if (node_process.env.LOG_TOKENS)
|
||||
console.log('|', cst.prettyToken(source));
|
||||
if (this.atScalar) {
|
||||
this.atScalar = false;
|
||||
yield* this.step();
|
||||
this.offset += source.length;
|
||||
return;
|
||||
}
|
||||
const type = cst.tokenType(source);
|
||||
if (!type) {
|
||||
const message = `Not a YAML token: ${source}`;
|
||||
yield* this.pop({ type: 'error', offset: this.offset, message, source });
|
||||
this.offset += source.length;
|
||||
}
|
||||
else if (type === 'scalar') {
|
||||
this.atNewLine = false;
|
||||
this.atScalar = true;
|
||||
this.type = 'scalar';
|
||||
}
|
||||
else {
|
||||
this.type = type;
|
||||
yield* this.step();
|
||||
switch (type) {
|
||||
case 'newline':
|
||||
this.atNewLine = true;
|
||||
this.indent = 0;
|
||||
if (this.onNewLine)
|
||||
this.onNewLine(this.offset + source.length);
|
||||
break;
|
||||
case 'space':
|
||||
if (this.atNewLine && source[0] === ' ')
|
||||
this.indent += source.length;
|
||||
break;
|
||||
case 'explicit-key-ind':
|
||||
case 'map-value-ind':
|
||||
case 'seq-item-ind':
|
||||
if (this.atNewLine)
|
||||
this.indent += source.length;
|
||||
break;
|
||||
case 'doc-mode':
|
||||
case 'flow-error-end':
|
||||
return;
|
||||
default:
|
||||
this.atNewLine = false;
|
||||
}
|
||||
this.offset += source.length;
|
||||
}
|
||||
}
|
||||
/** Call at end of input to push out any remaining constructions */
|
||||
*end() {
|
||||
while (this.stack.length > 0)
|
||||
yield* this.pop();
|
||||
}
|
||||
get sourceToken() {
|
||||
const st = {
|
||||
type: this.type,
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
source: this.source
|
||||
};
|
||||
return st;
|
||||
}
|
||||
*step() {
|
||||
const top = this.peek(1);
|
||||
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
|
||||
while (this.stack.length > 0)
|
||||
yield* this.pop();
|
||||
this.stack.push({
|
||||
type: 'doc-end',
|
||||
offset: this.offset,
|
||||
source: this.source
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!top)
|
||||
return yield* this.stream();
|
||||
switch (top.type) {
|
||||
case 'document':
|
||||
return yield* this.document(top);
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return yield* this.scalar(top);
|
||||
case 'block-scalar':
|
||||
return yield* this.blockScalar(top);
|
||||
case 'block-map':
|
||||
return yield* this.blockMap(top);
|
||||
case 'block-seq':
|
||||
return yield* this.blockSequence(top);
|
||||
case 'flow-collection':
|
||||
return yield* this.flowCollection(top);
|
||||
case 'doc-end':
|
||||
return yield* this.documentEnd(top);
|
||||
}
|
||||
/* istanbul ignore next should not happen */
|
||||
yield* this.pop();
|
||||
}
|
||||
peek(n) {
|
||||
return this.stack[this.stack.length - n];
|
||||
}
|
||||
*pop(error) {
|
||||
const token = error ?? this.stack.pop();
|
||||
/* istanbul ignore if should not happen */
|
||||
if (!token) {
|
||||
const message = 'Tried to pop an empty stack';
|
||||
yield { type: 'error', offset: this.offset, source: '', message };
|
||||
}
|
||||
else if (this.stack.length === 0) {
|
||||
yield token;
|
||||
}
|
||||
else {
|
||||
const top = this.peek(1);
|
||||
if (token.type === 'block-scalar') {
|
||||
// Block scalars use their parent rather than header indent
|
||||
token.indent = 'indent' in top ? top.indent : 0;
|
||||
}
|
||||
else if (token.type === 'flow-collection' && top.type === 'document') {
|
||||
// Ignore all indent for top-level flow collections
|
||||
token.indent = 0;
|
||||
}
|
||||
if (token.type === 'flow-collection')
|
||||
fixFlowSeqItems(token);
|
||||
switch (top.type) {
|
||||
case 'document':
|
||||
top.value = token;
|
||||
break;
|
||||
case 'block-scalar':
|
||||
top.props.push(token); // error
|
||||
break;
|
||||
case 'block-map': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (it.value) {
|
||||
top.items.push({ start: [], key: token, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.value = token;
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
this.onKeyLine = !it.explicitKey;
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'block-seq': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (it.value)
|
||||
top.items.push({ start: [], value: token });
|
||||
else
|
||||
it.value = token;
|
||||
break;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (!it || it.value)
|
||||
top.items.push({ start: [], key: token, sep: [] });
|
||||
else if (it.sep)
|
||||
it.value = token;
|
||||
else
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
return;
|
||||
}
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
yield* this.pop();
|
||||
yield* this.pop(token);
|
||||
}
|
||||
if ((top.type === 'document' ||
|
||||
top.type === 'block-map' ||
|
||||
top.type === 'block-seq') &&
|
||||
(token.type === 'block-map' || token.type === 'block-seq')) {
|
||||
const last = token.items[token.items.length - 1];
|
||||
if (last &&
|
||||
!last.sep &&
|
||||
!last.value &&
|
||||
last.start.length > 0 &&
|
||||
findNonEmptyIndex(last.start) === -1 &&
|
||||
(token.indent === 0 ||
|
||||
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
|
||||
if (top.type === 'document')
|
||||
top.end = last.start;
|
||||
else
|
||||
top.items.push({ start: last.start });
|
||||
token.items.splice(-1, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*stream() {
|
||||
switch (this.type) {
|
||||
case 'directive-line':
|
||||
yield { type: 'directive', offset: this.offset, source: this.source };
|
||||
return;
|
||||
case 'byte-order-mark':
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
yield this.sourceToken;
|
||||
return;
|
||||
case 'doc-mode':
|
||||
case 'doc-start': {
|
||||
const doc = {
|
||||
type: 'document',
|
||||
offset: this.offset,
|
||||
start: []
|
||||
};
|
||||
if (this.type === 'doc-start')
|
||||
doc.start.push(this.sourceToken);
|
||||
this.stack.push(doc);
|
||||
return;
|
||||
}
|
||||
}
|
||||
yield {
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: `Unexpected ${this.type} token in YAML stream`,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
*document(doc) {
|
||||
if (doc.value)
|
||||
return yield* this.lineEnd(doc);
|
||||
switch (this.type) {
|
||||
case 'doc-start': {
|
||||
if (findNonEmptyIndex(doc.start) !== -1) {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
else
|
||||
doc.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
doc.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
const bv = this.startBlockValue(doc);
|
||||
if (bv)
|
||||
this.stack.push(bv);
|
||||
else {
|
||||
yield {
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: `Unexpected ${this.type} token in YAML document`,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
}
|
||||
*scalar(scalar) {
|
||||
if (this.type === 'map-value-ind') {
|
||||
const prev = getPrevProps(this.peek(2));
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
let sep;
|
||||
if (scalar.end) {
|
||||
sep = scalar.end;
|
||||
sep.push(this.sourceToken);
|
||||
delete scalar.end;
|
||||
}
|
||||
else
|
||||
sep = [this.sourceToken];
|
||||
const map = {
|
||||
type: 'block-map',
|
||||
offset: scalar.offset,
|
||||
indent: scalar.indent,
|
||||
items: [{ start, key: scalar, sep }]
|
||||
};
|
||||
this.onKeyLine = true;
|
||||
this.stack[this.stack.length - 1] = map;
|
||||
}
|
||||
else
|
||||
yield* this.lineEnd(scalar);
|
||||
}
|
||||
*blockScalar(scalar) {
|
||||
switch (this.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
scalar.props.push(this.sourceToken);
|
||||
return;
|
||||
case 'scalar':
|
||||
scalar.source = this.source;
|
||||
// block-scalar source includes trailing newline
|
||||
this.atNewLine = true;
|
||||
this.indent = 0;
|
||||
if (this.onNewLine) {
|
||||
let nl = this.source.indexOf('\n') + 1;
|
||||
while (nl !== 0) {
|
||||
this.onNewLine(this.offset + nl);
|
||||
nl = this.source.indexOf('\n', nl) + 1;
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
}
|
||||
*blockMap(map) {
|
||||
const it = map.items[map.items.length - 1];
|
||||
// it.sep is true-ish if pair already has key or : separator
|
||||
switch (this.type) {
|
||||
case 'newline':
|
||||
this.onKeyLine = false;
|
||||
if (it.value) {
|
||||
const end = 'end' in it.value ? it.value.end : undefined;
|
||||
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||||
if (last?.type === 'comment')
|
||||
end?.push(this.sourceToken);
|
||||
else
|
||||
map.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
if (it.value) {
|
||||
map.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
if (this.atIndentedComment(it.start, map.indent)) {
|
||||
const prev = map.items[map.items.length - 2];
|
||||
const end = prev?.value?.end;
|
||||
if (Array.isArray(end)) {
|
||||
Array.prototype.push.apply(end, it.start);
|
||||
end.push(this.sourceToken);
|
||||
map.items.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.indent >= map.indent) {
|
||||
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
|
||||
const atNextItem = atMapIndent &&
|
||||
(it.sep || it.explicitKey) &&
|
||||
this.type !== 'seq-item-ind';
|
||||
// For empty nodes, assign newline-separated not indented empty tokens to following node
|
||||
let start = [];
|
||||
if (atNextItem && it.sep && !it.value) {
|
||||
const nl = [];
|
||||
for (let i = 0; i < it.sep.length; ++i) {
|
||||
const st = it.sep[i];
|
||||
switch (st.type) {
|
||||
case 'newline':
|
||||
nl.push(i);
|
||||
break;
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
if (st.indent > map.indent)
|
||||
nl.length = 0;
|
||||
break;
|
||||
default:
|
||||
nl.length = 0;
|
||||
}
|
||||
}
|
||||
if (nl.length >= 2)
|
||||
start = it.sep.splice(nl[1]);
|
||||
}
|
||||
switch (this.type) {
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'explicit-key-ind':
|
||||
if (!it.sep && !it.explicitKey) {
|
||||
it.start.push(this.sourceToken);
|
||||
it.explicitKey = true;
|
||||
}
|
||||
else if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start, explicitKey: true });
|
||||
}
|
||||
else {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken], explicitKey: true }]
|
||||
});
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (it.explicitKey) {
|
||||
if (!it.sep) {
|
||||
if (includesToken(it.start, 'newline')) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else {
|
||||
const start = getFirstKeyStartProps(it.start);
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (it.value) {
|
||||
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (includesToken(it.sep, 'map-value-ind')) {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
else if (isFlowToken(it.key) &&
|
||||
!includesToken(it.sep, 'newline')) {
|
||||
const start = getFirstKeyStartProps(it.start);
|
||||
const key = it.key;
|
||||
const sep = it.sep;
|
||||
sep.push(this.sourceToken);
|
||||
// @ts-expect-error type guard is wrong here
|
||||
delete it.key;
|
||||
// @ts-expect-error type guard is wrong here
|
||||
delete it.sep;
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key, sep }]
|
||||
});
|
||||
}
|
||||
else if (start.length > 0) {
|
||||
// Not actually at next item
|
||||
it.sep = it.sep.concat(start, this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (!it.sep) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (it.value || atNextItem) {
|
||||
map.items.push({ start, key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (includesToken(it.sep, 'map-value-ind')) {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [], key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
else {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar': {
|
||||
const fs = this.flowScalar(this.type);
|
||||
if (atNextItem || it.value) {
|
||||
map.items.push({ start, key: fs, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
else if (it.sep) {
|
||||
this.stack.push(fs);
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: fs, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
const bv = this.startBlockValue(map);
|
||||
if (bv) {
|
||||
if (bv.type === 'block-seq') {
|
||||
if (!it.explicitKey &&
|
||||
it.sep &&
|
||||
!includesToken(it.sep, 'newline')) {
|
||||
yield* this.pop({
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: 'Unexpected block-seq-ind on same line with key',
|
||||
source: this.source
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (atMapIndent) {
|
||||
map.items.push({ start });
|
||||
}
|
||||
this.stack.push(bv);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
*blockSequence(seq) {
|
||||
const it = seq.items[seq.items.length - 1];
|
||||
switch (this.type) {
|
||||
case 'newline':
|
||||
if (it.value) {
|
||||
const end = 'end' in it.value ? it.value.end : undefined;
|
||||
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||||
if (last?.type === 'comment')
|
||||
end?.push(this.sourceToken);
|
||||
else
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
if (it.value)
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
else {
|
||||
if (this.atIndentedComment(it.start, seq.indent)) {
|
||||
const prev = seq.items[seq.items.length - 2];
|
||||
const end = prev?.value?.end;
|
||||
if (Array.isArray(end)) {
|
||||
Array.prototype.push.apply(end, it.start);
|
||||
end.push(this.sourceToken);
|
||||
seq.items.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (it.value || this.indent <= seq.indent)
|
||||
break;
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'seq-item-ind':
|
||||
if (this.indent !== seq.indent)
|
||||
break;
|
||||
if (it.value || includesToken(it.start, 'seq-item-ind'))
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
if (this.indent > seq.indent) {
|
||||
const bv = this.startBlockValue(seq);
|
||||
if (bv) {
|
||||
this.stack.push(bv);
|
||||
return;
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
*flowCollection(fc) {
|
||||
const it = fc.items[fc.items.length - 1];
|
||||
if (this.type === 'flow-error-end') {
|
||||
let top;
|
||||
do {
|
||||
yield* this.pop();
|
||||
top = this.peek(1);
|
||||
} while (top && top.type === 'flow-collection');
|
||||
}
|
||||
else if (fc.end.length === 0) {
|
||||
switch (this.type) {
|
||||
case 'comma':
|
||||
case 'explicit-key-ind':
|
||||
if (!it || it.sep)
|
||||
fc.items.push({ start: [this.sourceToken] });
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||||
else if (it.sep)
|
||||
it.sep.push(this.sourceToken);
|
||||
else
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [this.sourceToken] });
|
||||
else if (it.sep)
|
||||
it.sep.push(this.sourceToken);
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar': {
|
||||
const fs = this.flowScalar(this.type);
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [], key: fs, sep: [] });
|
||||
else if (it.sep)
|
||||
this.stack.push(fs);
|
||||
else
|
||||
Object.assign(it, { key: fs, sep: [] });
|
||||
return;
|
||||
}
|
||||
case 'flow-map-end':
|
||||
case 'flow-seq-end':
|
||||
fc.end.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
const bv = this.startBlockValue(fc);
|
||||
/* istanbul ignore else should not happen */
|
||||
if (bv)
|
||||
this.stack.push(bv);
|
||||
else {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.peek(2);
|
||||
if (parent.type === 'block-map' &&
|
||||
((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
|
||||
(this.type === 'newline' &&
|
||||
!parent.items[parent.items.length - 1].sep))) {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
else if (this.type === 'map-value-ind' &&
|
||||
parent.type !== 'flow-collection') {
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
fixFlowSeqItems(fc);
|
||||
const sep = fc.end.splice(1, fc.end.length);
|
||||
sep.push(this.sourceToken);
|
||||
const map = {
|
||||
type: 'block-map',
|
||||
offset: fc.offset,
|
||||
indent: fc.indent,
|
||||
items: [{ start, key: fc, sep }]
|
||||
};
|
||||
this.onKeyLine = true;
|
||||
this.stack[this.stack.length - 1] = map;
|
||||
}
|
||||
else {
|
||||
yield* this.lineEnd(fc);
|
||||
}
|
||||
}
|
||||
}
|
||||
flowScalar(type) {
|
||||
if (this.onNewLine) {
|
||||
let nl = this.source.indexOf('\n') + 1;
|
||||
while (nl !== 0) {
|
||||
this.onNewLine(this.offset + nl);
|
||||
nl = this.source.indexOf('\n', nl) + 1;
|
||||
}
|
||||
}
|
||||
return {
|
||||
type,
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
startBlockValue(parent) {
|
||||
switch (this.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return this.flowScalar(this.type);
|
||||
case 'block-scalar-header':
|
||||
return {
|
||||
type: 'block-scalar',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
props: [this.sourceToken],
|
||||
source: ''
|
||||
};
|
||||
case 'flow-map-start':
|
||||
case 'flow-seq-start':
|
||||
return {
|
||||
type: 'flow-collection',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
start: this.sourceToken,
|
||||
items: [],
|
||||
end: []
|
||||
};
|
||||
case 'seq-item-ind':
|
||||
return {
|
||||
type: 'block-seq',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken] }]
|
||||
};
|
||||
case 'explicit-key-ind': {
|
||||
this.onKeyLine = true;
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
start.push(this.sourceToken);
|
||||
return {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, explicitKey: true }]
|
||||
};
|
||||
}
|
||||
case 'map-value-ind': {
|
||||
this.onKeyLine = true;
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
return {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
atIndentedComment(start, indent) {
|
||||
if (this.type !== 'comment')
|
||||
return false;
|
||||
if (this.indent <= indent)
|
||||
return false;
|
||||
return start.every(st => st.type === 'newline' || st.type === 'space');
|
||||
}
|
||||
*documentEnd(docEnd) {
|
||||
if (this.type !== 'doc-mode') {
|
||||
if (docEnd.end)
|
||||
docEnd.end.push(this.sourceToken);
|
||||
else
|
||||
docEnd.end = [this.sourceToken];
|
||||
if (this.type === 'newline')
|
||||
yield* this.pop();
|
||||
}
|
||||
}
|
||||
*lineEnd(token) {
|
||||
switch (this.type) {
|
||||
case 'comma':
|
||||
case 'doc-start':
|
||||
case 'doc-end':
|
||||
case 'flow-seq-end':
|
||||
case 'flow-map-end':
|
||||
case 'map-value-ind':
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
break;
|
||||
case 'newline':
|
||||
this.onKeyLine = false;
|
||||
// fallthrough
|
||||
case 'space':
|
||||
case 'comment':
|
||||
default:
|
||||
// all other values are errors
|
||||
if (token.end)
|
||||
token.end.push(this.sourceToken);
|
||||
else
|
||||
token.end = [this.sourceToken];
|
||||
if (this.type === 'newline')
|
||||
yield* this.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.Parser = Parser;
|
44
node_modules/yaml/dist/public-api.d.ts
generated
vendored
Normal file
44
node_modules/yaml/dist/public-api.d.ts
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
import { Composer } from './compose/composer';
|
||||
import type { Reviver } from './doc/applyReviver';
|
||||
import type { Replacer } from './doc/Document';
|
||||
import { Document } from './doc/Document';
|
||||
import type { Node, ParsedNode } from './nodes/Node';
|
||||
import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options';
|
||||
export interface EmptyStream extends Array<Document.Parsed>, ReturnType<Composer['streamInfo']> {
|
||||
empty: true;
|
||||
}
|
||||
/**
|
||||
* Parse the input as a stream of YAML documents.
|
||||
*
|
||||
* Documents should be separated from each other by `...` or `---` marker lines.
|
||||
*
|
||||
* @returns If an empty `docs` array is returned, it will be of type
|
||||
* EmptyStream and contain additional stream information. In
|
||||
* TypeScript, you should use `'empty' in docs` as a type guard for it.
|
||||
*/
|
||||
export declare function parseAllDocuments<Contents extends Node = ParsedNode, Strict extends boolean = true>(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Array<Contents extends ParsedNode ? Document.Parsed<Contents, Strict> : Document<Contents, Strict>> | EmptyStream;
|
||||
/** Parse an input string into a single YAML.Document */
|
||||
export declare function parseDocument<Contents extends Node = ParsedNode, Strict extends boolean = true>(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Contents extends ParsedNode ? Document.Parsed<Contents, Strict> : Document<Contents, Strict>;
|
||||
/**
|
||||
* Parse an input string into JavaScript.
|
||||
*
|
||||
* Only supports input consisting of a single YAML document; for multi-document
|
||||
* support you should use `YAML.parseAllDocuments`. May throw on error, and may
|
||||
* log warnings using `console.warn`.
|
||||
*
|
||||
* @param str - A string with YAML formatting.
|
||||
* @param reviver - A reviver function, as in `JSON.parse()`
|
||||
* @returns The value will match the type of the root value of the parsed YAML
|
||||
* document, so Maps become objects, Sequences arrays, and scalars result in
|
||||
* nulls, booleans, numbers and strings.
|
||||
*/
|
||||
export declare function parse(src: string, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any;
|
||||
export declare function parse(src: string, reviver: Reviver, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any;
|
||||
/**
|
||||
* Stringify a value as a YAML document.
|
||||
*
|
||||
* @param replacer - A replacer array or function, as in `JSON.stringify()`
|
||||
* @returns Will always include `\n` as the last character, as is expected of YAML documents.
|
||||
*/
|
||||
export declare function stringify(value: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions): string;
|
||||
export declare function stringify(value: any, replacer?: Replacer | null, options?: string | number | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions)): string;
|
107
node_modules/yaml/dist/public-api.js
generated
vendored
Normal file
107
node_modules/yaml/dist/public-api.js
generated
vendored
Normal file
@ -0,0 +1,107 @@
|
||||
'use strict';
|
||||
|
||||
var composer = require('./compose/composer.js');
|
||||
var Document = require('./doc/Document.js');
|
||||
var errors = require('./errors.js');
|
||||
var log = require('./log.js');
|
||||
var identity = require('./nodes/identity.js');
|
||||
var lineCounter = require('./parse/line-counter.js');
|
||||
var parser = require('./parse/parser.js');
|
||||
|
||||
function parseOptions(options) {
|
||||
const prettyErrors = options.prettyErrors !== false;
|
||||
const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;
|
||||
return { lineCounter: lineCounter$1, prettyErrors };
|
||||
}
|
||||
/**
|
||||
* Parse the input as a stream of YAML documents.
|
||||
*
|
||||
* Documents should be separated from each other by `...` or `---` marker lines.
|
||||
*
|
||||
* @returns If an empty `docs` array is returned, it will be of type
|
||||
* EmptyStream and contain additional stream information. In
|
||||
* TypeScript, you should use `'empty' in docs` as a type guard for it.
|
||||
*/
|
||||
function parseAllDocuments(source, options = {}) {
|
||||
const { lineCounter, prettyErrors } = parseOptions(options);
|
||||
const parser$1 = new parser.Parser(lineCounter?.addNewLine);
|
||||
const composer$1 = new composer.Composer(options);
|
||||
const docs = Array.from(composer$1.compose(parser$1.parse(source)));
|
||||
if (prettyErrors && lineCounter)
|
||||
for (const doc of docs) {
|
||||
doc.errors.forEach(errors.prettifyError(source, lineCounter));
|
||||
doc.warnings.forEach(errors.prettifyError(source, lineCounter));
|
||||
}
|
||||
if (docs.length > 0)
|
||||
return docs;
|
||||
return Object.assign([], { empty: true }, composer$1.streamInfo());
|
||||
}
|
||||
/** Parse an input string into a single YAML.Document */
|
||||
function parseDocument(source, options = {}) {
|
||||
const { lineCounter, prettyErrors } = parseOptions(options);
|
||||
const parser$1 = new parser.Parser(lineCounter?.addNewLine);
|
||||
const composer$1 = new composer.Composer(options);
|
||||
// `doc` is always set by compose.end(true) at the very latest
|
||||
let doc = null;
|
||||
for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {
|
||||
if (!doc)
|
||||
doc = _doc;
|
||||
else if (doc.options.logLevel !== 'silent') {
|
||||
doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (prettyErrors && lineCounter) {
|
||||
doc.errors.forEach(errors.prettifyError(source, lineCounter));
|
||||
doc.warnings.forEach(errors.prettifyError(source, lineCounter));
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
function parse(src, reviver, options) {
|
||||
let _reviver = undefined;
|
||||
if (typeof reviver === 'function') {
|
||||
_reviver = reviver;
|
||||
}
|
||||
else if (options === undefined && reviver && typeof reviver === 'object') {
|
||||
options = reviver;
|
||||
}
|
||||
const doc = parseDocument(src, options);
|
||||
if (!doc)
|
||||
return null;
|
||||
doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));
|
||||
if (doc.errors.length > 0) {
|
||||
if (doc.options.logLevel !== 'silent')
|
||||
throw doc.errors[0];
|
||||
else
|
||||
doc.errors = [];
|
||||
}
|
||||
return doc.toJS(Object.assign({ reviver: _reviver }, options));
|
||||
}
|
||||
function stringify(value, replacer, options) {
|
||||
let _replacer = null;
|
||||
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (options === undefined && replacer) {
|
||||
options = replacer;
|
||||
}
|
||||
if (typeof options === 'string')
|
||||
options = options.length;
|
||||
if (typeof options === 'number') {
|
||||
const indent = Math.round(options);
|
||||
options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
|
||||
}
|
||||
if (value === undefined) {
|
||||
const { keepUndefined } = options ?? replacer ?? {};
|
||||
if (!keepUndefined)
|
||||
return undefined;
|
||||
}
|
||||
if (identity.isDocument(value) && !_replacer)
|
||||
return value.toString(options);
|
||||
return new Document.Document(value, _replacer, options).toString(options);
|
||||
}
|
||||
|
||||
exports.parse = parse;
|
||||
exports.parseAllDocuments = parseAllDocuments;
|
||||
exports.parseDocument = parseDocument;
|
||||
exports.stringify = stringify;
|
17
node_modules/yaml/dist/schema/Schema.d.ts
generated
vendored
Normal file
17
node_modules/yaml/dist/schema/Schema.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
import { MAP, SCALAR, SEQ } from '../nodes/identity';
|
||||
import type { Pair } from '../nodes/Pair';
|
||||
import type { SchemaOptions, ToStringOptions } from '../options';
|
||||
import type { CollectionTag, ScalarTag } from './types';
|
||||
export declare class Schema {
|
||||
compat: Array<CollectionTag | ScalarTag> | null;
|
||||
knownTags: Record<string, CollectionTag | ScalarTag>;
|
||||
name: string;
|
||||
sortMapEntries: ((a: Pair, b: Pair) => number) | null;
|
||||
tags: Array<CollectionTag | ScalarTag>;
|
||||
toStringOptions: Readonly<ToStringOptions> | null;
|
||||
readonly [MAP]: CollectionTag;
|
||||
readonly [SCALAR]: ScalarTag;
|
||||
readonly [SEQ]: CollectionTag;
|
||||
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }: SchemaOptions);
|
||||
clone(): Schema;
|
||||
}
|
39
node_modules/yaml/dist/schema/Schema.js
generated
vendored
Normal file
39
node_modules/yaml/dist/schema/Schema.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../nodes/identity.js');
|
||||
var map = require('./common/map.js');
|
||||
var seq = require('./common/seq.js');
|
||||
var string = require('./common/string.js');
|
||||
var tags = require('./tags.js');
|
||||
|
||||
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||||
class Schema {
|
||||
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
|
||||
this.compat = Array.isArray(compat)
|
||||
? tags.getTags(compat, 'compat')
|
||||
: compat
|
||||
? tags.getTags(null, compat)
|
||||
: null;
|
||||
this.name = (typeof schema === 'string' && schema) || 'core';
|
||||
this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};
|
||||
this.tags = tags.getTags(customTags, this.name, merge);
|
||||
this.toStringOptions = toStringDefaults ?? null;
|
||||
Object.defineProperty(this, identity.MAP, { value: map.map });
|
||||
Object.defineProperty(this, identity.SCALAR, { value: string.string });
|
||||
Object.defineProperty(this, identity.SEQ, { value: seq.seq });
|
||||
// Used by createMap()
|
||||
this.sortMapEntries =
|
||||
typeof sortMapEntries === 'function'
|
||||
? sortMapEntries
|
||||
: sortMapEntries === true
|
||||
? sortMapEntriesByKey
|
||||
: null;
|
||||
}
|
||||
clone() {
|
||||
const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
|
||||
copy.tags = this.tags.slice();
|
||||
return copy;
|
||||
}
|
||||
}
|
||||
|
||||
exports.Schema = Schema;
|
2
node_modules/yaml/dist/schema/common/map.d.ts
generated
vendored
Normal file
2
node_modules/yaml/dist/schema/common/map.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { CollectionTag } from '../types';
|
||||
export declare const map: CollectionTag;
|
19
node_modules/yaml/dist/schema/common/map.js
generated
vendored
Normal file
19
node_modules/yaml/dist/schema/common/map.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../../nodes/identity.js');
|
||||
var YAMLMap = require('../../nodes/YAMLMap.js');
|
||||
|
||||
const map = {
|
||||
collection: 'map',
|
||||
default: true,
|
||||
nodeClass: YAMLMap.YAMLMap,
|
||||
tag: 'tag:yaml.org,2002:map',
|
||||
resolve(map, onError) {
|
||||
if (!identity.isMap(map))
|
||||
onError('Expected a mapping for this tag');
|
||||
return map;
|
||||
},
|
||||
createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx)
|
||||
};
|
||||
|
||||
exports.map = map;
|
4
node_modules/yaml/dist/schema/common/null.d.ts
generated
vendored
Normal file
4
node_modules/yaml/dist/schema/common/null.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import type { ScalarTag } from '../types';
|
||||
export declare const nullTag: ScalarTag & {
|
||||
test: RegExp;
|
||||
};
|
17
node_modules/yaml/dist/schema/common/null.js
generated
vendored
Normal file
17
node_modules/yaml/dist/schema/common/null.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
'use strict';
|
||||
|
||||
var Scalar = require('../../nodes/Scalar.js');
|
||||
|
||||
const nullTag = {
|
||||
identify: value => value == null,
|
||||
createNode: () => new Scalar.Scalar(null),
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||||
resolve: () => new Scalar.Scalar(null),
|
||||
stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
|
||||
? source
|
||||
: ctx.options.nullStr
|
||||
};
|
||||
|
||||
exports.nullTag = nullTag;
|
2
node_modules/yaml/dist/schema/common/seq.d.ts
generated
vendored
Normal file
2
node_modules/yaml/dist/schema/common/seq.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { CollectionTag } from '../types';
|
||||
export declare const seq: CollectionTag;
|
19
node_modules/yaml/dist/schema/common/seq.js
generated
vendored
Normal file
19
node_modules/yaml/dist/schema/common/seq.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
var identity = require('../../nodes/identity.js');
|
||||
var YAMLSeq = require('../../nodes/YAMLSeq.js');
|
||||
|
||||
const seq = {
|
||||
collection: 'seq',
|
||||
default: true,
|
||||
nodeClass: YAMLSeq.YAMLSeq,
|
||||
tag: 'tag:yaml.org,2002:seq',
|
||||
resolve(seq, onError) {
|
||||
if (!identity.isSeq(seq))
|
||||
onError('Expected a sequence for this tag');
|
||||
return seq;
|
||||
},
|
||||
createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx)
|
||||
};
|
||||
|
||||
exports.seq = seq;
|
2
node_modules/yaml/dist/schema/common/string.d.ts
generated
vendored
Normal file
2
node_modules/yaml/dist/schema/common/string.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { ScalarTag } from '../types';
|
||||
export declare const string: ScalarTag;
|
16
node_modules/yaml/dist/schema/common/string.js
generated
vendored
Normal file
16
node_modules/yaml/dist/schema/common/string.js
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
var stringifyString = require('../../stringify/stringifyString.js');
|
||||
|
||||
const string = {
|
||||
identify: value => typeof value === 'string',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: str => str,
|
||||
stringify(item, ctx, onComment, onChompKeep) {
|
||||
ctx = Object.assign({ actualString: true }, ctx);
|
||||
return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);
|
||||
}
|
||||
};
|
||||
|
||||
exports.string = string;
|
4
node_modules/yaml/dist/schema/core/bool.d.ts
generated
vendored
Normal file
4
node_modules/yaml/dist/schema/core/bool.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import type { ScalarTag } from '../types';
|
||||
export declare const boolTag: ScalarTag & {
|
||||
test: RegExp;
|
||||
};
|
21
node_modules/yaml/dist/schema/core/bool.js
generated
vendored
Normal file
21
node_modules/yaml/dist/schema/core/bool.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
'use strict';
|
||||
|
||||
var Scalar = require('../../nodes/Scalar.js');
|
||||
|
||||
const boolTag = {
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
|
||||
resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),
|
||||
stringify({ source, value }, ctx) {
|
||||
if (source && boolTag.test.test(source)) {
|
||||
const sv = source[0] === 't' || source[0] === 'T';
|
||||
if (value === sv)
|
||||
return source;
|
||||
}
|
||||
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||||
}
|
||||
};
|
||||
|
||||
exports.boolTag = boolTag;
|
4
node_modules/yaml/dist/schema/core/float.d.ts
generated
vendored
Normal file
4
node_modules/yaml/dist/schema/core/float.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import type { ScalarTag } from '../types';
|
||||
export declare const floatNaN: ScalarTag;
|
||||
export declare const floatExp: ScalarTag;
|
||||
export declare const float: ScalarTag;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user