This commit is contained in:
nik
2025-10-03 22:27:28 +03:00
parent 829fad0e17
commit 871cf7e792
16520 changed files with 2967597 additions and 3 deletions

62
node_modules/langium/lib/serializer/hydrator.d.ts generated vendored Normal file
View File

@@ -0,0 +1,62 @@
/******************************************************************************
* Copyright 2024 TypeFox GmbH
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available in the project root.
******************************************************************************/
import type { TokenType } from 'chevrotain';
import { type AbstractElement, type Grammar } from '../languages/generated/ast.js';
import type { Linker } from '../references/linker.js';
import type { Lexer } from '../parser/lexer.js';
import type { LangiumCoreServices } from '../services.js';
import type { ParseResult } from '../parser/langium-parser.js';
import type { Reference, AstNode, CstNode, LeafCstNode } from '../syntax-tree.js';
import { BiMap } from '../utils/collections.js';
import type { LexingReport } from '../parser/token-builder.js';
/**
* The hydrator service is responsible for allowing AST parse results to be sent across worker threads.
*/
export interface Hydrator {
/**
* Converts a parse result to a plain object. The resulting object can be sent across worker threads.
*/
dehydrate(result: ParseResult<AstNode>): ParseResult<object>;
/**
* Converts a plain object to a parse result. The included AST node can then be used in the main thread.
* Calling this method on objects that have not been dehydrated first will result in undefined behavior.
*/
hydrate<T extends AstNode = AstNode>(result: ParseResult<object>): ParseResult<T>;
}
export interface DehydrateContext {
astNodes: Map<AstNode, any>;
cstNodes: Map<CstNode, any>;
}
export interface HydrateContext {
astNodes: Map<any, AstNode>;
cstNodes: Map<any, CstNode>;
}
export declare class DefaultHydrator implements Hydrator {
protected readonly grammar: Grammar;
protected readonly lexer: Lexer;
protected readonly linker: Linker;
protected readonly grammarElementIdMap: BiMap<AbstractElement, number>;
protected readonly tokenTypeIdMap: BiMap<number, TokenType>;
constructor(services: LangiumCoreServices);
dehydrate(result: ParseResult<AstNode>): ParseResult<object>;
protected dehydrateLexerReport(lexerReport: LexingReport): LexingReport;
protected createDehyrationContext(node: AstNode): DehydrateContext;
protected dehydrateAstNode(node: AstNode, context: DehydrateContext): object;
protected dehydrateReference(reference: Reference, context: DehydrateContext): any;
protected dehydrateCstNode(node: CstNode, context: DehydrateContext): any;
hydrate<T extends AstNode = AstNode>(result: ParseResult<object>): ParseResult<T>;
protected createHydrationContext(node: any): HydrateContext;
protected hydrateAstNode(node: any, context: HydrateContext): AstNode;
protected setParent(node: any, parent: any): any;
protected hydrateReference(reference: any, node: AstNode, name: string, context: HydrateContext): Reference;
protected hydrateCstNode(cstNode: any, context: HydrateContext, num?: number): CstNode;
protected hydrateCstLeafNode(cstNode: any): LeafCstNode;
protected getTokenType(name: string): TokenType;
protected getGrammarElementId(node: AbstractElement | undefined): number | undefined;
protected getGrammarElement(id: number): AbstractElement | undefined;
protected createGrammarElementIdMap(): void;
}
//# sourceMappingURL=hydrator.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"hydrator.d.ts","sourceRoot":"","sources":["../../src/serializer/hydrator.ts"],"names":[],"mappings":"AAAA;;;;gFAIgF;AAIhF,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,YAAY,CAAC;AAE5C,OAAO,EAAqB,KAAK,eAAe,EAAE,KAAK,OAAO,EAAE,MAAM,+BAA+B,CAAC;AACtG,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,yBAAyB,CAAC;AACtD,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,oBAAoB,CAAC;AAChD,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAC1D,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAC/D,OAAO,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,OAAO,EAAE,WAAW,EAAwC,MAAM,mBAAmB,CAAC;AAGxH,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEhD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAC;AAE/D;;GAEG;AACH,MAAM,WAAW,QAAQ;IACrB;;OAEG;IACH,SAAS,CAAC,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC;IAC7D;;;OAGG;IACH,OAAO,CAAC,CAAC,SAAS,OAAO,GAAG,OAAO,EAAE,MAAM,EAAE,WAAW,CAAC,MAAM,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;CACrF;AAED,MAAM,WAAW,gBAAgB;IAC7B,QAAQ,EAAE,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;IAC5B,QAAQ,EAAE,GAAG,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;CAC/B;AAED,MAAM,WAAW,cAAc;IAC3B,QAAQ,EAAE,GAAG,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IAC5B,QAAQ,EAAE,GAAG,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;CAC/B;AAED,qBAAa,eAAgB,YAAW,QAAQ;IAE5C,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IACpC,SAAS,CAAC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAC;IAChC,SAAS,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC;IAElC,SAAS,CAAC,QAAQ,CAAC,mBAAmB,iCAAwC;IAC9E,SAAS,CAAC,QAAQ,CAAC,cAAc,2BAAkC;gBAEvD,QAAQ,EAAE,mBAAmB;IAMzC,SAAS,CAAC,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC;IAW5D,SAAS,CAAC,oBAAoB,CAAC,WAAW,EAAE,YAAY,GAAG,YAAY;IAKvE,SAAS,CAAC,uBAAuB,CAAC,IAAI,EAAE,OAAO,GAAG,gBAAgB;IAiBlE,SAAS,CAAC,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,gBAAgB,GAAG,MAAM;IAmC5E,SAAS,CAAC,kBAAkB,CAAC,SAAS,EAAE,SAAS,EAAE,OAAO,EAAE,gBAAgB,GAAG,GAAG;IASlF,SAAS,CAAC,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,gBAAgB,GAAG,GAAG;IAwBzE,OAAO,CAAC,CAAC,SAAS,OAAO,GAAG,OAAO,EAAE,MAAM,EAAE,WAAW,CAAC,MAAM,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;IAcjF,SAAS,CAAC,sBAAsB,CAAC,IAAI,EAAE,GAAG,GAAG,cAAc;IA8B3D,SAAS,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,cAAc,GAAG,OAAO;IAmCrE,SAAS,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,GAAG,GAAG;IAKhD,SAAS,CAAC,gBAAgB,CAAC,SAAS,EAAE,GAAG,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,cAAc,GAAG,SAAS;IAI3G,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,SAAI,GAAG,OAAO;IAejF,SAAS,CAAC,kBAAkB,CAAC,OAAO,EAAE,GAAG,GAAG,WAAW;IA4BvD,SAAS,CAAC,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS;IAI/C,SAAS,CAAC,mBAAmB,CAAC,IAAI,EAAE,eAAe,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS;IAUpF,SAAS,CAAC,iBAAiB,CAAC,EAAE,EAAE,MAAM,GAAG,eAAe,GAAG,SAAS;IAQpE,SAAS,CAAC,yBAAyB,IAAI,IAAI;CAS9C"}

275
node_modules/langium/lib/serializer/hydrator.js generated vendored Normal file
View File

@@ -0,0 +1,275 @@
/******************************************************************************
* Copyright 2024 TypeFox GmbH
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available in the project root.
******************************************************************************/
import { CompositeCstNodeImpl, LeafCstNodeImpl, RootCstNodeImpl } from '../parser/cst-node-builder.js';
import { isAbstractElement } from '../languages/generated/ast.js';
import { isRootCstNode, isCompositeCstNode, isLeafCstNode, isAstNode, isReference } from '../syntax-tree.js';
import { streamAst } from '../utils/ast-utils.js';
import { BiMap } from '../utils/collections.js';
import { streamCst } from '../utils/cst-utils.js';
export class DefaultHydrator {
constructor(services) {
this.grammarElementIdMap = new BiMap();
this.tokenTypeIdMap = new BiMap();
this.grammar = services.Grammar;
this.lexer = services.parser.Lexer;
this.linker = services.references.Linker;
}
dehydrate(result) {
return {
lexerErrors: result.lexerErrors,
lexerReport: result.lexerReport ? this.dehydrateLexerReport(result.lexerReport) : undefined,
// We need to create shallow copies of the errors
// The original errors inherit from the `Error` class, which is not transferable across worker threads
parserErrors: result.parserErrors.map(e => (Object.assign(Object.assign({}, e), { message: e.message }))),
value: this.dehydrateAstNode(result.value, this.createDehyrationContext(result.value))
};
}
dehydrateLexerReport(lexerReport) {
// By default, lexer reports are serializable
return lexerReport;
}
createDehyrationContext(node) {
const astNodes = new Map();
const cstNodes = new Map();
for (const astNode of streamAst(node)) {
astNodes.set(astNode, {});
}
if (node.$cstNode) {
for (const cstNode of streamCst(node.$cstNode)) {
cstNodes.set(cstNode, {});
}
}
return {
astNodes,
cstNodes
};
}
dehydrateAstNode(node, context) {
const obj = context.astNodes.get(node);
obj.$type = node.$type;
obj.$containerIndex = node.$containerIndex;
obj.$containerProperty = node.$containerProperty;
if (node.$cstNode !== undefined) {
obj.$cstNode = this.dehydrateCstNode(node.$cstNode, context);
}
for (const [name, value] of Object.entries(node)) {
if (name.startsWith('$')) {
continue;
}
if (Array.isArray(value)) {
const arr = [];
obj[name] = arr;
for (const item of value) {
if (isAstNode(item)) {
arr.push(this.dehydrateAstNode(item, context));
}
else if (isReference(item)) {
arr.push(this.dehydrateReference(item, context));
}
else {
arr.push(item);
}
}
}
else if (isAstNode(value)) {
obj[name] = this.dehydrateAstNode(value, context);
}
else if (isReference(value)) {
obj[name] = this.dehydrateReference(value, context);
}
else if (value !== undefined) {
obj[name] = value;
}
}
return obj;
}
dehydrateReference(reference, context) {
const obj = {};
obj.$refText = reference.$refText;
if (reference.$refNode) {
obj.$refNode = context.cstNodes.get(reference.$refNode);
}
return obj;
}
dehydrateCstNode(node, context) {
const cstNode = context.cstNodes.get(node);
if (isRootCstNode(node)) {
cstNode.fullText = node.fullText;
}
else {
// Note: This returns undefined for hidden nodes (i.e. comments)
cstNode.grammarSource = this.getGrammarElementId(node.grammarSource);
}
cstNode.hidden = node.hidden;
cstNode.astNode = context.astNodes.get(node.astNode);
if (isCompositeCstNode(node)) {
cstNode.content = node.content.map(child => this.dehydrateCstNode(child, context));
}
else if (isLeafCstNode(node)) {
cstNode.tokenType = node.tokenType.name;
cstNode.offset = node.offset;
cstNode.length = node.length;
cstNode.startLine = node.range.start.line;
cstNode.startColumn = node.range.start.character;
cstNode.endLine = node.range.end.line;
cstNode.endColumn = node.range.end.character;
}
return cstNode;
}
hydrate(result) {
const node = result.value;
const context = this.createHydrationContext(node);
if ('$cstNode' in node) {
this.hydrateCstNode(node.$cstNode, context);
}
return {
lexerErrors: result.lexerErrors,
lexerReport: result.lexerReport,
parserErrors: result.parserErrors,
value: this.hydrateAstNode(node, context)
};
}
createHydrationContext(node) {
const astNodes = new Map();
const cstNodes = new Map();
for (const astNode of streamAst(node)) {
astNodes.set(astNode, {});
}
let root;
if (node.$cstNode) {
for (const cstNode of streamCst(node.$cstNode)) {
let cst;
if ('fullText' in cstNode) {
cst = new RootCstNodeImpl(cstNode.fullText);
root = cst;
}
else if ('content' in cstNode) {
cst = new CompositeCstNodeImpl();
}
else if ('tokenType' in cstNode) {
cst = this.hydrateCstLeafNode(cstNode);
}
if (cst) {
cstNodes.set(cstNode, cst);
cst.root = root;
}
}
}
return {
astNodes,
cstNodes
};
}
hydrateAstNode(node, context) {
const astNode = context.astNodes.get(node);
astNode.$type = node.$type;
astNode.$containerIndex = node.$containerIndex;
astNode.$containerProperty = node.$containerProperty;
if (node.$cstNode) {
astNode.$cstNode = context.cstNodes.get(node.$cstNode);
}
for (const [name, value] of Object.entries(node)) {
if (name.startsWith('$')) {
continue;
}
if (Array.isArray(value)) {
const arr = [];
astNode[name] = arr;
for (const item of value) {
if (isAstNode(item)) {
arr.push(this.setParent(this.hydrateAstNode(item, context), astNode));
}
else if (isReference(item)) {
arr.push(this.hydrateReference(item, astNode, name, context));
}
else {
arr.push(item);
}
}
}
else if (isAstNode(value)) {
astNode[name] = this.setParent(this.hydrateAstNode(value, context), astNode);
}
else if (isReference(value)) {
astNode[name] = this.hydrateReference(value, astNode, name, context);
}
else if (value !== undefined) {
astNode[name] = value;
}
}
return astNode;
}
setParent(node, parent) {
node.$container = parent;
return node;
}
hydrateReference(reference, node, name, context) {
return this.linker.buildReference(node, name, context.cstNodes.get(reference.$refNode), reference.$refText);
}
hydrateCstNode(cstNode, context, num = 0) {
const cstNodeObj = context.cstNodes.get(cstNode);
if (typeof cstNode.grammarSource === 'number') {
cstNodeObj.grammarSource = this.getGrammarElement(cstNode.grammarSource);
}
cstNodeObj.astNode = context.astNodes.get(cstNode.astNode);
if (isCompositeCstNode(cstNodeObj)) {
for (const child of cstNode.content) {
const hydrated = this.hydrateCstNode(child, context, num++);
cstNodeObj.content.push(hydrated);
}
}
return cstNodeObj;
}
hydrateCstLeafNode(cstNode) {
const tokenType = this.getTokenType(cstNode.tokenType);
const offset = cstNode.offset;
const length = cstNode.length;
const startLine = cstNode.startLine;
const startColumn = cstNode.startColumn;
const endLine = cstNode.endLine;
const endColumn = cstNode.endColumn;
const hidden = cstNode.hidden;
const node = new LeafCstNodeImpl(offset, length, {
start: {
line: startLine,
character: startColumn
},
end: {
line: endLine,
character: endColumn
}
}, tokenType, hidden);
return node;
}
getTokenType(name) {
return this.lexer.definition[name];
}
getGrammarElementId(node) {
if (!node) {
return undefined;
}
if (this.grammarElementIdMap.size === 0) {
this.createGrammarElementIdMap();
}
return this.grammarElementIdMap.get(node);
}
getGrammarElement(id) {
if (this.grammarElementIdMap.size === 0) {
this.createGrammarElementIdMap();
}
const element = this.grammarElementIdMap.getKey(id);
return element;
}
createGrammarElementIdMap() {
let id = 0;
for (const element of streamAst(this.grammar)) {
if (isAbstractElement(element)) {
this.grammarElementIdMap.set(element, id++);
}
}
}
}
//# sourceMappingURL=hydrator.js.map

1
node_modules/langium/lib/serializer/hydrator.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

8
node_modules/langium/lib/serializer/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/******************************************************************************
* Copyright 2023 TypeFox GmbH
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available in the project root.
******************************************************************************/
export * from './hydrator.js';
export * from './json-serializer.js';
//# sourceMappingURL=index.d.ts.map

1
node_modules/langium/lib/serializer/index.d.ts.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/serializer/index.ts"],"names":[],"mappings":"AAAA;;;;gFAIgF;AAEhF,cAAc,eAAe,CAAC;AAC9B,cAAc,sBAAsB,CAAC"}

8
node_modules/langium/lib/serializer/index.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/******************************************************************************
* Copyright 2023 TypeFox GmbH
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available in the project root.
******************************************************************************/
export * from './hydrator.js';
export * from './json-serializer.js';
//# sourceMappingURL=index.js.map

1
node_modules/langium/lib/serializer/index.js.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/serializer/index.ts"],"names":[],"mappings":"AAAA;;;;gFAIgF;AAEhF,cAAc,eAAe,CAAC;AAC9B,cAAc,sBAAsB,CAAC"}

View File

@@ -0,0 +1,109 @@
/******************************************************************************
* Copyright 2021 TypeFox GmbH
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available in the project root.
******************************************************************************/
import { URI } from 'vscode-uri';
import type { CommentProvider } from '../documentation/comment-provider.js';
import type { NameProvider } from '../references/name-provider.js';
import type { LangiumCoreServices } from '../services.js';
import type { AstNode, GenericAstNode, Reference } from '../syntax-tree.js';
import type { AstNodeLocator } from '../workspace/ast-node-locator.js';
import type { DocumentSegment, LangiumDocument, LangiumDocuments } from '../workspace/documents.js';
export interface JsonSerializeOptions {
/** The space parameter for `JSON.stringify`, controlling whether and how to pretty-print the output. */
space?: string | number;
/** Whether to include the `$refText` property for references (the name used to identify the target node). */
refText?: boolean;
/** Whether to include the `$sourceText` property, which holds the full source text from which an AST node was parsed. */
sourceText?: boolean;
/** Whether to include the `$textRegion` property, which holds information to trace AST node properties to their respective source text regions. */
textRegions?: boolean;
/** Whether to include the `$comment` property, which holds comments according to the CommentProvider service. */
comments?: boolean;
/** The replacer parameter for `JSON.stringify`; the default replacer given as parameter should be used to apply basic replacements. */
replacer?: (key: string, value: unknown, defaultReplacer: (key: string, value: unknown) => unknown) => unknown;
/** Used to convert and serialize URIs when the target of a cross-reference is in a different document. */
uriConverter?: (uri: URI, reference: Reference) => string;
}
export interface JsonDeserializeOptions {
/** Used to parse and convert URIs when the target of a cross-reference is in a different document. */
uriConverter?: (uri: string) => URI;
}
/**
* {@link AstNode}s that may carry information on their definition area within the DSL text.
*/
export interface AstNodeWithTextRegion extends AstNode {
$sourceText?: string;
$textRegion?: AstNodeRegionWithAssignments;
}
/**
* {@link AstNode}s that may carry a semantically relevant comment.
*/
export interface AstNodeWithComment extends AstNode {
$comment?: string;
}
export declare function isAstNodeWithComment(node: AstNode): node is AstNodeWithComment;
/**
* A {@link DocumentSegment} representing the definition area of an AstNode within the DSL text.
* Usually contains text region information on all assigned property values of the AstNode,
* and may contain the defining file's URI as string.
*/
export interface AstNodeRegionWithAssignments extends DocumentSegment {
/**
* A record containing an entry for each assigned property of the AstNode.
* The key is equal to the property name and the value is an array of the property values'
* text regions, regardless of whether the property is a single value or list property.
*/
assignments?: Record<string, DocumentSegment[]>;
/**
* The AstNode defining file's URI as string
*/
documentURI?: string;
}
/**
* Utility service for transforming an `AstNode` into a JSON string and vice versa.
*/
export interface JsonSerializer {
/**
* Serialize an `AstNode` into a JSON `string`.
* @param node The `AstNode` to be serialized.
* @param options Serialization options
*/
serialize(node: AstNode, options?: JsonSerializeOptions): string;
/**
* Deserialize (parse) a JSON `string` into an `AstNode`.
*/
deserialize<T extends AstNode = AstNode>(content: string, options?: JsonDeserializeOptions): T;
}
/**
* A cross-reference in the serialized JSON representation of an AstNode.
*/
interface IntermediateReference {
/** URI pointing to the target element. This is either `#${path}` if the target is in the same document, or `${documentURI}#${path}` otherwise. */
$ref?: string;
/** The actual text used to look up the reference target in the surrounding scope. */
$refText?: string;
/** If any problem occurred while resolving the reference, it is described by this property. */
$error?: string;
}
export declare class DefaultJsonSerializer implements JsonSerializer {
/** The set of AstNode properties to be ignored by the serializer. */
ignoreProperties: Set<string>;
/** The document that is currently processed by the serializer; this is used by the replacer function. */
protected currentDocument: LangiumDocument | undefined;
protected readonly langiumDocuments: LangiumDocuments;
protected readonly astNodeLocator: AstNodeLocator;
protected readonly nameProvider: NameProvider;
protected readonly commentProvider: CommentProvider;
constructor(services: LangiumCoreServices);
serialize(node: AstNode, options?: JsonSerializeOptions): string;
deserialize<T extends AstNode = AstNode>(content: string, options?: JsonDeserializeOptions): T;
protected replacer(key: string, value: unknown, { refText, sourceText, textRegions, comments, uriConverter }: JsonSerializeOptions): unknown;
protected addAstNodeRegionWithAssignmentsTo(node: AstNodeWithTextRegion): AstNodeWithTextRegion | undefined;
protected linkNode(node: GenericAstNode, root: AstNode, options: JsonDeserializeOptions, container?: AstNode, containerProperty?: string, containerIndex?: number): void;
protected reviveReference(container: AstNode, property: string, root: AstNode, reference: IntermediateReference, options: JsonDeserializeOptions): Reference | undefined;
protected getRefNode(root: AstNode, uri: string, uriConverter?: (uri: string) => URI): AstNode | string;
}
export {};
//# sourceMappingURL=json-serializer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"json-serializer.d.ts","sourceRoot":"","sources":["../../src/serializer/json-serializer.ts"],"names":[],"mappings":"AAAA;;;;gFAIgF;AAEhF,OAAO,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AACjC,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,sCAAsC,CAAC;AAC5E,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gCAAgC,CAAC;AACnE,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAC1D,OAAO,KAAK,EAAE,OAAO,EAAW,cAAc,EAAW,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAI9F,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,kCAAkC,CAAC;AACvE,OAAO,KAAK,EAAE,eAAe,EAAE,eAAe,EAAE,gBAAgB,EAAE,MAAM,2BAA2B,CAAC;AAEpG,MAAM,WAAW,oBAAoB;IACjC,wGAAwG;IACxG,KAAK,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACxB,6GAA6G;IAC7G,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,yHAAyH;IACzH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,mJAAmJ;IACnJ,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,iHAAiH;IACjH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,uIAAuI;IACvI,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,eAAe,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,OAAO,KAAK,OAAO,CAAA;IAC9G,0GAA0G;IAC1G,YAAY,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,KAAK,MAAM,CAAA;CAC5D;AAED,MAAM,WAAW,sBAAsB;IACnC,sGAAsG;IACtG,YAAY,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,GAAG,CAAA;CACtC;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,OAAO;IAClD,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,4BAA4B,CAAC;CAC9C;AAED;;GAEG;AACH,MAAM,WAAW,kBAAmB,SAAQ,OAAO;IAC/C,QAAQ,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,wBAAgB,oBAAoB,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,kBAAkB,CAE9E;AAED;;;;GAIG;AACH,MAAM,WAAW,4BAA6B,SAAQ,eAAe;IACjE;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,EAAE,CAAC,CAAC;IAChD;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACxB;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC3B;;;;OAIG;IACH,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,oBAAoB,GAAG,MAAM,CAAC;IACjE;;OAEG;IACH,WAAW,CAAC,CAAC,SAAS,OAAO,GAAG,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,sBAAsB,GAAG,CAAC,CAAC;CAClG;AAED;;GAEG;AACH,UAAU,qBAAqB;IAC3B,kJAAkJ;IAClJ,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,qFAAqF;IACrF,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,+FAA+F;IAC/F,MAAM,CAAC,EAAE,MAAM,CAAA;CAClB;AAMD,qBAAa,qBAAsB,YAAW,cAAc;IAExD,qEAAqE;IACrE,gBAAgB,cAA6F;IAE7G,0GAA0G;IAC1G,SAAS,CAAC,eAAe,EAAE,eAAe,GAAG,SAAS,CAAC;IAEvD,SAAS,CAAC,QAAQ,CAAC,gBAAgB,EAAE,gBAAgB,CAAC;IACtD,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,cAAc,CAAC;IAClD,SAAS,CAAC,QAAQ,CAAC,YAAY,EAAE,YAAY,CAAC;IAC9C,SAAS,CAAC,QAAQ,CAAC,eAAe,EAAE,eAAe,CAAC;gBAExC,QAAQ,EAAE,mBAAmB;IAOzC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE,oBAAoB,GAAG,MAAM;IAchE,WAAW,CAAC,CAAC,SAAS,OAAO,GAAG,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,sBAAsB,GAAG,CAAC;IAO9F,SAAS,CAAC,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,QAAQ,EAAE,YAAY,EAAE,EAAE,oBAAoB,GAAG,OAAO;IAqD5I,SAAS,CAAC,iCAAiC,CAAC,IAAI,EAAE,qBAAqB;IAwBvE,SAAS,CAAC,QAAQ,CAAC,IAAI,EAAE,cAAc,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,sBAAsB,EAAE,SAAS,CAAC,EAAE,OAAO,EAAE,iBAAiB,CAAC,EAAE,MAAM,EAAE,cAAc,CAAC,EAAE,MAAM;IAuBjK,SAAS,CAAC,eAAe,CAAC,SAAS,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,SAAS,EAAE,qBAAqB,EAAE,OAAO,EAAE,sBAAsB,GAAG,SAAS,GAAG,SAAS;IAiCxK,SAAS,CAAC,UAAU,CAAC,IAAI,EAAE,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,YAAY,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,GAAG,GAAG,OAAO,GAAG,MAAM;CAoC1G"}

218
node_modules/langium/lib/serializer/json-serializer.js generated vendored Normal file
View File

@@ -0,0 +1,218 @@
/******************************************************************************
* Copyright 2021 TypeFox GmbH
* This program and the accompanying materials are made available under the
* terms of the MIT License, which is available in the project root.
******************************************************************************/
import { URI } from 'vscode-uri';
import { isAstNode, isReference } from '../syntax-tree.js';
import { getDocument } from '../utils/ast-utils.js';
import { findNodesForProperty } from '../utils/grammar-utils.js';
export function isAstNodeWithComment(node) {
return typeof node.$comment === 'string';
}
function isIntermediateReference(obj) {
return typeof obj === 'object' && !!obj && ('$ref' in obj || '$error' in obj);
}
export class DefaultJsonSerializer {
constructor(services) {
/** The set of AstNode properties to be ignored by the serializer. */
this.ignoreProperties = new Set(['$container', '$containerProperty', '$containerIndex', '$document', '$cstNode']);
this.langiumDocuments = services.shared.workspace.LangiumDocuments;
this.astNodeLocator = services.workspace.AstNodeLocator;
this.nameProvider = services.references.NameProvider;
this.commentProvider = services.documentation.CommentProvider;
}
serialize(node, options) {
const serializeOptions = options !== null && options !== void 0 ? options : {};
const specificReplacer = options === null || options === void 0 ? void 0 : options.replacer;
const defaultReplacer = (key, value) => this.replacer(key, value, serializeOptions);
const replacer = specificReplacer ? (key, value) => specificReplacer(key, value, defaultReplacer) : defaultReplacer;
try {
this.currentDocument = getDocument(node);
return JSON.stringify(node, replacer, options === null || options === void 0 ? void 0 : options.space);
}
finally {
this.currentDocument = undefined;
}
}
deserialize(content, options) {
const deserializeOptions = options !== null && options !== void 0 ? options : {};
const root = JSON.parse(content);
this.linkNode(root, root, deserializeOptions);
return root;
}
replacer(key, value, { refText, sourceText, textRegions, comments, uriConverter }) {
var _a, _b, _c, _d;
if (this.ignoreProperties.has(key)) {
return undefined;
}
else if (isReference(value)) {
const refValue = value.ref;
const $refText = refText ? value.$refText : undefined;
if (refValue) {
const targetDocument = getDocument(refValue);
let targetUri = '';
if (this.currentDocument && this.currentDocument !== targetDocument) {
if (uriConverter) {
targetUri = uriConverter(targetDocument.uri, value);
}
else {
targetUri = targetDocument.uri.toString();
}
}
const targetPath = this.astNodeLocator.getAstNodePath(refValue);
return {
$ref: `${targetUri}#${targetPath}`,
$refText
};
}
else {
return {
$error: (_b = (_a = value.error) === null || _a === void 0 ? void 0 : _a.message) !== null && _b !== void 0 ? _b : 'Could not resolve reference',
$refText
};
}
}
else if (isAstNode(value)) {
let astNode = undefined;
if (textRegions) {
astNode = this.addAstNodeRegionWithAssignmentsTo(Object.assign({}, value));
if ((!key || value.$document) && (astNode === null || astNode === void 0 ? void 0 : astNode.$textRegion)) {
// The document URI is added to the root node of the resulting JSON tree
astNode.$textRegion.documentURI = (_c = this.currentDocument) === null || _c === void 0 ? void 0 : _c.uri.toString();
}
}
if (sourceText && !key) {
astNode !== null && astNode !== void 0 ? astNode : (astNode = Object.assign({}, value));
astNode.$sourceText = (_d = value.$cstNode) === null || _d === void 0 ? void 0 : _d.text;
}
if (comments) {
astNode !== null && astNode !== void 0 ? astNode : (astNode = Object.assign({}, value));
const comment = this.commentProvider.getComment(value);
if (comment) {
astNode.$comment = comment.replace(/\r/g, '');
}
}
return astNode !== null && astNode !== void 0 ? astNode : value;
}
else {
return value;
}
}
addAstNodeRegionWithAssignmentsTo(node) {
const createDocumentSegment = cstNode => ({
offset: cstNode.offset,
end: cstNode.end,
length: cstNode.length,
range: cstNode.range,
});
if (node.$cstNode) {
const textRegion = node.$textRegion = createDocumentSegment(node.$cstNode);
const assignments = textRegion.assignments = {};
Object.keys(node).filter(key => !key.startsWith('$')).forEach(key => {
const propertyAssignments = findNodesForProperty(node.$cstNode, key).map(createDocumentSegment);
if (propertyAssignments.length !== 0) {
assignments[key] = propertyAssignments;
}
});
return node;
}
return undefined;
}
linkNode(node, root, options, container, containerProperty, containerIndex) {
for (const [propertyName, item] of Object.entries(node)) {
if (Array.isArray(item)) {
for (let index = 0; index < item.length; index++) {
const element = item[index];
if (isIntermediateReference(element)) {
item[index] = this.reviveReference(node, propertyName, root, element, options);
}
else if (isAstNode(element)) {
this.linkNode(element, root, options, node, propertyName, index);
}
}
}
else if (isIntermediateReference(item)) {
node[propertyName] = this.reviveReference(node, propertyName, root, item, options);
}
else if (isAstNode(item)) {
this.linkNode(item, root, options, node, propertyName);
}
}
const mutable = node;
mutable.$container = container;
mutable.$containerProperty = containerProperty;
mutable.$containerIndex = containerIndex;
}
reviveReference(container, property, root, reference, options) {
let refText = reference.$refText;
let error = reference.$error;
if (reference.$ref) {
const ref = this.getRefNode(root, reference.$ref, options.uriConverter);
if (isAstNode(ref)) {
if (!refText) {
refText = this.nameProvider.getName(ref);
}
return {
$refText: refText !== null && refText !== void 0 ? refText : '',
ref
};
}
else {
error = ref;
}
}
if (error) {
const ref = {
$refText: refText !== null && refText !== void 0 ? refText : ''
};
ref.error = {
container,
property,
message: error,
reference: ref
};
return ref;
}
else {
return undefined;
}
}
getRefNode(root, uri, uriConverter) {
try {
const fragmentIndex = uri.indexOf('#');
if (fragmentIndex === 0) {
const node = this.astNodeLocator.getAstNode(root, uri.substring(1));
if (!node) {
return 'Could not resolve path: ' + uri;
}
return node;
}
if (fragmentIndex < 0) {
const documentUri = uriConverter ? uriConverter(uri) : URI.parse(uri);
const document = this.langiumDocuments.getDocument(documentUri);
if (!document) {
return 'Could not find document for URI: ' + uri;
}
return document.parseResult.value;
}
const documentUri = uriConverter ? uriConverter(uri.substring(0, fragmentIndex)) : URI.parse(uri.substring(0, fragmentIndex));
const document = this.langiumDocuments.getDocument(documentUri);
if (!document) {
return 'Could not find document for URI: ' + uri;
}
if (fragmentIndex === uri.length - 1) {
return document.parseResult.value;
}
const node = this.astNodeLocator.getAstNode(document.parseResult.value, uri.substring(fragmentIndex + 1));
if (!node) {
return 'Could not resolve URI: ' + uri;
}
return node;
}
catch (err) {
return String(err);
}
}
}
//# sourceMappingURL=json-serializer.js.map

File diff suppressed because one or more lines are too long