mirror of
https://github.com/discordjs/discord.js.git
synced 2026-03-12 09:33:32 +01:00
build: package api-extractor and -model (#9920)
* fix(ExceptText): don't display import("d..-types/v10"). in return type
* Squashed 'packages/api-extractor-model/' content from commit 39ecb196c
git-subtree-dir: packages/api-extractor-model
git-subtree-split: 39ecb196ca210bdf84ba6c9cadb1bb93571849d7
* Squashed 'packages/api-extractor/' content from commit 341ad6c51
git-subtree-dir: packages/api-extractor
git-subtree-split: 341ad6c51b01656d4f73b74ad4bdb3095f9262c4
* feat(api-extractor): add api-extractor and -model
* fix: package.json docs script
* fix(SourcLink): use <> instead of function syntax
* fix: make packages private
* fix: rest params showing in docs, added labels
* fix: missed two files
* fix: cpy-cli & pnpm-lock
* fix: increase icon size
* fix: icon size again
This commit is contained in:
1134
packages/api-extractor/src/generators/ApiModelGenerator.ts
Normal file
1134
packages/api-extractor/src/generators/ApiModelGenerator.ts
Normal file
File diff suppressed because it is too large
Load Diff
539
packages/api-extractor/src/generators/ApiReportGenerator.ts
Normal file
539
packages/api-extractor/src/generators/ApiReportGenerator.ts
Normal file
@@ -0,0 +1,539 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
/* eslint-disable @typescript-eslint/require-array-sort-compare */
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { ReleaseTag, releaseTagGetTagName } from '@discordjs/api-extractor-model';
|
||||
import { Text, InternalError } from '@rushstack/node-core-library';
|
||||
import * as ts from 'typescript';
|
||||
import { AstDeclaration } from '../analyzer/AstDeclaration.js';
|
||||
import type { AstEntity } from '../analyzer/AstEntity.js';
|
||||
import { AstImport } from '../analyzer/AstImport.js';
|
||||
import type { AstModuleExportInfo } from '../analyzer/AstModule.js';
|
||||
import { AstNamespaceImport } from '../analyzer/AstNamespaceImport.js';
|
||||
import { AstSymbol } from '../analyzer/AstSymbol.js';
|
||||
import { SourceFileLocationFormatter } from '../analyzer/SourceFileLocationFormatter.js';
|
||||
import { Span } from '../analyzer/Span.js';
|
||||
import { TypeScriptHelpers } from '../analyzer/TypeScriptHelpers.js';
|
||||
import type { ExtractorMessage } from '../api/ExtractorMessage.js';
|
||||
import { ExtractorMessageId } from '../api/ExtractorMessageId.js';
|
||||
import type { ApiItemMetadata } from '../collector/ApiItemMetadata.js';
|
||||
import { Collector } from '../collector/Collector.js';
|
||||
import type { CollectorEntity } from '../collector/CollectorEntity.js';
|
||||
import { DtsEmitHelpers } from './DtsEmitHelpers.js';
|
||||
import { IndentedWriter } from './IndentedWriter.js';
|
||||
|
||||
export class ApiReportGenerator {
|
||||
private static _trimSpacesRegExp: RegExp = / +$/gm;
|
||||
|
||||
/**
|
||||
* Compares the contents of two API files that were created using ApiFileGenerator,
|
||||
* and returns true if they are equivalent. Note that these files are not normally edited
|
||||
* by a human; the "equivalence" comparison here is intended to ignore spurious changes that
|
||||
* might be introduced by a tool, e.g. Git newline normalization or an editor that strips
|
||||
* whitespace when saving.
|
||||
*/
|
||||
public static areEquivalentApiFileContents(actualFileContent: string, expectedFileContent: string): boolean {
|
||||
// NOTE: "\s" also matches "\r" and "\n"
|
||||
const normalizedActual: string = actualFileContent.replaceAll(/\s+/g, ' ');
|
||||
const normalizedExpected: string = expectedFileContent.replaceAll(/\s+/g, ' ');
|
||||
return normalizedActual === normalizedExpected;
|
||||
}
|
||||
|
||||
public static generateReviewFileContent(collector: Collector): string {
|
||||
const writer: IndentedWriter = new IndentedWriter();
|
||||
writer.trimLeadingSpaces = true;
|
||||
|
||||
writer.writeLine(
|
||||
[
|
||||
`## API Report File for "${collector.workingPackage.name}"`,
|
||||
``,
|
||||
`> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/).`,
|
||||
``,
|
||||
].join('\n'),
|
||||
);
|
||||
|
||||
// Write the opening delimiter for the Markdown code fence
|
||||
writer.writeLine('```ts\n');
|
||||
|
||||
// Emit the triple slash directives
|
||||
for (const typeDirectiveReference of Array.from(collector.dtsTypeReferenceDirectives).sort()) {
|
||||
// https://github.com/microsoft/TypeScript/blob/611ebc7aadd7a44a4c0447698bfda9222a78cb66/src/compiler/declarationEmitter.ts#L162
|
||||
writer.writeLine(`/// <reference types="${typeDirectiveReference}" />`);
|
||||
}
|
||||
|
||||
for (const libDirectiveReference of Array.from(collector.dtsLibReferenceDirectives).sort()) {
|
||||
writer.writeLine(`/// <reference lib="${libDirectiveReference}" />`);
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
|
||||
// Emit the imports
|
||||
for (const entity of collector.entities) {
|
||||
if (entity.astEntity instanceof AstImport) {
|
||||
DtsEmitHelpers.emitImport(writer, entity, entity.astEntity);
|
||||
}
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
|
||||
// Emit the regular declarations
|
||||
for (const entity of collector.entities) {
|
||||
const astEntity: AstEntity = entity.astEntity;
|
||||
if (entity.consumable || collector.extractorConfig.apiReportIncludeForgottenExports) {
|
||||
// First, collect the list of export names for this symbol. When reporting messages with
|
||||
// ExtractorMessage.properties.exportName, this will enable us to emit the warning comments alongside
|
||||
// the associated export statement.
|
||||
interface IExportToEmit {
|
||||
readonly associatedMessages: ExtractorMessage[];
|
||||
readonly exportName: string;
|
||||
}
|
||||
const exportsToEmit: Map<string, IExportToEmit> = new Map<string, IExportToEmit>();
|
||||
|
||||
for (const exportName of entity.exportNames) {
|
||||
if (!entity.shouldInlineExport) {
|
||||
exportsToEmit.set(exportName, { exportName, associatedMessages: [] });
|
||||
}
|
||||
}
|
||||
|
||||
if (astEntity instanceof AstSymbol) {
|
||||
// Emit all the declarations for this entity
|
||||
for (const astDeclaration of astEntity.astDeclarations || []) {
|
||||
// Get the messages associated with this declaration
|
||||
const fetchedMessages: ExtractorMessage[] =
|
||||
collector.messageRouter.fetchAssociatedMessagesForReviewFile(astDeclaration);
|
||||
|
||||
// Peel off the messages associated with an export statement and store them
|
||||
// in IExportToEmit.associatedMessages (to be processed later). The remaining messages will
|
||||
// added to messagesToReport, to be emitted next to the declaration instead of the export statement.
|
||||
const messagesToReport: ExtractorMessage[] = [];
|
||||
for (const message of fetchedMessages) {
|
||||
if (message.properties.exportName) {
|
||||
const exportToEmit: IExportToEmit | undefined = exportsToEmit.get(message.properties.exportName);
|
||||
if (exportToEmit) {
|
||||
exportToEmit.associatedMessages.push(message);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
messagesToReport.push(message);
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
writer.write(ApiReportGenerator._getAedocSynopsis(collector, astDeclaration, messagesToReport));
|
||||
|
||||
const span: Span = new Span(astDeclaration.declaration);
|
||||
|
||||
const apiItemMetadata: ApiItemMetadata = collector.fetchApiItemMetadata(astDeclaration);
|
||||
if (apiItemMetadata.isPreapproved) {
|
||||
ApiReportGenerator._modifySpanForPreapproved(span);
|
||||
} else {
|
||||
ApiReportGenerator._modifySpan(collector, span, entity, astDeclaration, false);
|
||||
}
|
||||
|
||||
span.writeModifiedText(writer);
|
||||
writer.ensureNewLine();
|
||||
}
|
||||
}
|
||||
|
||||
if (astEntity instanceof AstNamespaceImport) {
|
||||
const astModuleExportInfo: AstModuleExportInfo = astEntity.fetchAstModuleExportInfo(collector);
|
||||
|
||||
if (entity.nameForEmit === undefined) {
|
||||
// This should never happen
|
||||
throw new InternalError('referencedEntry.nameForEmit is undefined');
|
||||
}
|
||||
|
||||
if (astModuleExportInfo.starExportedExternalModules.size > 0) {
|
||||
// We could support this, but we would need to find a way to safely represent it.
|
||||
throw new Error(
|
||||
`The ${entity.nameForEmit} namespace import includes a star export, which is not supported:\n` +
|
||||
SourceFileLocationFormatter.formatDeclaration(astEntity.declaration),
|
||||
);
|
||||
}
|
||||
|
||||
// Emit a synthetic declaration for the namespace. It will look like this:
|
||||
//
|
||||
// declare namespace example {
|
||||
// export {
|
||||
// f1,
|
||||
// f2
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Note that we do not try to relocate f1()/f2() to be inside the namespace because other type
|
||||
// signatures may reference them directly (without using the namespace qualifier).
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
writer.writeLine(`declare namespace ${entity.nameForEmit} {`);
|
||||
|
||||
// all local exports of local imported module are just references to top-level declarations
|
||||
writer.increaseIndent();
|
||||
writer.writeLine('export {');
|
||||
writer.increaseIndent();
|
||||
|
||||
const exportClauses: string[] = [];
|
||||
for (const [exportedName, exportedEntity] of astModuleExportInfo.exportedLocalEntities) {
|
||||
const collectorEntity: CollectorEntity | undefined = collector.tryGetCollectorEntity(exportedEntity);
|
||||
if (collectorEntity === undefined) {
|
||||
// This should never happen
|
||||
// top-level exports of local imported module should be added as collector entities before
|
||||
throw new InternalError(
|
||||
`Cannot find collector entity for ${entity.nameForEmit}.${exportedEntity.localName}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (collectorEntity.nameForEmit === exportedName) {
|
||||
exportClauses.push(collectorEntity.nameForEmit);
|
||||
} else {
|
||||
exportClauses.push(`${collectorEntity.nameForEmit} as ${exportedName}`);
|
||||
}
|
||||
}
|
||||
|
||||
writer.writeLine(exportClauses.join(',\n'));
|
||||
|
||||
writer.decreaseIndent();
|
||||
writer.writeLine('}'); // end of "export { ... }"
|
||||
writer.decreaseIndent();
|
||||
writer.writeLine('}'); // end of "declare namespace { ... }"
|
||||
}
|
||||
|
||||
// Now emit the export statements for this entity.
|
||||
for (const exportToEmit of exportsToEmit.values()) {
|
||||
// Write any associated messages
|
||||
if (exportToEmit.associatedMessages.length > 0) {
|
||||
writer.ensureSkippedLine();
|
||||
for (const message of exportToEmit.associatedMessages) {
|
||||
ApiReportGenerator._writeLineAsComments(writer, 'Warning: ' + message.formatMessageWithoutLocation());
|
||||
}
|
||||
}
|
||||
|
||||
DtsEmitHelpers.emitNamedExport(writer, exportToEmit.exportName, entity);
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
}
|
||||
}
|
||||
|
||||
DtsEmitHelpers.emitStarExports(writer, collector);
|
||||
|
||||
// Write the unassociated warnings at the bottom of the file
|
||||
const unassociatedMessages: ExtractorMessage[] = collector.messageRouter.fetchUnassociatedMessagesForReviewFile();
|
||||
if (unassociatedMessages.length > 0) {
|
||||
writer.ensureSkippedLine();
|
||||
ApiReportGenerator._writeLineAsComments(writer, 'Warnings were encountered during analysis:');
|
||||
ApiReportGenerator._writeLineAsComments(writer, '');
|
||||
for (const unassociatedMessage of unassociatedMessages) {
|
||||
ApiReportGenerator._writeLineAsComments(
|
||||
writer,
|
||||
unassociatedMessage.formatMessageWithLocation(collector.workingPackage.packageFolder),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (collector.workingPackage.tsdocComment === undefined) {
|
||||
writer.ensureSkippedLine();
|
||||
ApiReportGenerator._writeLineAsComments(writer, '(No @packageDocumentation comment for this package)');
|
||||
}
|
||||
|
||||
// Write the closing delimiter for the Markdown code fence
|
||||
writer.ensureSkippedLine();
|
||||
writer.writeLine('```');
|
||||
|
||||
// Remove any trailing spaces
|
||||
return writer.toString().replace(ApiReportGenerator._trimSpacesRegExp, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Before writing out a declaration, _modifySpan() applies various fixups to make it nice.
|
||||
*/
|
||||
private static _modifySpan(
|
||||
collector: Collector,
|
||||
span: Span,
|
||||
entity: CollectorEntity,
|
||||
astDeclaration: AstDeclaration,
|
||||
insideTypeLiteral: boolean,
|
||||
): void {
|
||||
// Should we process this declaration at all?
|
||||
|
||||
if ((astDeclaration.modifierFlags & ts.ModifierFlags.Private) !== 0) {
|
||||
span.modification.skipAll();
|
||||
return;
|
||||
}
|
||||
|
||||
const previousSpan: Span | undefined = span.previousSibling;
|
||||
|
||||
let recurseChildren = true;
|
||||
let sortChildren = false;
|
||||
|
||||
switch (span.kind) {
|
||||
case ts.SyntaxKind.JSDocComment:
|
||||
span.modification.skipAll();
|
||||
// For now, we don't transform JSDoc comment nodes at all
|
||||
recurseChildren = false;
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.ExportKeyword:
|
||||
case ts.SyntaxKind.DefaultKeyword:
|
||||
case ts.SyntaxKind.DeclareKeyword:
|
||||
// Delete any explicit "export" or "declare" keywords -- we will re-add them below
|
||||
span.modification.skipAll();
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.InterfaceKeyword:
|
||||
case ts.SyntaxKind.ClassKeyword:
|
||||
case ts.SyntaxKind.EnumKeyword:
|
||||
case ts.SyntaxKind.NamespaceKeyword:
|
||||
case ts.SyntaxKind.ModuleKeyword:
|
||||
case ts.SyntaxKind.TypeKeyword:
|
||||
case ts.SyntaxKind.FunctionKeyword:
|
||||
// Replace the stuff we possibly deleted above
|
||||
let replacedModifiers = '';
|
||||
|
||||
if (entity.shouldInlineExport) {
|
||||
replacedModifiers = 'export ' + replacedModifiers;
|
||||
}
|
||||
|
||||
if (previousSpan && previousSpan.kind === ts.SyntaxKind.SyntaxList) {
|
||||
// If there is a previous span of type SyntaxList, then apply it before any other modifiers
|
||||
// (e.g. "abstract") that appear there.
|
||||
previousSpan.modification.prefix = replacedModifiers + previousSpan.modification.prefix;
|
||||
} else {
|
||||
// Otherwise just stick it in front of this span
|
||||
span.modification.prefix = replacedModifiers + span.modification.prefix;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.SyntaxList:
|
||||
if (
|
||||
span.parent &&
|
||||
(AstDeclaration.isSupportedSyntaxKind(span.parent.kind) || span.parent.kind === ts.SyntaxKind.ModuleBlock)
|
||||
) {
|
||||
// If the immediate parent is an API declaration, and the immediate children are API declarations,
|
||||
// then sort the children alphabetically
|
||||
// Namespaces are special because their chain goes ModuleDeclaration -> ModuleBlock -> SyntaxList
|
||||
sortChildren = true;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.VariableDeclaration:
|
||||
if (!span.parent) {
|
||||
// The VariableDeclaration node is part of a VariableDeclarationList, however
|
||||
// the Entry.followedSymbol points to the VariableDeclaration part because
|
||||
// multiple definitions might share the same VariableDeclarationList.
|
||||
//
|
||||
// Since we are emitting a separate declaration for each one, we need to look upwards
|
||||
// in the ts.Node tree and write a copy of the enclosing VariableDeclarationList
|
||||
// content (e.g. "var" from "var x=1, y=2").
|
||||
const list: ts.VariableDeclarationList | undefined = TypeScriptHelpers.matchAncestor(span.node, [
|
||||
ts.SyntaxKind.VariableDeclarationList,
|
||||
ts.SyntaxKind.VariableDeclaration,
|
||||
]);
|
||||
if (!list) {
|
||||
// This should not happen unless the compiler API changes somehow
|
||||
throw new InternalError('Unsupported variable declaration');
|
||||
}
|
||||
|
||||
const listPrefix: string = list.getSourceFile().text.slice(list.getStart(), list.declarations[0]!.getStart());
|
||||
span.modification.prefix = listPrefix + span.modification.prefix;
|
||||
span.modification.suffix = ';';
|
||||
|
||||
if (entity.shouldInlineExport) {
|
||||
span.modification.prefix = 'export ' + span.modification.prefix;
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.Identifier:
|
||||
const referencedEntity: CollectorEntity | undefined = collector.tryGetEntityForNode(span.node as ts.Identifier);
|
||||
|
||||
if (referencedEntity) {
|
||||
if (!referencedEntity.nameForEmit) {
|
||||
// This should never happen
|
||||
throw new InternalError('referencedEntry.nameForEmit is undefined');
|
||||
}
|
||||
|
||||
span.modification.prefix = referencedEntity.nameForEmit;
|
||||
// For debugging:
|
||||
// span.modification.prefix += '/*R=FIX*/';
|
||||
} else {
|
||||
// For debugging:
|
||||
// span.modification.prefix += '/*R=KEEP*/';
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.TypeLiteral:
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
insideTypeLiteral = true;
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.ImportType:
|
||||
DtsEmitHelpers.modifyImportTypeSpan(collector, span, astDeclaration, (childSpan, childAstDeclaration) => {
|
||||
ApiReportGenerator._modifySpan(collector, childSpan, entity, childAstDeclaration, insideTypeLiteral);
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (recurseChildren) {
|
||||
for (const child of span.children) {
|
||||
let childAstDeclaration: AstDeclaration = astDeclaration;
|
||||
|
||||
if (AstDeclaration.isSupportedSyntaxKind(child.kind)) {
|
||||
childAstDeclaration = collector.astSymbolTable.getChildAstDeclarationByNode(child.node, astDeclaration);
|
||||
|
||||
if (sortChildren) {
|
||||
span.modification.sortChildren = true;
|
||||
child.modification.sortKey = Collector.getSortKeyIgnoringUnderscore(
|
||||
childAstDeclaration.astSymbol.localName,
|
||||
);
|
||||
}
|
||||
|
||||
if (!insideTypeLiteral) {
|
||||
const messagesToReport: ExtractorMessage[] =
|
||||
collector.messageRouter.fetchAssociatedMessagesForReviewFile(childAstDeclaration);
|
||||
const aedocSynopsis: string = ApiReportGenerator._getAedocSynopsis(
|
||||
collector,
|
||||
childAstDeclaration,
|
||||
messagesToReport,
|
||||
);
|
||||
|
||||
child.modification.prefix = aedocSynopsis + child.modification.prefix;
|
||||
}
|
||||
}
|
||||
|
||||
ApiReportGenerator._modifySpan(collector, child, entity, childAstDeclaration, insideTypeLiteral);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* For declarations marked as `@preapproved`, this is used instead of _modifySpan().
|
||||
*/
|
||||
private static _modifySpanForPreapproved(span: Span): void {
|
||||
// Match something like this:
|
||||
//
|
||||
// ClassDeclaration:
|
||||
// SyntaxList:
|
||||
// ExportKeyword: pre=[export] sep=[ ]
|
||||
// DeclareKeyword: pre=[declare] sep=[ ]
|
||||
// ClassKeyword: pre=[class] sep=[ ]
|
||||
// Identifier: pre=[_PreapprovedClass] sep=[ ]
|
||||
// FirstPunctuation: pre=[{] sep=[\n\n ]
|
||||
// SyntaxList:
|
||||
// ...
|
||||
// CloseBraceToken: pre=[}]
|
||||
//
|
||||
// or this:
|
||||
// ModuleDeclaration:
|
||||
// SyntaxList:
|
||||
// ExportKeyword: pre=[export] sep=[ ]
|
||||
// DeclareKeyword: pre=[declare] sep=[ ]
|
||||
// NamespaceKeyword: pre=[namespace] sep=[ ]
|
||||
// Identifier: pre=[_PreapprovedNamespace] sep=[ ]
|
||||
// ModuleBlock:
|
||||
// FirstPunctuation: pre=[{] sep=[\n\n ]
|
||||
// SyntaxList:
|
||||
// ...
|
||||
// CloseBraceToken: pre=[}]
|
||||
//
|
||||
// And reduce it to something like this:
|
||||
//
|
||||
// // @internal (undocumented)
|
||||
// class _PreapprovedClass { /* (preapproved) */ }
|
||||
//
|
||||
|
||||
let skipRest = false;
|
||||
for (const child of span.children) {
|
||||
if (skipRest || child.kind === ts.SyntaxKind.SyntaxList || child.kind === ts.SyntaxKind.JSDocComment) {
|
||||
child.modification.skipAll();
|
||||
}
|
||||
|
||||
if (child.kind === ts.SyntaxKind.Identifier) {
|
||||
skipRest = true;
|
||||
child.modification.omitSeparatorAfter = true;
|
||||
child.modification.suffix = ' { /* (preapproved) */ }';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a synopsis of the AEDoc comments, which indicates the release tag,
|
||||
* whether the item has been documented, and any warnings that were detected
|
||||
* by the analysis.
|
||||
*/
|
||||
private static _getAedocSynopsis(
|
||||
collector: Collector,
|
||||
astDeclaration: AstDeclaration,
|
||||
messagesToReport: ExtractorMessage[],
|
||||
): string {
|
||||
const writer: IndentedWriter = new IndentedWriter();
|
||||
|
||||
for (const message of messagesToReport) {
|
||||
ApiReportGenerator._writeLineAsComments(writer, 'Warning: ' + message.formatMessageWithoutLocation());
|
||||
}
|
||||
|
||||
if (!collector.isAncillaryDeclaration(astDeclaration)) {
|
||||
const footerParts: string[] = [];
|
||||
const apiItemMetadata: ApiItemMetadata = collector.fetchApiItemMetadata(astDeclaration);
|
||||
if (!apiItemMetadata.releaseTagSameAsParent && apiItemMetadata.effectiveReleaseTag !== ReleaseTag.None) {
|
||||
footerParts.push(releaseTagGetTagName(apiItemMetadata.effectiveReleaseTag));
|
||||
}
|
||||
|
||||
if (apiItemMetadata.isSealed) {
|
||||
footerParts.push('@sealed');
|
||||
}
|
||||
|
||||
if (apiItemMetadata.isVirtual) {
|
||||
footerParts.push('@virtual');
|
||||
}
|
||||
|
||||
if (apiItemMetadata.isOverride) {
|
||||
footerParts.push('@override');
|
||||
}
|
||||
|
||||
if (apiItemMetadata.isEventProperty) {
|
||||
footerParts.push('@eventProperty');
|
||||
}
|
||||
|
||||
if (apiItemMetadata.tsdocComment?.deprecatedBlock) {
|
||||
footerParts.push('@deprecated');
|
||||
}
|
||||
|
||||
if (apiItemMetadata.undocumented) {
|
||||
footerParts.push('(undocumented)');
|
||||
|
||||
collector.messageRouter.addAnalyzerIssue(
|
||||
ExtractorMessageId.Undocumented,
|
||||
`Missing documentation for "${astDeclaration.astSymbol.localName}".`,
|
||||
astDeclaration,
|
||||
);
|
||||
}
|
||||
|
||||
if (footerParts.length > 0) {
|
||||
if (messagesToReport.length > 0) {
|
||||
ApiReportGenerator._writeLineAsComments(writer, ''); // skip a line after the warnings
|
||||
}
|
||||
|
||||
ApiReportGenerator._writeLineAsComments(writer, footerParts.join(' '));
|
||||
}
|
||||
}
|
||||
|
||||
return writer.toString();
|
||||
}
|
||||
|
||||
private static _writeLineAsComments(writer: IndentedWriter, line: string): void {
|
||||
const lines: string[] = Text.convertToLf(line).split('\n');
|
||||
for (const realLine of lines) {
|
||||
writer.write('// ');
|
||||
writer.write(realLine);
|
||||
writer.writeLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,376 @@
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { Navigation, Meaning } from '@discordjs/api-extractor-model';
|
||||
import {
|
||||
DeclarationReference,
|
||||
ModuleSource,
|
||||
GlobalSource,
|
||||
} from '@microsoft/tsdoc/lib-commonjs/beta/DeclarationReference.js';
|
||||
import { type INodePackageJson, InternalError } from '@rushstack/node-core-library';
|
||||
import * as ts from 'typescript';
|
||||
import { AstNamespaceImport } from '../analyzer/AstNamespaceImport.js';
|
||||
import { TypeScriptHelpers } from '../analyzer/TypeScriptHelpers.js';
|
||||
import { TypeScriptInternals } from '../analyzer/TypeScriptInternals.js';
|
||||
import type { Collector } from '../collector/Collector.js';
|
||||
import type { CollectorEntity } from '../collector/CollectorEntity.js';
|
||||
|
||||
export class DeclarationReferenceGenerator {
|
||||
public static readonly unknownReference: string = '?';
|
||||
|
||||
private readonly _collector: Collector;
|
||||
|
||||
public constructor(collector: Collector) {
|
||||
this._collector = collector;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the UID for a TypeScript Identifier that references a type.
|
||||
*/
|
||||
public getDeclarationReferenceForIdentifier(node: ts.Identifier): DeclarationReference | undefined {
|
||||
const symbol: ts.Symbol | undefined = this._collector.typeChecker.getSymbolAtLocation(node);
|
||||
if (symbol !== undefined) {
|
||||
const isExpression: boolean = DeclarationReferenceGenerator._isInExpressionContext(node);
|
||||
return (
|
||||
this.getDeclarationReferenceForSymbol(symbol, isExpression ? ts.SymbolFlags.Value : ts.SymbolFlags.Type) ??
|
||||
this.getDeclarationReferenceForSymbol(symbol, isExpression ? ts.SymbolFlags.Type : ts.SymbolFlags.Value) ??
|
||||
this.getDeclarationReferenceForSymbol(symbol, ts.SymbolFlags.Namespace)
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the DeclarationReference for a TypeScript Symbol for a given meaning.
|
||||
*/
|
||||
public getDeclarationReferenceForSymbol(
|
||||
symbol: ts.Symbol,
|
||||
meaning: ts.SymbolFlags,
|
||||
): DeclarationReference | undefined {
|
||||
return this._symbolToDeclarationReference(symbol, meaning, /* includeModuleSymbols*/ false);
|
||||
}
|
||||
|
||||
private static _isInExpressionContext(node: ts.Node): boolean {
|
||||
switch (node.parent.kind) {
|
||||
case ts.SyntaxKind.TypeQuery:
|
||||
case ts.SyntaxKind.ComputedPropertyName:
|
||||
return true;
|
||||
case ts.SyntaxKind.QualifiedName:
|
||||
return DeclarationReferenceGenerator._isInExpressionContext(node.parent);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static _isExternalModuleSymbol(symbol: ts.Symbol): boolean {
|
||||
return (
|
||||
Boolean(symbol.flags & ts.SymbolFlags.ValueModule) &&
|
||||
symbol.valueDeclaration !== undefined &&
|
||||
ts.isSourceFile(symbol.valueDeclaration)
|
||||
);
|
||||
}
|
||||
|
||||
private static _isSameSymbol(left: ts.Symbol | undefined, right: ts.Symbol): boolean {
|
||||
return (
|
||||
left === right ||
|
||||
Boolean(left?.valueDeclaration && right.valueDeclaration && left.valueDeclaration === right.valueDeclaration)
|
||||
);
|
||||
}
|
||||
|
||||
private _getNavigationToSymbol(symbol: ts.Symbol): Navigation {
|
||||
const declaration: ts.Declaration | undefined = TypeScriptHelpers.tryGetADeclaration(symbol);
|
||||
const sourceFile: ts.SourceFile | undefined = declaration?.getSourceFile();
|
||||
const parent: ts.Symbol | undefined = TypeScriptInternals.getSymbolParent(symbol);
|
||||
|
||||
// If it's global or from an external library, then use either Members or Exports. It's not possible for
|
||||
// global symbols or external library symbols to be Locals.
|
||||
const isGlobal: boolean = Boolean(sourceFile) && !ts.isExternalModule(sourceFile!);
|
||||
const isFromExternalLibrary: boolean =
|
||||
Boolean(sourceFile) && this._collector.program.isSourceFileFromExternalLibrary(sourceFile!);
|
||||
if (isGlobal || isFromExternalLibrary) {
|
||||
if (
|
||||
parent?.members &&
|
||||
DeclarationReferenceGenerator._isSameSymbol(parent.members.get(symbol.escapedName), symbol)
|
||||
) {
|
||||
return Navigation.Members;
|
||||
}
|
||||
|
||||
return Navigation.Exports;
|
||||
}
|
||||
|
||||
// Otherwise, this symbol is from the current package. If we've found an associated consumable
|
||||
// `CollectorEntity`, then use Exports. We use `consumable` here instead of `exported` because
|
||||
// if the symbol is exported from a non-consumable `AstNamespaceImport`, we don't want to use
|
||||
// Exports. We should use Locals instead.
|
||||
const entity: CollectorEntity | undefined = this._collector.tryGetEntityForSymbol(symbol);
|
||||
if (entity?.consumable) {
|
||||
return Navigation.Exports;
|
||||
}
|
||||
|
||||
// If its parent symbol is not a source file, then use either Exports or Members. If the parent symbol
|
||||
// is a source file, but it wasn't exported from the package entry point (in the check above), then the
|
||||
// symbol is a local, so fall through below.
|
||||
if (parent && !DeclarationReferenceGenerator._isExternalModuleSymbol(parent)) {
|
||||
if (
|
||||
parent.members &&
|
||||
DeclarationReferenceGenerator._isSameSymbol(parent.members.get(symbol.escapedName), symbol)
|
||||
) {
|
||||
return Navigation.Members;
|
||||
}
|
||||
|
||||
return Navigation.Exports;
|
||||
}
|
||||
|
||||
// Otherwise, we have a local symbol, so use a Locals navigation. These are either:
|
||||
//
|
||||
// 1. Symbols that are exported from a file module but not the package entry point.
|
||||
// 2. Symbols that are not exported from their parent module.
|
||||
return Navigation.Locals;
|
||||
}
|
||||
|
||||
private static _getMeaningOfSymbol(symbol: ts.Symbol, meaning: ts.SymbolFlags): Meaning | undefined {
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Class) {
|
||||
return Meaning.Class;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Enum) {
|
||||
return Meaning.Enum;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Interface) {
|
||||
return Meaning.Interface;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.TypeAlias) {
|
||||
return Meaning.TypeAlias;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Function) {
|
||||
return Meaning.Function;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Variable) {
|
||||
return Meaning.Variable;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Module) {
|
||||
return Meaning.Namespace;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.ClassMember) {
|
||||
return Meaning.Member;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Constructor) {
|
||||
return Meaning.Constructor;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.EnumMember) {
|
||||
return Meaning.Member;
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.Signature) {
|
||||
if (symbol.escapedName === ts.InternalSymbolName.Call) {
|
||||
return Meaning.CallSignature;
|
||||
}
|
||||
|
||||
if (symbol.escapedName === ts.InternalSymbolName.New) {
|
||||
return Meaning.ConstructSignature;
|
||||
}
|
||||
|
||||
if (symbol.escapedName === ts.InternalSymbolName.Index) {
|
||||
return Meaning.IndexSignature;
|
||||
}
|
||||
}
|
||||
|
||||
if (symbol.flags & meaning & ts.SymbolFlags.TypeParameter) {
|
||||
// This should have already been handled in `getDeclarationReferenceOfSymbol`.
|
||||
throw new InternalError('Not supported.');
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private _symbolToDeclarationReference(
|
||||
symbol: ts.Symbol,
|
||||
meaning: ts.SymbolFlags,
|
||||
includeModuleSymbols: boolean,
|
||||
): DeclarationReference | undefined {
|
||||
const declaration: ts.Node | undefined = TypeScriptHelpers.tryGetADeclaration(symbol);
|
||||
const sourceFile: ts.SourceFile | undefined = declaration?.getSourceFile();
|
||||
|
||||
let followedSymbol: ts.Symbol = symbol;
|
||||
if (followedSymbol.flags & ts.SymbolFlags.ExportValue) {
|
||||
followedSymbol = this._collector.typeChecker.getExportSymbolOfSymbol(followedSymbol);
|
||||
}
|
||||
|
||||
if (followedSymbol.flags & ts.SymbolFlags.Alias) {
|
||||
followedSymbol = this._collector.typeChecker.getAliasedSymbol(followedSymbol);
|
||||
|
||||
// Without this logic, we end up following the symbol `ns` in `import * as ns from './file'` to
|
||||
// the actual file `file.ts`. We don't want to do this, so revert to the original symbol.
|
||||
if (followedSymbol.flags & ts.SymbolFlags.ValueModule) {
|
||||
followedSymbol = symbol;
|
||||
}
|
||||
}
|
||||
|
||||
if (DeclarationReferenceGenerator._isExternalModuleSymbol(followedSymbol)) {
|
||||
if (!includeModuleSymbols) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return new DeclarationReference(this._sourceFileToModuleSource(sourceFile));
|
||||
}
|
||||
|
||||
// Do not generate a declaration reference for a type parameter.
|
||||
if (followedSymbol.flags & ts.SymbolFlags.TypeParameter) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let parentRef: DeclarationReference | undefined = this._getParentReference(followedSymbol);
|
||||
if (!parentRef) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let localName: string = followedSymbol.name;
|
||||
const entity: CollectorEntity | undefined = this._collector.tryGetEntityForSymbol(followedSymbol);
|
||||
if (entity?.nameForEmit) {
|
||||
localName = entity.nameForEmit;
|
||||
}
|
||||
|
||||
if (followedSymbol.escapedName === ts.InternalSymbolName.Constructor) {
|
||||
localName = 'constructor';
|
||||
} else {
|
||||
const wellKnownName: string | undefined = TypeScriptHelpers.tryDecodeWellKnownSymbolName(
|
||||
followedSymbol.escapedName,
|
||||
);
|
||||
if (wellKnownName) {
|
||||
// TypeScript binds well-known ECMAScript symbols like 'Symbol.iterator' as '__@iterator'.
|
||||
// This converts a string like '__@iterator' into the property name '[Symbol.iterator]'.
|
||||
localName = wellKnownName;
|
||||
} else if (TypeScriptHelpers.isUniqueSymbolName(followedSymbol.escapedName)) {
|
||||
for (const decl of followedSymbol.declarations ?? []) {
|
||||
const declName: ts.DeclarationName | undefined = ts.getNameOfDeclaration(decl);
|
||||
if (declName && ts.isComputedPropertyName(declName)) {
|
||||
const lateName: string | undefined = TypeScriptHelpers.tryGetLateBoundName(declName);
|
||||
if (lateName !== undefined) {
|
||||
localName = lateName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const navigation: Navigation = this._getNavigationToSymbol(followedSymbol);
|
||||
|
||||
// If the symbol is a global, ensure the source is global.
|
||||
if (sourceFile && !ts.isExternalModule(sourceFile) && parentRef.source !== GlobalSource.instance) {
|
||||
parentRef = new DeclarationReference(GlobalSource.instance);
|
||||
}
|
||||
|
||||
return parentRef
|
||||
.addNavigationStep(navigation as any, localName)
|
||||
.withMeaning(DeclarationReferenceGenerator._getMeaningOfSymbol(followedSymbol, meaning) as any);
|
||||
}
|
||||
|
||||
private _getParentReference(symbol: ts.Symbol): DeclarationReference | undefined {
|
||||
const declaration: ts.Node | undefined = TypeScriptHelpers.tryGetADeclaration(symbol);
|
||||
const sourceFile: ts.SourceFile | undefined = declaration?.getSourceFile();
|
||||
|
||||
// Note that it's possible for a symbol to be exported from an entry point as well as one or more
|
||||
// namespaces. In that case, it's not clear what to choose as its parent. Today's logic is neither
|
||||
// perfect nor particularly stable to API items being renamed and shuffled around.
|
||||
const entity: CollectorEntity | undefined = this._collector.tryGetEntityForSymbol(symbol);
|
||||
if (entity) {
|
||||
if (entity.exportedFromEntryPoint) {
|
||||
return new DeclarationReference(this._sourceFileToModuleSource(sourceFile));
|
||||
}
|
||||
|
||||
const firstExportingConsumableParent: CollectorEntity | undefined = entity.getFirstExportingConsumableParent();
|
||||
if (firstExportingConsumableParent && firstExportingConsumableParent.astEntity instanceof AstNamespaceImport) {
|
||||
const parentSymbol: ts.Symbol | undefined = TypeScriptInternals.tryGetSymbolForDeclaration(
|
||||
firstExportingConsumableParent.astEntity.declaration,
|
||||
this._collector.typeChecker,
|
||||
);
|
||||
if (parentSymbol) {
|
||||
return this._symbolToDeclarationReference(parentSymbol, parentSymbol.flags, /* includeModuleSymbols*/ true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Next, try to find a parent symbol via the symbol tree.
|
||||
const parentSymbol: ts.Symbol | undefined = TypeScriptInternals.getSymbolParent(symbol);
|
||||
if (parentSymbol) {
|
||||
return this._symbolToDeclarationReference(parentSymbol, parentSymbol.flags, /* includeModuleSymbols*/ true);
|
||||
}
|
||||
|
||||
// If that doesn't work, try to find a parent symbol via the node tree. As far as we can tell,
|
||||
// this logic is only needed for local symbols within namespaces. For example:
|
||||
//
|
||||
// ```
|
||||
// export namespace n {
|
||||
// type SomeType = number;
|
||||
// export function someFunction(): SomeType { return 5; }
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// In the example above, `SomeType` doesn't have a parent symbol per the TS internal API above,
|
||||
// but its reference still needs to be qualified with the parent reference for `n`.
|
||||
const grandParent: ts.Node | undefined = declaration?.parent?.parent;
|
||||
if (grandParent && ts.isModuleDeclaration(grandParent)) {
|
||||
const grandParentSymbol: ts.Symbol | undefined = TypeScriptInternals.tryGetSymbolForDeclaration(
|
||||
grandParent,
|
||||
this._collector.typeChecker,
|
||||
);
|
||||
if (grandParentSymbol) {
|
||||
return this._symbolToDeclarationReference(
|
||||
grandParentSymbol,
|
||||
grandParentSymbol.flags,
|
||||
/* includeModuleSymbols*/ true,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// At this point, we have a local symbol in a module.
|
||||
if (sourceFile && ts.isExternalModule(sourceFile)) {
|
||||
return new DeclarationReference(this._sourceFileToModuleSource(sourceFile));
|
||||
} else {
|
||||
return new DeclarationReference(GlobalSource.instance);
|
||||
}
|
||||
}
|
||||
|
||||
private _getPackageName(sourceFile: ts.SourceFile): string {
|
||||
if (this._collector.program.isSourceFileFromExternalLibrary(sourceFile)) {
|
||||
const packageJson: INodePackageJson | undefined = this._collector.packageJsonLookup.tryLoadNodePackageJsonFor(
|
||||
sourceFile.fileName,
|
||||
);
|
||||
|
||||
if (packageJson?.name) {
|
||||
return packageJson.name;
|
||||
}
|
||||
|
||||
return DeclarationReferenceGenerator.unknownReference;
|
||||
}
|
||||
|
||||
return this._collector.workingPackage.name;
|
||||
}
|
||||
|
||||
private _sourceFileToModuleSource(sourceFile: ts.SourceFile | undefined): GlobalSource | ModuleSource {
|
||||
if (sourceFile && ts.isExternalModule(sourceFile)) {
|
||||
const packageName: string = this._getPackageName(sourceFile);
|
||||
|
||||
if (this._collector.bundledPackageNames.has(packageName)) {
|
||||
// The api-extractor.json config file has a "bundledPackages" setting, which causes imports from
|
||||
// certain NPM packages to be treated as part of the working project. In this case, we need to
|
||||
// substitute the working package name.
|
||||
return new ModuleSource(this._collector.workingPackage.name);
|
||||
} else {
|
||||
return new ModuleSource(packageName);
|
||||
}
|
||||
}
|
||||
|
||||
return GlobalSource.instance;
|
||||
}
|
||||
}
|
||||
159
packages/api-extractor/src/generators/DtsEmitHelpers.ts
Normal file
159
packages/api-extractor/src/generators/DtsEmitHelpers.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { InternalError } from '@rushstack/node-core-library';
|
||||
import * as ts from 'typescript';
|
||||
import { AstDeclaration } from '../analyzer/AstDeclaration.js';
|
||||
import { AstImport, AstImportKind } from '../analyzer/AstImport.js';
|
||||
import { SourceFileLocationFormatter } from '../analyzer/SourceFileLocationFormatter.js';
|
||||
import type { Span } from '../analyzer/Span.js';
|
||||
import type { Collector } from '../collector/Collector.js';
|
||||
import type { CollectorEntity } from '../collector/CollectorEntity.js';
|
||||
import type { IndentedWriter } from './IndentedWriter.js';
|
||||
|
||||
/**
|
||||
* Some common code shared between DtsRollupGenerator and ApiReportGenerator.
|
||||
*/
|
||||
export class DtsEmitHelpers {
|
||||
public static emitImport(writer: IndentedWriter, collectorEntity: CollectorEntity, astImport: AstImport): void {
|
||||
const importPrefix: string = astImport.isTypeOnlyEverywhere ? 'import type' : 'import';
|
||||
|
||||
switch (astImport.importKind) {
|
||||
case AstImportKind.DefaultImport:
|
||||
if (collectorEntity.nameForEmit === astImport.exportName) {
|
||||
writer.write(`${importPrefix} ${astImport.exportName}`);
|
||||
} else {
|
||||
writer.write(`${importPrefix} { default as ${collectorEntity.nameForEmit} }`);
|
||||
}
|
||||
|
||||
writer.writeLine(` from '${astImport.modulePath}';`);
|
||||
break;
|
||||
case AstImportKind.NamedImport:
|
||||
if (collectorEntity.nameForEmit === astImport.exportName) {
|
||||
writer.write(`${importPrefix} { ${astImport.exportName} }`);
|
||||
} else {
|
||||
writer.write(`${importPrefix} { ${astImport.exportName} as ${collectorEntity.nameForEmit} }`);
|
||||
}
|
||||
|
||||
writer.writeLine(` from '${astImport.modulePath}';`);
|
||||
break;
|
||||
case AstImportKind.StarImport:
|
||||
writer.writeLine(`${importPrefix} * as ${collectorEntity.nameForEmit} from '${astImport.modulePath}';`);
|
||||
break;
|
||||
case AstImportKind.EqualsImport:
|
||||
writer.writeLine(`${importPrefix} ${collectorEntity.nameForEmit} = require('${astImport.modulePath}');`);
|
||||
break;
|
||||
case AstImportKind.ImportType:
|
||||
if (astImport.exportName) {
|
||||
const topExportName: string = astImport.exportName.split('.')[0]!;
|
||||
if (collectorEntity.nameForEmit === topExportName) {
|
||||
writer.write(`${importPrefix} { ${topExportName} }`);
|
||||
} else {
|
||||
writer.write(`${importPrefix} { ${topExportName} as ${collectorEntity.nameForEmit} }`);
|
||||
}
|
||||
|
||||
writer.writeLine(` from '${astImport.modulePath}';`);
|
||||
} else {
|
||||
writer.writeLine(`${importPrefix} * as ${collectorEntity.nameForEmit} from '${astImport.modulePath}';`);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
throw new InternalError('Unimplemented AstImportKind');
|
||||
}
|
||||
}
|
||||
|
||||
public static emitNamedExport(writer: IndentedWriter, exportName: string, collectorEntity: CollectorEntity): void {
|
||||
if (exportName === ts.InternalSymbolName.Default) {
|
||||
writer.writeLine(`export default ${collectorEntity.nameForEmit};`);
|
||||
} else if (collectorEntity.nameForEmit === exportName) {
|
||||
writer.writeLine(`export { ${exportName} }`);
|
||||
} else {
|
||||
writer.writeLine(`export { ${collectorEntity.nameForEmit} as ${exportName} }`);
|
||||
}
|
||||
}
|
||||
|
||||
public static emitStarExports(writer: IndentedWriter, collector: Collector): void {
|
||||
if (collector.starExportedExternalModulePaths.length > 0) {
|
||||
writer.writeLine();
|
||||
for (const starExportedExternalModulePath of collector.starExportedExternalModulePaths) {
|
||||
writer.writeLine(`export * from "${starExportedExternalModulePath}";`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static modifyImportTypeSpan(
|
||||
collector: Collector,
|
||||
span: Span,
|
||||
astDeclaration: AstDeclaration,
|
||||
modifyNestedSpan: (childSpan: Span, childAstDeclaration: AstDeclaration) => void,
|
||||
): void {
|
||||
const node: ts.ImportTypeNode = span.node as ts.ImportTypeNode;
|
||||
const referencedEntity: CollectorEntity | undefined = collector.tryGetEntityForNode(node);
|
||||
|
||||
if (referencedEntity) {
|
||||
if (!referencedEntity.nameForEmit) {
|
||||
// This should never happen
|
||||
|
||||
throw new InternalError('referencedEntry.nameForEmit is undefined');
|
||||
}
|
||||
|
||||
let typeArgumentsText = '';
|
||||
|
||||
if (node.typeArguments && node.typeArguments.length > 0) {
|
||||
// Type arguments have to be processed and written to the document
|
||||
const lessThanTokenPos: number = span.children.findIndex(
|
||||
(childSpan) => childSpan.node.kind === ts.SyntaxKind.LessThanToken,
|
||||
);
|
||||
const greaterThanTokenPos: number = span.children.findIndex(
|
||||
(childSpan) => childSpan.node.kind === ts.SyntaxKind.GreaterThanToken,
|
||||
);
|
||||
|
||||
if (lessThanTokenPos < 0 || greaterThanTokenPos <= lessThanTokenPos) {
|
||||
throw new InternalError(
|
||||
`Invalid type arguments: ${node.getText()}\n` + SourceFileLocationFormatter.formatDeclaration(node),
|
||||
);
|
||||
}
|
||||
|
||||
const typeArgumentsSpans: Span[] = span.children.slice(lessThanTokenPos + 1, greaterThanTokenPos);
|
||||
|
||||
// Apply modifications to Span elements of typeArguments
|
||||
for (const childSpan of typeArgumentsSpans) {
|
||||
const childAstDeclaration: AstDeclaration = AstDeclaration.isSupportedSyntaxKind(childSpan.kind)
|
||||
? collector.astSymbolTable.getChildAstDeclarationByNode(childSpan.node, astDeclaration)
|
||||
: astDeclaration;
|
||||
|
||||
modifyNestedSpan(childSpan, childAstDeclaration);
|
||||
}
|
||||
|
||||
const typeArgumentsStrings: string[] = typeArgumentsSpans.map((childSpan) => childSpan.getModifiedText());
|
||||
typeArgumentsText = `<${typeArgumentsStrings.join(', ')}>`;
|
||||
}
|
||||
|
||||
const separatorAfter: string = /(?<separator>\s*)$/.exec(span.getText())?.groups?.separator ?? '';
|
||||
|
||||
if (
|
||||
referencedEntity.astEntity instanceof AstImport &&
|
||||
referencedEntity.astEntity.importKind === AstImportKind.ImportType &&
|
||||
referencedEntity.astEntity.exportName
|
||||
) {
|
||||
// For an ImportType with a namespace chain, only the top namespace is imported.
|
||||
// Must add the original nested qualifiers to the rolled up import.
|
||||
const qualifiersText: string = node.qualifier?.getText() ?? '';
|
||||
const nestedQualifiersStart: number = qualifiersText.indexOf('.');
|
||||
// Including the leading "."
|
||||
const nestedQualifiersText: string =
|
||||
nestedQualifiersStart >= 0 ? qualifiersText.slice(Math.max(0, nestedQualifiersStart)) : '';
|
||||
|
||||
const replacement = `${referencedEntity.nameForEmit}${nestedQualifiersText}${typeArgumentsText}${separatorAfter}`;
|
||||
|
||||
span.modification.skipAll();
|
||||
span.modification.prefix = replacement;
|
||||
} else {
|
||||
// Replace with internal symbol or AstImport
|
||||
span.modification.skipAll();
|
||||
span.modification.prefix = `${referencedEntity.nameForEmit}${typeArgumentsText}${separatorAfter}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
473
packages/api-extractor/src/generators/DtsRollupGenerator.ts
Normal file
473
packages/api-extractor/src/generators/DtsRollupGenerator.ts
Normal file
@@ -0,0 +1,473 @@
|
||||
/* eslint-disable no-case-declarations */
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { ReleaseTag } from '@discordjs/api-extractor-model';
|
||||
import { FileSystem, type NewlineKind, InternalError } from '@rushstack/node-core-library';
|
||||
import * as ts from 'typescript';
|
||||
import { AstDeclaration } from '../analyzer/AstDeclaration.js';
|
||||
import type { AstEntity } from '../analyzer/AstEntity.js';
|
||||
import { AstImport } from '../analyzer/AstImport.js';
|
||||
import type { AstModuleExportInfo } from '../analyzer/AstModule.js';
|
||||
import { AstNamespaceImport } from '../analyzer/AstNamespaceImport.js';
|
||||
import { AstSymbol } from '../analyzer/AstSymbol.js';
|
||||
import { SourceFileLocationFormatter } from '../analyzer/SourceFileLocationFormatter.js';
|
||||
import { IndentDocCommentScope, Span, type SpanModification } from '../analyzer/Span.js';
|
||||
import { TypeScriptHelpers } from '../analyzer/TypeScriptHelpers.js';
|
||||
import type { ApiItemMetadata } from '../collector/ApiItemMetadata.js';
|
||||
import type { Collector } from '../collector/Collector.js';
|
||||
import type { CollectorEntity } from '../collector/CollectorEntity.js';
|
||||
import type { DeclarationMetadata } from '../collector/DeclarationMetadata.js';
|
||||
import type { SymbolMetadata } from '../collector/SymbolMetadata.js';
|
||||
import { DtsEmitHelpers } from './DtsEmitHelpers.js';
|
||||
import { IndentedWriter } from './IndentedWriter.js';
|
||||
|
||||
/**
|
||||
* Used with DtsRollupGenerator.writeTypingsFile()
|
||||
*/
|
||||
export enum DtsRollupKind {
|
||||
/**
|
||||
* Generate a *.d.ts file for an internal release, or for the trimming=false mode.
|
||||
* This output file will contain all definitions that are reachable from the entry point.
|
||||
*/
|
||||
InternalRelease,
|
||||
|
||||
/**
|
||||
* Generate a *.d.ts file for a preview release.
|
||||
* This output file will contain all definitions that are reachable from the entry point,
|
||||
* except definitions marked as \@internal.
|
||||
*/
|
||||
AlphaRelease,
|
||||
|
||||
/**
|
||||
* Generate a *.d.ts file for a preview release.
|
||||
* This output file will contain all definitions that are reachable from the entry point,
|
||||
* except definitions marked as \@alpha or \@internal.
|
||||
*/
|
||||
BetaRelease,
|
||||
|
||||
/**
|
||||
* Generate a *.d.ts file for a public release.
|
||||
* This output file will contain all definitions that are reachable from the entry point,
|
||||
* except definitions marked as \@beta, \@alpha, or \@internal.
|
||||
*/
|
||||
PublicRelease,
|
||||
}
|
||||
|
||||
export class DtsRollupGenerator {
|
||||
/**
|
||||
* Generates the typings file and writes it to disk.
|
||||
*
|
||||
* @param collector - The Collector
|
||||
* @param dtsFilename - The *.d.ts output filename
|
||||
*/
|
||||
public static writeTypingsFile(
|
||||
collector: Collector,
|
||||
dtsFilename: string,
|
||||
dtsKind: DtsRollupKind,
|
||||
newlineKind: NewlineKind,
|
||||
): void {
|
||||
const writer: IndentedWriter = new IndentedWriter();
|
||||
writer.trimLeadingSpaces = true;
|
||||
|
||||
DtsRollupGenerator._generateTypingsFileContent(collector, writer, dtsKind);
|
||||
|
||||
FileSystem.writeFile(dtsFilename, writer.toString(), {
|
||||
convertLineEndings: newlineKind,
|
||||
ensureFolderExists: true,
|
||||
});
|
||||
}
|
||||
|
||||
private static _generateTypingsFileContent(
|
||||
collector: Collector,
|
||||
writer: IndentedWriter,
|
||||
dtsKind: DtsRollupKind,
|
||||
): void {
|
||||
// Emit the @packageDocumentation comment at the top of the file
|
||||
if (collector.workingPackage.tsdocParserContext) {
|
||||
writer.trimLeadingSpaces = false;
|
||||
writer.writeLine(collector.workingPackage.tsdocParserContext.sourceRange.toString());
|
||||
writer.trimLeadingSpaces = true;
|
||||
writer.ensureSkippedLine();
|
||||
}
|
||||
|
||||
// Emit the triple slash directives
|
||||
for (const typeDirectiveReference of collector.dtsTypeReferenceDirectives) {
|
||||
// https://github.com/microsoft/TypeScript/blob/611ebc7aadd7a44a4c0447698bfda9222a78cb66/src/compiler/declarationEmitter.ts#L162
|
||||
writer.writeLine(`/// <reference types="${typeDirectiveReference}" />`);
|
||||
}
|
||||
|
||||
for (const libDirectiveReference of collector.dtsLibReferenceDirectives) {
|
||||
writer.writeLine(`/// <reference lib="${libDirectiveReference}" />`);
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
|
||||
// Emit the imports
|
||||
for (const entity of collector.entities) {
|
||||
if (entity.astEntity instanceof AstImport) {
|
||||
const astImport: AstImport = entity.astEntity;
|
||||
|
||||
// For example, if the imported API comes from an external package that supports AEDoc,
|
||||
// and it was marked as `@internal`, then don't emit it.
|
||||
const symbolMetadata: SymbolMetadata | undefined = collector.tryFetchMetadataForAstEntity(astImport);
|
||||
const maxEffectiveReleaseTag: ReleaseTag = symbolMetadata
|
||||
? symbolMetadata.maxEffectiveReleaseTag
|
||||
: ReleaseTag.None;
|
||||
|
||||
if (this._shouldIncludeReleaseTag(maxEffectiveReleaseTag, dtsKind)) {
|
||||
DtsEmitHelpers.emitImport(writer, entity, astImport);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
|
||||
// Emit the regular declarations
|
||||
for (const entity of collector.entities) {
|
||||
const astEntity: AstEntity = entity.astEntity;
|
||||
const symbolMetadata: SymbolMetadata | undefined = collector.tryFetchMetadataForAstEntity(astEntity);
|
||||
const maxEffectiveReleaseTag: ReleaseTag = symbolMetadata
|
||||
? symbolMetadata.maxEffectiveReleaseTag
|
||||
: ReleaseTag.None;
|
||||
|
||||
if (!this._shouldIncludeReleaseTag(maxEffectiveReleaseTag, dtsKind)) {
|
||||
if (!collector.extractorConfig.omitTrimmingComments) {
|
||||
writer.ensureSkippedLine();
|
||||
writer.writeLine(`/* Excluded from this release type: ${entity.nameForEmit} */`);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (astEntity instanceof AstSymbol) {
|
||||
// Emit all the declarations for this entry
|
||||
for (const astDeclaration of astEntity.astDeclarations || []) {
|
||||
const apiItemMetadata: ApiItemMetadata = collector.fetchApiItemMetadata(astDeclaration);
|
||||
|
||||
if (this._shouldIncludeReleaseTag(apiItemMetadata.effectiveReleaseTag, dtsKind)) {
|
||||
const span: Span = new Span(astDeclaration.declaration);
|
||||
DtsRollupGenerator._modifySpan(collector, span, entity, astDeclaration, dtsKind);
|
||||
writer.ensureSkippedLine();
|
||||
span.writeModifiedText(writer);
|
||||
writer.ensureNewLine();
|
||||
} else if (!collector.extractorConfig.omitTrimmingComments) {
|
||||
writer.ensureSkippedLine();
|
||||
writer.writeLine(`/* Excluded declaration from this release type: ${entity.nameForEmit} */`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (astEntity instanceof AstNamespaceImport) {
|
||||
const astModuleExportInfo: AstModuleExportInfo = astEntity.fetchAstModuleExportInfo(collector);
|
||||
|
||||
if (entity.nameForEmit === undefined) {
|
||||
// This should never happen
|
||||
throw new InternalError('referencedEntry.nameForEmit is undefined');
|
||||
}
|
||||
|
||||
if (astModuleExportInfo.starExportedExternalModules.size > 0) {
|
||||
// We could support this, but we would need to find a way to safely represent it.
|
||||
throw new Error(
|
||||
`The ${entity.nameForEmit} namespace import includes a start export, which is not supported:\n` +
|
||||
SourceFileLocationFormatter.formatDeclaration(astEntity.declaration),
|
||||
);
|
||||
}
|
||||
|
||||
// Emit a synthetic declaration for the namespace. It will look like this:
|
||||
//
|
||||
// declare namespace example {
|
||||
// export {
|
||||
// f1,
|
||||
// f2
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Note that we do not try to relocate f1()/f2() to be inside the namespace because other type
|
||||
// signatures may reference them directly (without using the namespace qualifier).
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
if (entity.shouldInlineExport) {
|
||||
writer.write('export ');
|
||||
}
|
||||
|
||||
writer.writeLine(`declare namespace ${entity.nameForEmit} {`);
|
||||
|
||||
// all local exports of local imported module are just references to top-level declarations
|
||||
writer.increaseIndent();
|
||||
writer.writeLine('export {');
|
||||
writer.increaseIndent();
|
||||
|
||||
const exportClauses: string[] = [];
|
||||
for (const [exportedName, exportedEntity] of astModuleExportInfo.exportedLocalEntities) {
|
||||
const collectorEntity: CollectorEntity | undefined = collector.tryGetCollectorEntity(exportedEntity);
|
||||
if (collectorEntity === undefined) {
|
||||
// This should never happen
|
||||
// top-level exports of local imported module should be added as collector entities before
|
||||
throw new InternalError(
|
||||
`Cannot find collector entity for ${entity.nameForEmit}.${exportedEntity.localName}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (collectorEntity.nameForEmit === exportedName) {
|
||||
exportClauses.push(collectorEntity.nameForEmit);
|
||||
} else {
|
||||
exportClauses.push(`${collectorEntity.nameForEmit} as ${exportedName}`);
|
||||
}
|
||||
}
|
||||
|
||||
writer.writeLine(exportClauses.join(',\n'));
|
||||
|
||||
writer.decreaseIndent();
|
||||
writer.writeLine('}'); // end of "export { ... }"
|
||||
writer.decreaseIndent();
|
||||
writer.writeLine('}'); // end of "declare namespace { ... }"
|
||||
}
|
||||
|
||||
if (!entity.shouldInlineExport) {
|
||||
for (const exportName of entity.exportNames) {
|
||||
DtsEmitHelpers.emitNamedExport(writer, exportName, entity);
|
||||
}
|
||||
}
|
||||
|
||||
writer.ensureSkippedLine();
|
||||
}
|
||||
|
||||
DtsEmitHelpers.emitStarExports(writer, collector);
|
||||
|
||||
// Emit "export { }" which is a special directive that prevents consumers from importing declarations
|
||||
// that don't have an explicit "export" modifier.
|
||||
writer.ensureSkippedLine();
|
||||
writer.writeLine('export { }');
|
||||
}
|
||||
|
||||
/**
|
||||
* Before writing out a declaration, _modifySpan() applies various fixups to make it nice.
|
||||
*/
|
||||
private static _modifySpan(
|
||||
collector: Collector,
|
||||
span: Span,
|
||||
entity: CollectorEntity,
|
||||
astDeclaration: AstDeclaration,
|
||||
dtsKind: DtsRollupKind,
|
||||
): void {
|
||||
const previousSpan: Span | undefined = span.previousSibling;
|
||||
|
||||
let recurseChildren = true;
|
||||
switch (span.kind) {
|
||||
case ts.SyntaxKind.JSDocComment:
|
||||
// If the @packageDocumentation comment seems to be attached to one of the regular API items,
|
||||
// omit it. It gets explictly emitted at the top of the file.
|
||||
if (/[\s*]@packagedocumentation[\s*]/gi.test(span.node.getText())) {
|
||||
span.modification.skipAll();
|
||||
}
|
||||
|
||||
// For now, we don't transform JSDoc comment nodes at all
|
||||
recurseChildren = false;
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.ExportKeyword:
|
||||
case ts.SyntaxKind.DefaultKeyword:
|
||||
case ts.SyntaxKind.DeclareKeyword:
|
||||
// Delete any explicit "export" or "declare" keywords -- we will re-add them below
|
||||
span.modification.skipAll();
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.InterfaceKeyword:
|
||||
case ts.SyntaxKind.ClassKeyword:
|
||||
case ts.SyntaxKind.EnumKeyword:
|
||||
case ts.SyntaxKind.NamespaceKeyword:
|
||||
case ts.SyntaxKind.ModuleKeyword:
|
||||
case ts.SyntaxKind.TypeKeyword:
|
||||
case ts.SyntaxKind.FunctionKeyword:
|
||||
// Replace the stuff we possibly deleted above
|
||||
let replacedModifiers = '';
|
||||
|
||||
// Add a declare statement for root declarations (but not for nested declarations)
|
||||
if (!astDeclaration.parent) {
|
||||
replacedModifiers += 'declare ';
|
||||
}
|
||||
|
||||
if (entity.shouldInlineExport) {
|
||||
replacedModifiers = 'export ' + replacedModifiers;
|
||||
}
|
||||
|
||||
if (previousSpan && previousSpan.kind === ts.SyntaxKind.SyntaxList) {
|
||||
// If there is a previous span of type SyntaxList, then apply it before any other modifiers
|
||||
// (e.g. "abstract") that appear there.
|
||||
previousSpan.modification.prefix = replacedModifiers + previousSpan.modification.prefix;
|
||||
} else {
|
||||
// Otherwise just stick it in front of this span
|
||||
span.modification.prefix = replacedModifiers + span.modification.prefix;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.VariableDeclaration:
|
||||
// Is this a top-level variable declaration?
|
||||
// (The logic below does not apply to variable declarations that are part of an explicit "namespace" block,
|
||||
// since the compiler prefers not to emit "declare" or "export" keywords for those declarations.)
|
||||
if (!span.parent) {
|
||||
// The VariableDeclaration node is part of a VariableDeclarationList, however
|
||||
// the Entry.followedSymbol points to the VariableDeclaration part because
|
||||
// multiple definitions might share the same VariableDeclarationList.
|
||||
//
|
||||
// Since we are emitting a separate declaration for each one, we need to look upwards
|
||||
// in the ts.Node tree and write a copy of the enclosing VariableDeclarationList
|
||||
// content (e.g. "var" from "var x=1, y=2").
|
||||
const list: ts.VariableDeclarationList | undefined = TypeScriptHelpers.matchAncestor(span.node, [
|
||||
ts.SyntaxKind.VariableDeclarationList,
|
||||
ts.SyntaxKind.VariableDeclaration,
|
||||
]);
|
||||
if (!list) {
|
||||
// This should not happen unless the compiler API changes somehow
|
||||
throw new InternalError('Unsupported variable declaration');
|
||||
}
|
||||
|
||||
const listPrefix: string = list.getSourceFile().text.slice(list.getStart(), list.declarations[0]!.getStart());
|
||||
span.modification.prefix = 'declare ' + listPrefix + span.modification.prefix;
|
||||
span.modification.suffix = ';';
|
||||
|
||||
if (entity.shouldInlineExport) {
|
||||
span.modification.prefix = 'export ' + span.modification.prefix;
|
||||
}
|
||||
|
||||
const declarationMetadata: DeclarationMetadata = collector.fetchDeclarationMetadata(astDeclaration);
|
||||
if (declarationMetadata.tsdocParserContext) {
|
||||
// Typically the comment for a variable declaration is attached to the outer variable statement
|
||||
// (which may possibly contain multiple variable declarations), so it's not part of the Span.
|
||||
// Instead we need to manually inject it.
|
||||
let originalComment: string = declarationMetadata.tsdocParserContext.sourceRange.toString();
|
||||
if (!/\r?\n\s*$/.test(originalComment)) {
|
||||
originalComment += '\n';
|
||||
}
|
||||
|
||||
span.modification.indentDocComment = IndentDocCommentScope.PrefixOnly;
|
||||
span.modification.prefix = originalComment + span.modification.prefix;
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.Identifier:
|
||||
{
|
||||
const referencedEntity: CollectorEntity | undefined = collector.tryGetEntityForNode(
|
||||
span.node as ts.Identifier,
|
||||
);
|
||||
|
||||
if (referencedEntity) {
|
||||
if (!referencedEntity.nameForEmit) {
|
||||
// This should never happen
|
||||
throw new InternalError('referencedEntry.nameForEmit is undefined');
|
||||
}
|
||||
|
||||
span.modification.prefix = referencedEntity.nameForEmit;
|
||||
// For debugging:
|
||||
// span.modification.prefix += '/*R=FIX*/';
|
||||
} else {
|
||||
// For debugging:
|
||||
// span.modification.prefix += '/*R=KEEP*/';
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ts.SyntaxKind.ImportType:
|
||||
DtsEmitHelpers.modifyImportTypeSpan(collector, span, astDeclaration, (childSpan, childAstDeclaration) => {
|
||||
DtsRollupGenerator._modifySpan(collector, childSpan, entity, childAstDeclaration, dtsKind);
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (recurseChildren) {
|
||||
for (const child of span.children) {
|
||||
let childAstDeclaration: AstDeclaration = astDeclaration;
|
||||
|
||||
// Should we trim this node?
|
||||
let trimmed = false;
|
||||
if (AstDeclaration.isSupportedSyntaxKind(child.kind)) {
|
||||
childAstDeclaration = collector.astSymbolTable.getChildAstDeclarationByNode(child.node, astDeclaration);
|
||||
const releaseTag: ReleaseTag = collector.fetchApiItemMetadata(childAstDeclaration).effectiveReleaseTag;
|
||||
|
||||
if (!this._shouldIncludeReleaseTag(releaseTag, dtsKind)) {
|
||||
let nodeToTrim: Span = child;
|
||||
|
||||
// If we are trimming a variable statement, then we need to trim the outer VariableDeclarationList
|
||||
// as well.
|
||||
if (child.kind === ts.SyntaxKind.VariableDeclaration) {
|
||||
const variableStatement: Span | undefined = child.findFirstParent(ts.SyntaxKind.VariableStatement);
|
||||
if (variableStatement !== undefined) {
|
||||
nodeToTrim = variableStatement;
|
||||
}
|
||||
}
|
||||
|
||||
const modification: SpanModification = nodeToTrim.modification;
|
||||
|
||||
// Yes, trim it and stop here
|
||||
const name: string = childAstDeclaration.astSymbol.localName;
|
||||
modification.omitChildren = true;
|
||||
|
||||
if (collector.extractorConfig.omitTrimmingComments) {
|
||||
modification.prefix = '';
|
||||
} else {
|
||||
modification.prefix = `/* Excluded from this release type: ${name} */`;
|
||||
}
|
||||
|
||||
modification.suffix = '';
|
||||
|
||||
if (nodeToTrim.children.length > 0) {
|
||||
// If there are grandchildren, then keep the last grandchild's separator,
|
||||
// since it often has useful whitespace
|
||||
modification.suffix = nodeToTrim.children[nodeToTrim.children.length - 1]!.separator;
|
||||
}
|
||||
|
||||
if (
|
||||
nodeToTrim.nextSibling && // If the thing we are trimming is followed by a comma, then trim the comma also.
|
||||
// An example would be an enum member.
|
||||
nodeToTrim.nextSibling.kind === ts.SyntaxKind.CommaToken
|
||||
) {
|
||||
// Keep its separator since it often has useful whitespace
|
||||
modification.suffix += nodeToTrim.nextSibling.separator;
|
||||
nodeToTrim.nextSibling.modification.skipAll();
|
||||
}
|
||||
|
||||
trimmed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!trimmed) {
|
||||
DtsRollupGenerator._modifySpan(collector, child, entity, childAstDeclaration, dtsKind);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _shouldIncludeReleaseTag(releaseTag: ReleaseTag, dtsKind: DtsRollupKind): boolean {
|
||||
switch (dtsKind) {
|
||||
case DtsRollupKind.InternalRelease:
|
||||
return true;
|
||||
case DtsRollupKind.AlphaRelease:
|
||||
return (
|
||||
releaseTag === ReleaseTag.Alpha ||
|
||||
releaseTag === ReleaseTag.Beta ||
|
||||
releaseTag === ReleaseTag.Public ||
|
||||
// NOTE: If the release tag is "None", then we don't have enough information to trim it
|
||||
releaseTag === ReleaseTag.None
|
||||
);
|
||||
case DtsRollupKind.BetaRelease:
|
||||
return (
|
||||
releaseTag === ReleaseTag.Beta ||
|
||||
releaseTag === ReleaseTag.Public ||
|
||||
// NOTE: If the release tag is "None", then we don't have enough information to trim it
|
||||
releaseTag === ReleaseTag.None
|
||||
);
|
||||
case DtsRollupKind.PublicRelease:
|
||||
return releaseTag === ReleaseTag.Public || releaseTag === ReleaseTag.None;
|
||||
default:
|
||||
throw new Error(`${DtsRollupKind[dtsKind]} is not implemented`);
|
||||
}
|
||||
}
|
||||
}
|
||||
338
packages/api-extractor/src/generators/ExcerptBuilder.ts
Normal file
338
packages/api-extractor/src/generators/ExcerptBuilder.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { ExcerptTokenKind, type IExcerptToken, type IExcerptTokenRange } from '@discordjs/api-extractor-model';
|
||||
import type { DeclarationReference } from '@microsoft/tsdoc/lib-commonjs/beta/DeclarationReference';
|
||||
import * as ts from 'typescript';
|
||||
import type { AstDeclaration } from '../analyzer/AstDeclaration.js';
|
||||
import { Span } from '../analyzer/Span.js';
|
||||
import type { DeclarationReferenceGenerator } from './DeclarationReferenceGenerator.js';
|
||||
|
||||
/**
|
||||
* Used to provide ExcerptBuilder with a list of nodes whose token range we want to capture.
|
||||
*/
|
||||
export interface IExcerptBuilderNodeToCapture {
|
||||
/**
|
||||
* The node to capture
|
||||
*/
|
||||
node: ts.Node | undefined;
|
||||
/**
|
||||
* The token range whose startIndex/endIndex will be overwritten with the indexes for the
|
||||
* tokens corresponding to IExcerptBuilderNodeToCapture.node
|
||||
*/
|
||||
tokenRange: IExcerptTokenRange;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal state for ExcerptBuilder
|
||||
*/
|
||||
interface IBuildSpanState {
|
||||
/**
|
||||
* Tracks whether the last appended token was a separator. If so, and we're in the middle of
|
||||
* capturing a token range, then omit the separator from the range.
|
||||
*/
|
||||
lastAppendedTokenIsSeparator: boolean;
|
||||
|
||||
referenceGenerator: DeclarationReferenceGenerator;
|
||||
|
||||
/**
|
||||
* The AST node that we will traverse to extract tokens
|
||||
*/
|
||||
startingNode: ts.Node;
|
||||
|
||||
/**
|
||||
* Normally, the excerpt will include all child nodes for `startingNode`; whereas if `childKindToStopBefore`
|
||||
* is specified, then the node traversal will stop before (i.e. excluding) the first immediate child
|
||||
* of `startingNode` with the specified syntax kind.
|
||||
*
|
||||
* @remarks
|
||||
* For example, suppose the signature is `interface X: Y { z: string }`. The token `{` has syntax kind
|
||||
* `ts.SyntaxKind.FirstPunctuation`, so we can specify that to truncate the excerpt to `interface X: Y`.
|
||||
*/
|
||||
stopBeforeChildKind: ts.SyntaxKind | undefined;
|
||||
|
||||
tokenRangesByNode: Map<ts.Node, IExcerptTokenRange>;
|
||||
}
|
||||
|
||||
export class ExcerptBuilder {
|
||||
/**
|
||||
* Appends a blank line to the `excerptTokens` list.
|
||||
*
|
||||
* @param excerptTokens - The target token list to append to
|
||||
*/
|
||||
public static addBlankLine(excerptTokens: IExcerptToken[]): void {
|
||||
let newlines = '\n\n';
|
||||
// If the existing text already ended with a newline, then only append one newline
|
||||
if (excerptTokens.length > 0) {
|
||||
const previousText: string = excerptTokens[excerptTokens.length - 1]!.text;
|
||||
if (previousText.endsWith('\n')) {
|
||||
newlines = '\n';
|
||||
}
|
||||
}
|
||||
|
||||
excerptTokens.push({ kind: ExcerptTokenKind.Content, text: newlines });
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends the signature for the specified `AstDeclaration` to the `excerptTokens` list.
|
||||
*
|
||||
* @param excerptTokens - The target token list to append to
|
||||
* @param astDeclaration - The declaration
|
||||
* @param nodesToCapture - A list of child nodes whose token ranges we want to capture
|
||||
*/
|
||||
public static addDeclaration(
|
||||
excerptTokens: IExcerptToken[],
|
||||
astDeclaration: AstDeclaration,
|
||||
nodesToCapture: IExcerptBuilderNodeToCapture[],
|
||||
referenceGenerator: DeclarationReferenceGenerator,
|
||||
): void {
|
||||
let stopBeforeChildKind: ts.SyntaxKind | undefined;
|
||||
|
||||
switch (astDeclaration.declaration.kind) {
|
||||
case ts.SyntaxKind.ClassDeclaration:
|
||||
case ts.SyntaxKind.EnumDeclaration:
|
||||
case ts.SyntaxKind.InterfaceDeclaration:
|
||||
// FirstPunctuation = "{"
|
||||
stopBeforeChildKind = ts.SyntaxKind.FirstPunctuation;
|
||||
break;
|
||||
case ts.SyntaxKind.ModuleDeclaration:
|
||||
// ModuleBlock = the "{ ... }" block
|
||||
stopBeforeChildKind = ts.SyntaxKind.ModuleBlock;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
const span: Span = new Span(astDeclaration.declaration);
|
||||
|
||||
const tokenRangesByNode: Map<ts.Node, IExcerptTokenRange> = new Map<ts.Node, IExcerptTokenRange>();
|
||||
for (const excerpt of nodesToCapture || []) {
|
||||
if (excerpt.node) {
|
||||
tokenRangesByNode.set(excerpt.node, excerpt.tokenRange);
|
||||
}
|
||||
}
|
||||
|
||||
ExcerptBuilder._buildSpan(excerptTokens, span, {
|
||||
referenceGenerator,
|
||||
startingNode: span.node,
|
||||
stopBeforeChildKind,
|
||||
tokenRangesByNode,
|
||||
lastAppendedTokenIsSeparator: false,
|
||||
});
|
||||
ExcerptBuilder._condenseTokens(excerptTokens, [...tokenRangesByNode.values()]);
|
||||
}
|
||||
|
||||
public static createEmptyTokenRange(): IExcerptTokenRange {
|
||||
return { startIndex: 0, endIndex: 0 };
|
||||
}
|
||||
|
||||
private static _buildSpan(excerptTokens: IExcerptToken[], span: Span, state: IBuildSpanState): boolean {
|
||||
if (span.kind === ts.SyntaxKind.JSDocComment) {
|
||||
// Discard any comments
|
||||
return true;
|
||||
}
|
||||
|
||||
// Can this node start a excerpt?
|
||||
const capturedTokenRange: IExcerptTokenRange | undefined = state.tokenRangesByNode.get(span.node);
|
||||
let excerptStartIndex = 0;
|
||||
|
||||
if (capturedTokenRange) {
|
||||
// We will assign capturedTokenRange.startIndex to be the index of the next token to be appended
|
||||
excerptStartIndex = excerptTokens.length;
|
||||
}
|
||||
|
||||
if (span.prefix) {
|
||||
let canonicalReference: DeclarationReference | undefined;
|
||||
|
||||
if (span.kind === ts.SyntaxKind.Identifier) {
|
||||
const name: ts.Identifier = span.node as ts.Identifier;
|
||||
if (!ExcerptBuilder._isDeclarationName(name)) {
|
||||
canonicalReference = state.referenceGenerator.getDeclarationReferenceForIdentifier(name);
|
||||
}
|
||||
}
|
||||
|
||||
if (canonicalReference) {
|
||||
ExcerptBuilder._appendToken(excerptTokens, ExcerptTokenKind.Reference, span.prefix, canonicalReference);
|
||||
} else {
|
||||
ExcerptBuilder._appendToken(excerptTokens, ExcerptTokenKind.Content, span.prefix);
|
||||
}
|
||||
|
||||
state.lastAppendedTokenIsSeparator = false;
|
||||
}
|
||||
|
||||
for (const child of span.children) {
|
||||
if (span.node === state.startingNode && state.stopBeforeChildKind && child.kind === state.stopBeforeChildKind) {
|
||||
// We reached a child whose kind is stopBeforeChildKind, so stop traversing
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!this._buildSpan(excerptTokens, child, state)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (span.suffix) {
|
||||
ExcerptBuilder._appendToken(excerptTokens, ExcerptTokenKind.Content, span.suffix);
|
||||
state.lastAppendedTokenIsSeparator = false;
|
||||
}
|
||||
|
||||
if (span.separator) {
|
||||
ExcerptBuilder._appendToken(excerptTokens, ExcerptTokenKind.Content, span.separator);
|
||||
state.lastAppendedTokenIsSeparator = true;
|
||||
}
|
||||
|
||||
// Are we building a excerpt? If so, set its range
|
||||
if (capturedTokenRange) {
|
||||
capturedTokenRange.startIndex = excerptStartIndex;
|
||||
|
||||
// We will assign capturedTokenRange.startIndex to be the index after the last token
|
||||
// that was appended so far. However, if the last appended token was a separator, omit
|
||||
// it from the range.
|
||||
let excerptEndIndex: number = excerptTokens.length;
|
||||
if (state.lastAppendedTokenIsSeparator) {
|
||||
excerptEndIndex--;
|
||||
}
|
||||
|
||||
capturedTokenRange.endIndex = excerptEndIndex;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static _appendToken(
|
||||
excerptTokens: IExcerptToken[],
|
||||
excerptTokenKind: ExcerptTokenKind,
|
||||
text: string,
|
||||
canonicalReference?: DeclarationReference,
|
||||
): void {
|
||||
if (text.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const excerptToken: IExcerptToken = { kind: excerptTokenKind, text };
|
||||
if (canonicalReference !== undefined) {
|
||||
excerptToken.canonicalReference = canonicalReference.toString();
|
||||
}
|
||||
|
||||
excerptTokens.push(excerptToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* Condenses the provided excerpt tokens by merging tokens where possible. Updates the provided token ranges to
|
||||
* remain accurate after token merging.
|
||||
*
|
||||
* @remarks
|
||||
* For example, suppose we have excerpt tokens ["A", "B", "C"] and a token range [0, 2]. If the excerpt tokens
|
||||
* are condensed to ["AB", "C"], then the token range would be updated to [0, 1]. Note that merges are only
|
||||
* performed if they are compatible with the provided token ranges. In the example above, if our token range was
|
||||
* originally [0, 1], we would not be able to merge tokens "A" and "B".
|
||||
*/
|
||||
private static _condenseTokens(excerptTokens: IExcerptToken[], tokenRanges: IExcerptTokenRange[]): void {
|
||||
// This set is used to quickly lookup a start or end index.
|
||||
const startOrEndIndices: Set<number> = new Set();
|
||||
for (const tokenRange of tokenRanges) {
|
||||
startOrEndIndices.add(tokenRange.startIndex);
|
||||
startOrEndIndices.add(tokenRange.endIndex);
|
||||
}
|
||||
|
||||
for (let currentIndex = 1; currentIndex < excerptTokens.length; ++currentIndex) {
|
||||
while (currentIndex < excerptTokens.length) {
|
||||
const prevPrevToken: IExcerptToken | undefined = excerptTokens[currentIndex - 2]; // May be undefined
|
||||
const prevToken: IExcerptToken = excerptTokens[currentIndex - 1]!;
|
||||
const currentToken: IExcerptToken = excerptTokens[currentIndex]!;
|
||||
|
||||
// The number of excerpt tokens that are merged in this iteration. We need this to determine
|
||||
// how to update the start and end indices of our token ranges.
|
||||
let mergeCount: number;
|
||||
|
||||
// There are two types of merges that can occur. We only perform these merges if they are
|
||||
// compatible with all of our token ranges.
|
||||
if (
|
||||
prevPrevToken &&
|
||||
prevPrevToken.kind === ExcerptTokenKind.Reference &&
|
||||
prevToken.kind === ExcerptTokenKind.Content &&
|
||||
prevToken.text.trim() === '.' &&
|
||||
currentToken.kind === ExcerptTokenKind.Reference &&
|
||||
!startOrEndIndices.has(currentIndex) &&
|
||||
!startOrEndIndices.has(currentIndex - 1)
|
||||
) {
|
||||
// If the current token is a reference token, the previous token is a ".", and the previous-
|
||||
// previous token is a reference token, then merge all three tokens into a reference token.
|
||||
//
|
||||
// For example: Given ["MyNamespace" (R), ".", "MyClass" (R)], tokens "." and "MyClass" might
|
||||
// be merged into "MyNamespace". The condensed token would be ["MyNamespace.MyClass" (R)].
|
||||
prevPrevToken.text += prevToken.text + currentToken.text;
|
||||
prevPrevToken.canonicalReference = currentToken.canonicalReference;
|
||||
mergeCount = 2;
|
||||
currentIndex--;
|
||||
} else if (
|
||||
// If the current and previous tokens are both content tokens, then merge the tokens into a
|
||||
// single content token. For example: Given ["export ", "declare class"], these tokens
|
||||
// might be merged into "export declare class".
|
||||
prevToken.kind === ExcerptTokenKind.Content &&
|
||||
prevToken.kind === currentToken.kind &&
|
||||
!startOrEndIndices.has(currentIndex)
|
||||
) {
|
||||
prevToken.text += currentToken.text;
|
||||
mergeCount = 1;
|
||||
} else {
|
||||
// Otherwise, no merging can occur here. Continue to the next index.
|
||||
break;
|
||||
}
|
||||
|
||||
// Remove the now redundant excerpt token(s), as they were merged into a previous token.
|
||||
excerptTokens.splice(currentIndex, mergeCount);
|
||||
|
||||
// Update the start and end indices for all token ranges based upon how many excerpt
|
||||
// tokens were merged and in what positions.
|
||||
for (const tokenRange of tokenRanges) {
|
||||
if (tokenRange.startIndex > currentIndex) {
|
||||
tokenRange.startIndex -= mergeCount;
|
||||
}
|
||||
|
||||
if (tokenRange.endIndex > currentIndex) {
|
||||
tokenRange.endIndex -= mergeCount;
|
||||
}
|
||||
}
|
||||
|
||||
// Clear and repopulate our set with the updated indices.
|
||||
startOrEndIndices.clear();
|
||||
for (const tokenRange of tokenRanges) {
|
||||
startOrEndIndices.add(tokenRange.startIndex);
|
||||
startOrEndIndices.add(tokenRange.endIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static _isDeclarationName(name: ts.Identifier): boolean {
|
||||
return ExcerptBuilder._isDeclaration(name.parent) && name.parent.name === name;
|
||||
}
|
||||
|
||||
private static _isDeclaration(node: ts.Node): node is ts.NamedDeclaration {
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.FunctionDeclaration:
|
||||
case ts.SyntaxKind.FunctionExpression:
|
||||
case ts.SyntaxKind.VariableDeclaration:
|
||||
case ts.SyntaxKind.Parameter:
|
||||
case ts.SyntaxKind.EnumDeclaration:
|
||||
case ts.SyntaxKind.ClassDeclaration:
|
||||
case ts.SyntaxKind.ClassExpression:
|
||||
case ts.SyntaxKind.ModuleDeclaration:
|
||||
case ts.SyntaxKind.MethodDeclaration:
|
||||
case ts.SyntaxKind.MethodSignature:
|
||||
case ts.SyntaxKind.PropertyDeclaration:
|
||||
case ts.SyntaxKind.PropertySignature:
|
||||
case ts.SyntaxKind.GetAccessor:
|
||||
case ts.SyntaxKind.SetAccessor:
|
||||
case ts.SyntaxKind.InterfaceDeclaration:
|
||||
case ts.SyntaxKind.TypeAliasDeclaration:
|
||||
case ts.SyntaxKind.TypeParameter:
|
||||
case ts.SyntaxKind.EnumMember:
|
||||
case ts.SyntaxKind.BindingElement:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
281
packages/api-extractor/src/generators/IndentedWriter.ts
Normal file
281
packages/api-extractor/src/generators/IndentedWriter.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { StringBuilder, type IStringBuilder } from '@rushstack/node-core-library';
|
||||
|
||||
/**
|
||||
* A utility for writing indented text.
|
||||
*
|
||||
* @remarks
|
||||
*
|
||||
* Note that the indentation is inserted at the last possible opportunity.
|
||||
* For example, this code...
|
||||
*
|
||||
* ```ts
|
||||
* writer.write('begin\n');
|
||||
* writer.increaseIndent();
|
||||
* writer.write('one\ntwo\n');
|
||||
* writer.decreaseIndent();
|
||||
* writer.increaseIndent();
|
||||
* writer.decreaseIndent();
|
||||
* writer.write('end');
|
||||
* ```
|
||||
*
|
||||
* ...would produce this output:
|
||||
*
|
||||
* ```
|
||||
* begin
|
||||
* one
|
||||
* two
|
||||
* end
|
||||
* ```
|
||||
*/
|
||||
export class IndentedWriter {
|
||||
/**
|
||||
* The text characters used to create one level of indentation.
|
||||
* Two spaces by default.
|
||||
*/
|
||||
public defaultIndentPrefix: string = ' ';
|
||||
|
||||
/**
|
||||
* Whether to indent blank lines
|
||||
*/
|
||||
public indentBlankLines: boolean = false;
|
||||
|
||||
/**
|
||||
* Trims leading spaces from the input text before applying the indent.
|
||||
*
|
||||
* @remarks
|
||||
* Consider the following example:
|
||||
*
|
||||
* ```ts
|
||||
* indentedWriter.increaseIndent(' '); // four spaces
|
||||
* indentedWriter.write(' a\n b c\n');
|
||||
* indentedWriter.decreaseIndent();
|
||||
* ```
|
||||
*
|
||||
* Normally the output would be indented by 6 spaces: 4 from `increaseIndent()`, plus the 2 spaces
|
||||
* from `write()`:
|
||||
* ```
|
||||
* a
|
||||
* b c
|
||||
* ```
|
||||
*
|
||||
* Setting `trimLeadingSpaces=true` will trim the leading spaces, so that the lines are indented
|
||||
* by 4 spaces only:
|
||||
* ```
|
||||
* a
|
||||
* b c
|
||||
* ```
|
||||
*/
|
||||
public trimLeadingSpaces: boolean = false;
|
||||
|
||||
private readonly _builder: IStringBuilder;
|
||||
|
||||
private _latestChunk: string | undefined;
|
||||
|
||||
private _previousChunk: string | undefined;
|
||||
|
||||
private _atStartOfLine: boolean;
|
||||
|
||||
private readonly _indentStack: string[];
|
||||
|
||||
private _indentText: string;
|
||||
|
||||
private _previousLineIsBlank: boolean;
|
||||
|
||||
private _currentLineIsBlank: boolean;
|
||||
|
||||
public constructor(builder?: IStringBuilder) {
|
||||
this._builder = builder ?? new StringBuilder();
|
||||
this._latestChunk = undefined;
|
||||
this._previousChunk = undefined;
|
||||
this._atStartOfLine = true;
|
||||
this._previousLineIsBlank = true;
|
||||
this._currentLineIsBlank = true;
|
||||
|
||||
this._indentStack = [];
|
||||
this._indentText = '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the output that was built so far.
|
||||
*/
|
||||
public getText(): string {
|
||||
return this._builder.toString();
|
||||
}
|
||||
|
||||
public toString(): string {
|
||||
return this.getText();
|
||||
}
|
||||
|
||||
/**
|
||||
* Increases the indentation. Normally the indentation is two spaces,
|
||||
* however an arbitrary prefix can optional be specified. (For example,
|
||||
* the prefix could be "// " to indent and comment simultaneously.)
|
||||
* Each call to IndentedWriter.increaseIndent() must be followed by a
|
||||
* corresponding call to IndentedWriter.decreaseIndent().
|
||||
*/
|
||||
public increaseIndent(indentPrefix?: string): void {
|
||||
this._indentStack.push(indentPrefix ?? this.defaultIndentPrefix);
|
||||
this._updateIndentText();
|
||||
}
|
||||
|
||||
/**
|
||||
* Decreases the indentation, reverting the effect of the corresponding call
|
||||
* to IndentedWriter.increaseIndent().
|
||||
*/
|
||||
public decreaseIndent(): void {
|
||||
this._indentStack.pop();
|
||||
this._updateIndentText();
|
||||
}
|
||||
|
||||
/**
|
||||
* A shorthand for ensuring that increaseIndent()/decreaseIndent() occur
|
||||
* in pairs.
|
||||
*/
|
||||
public indentScope(scope: () => void, indentPrefix?: string): void {
|
||||
this.increaseIndent(indentPrefix);
|
||||
scope();
|
||||
this.decreaseIndent();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a newline if the file pointer is not already at the start of the line (or start of the stream).
|
||||
*/
|
||||
public ensureNewLine(): void {
|
||||
const lastCharacter: string = this.peekLastCharacter();
|
||||
if (lastCharacter !== '\n' && lastCharacter !== '') {
|
||||
this._writeNewLine();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds up to two newlines to ensure that there is a blank line above the current position.
|
||||
* The start of the stream is considered to be a blank line, so `ensureSkippedLine()` has no effect
|
||||
* unless some text has been written.
|
||||
*/
|
||||
public ensureSkippedLine(): void {
|
||||
this.ensureNewLine();
|
||||
if (!this._previousLineIsBlank) {
|
||||
this._writeNewLine();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the last character that was written, or an empty string if no characters have been written yet.
|
||||
*/
|
||||
public peekLastCharacter(): string {
|
||||
if (this._latestChunk !== undefined) {
|
||||
return this._latestChunk.slice(-1, -1 + 1);
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the second to last character that was written, or an empty string if less than one characters
|
||||
* have been written yet.
|
||||
*/
|
||||
public peekSecondLastCharacter(): string {
|
||||
if (this._latestChunk !== undefined) {
|
||||
if (this._latestChunk.length > 1) {
|
||||
return this._latestChunk.slice(-2, -2 + 1);
|
||||
}
|
||||
|
||||
if (this._previousChunk !== undefined) {
|
||||
return this._previousChunk.slice(-1, -1 + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes some text to the internal string buffer, applying indentation according
|
||||
* to the current indentation level. If the string contains multiple newlines,
|
||||
* each line will be indented separately.
|
||||
*/
|
||||
public write(message: string): void {
|
||||
if (message.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If there are no newline characters, then append the string verbatim
|
||||
if (!/[\n\r]/.test(message)) {
|
||||
this._writeLinePart(message);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise split the lines and write each one individually
|
||||
let first = true;
|
||||
for (const linePart of message.split('\n')) {
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
this._writeNewLine();
|
||||
}
|
||||
|
||||
if (linePart) {
|
||||
this._writeLinePart(linePart.replaceAll('\r', ''));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A shorthand for writing an optional message, followed by a newline.
|
||||
* Indentation is applied following the semantics of IndentedWriter.write().
|
||||
*/
|
||||
public writeLine(message: string = ''): void {
|
||||
if (message.length > 0) {
|
||||
this.write(message);
|
||||
}
|
||||
|
||||
this._writeNewLine();
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a string that does not contain any newline characters.
|
||||
*/
|
||||
private _writeLinePart(message: string): void {
|
||||
let trimmedMessage: string = message;
|
||||
|
||||
if (this.trimLeadingSpaces && this._atStartOfLine) {
|
||||
trimmedMessage = message.replace(/^ +/, '');
|
||||
}
|
||||
|
||||
if (trimmedMessage.length > 0) {
|
||||
if (this._atStartOfLine && this._indentText.length > 0) {
|
||||
this._write(this._indentText);
|
||||
}
|
||||
|
||||
this._write(trimmedMessage);
|
||||
if (this._currentLineIsBlank && /\S/.test(trimmedMessage)) {
|
||||
this._currentLineIsBlank = false;
|
||||
}
|
||||
|
||||
this._atStartOfLine = false;
|
||||
}
|
||||
}
|
||||
|
||||
private _writeNewLine(): void {
|
||||
if (this.indentBlankLines && this._atStartOfLine && this._indentText.length > 0) {
|
||||
this._write(this._indentText);
|
||||
}
|
||||
|
||||
this._previousLineIsBlank = this._currentLineIsBlank;
|
||||
this._write('\n');
|
||||
this._currentLineIsBlank = true;
|
||||
this._atStartOfLine = true;
|
||||
}
|
||||
|
||||
private _write(str: string): void {
|
||||
this._previousChunk = this._latestChunk;
|
||||
this._latestChunk = str;
|
||||
this._builder.append(str);
|
||||
}
|
||||
|
||||
private _updateIndentText(): void {
|
||||
this._indentText = this._indentStack.join('');
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
|
||||
// See LICENSE in the project root for license information.
|
||||
|
||||
import { IndentedWriter } from '../IndentedWriter.js';
|
||||
|
||||
test('01 Demo from docs', () => {
|
||||
const indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
indentedWriter.write('begin\n');
|
||||
indentedWriter.increaseIndent();
|
||||
indentedWriter.write('one\ntwo\n');
|
||||
indentedWriter.decreaseIndent();
|
||||
indentedWriter.increaseIndent();
|
||||
indentedWriter.decreaseIndent();
|
||||
indentedWriter.write('end');
|
||||
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('02 Indent something', () => {
|
||||
const indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
indentedWriter.write('a');
|
||||
indentedWriter.write('b');
|
||||
indentedWriter.increaseIndent();
|
||||
indentedWriter.writeLine('c');
|
||||
indentedWriter.writeLine('d');
|
||||
indentedWriter.decreaseIndent();
|
||||
indentedWriter.writeLine('e');
|
||||
|
||||
indentedWriter.increaseIndent('>>> ');
|
||||
indentedWriter.writeLine();
|
||||
indentedWriter.writeLine();
|
||||
indentedWriter.writeLine('g');
|
||||
indentedWriter.decreaseIndent();
|
||||
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('03 Indent something with indentBlankLines=true', () => {
|
||||
const indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
indentedWriter.indentBlankLines = true;
|
||||
|
||||
indentedWriter.write('a');
|
||||
indentedWriter.write('b');
|
||||
indentedWriter.increaseIndent();
|
||||
indentedWriter.writeLine('c');
|
||||
indentedWriter.writeLine('d');
|
||||
indentedWriter.decreaseIndent();
|
||||
indentedWriter.writeLine('e');
|
||||
|
||||
indentedWriter.increaseIndent('>>> ');
|
||||
indentedWriter.writeLine();
|
||||
indentedWriter.writeLine();
|
||||
indentedWriter.writeLine('g');
|
||||
indentedWriter.decreaseIndent();
|
||||
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('04 Two kinds of indents', () => {
|
||||
const indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
|
||||
indentedWriter.writeLine('---');
|
||||
indentedWriter.indentScope(() => {
|
||||
indentedWriter.write('a\nb');
|
||||
indentedWriter.indentScope(() => {
|
||||
indentedWriter.write('c\nd\n');
|
||||
});
|
||||
indentedWriter.write('e\n');
|
||||
}, '> ');
|
||||
indentedWriter.writeLine('---');
|
||||
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('05 Edge cases for ensureNewLine()', () => {
|
||||
let indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
indentedWriter.ensureNewLine();
|
||||
indentedWriter.write('line');
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
|
||||
indentedWriter = new IndentedWriter();
|
||||
indentedWriter.write('previous');
|
||||
indentedWriter.ensureNewLine();
|
||||
indentedWriter.write('line');
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('06 Edge cases for ensureSkippedLine()', () => {
|
||||
let indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
indentedWriter.ensureSkippedLine();
|
||||
indentedWriter.write('line');
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
|
||||
indentedWriter = new IndentedWriter();
|
||||
indentedWriter.write('previous');
|
||||
indentedWriter.ensureSkippedLine();
|
||||
indentedWriter.write('line');
|
||||
indentedWriter.ensureSkippedLine();
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('06 trimLeadingSpaces=true', () => {
|
||||
const indentedWriter: IndentedWriter = new IndentedWriter();
|
||||
indentedWriter.trimLeadingSpaces = true;
|
||||
|
||||
// Example from doc comment
|
||||
indentedWriter.increaseIndent(' ');
|
||||
indentedWriter.write(' a\n b c\n');
|
||||
indentedWriter.decreaseIndent();
|
||||
indentedWriter.ensureSkippedLine();
|
||||
indentedWriter.increaseIndent('>>');
|
||||
indentedWriter.write(' ');
|
||||
indentedWriter.write(' ');
|
||||
indentedWriter.write(' a');
|
||||
indentedWriter.writeLine(' b');
|
||||
indentedWriter.writeLine('\ttab'); // does not get indented
|
||||
indentedWriter.writeLine('c ');
|
||||
expect(indentedWriter.toString()).toMatchSnapshot();
|
||||
});
|
||||
@@ -0,0 +1,65 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`01 Demo from docs 1`] = `
|
||||
"begin
|
||||
one
|
||||
two
|
||||
end"
|
||||
`;
|
||||
|
||||
exports[`02 Indent something 1`] = `
|
||||
"abc
|
||||
d
|
||||
e
|
||||
|
||||
|
||||
>>> g
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`03 Indent something with indentBlankLines=true 1`] = `
|
||||
"abc
|
||||
d
|
||||
e
|
||||
>>>
|
||||
>>>
|
||||
>>> g
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`04 Two kinds of indents 1`] = `
|
||||
"---
|
||||
> a
|
||||
> bc
|
||||
> d
|
||||
> e
|
||||
---
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`05 Edge cases for ensureNewLine() 1`] = `"line"`;
|
||||
|
||||
exports[`05 Edge cases for ensureNewLine() 2`] = `
|
||||
"previous
|
||||
line"
|
||||
`;
|
||||
|
||||
exports[`06 Edge cases for ensureSkippedLine() 1`] = `"line"`;
|
||||
|
||||
exports[`06 Edge cases for ensureSkippedLine() 2`] = `
|
||||
"previous
|
||||
|
||||
line
|
||||
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`06 trimLeadingSpaces=true 1`] = `
|
||||
" a
|
||||
b c
|
||||
|
||||
>>a b
|
||||
>> tab
|
||||
>>c
|
||||
"
|
||||
`;
|
||||
Reference in New Issue
Block a user