This commit is contained in:
2024-02-07 01:33:07 -05:00
commit c1af19d441
4088 changed files with 1260170 additions and 0 deletions

View File

@ -0,0 +1,39 @@
import { ApiModel, ApiPackage } from '@microsoft/api-extractor-model';
import type { Collector } from '../collector/Collector';
export declare class ApiModelGenerator {
private readonly _collector;
private readonly _apiModel;
private readonly _referenceGenerator;
constructor(collector: Collector);
get apiModel(): ApiModel;
buildApiPackage(): ApiPackage;
private _processAstEntity;
private _processAstNamespaceImport;
private _processDeclaration;
private _processChildDeclarations;
private _processApiCallSignature;
private _processApiConstructor;
private _processApiClass;
private _processApiConstructSignature;
private _processApiEnum;
private _processApiEnumMember;
private _processApiFunction;
private _processApiIndexSignature;
private _processApiInterface;
private _processApiMethod;
private _processApiMethodSignature;
private _processApiNamespace;
private _processApiProperty;
private _processApiPropertySignature;
private _processApiTypeAlias;
private _processApiVariable;
/**
* @param nodesToCapture - A list of child nodes whose token ranges we want to capture
*/
private _buildExcerptTokens;
private _captureTypeParameters;
private _captureParameters;
private _isReadonly;
private _getFileUrlPath;
}
//# sourceMappingURL=ApiModelGenerator.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"ApiModelGenerator.d.ts","sourceRoot":"","sources":["../../src/generators/ApiModelGenerator.ts"],"names":[],"mappings":"AAQA,OAAO,EACL,QAAQ,EAER,UAAU,EAwBX,MAAM,gCAAgC,CAAC;AAGxC,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AAmBxD,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAY;IACvC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAW;IACrC,OAAO,CAAC,QAAQ,CAAC,mBAAmB,CAAgC;gBAEjD,SAAS,EAAE,SAAS;IAMvC,IAAW,QAAQ,IAAI,QAAQ,CAE9B;IAEM,eAAe,IAAI,UAAU;IA8BpC,OAAO,CAAC,iBAAiB;IAiCzB,OAAO,CAAC,0BAA0B;IAoClC,OAAO,CAAC,mBAAmB;IAyF3B,OAAO,CAAC,yBAAyB;IASjC,OAAO,CAAC,wBAAwB;IAiDhC,OAAO,CAAC,sBAAsB;IAyC9B,OAAO,CAAC,gBAAgB;IAgExB,OAAO,CAAC,6BAA6B;IAoDrC,OAAO,CAAC,eAAe;IAiCvB,OAAO,CAAC,qBAAqB;IAsC7B,OAAO,CAAC,mBAAmB;IAkD3B,OAAO,CAAC,yBAAyB;IA6CjC,OAAO,CAAC,oBAAoB;IAyD5B,OAAO,CAAC,iBAAiB;IA2DzB,OAAO,CAAC,0BAA0B;IAuDlC,OAAO,CAAC,oBAAoB;IAgC5B,OAAO,CAAC,mBAAmB;IA+D3B,OAAO,CAAC,4BAA4B;IA8CpC,OAAO,CAAC,oBAAoB;IA4C5B,OAAO,CAAC,mBAAmB;IA6C3B;;OAEG;IACH,OAAO,CAAC,mBAAmB;IAyB3B,OAAO,CAAC,sBAAsB;IAuB9B,OAAO,CAAC,kBAAkB;IAiB1B,OAAO,CAAC,WAAW;IA+BnB,OAAO,CAAC,eAAe;CAcxB"}

View File

@ -0,0 +1,814 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ApiModelGenerator = void 0;
/* eslint-disable no-bitwise */
const path = __importStar(require("path"));
const ts = __importStar(require("typescript"));
const api_extractor_model_1 = require("@microsoft/api-extractor-model");
const node_core_library_1 = require("@rushstack/node-core-library");
const ExcerptBuilder_1 = require("./ExcerptBuilder");
const AstSymbol_1 = require("../analyzer/AstSymbol");
const DeclarationReferenceGenerator_1 = require("./DeclarationReferenceGenerator");
const AstNamespaceImport_1 = require("../analyzer/AstNamespaceImport");
const TypeScriptInternals_1 = require("../analyzer/TypeScriptInternals");
class ApiModelGenerator {
constructor(collector) {
this._collector = collector;
this._apiModel = new api_extractor_model_1.ApiModel();
this._referenceGenerator = new DeclarationReferenceGenerator_1.DeclarationReferenceGenerator(collector);
}
get apiModel() {
return this._apiModel;
}
buildApiPackage() {
const packageDocComment = this._collector.workingPackage.tsdocComment;
const apiPackage = new api_extractor_model_1.ApiPackage({
name: this._collector.workingPackage.name,
docComment: packageDocComment,
tsdocConfiguration: this._collector.extractorConfig.tsdocConfiguration,
projectFolderUrl: this._collector.extractorConfig.projectFolderUrl
});
this._apiModel.addMember(apiPackage);
const apiEntryPoint = new api_extractor_model_1.ApiEntryPoint({ name: '' });
apiPackage.addMember(apiEntryPoint);
for (const entity of this._collector.entities) {
// Only process entities that are exported from the entry point. Entities that are exported from
// `AstNamespaceImport` entities will be processed by `_processAstNamespaceImport`. However, if
// we are including forgotten exports, then process everything.
if (entity.exportedFromEntryPoint || this._collector.extractorConfig.docModelIncludeForgottenExports) {
this._processAstEntity(entity.astEntity, {
name: entity.nameForEmit,
isExported: entity.exportedFromEntryPoint,
parentApiItem: apiEntryPoint
});
}
}
return apiPackage;
}
_processAstEntity(astEntity, context) {
if (astEntity instanceof AstSymbol_1.AstSymbol) {
// Skip ancillary declarations; we will process them with the main declaration
for (const astDeclaration of this._collector.getNonAncillaryDeclarations(astEntity)) {
this._processDeclaration(astDeclaration, context);
}
return;
}
if (astEntity instanceof AstNamespaceImport_1.AstNamespaceImport) {
// Note that a single API item can belong to two different AstNamespaceImport namespaces. For example:
//
// // file.ts defines "thing()"
// import * as example1 from "./file";
// import * as example2 from "./file";
//
// // ...so here we end up with example1.thing() and example2.thing()
// export { example1, example2 }
//
// The current logic does not try to associate "thing()" with a specific parent. Instead
// the API documentation will show duplicated entries for example1.thing() and example2.thing().
//
// This could be improved in the future, but it requires a stable mechanism for choosing an associated parent.
// For thoughts about this: https://github.com/microsoft/rushstack/issues/1308
this._processAstNamespaceImport(astEntity, context);
return;
}
// TODO: Figure out how to represent reexported AstImport objects. Basically we need to introduce a new
// ApiItem subclass for "export alias", similar to a type alias, but representing declarations of the
// form "export { X } from 'external-package'". We can also use this to solve GitHub issue #950.
}
_processAstNamespaceImport(astNamespaceImport, context) {
const astModule = astNamespaceImport.astModule;
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiNamespace.getContainerKey(name);
const fileUrlPath = this._getFileUrlPath(astNamespaceImport.declaration);
let apiNamespace = parentApiItem.tryGetMemberByKey(containerKey);
if (apiNamespace === undefined) {
apiNamespace = new api_extractor_model_1.ApiNamespace({
name,
docComment: undefined,
releaseTag: api_extractor_model_1.ReleaseTag.None,
excerptTokens: [],
isExported,
fileUrlPath
});
parentApiItem.addMember(apiNamespace);
}
astModule.astModuleExportInfo.exportedLocalEntities.forEach((exportedEntity, exportedName) => {
this._processAstEntity(exportedEntity, {
name: exportedName,
isExported: true,
parentApiItem: apiNamespace
});
});
}
_processDeclaration(astDeclaration, context) {
if ((astDeclaration.modifierFlags & ts.ModifierFlags.Private) !== 0) {
return; // trim out private declarations
}
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const releaseTag = apiItemMetadata.effectiveReleaseTag;
if (releaseTag === api_extractor_model_1.ReleaseTag.Internal) {
return; // trim out items marked as "@internal"
}
switch (astDeclaration.declaration.kind) {
case ts.SyntaxKind.CallSignature:
this._processApiCallSignature(astDeclaration, context);
break;
case ts.SyntaxKind.Constructor:
this._processApiConstructor(astDeclaration, context);
break;
case ts.SyntaxKind.ConstructSignature:
this._processApiConstructSignature(astDeclaration, context);
break;
case ts.SyntaxKind.ClassDeclaration:
this._processApiClass(astDeclaration, context);
break;
case ts.SyntaxKind.EnumDeclaration:
this._processApiEnum(astDeclaration, context);
break;
case ts.SyntaxKind.EnumMember:
this._processApiEnumMember(astDeclaration, context);
break;
case ts.SyntaxKind.FunctionDeclaration:
this._processApiFunction(astDeclaration, context);
break;
case ts.SyntaxKind.GetAccessor:
this._processApiProperty(astDeclaration, context);
break;
case ts.SyntaxKind.SetAccessor:
this._processApiProperty(astDeclaration, context);
break;
case ts.SyntaxKind.IndexSignature:
this._processApiIndexSignature(astDeclaration, context);
break;
case ts.SyntaxKind.InterfaceDeclaration:
this._processApiInterface(astDeclaration, context);
break;
case ts.SyntaxKind.MethodDeclaration:
this._processApiMethod(astDeclaration, context);
break;
case ts.SyntaxKind.MethodSignature:
this._processApiMethodSignature(astDeclaration, context);
break;
case ts.SyntaxKind.ModuleDeclaration:
this._processApiNamespace(astDeclaration, context);
break;
case ts.SyntaxKind.PropertyDeclaration:
this._processApiProperty(astDeclaration, context);
break;
case ts.SyntaxKind.PropertySignature:
this._processApiPropertySignature(astDeclaration, context);
break;
case ts.SyntaxKind.TypeAliasDeclaration:
this._processApiTypeAlias(astDeclaration, context);
break;
case ts.SyntaxKind.VariableDeclaration:
this._processApiVariable(astDeclaration, context);
break;
default:
// ignore unknown types
}
}
_processChildDeclarations(astDeclaration, context) {
for (const childDeclaration of astDeclaration.children) {
this._processDeclaration(childDeclaration, Object.assign(Object.assign({}, context), { name: childDeclaration.astSymbol.localName }));
}
}
_processApiCallSignature(astDeclaration, context) {
const { parentApiItem } = context;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiCallSignature.getContainerKey(overloadIndex);
let apiCallSignature = parentApiItem.tryGetMemberByKey(containerKey);
if (apiCallSignature === undefined) {
const callSignature = astDeclaration.declaration;
const nodesToCapture = [];
const returnTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: callSignature.type, tokenRange: returnTypeTokenRange });
const typeParameters = this._captureTypeParameters(nodesToCapture, callSignature.typeParameters);
const parameters = this._captureParameters(nodesToCapture, callSignature.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(callSignature);
apiCallSignature = new api_extractor_model_1.ApiCallSignature({
docComment,
releaseTag,
typeParameters,
parameters,
overloadIndex,
excerptTokens,
returnTypeTokenRange,
fileUrlPath
});
parentApiItem.addMember(apiCallSignature);
}
}
_processApiConstructor(astDeclaration, context) {
const { parentApiItem } = context;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiConstructor.getContainerKey(overloadIndex);
let apiConstructor = parentApiItem.tryGetMemberByKey(containerKey);
if (apiConstructor === undefined) {
const constructorDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const parameters = this._captureParameters(nodesToCapture, constructorDeclaration.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isProtected = (astDeclaration.modifierFlags & ts.ModifierFlags.Protected) !== 0;
const fileUrlPath = this._getFileUrlPath(constructorDeclaration);
apiConstructor = new api_extractor_model_1.ApiConstructor({
docComment,
releaseTag,
isProtected,
parameters,
overloadIndex,
excerptTokens,
fileUrlPath
});
parentApiItem.addMember(apiConstructor);
}
}
_processApiClass(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiClass.getContainerKey(name);
let apiClass = parentApiItem.tryGetMemberByKey(containerKey);
if (apiClass === undefined) {
const classDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const typeParameters = this._captureTypeParameters(nodesToCapture, classDeclaration.typeParameters);
let extendsTokenRange = undefined;
const implementsTokenRanges = [];
for (const heritageClause of classDeclaration.heritageClauses || []) {
if (heritageClause.token === ts.SyntaxKind.ExtendsKeyword) {
extendsTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
if (heritageClause.types.length > 0) {
nodesToCapture.push({ node: heritageClause.types[0], tokenRange: extendsTokenRange });
}
}
else if (heritageClause.token === ts.SyntaxKind.ImplementsKeyword) {
for (const heritageType of heritageClause.types) {
const implementsTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
implementsTokenRanges.push(implementsTokenRange);
nodesToCapture.push({ node: heritageType, tokenRange: implementsTokenRange });
}
}
}
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isAbstract = (ts.getCombinedModifierFlags(classDeclaration) & ts.ModifierFlags.Abstract) !== 0;
const fileUrlPath = this._getFileUrlPath(classDeclaration);
apiClass = new api_extractor_model_1.ApiClass({
name,
isAbstract,
docComment,
releaseTag,
excerptTokens,
typeParameters,
extendsTokenRange,
implementsTokenRanges,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiClass);
}
this._processChildDeclarations(astDeclaration, Object.assign(Object.assign({}, context), { parentApiItem: apiClass }));
}
_processApiConstructSignature(astDeclaration, context) {
const { parentApiItem } = context;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiConstructSignature.getContainerKey(overloadIndex);
let apiConstructSignature = parentApiItem.tryGetMemberByKey(containerKey);
if (apiConstructSignature === undefined) {
const constructSignature = astDeclaration.declaration;
const nodesToCapture = [];
const returnTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: constructSignature.type, tokenRange: returnTypeTokenRange });
const typeParameters = this._captureTypeParameters(nodesToCapture, constructSignature.typeParameters);
const parameters = this._captureParameters(nodesToCapture, constructSignature.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(constructSignature);
apiConstructSignature = new api_extractor_model_1.ApiConstructSignature({
docComment,
releaseTag,
typeParameters,
parameters,
overloadIndex,
excerptTokens,
returnTypeTokenRange,
fileUrlPath
});
parentApiItem.addMember(apiConstructSignature);
}
}
_processApiEnum(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiEnum.getContainerKey(name);
let apiEnum = parentApiItem.tryGetMemberByKey(containerKey);
if (apiEnum === undefined) {
const excerptTokens = this._buildExcerptTokens(astDeclaration, []);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const preserveMemberOrder = this._collector.extractorConfig.enumMemberOrder === api_extractor_model_1.EnumMemberOrder.Preserve;
const fileUrlPath = this._getFileUrlPath(astDeclaration.declaration);
apiEnum = new api_extractor_model_1.ApiEnum({
name,
docComment,
releaseTag,
excerptTokens,
preserveMemberOrder,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiEnum);
}
this._processChildDeclarations(astDeclaration, Object.assign(Object.assign({}, context), { parentApiItem: apiEnum }));
}
_processApiEnumMember(astDeclaration, context) {
const { name, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiEnumMember.getContainerKey(name);
let apiEnumMember = parentApiItem.tryGetMemberByKey(containerKey);
if (apiEnumMember === undefined) {
const enumMember = astDeclaration.declaration;
const nodesToCapture = [];
let initializerTokenRange = undefined;
if (enumMember.initializer) {
initializerTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: enumMember.initializer, tokenRange: initializerTokenRange });
}
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(enumMember);
apiEnumMember = new api_extractor_model_1.ApiEnumMember({
name,
docComment,
releaseTag,
excerptTokens,
initializerTokenRange,
fileUrlPath
});
parentApiItem.addMember(apiEnumMember);
}
}
_processApiFunction(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiFunction.getContainerKey(name, overloadIndex);
let apiFunction = parentApiItem.tryGetMemberByKey(containerKey);
if (apiFunction === undefined) {
const functionDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const returnTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: functionDeclaration.type, tokenRange: returnTypeTokenRange });
const typeParameters = this._captureTypeParameters(nodesToCapture, functionDeclaration.typeParameters);
const parameters = this._captureParameters(nodesToCapture, functionDeclaration.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(functionDeclaration);
apiFunction = new api_extractor_model_1.ApiFunction({
name,
docComment,
releaseTag,
typeParameters,
parameters,
overloadIndex,
excerptTokens,
returnTypeTokenRange,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiFunction);
}
}
_processApiIndexSignature(astDeclaration, context) {
const { parentApiItem } = context;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiIndexSignature.getContainerKey(overloadIndex);
let apiIndexSignature = parentApiItem.tryGetMemberByKey(containerKey);
if (apiIndexSignature === undefined) {
const indexSignature = astDeclaration.declaration;
const nodesToCapture = [];
const returnTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: indexSignature.type, tokenRange: returnTypeTokenRange });
const parameters = this._captureParameters(nodesToCapture, indexSignature.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isReadonly = this._isReadonly(astDeclaration);
const fileUrlPath = this._getFileUrlPath(indexSignature);
apiIndexSignature = new api_extractor_model_1.ApiIndexSignature({
docComment,
releaseTag,
parameters,
overloadIndex,
excerptTokens,
returnTypeTokenRange,
isReadonly,
fileUrlPath
});
parentApiItem.addMember(apiIndexSignature);
}
}
_processApiInterface(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiInterface.getContainerKey(name);
let apiInterface = parentApiItem.tryGetMemberByKey(containerKey);
if (apiInterface === undefined) {
const interfaceDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const typeParameters = this._captureTypeParameters(nodesToCapture, interfaceDeclaration.typeParameters);
const extendsTokenRanges = [];
for (const heritageClause of interfaceDeclaration.heritageClauses || []) {
if (heritageClause.token === ts.SyntaxKind.ExtendsKeyword) {
for (const heritageType of heritageClause.types) {
const extendsTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
extendsTokenRanges.push(extendsTokenRange);
nodesToCapture.push({ node: heritageType, tokenRange: extendsTokenRange });
}
}
}
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(interfaceDeclaration);
apiInterface = new api_extractor_model_1.ApiInterface({
name,
docComment,
releaseTag,
excerptTokens,
typeParameters,
extendsTokenRanges,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiInterface);
}
this._processChildDeclarations(astDeclaration, Object.assign(Object.assign({}, context), { parentApiItem: apiInterface }));
}
_processApiMethod(astDeclaration, context) {
const { name, parentApiItem } = context;
const isStatic = (astDeclaration.modifierFlags & ts.ModifierFlags.Static) !== 0;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiMethod.getContainerKey(name, isStatic, overloadIndex);
let apiMethod = parentApiItem.tryGetMemberByKey(containerKey);
if (apiMethod === undefined) {
const methodDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const returnTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: methodDeclaration.type, tokenRange: returnTypeTokenRange });
const typeParameters = this._captureTypeParameters(nodesToCapture, methodDeclaration.typeParameters);
const parameters = this._captureParameters(nodesToCapture, methodDeclaration.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
if (releaseTag === api_extractor_model_1.ReleaseTag.Internal || releaseTag === api_extractor_model_1.ReleaseTag.Alpha) {
return; // trim out items marked as "@internal" or "@alpha"
}
const isOptional = (astDeclaration.astSymbol.followedSymbol.flags & ts.SymbolFlags.Optional) !== 0;
const isProtected = (astDeclaration.modifierFlags & ts.ModifierFlags.Protected) !== 0;
const isAbstract = (astDeclaration.modifierFlags & ts.ModifierFlags.Abstract) !== 0;
const fileUrlPath = this._getFileUrlPath(methodDeclaration);
apiMethod = new api_extractor_model_1.ApiMethod({
name,
isAbstract,
docComment,
releaseTag,
isProtected,
isStatic,
isOptional,
typeParameters,
parameters,
overloadIndex,
excerptTokens,
returnTypeTokenRange,
fileUrlPath
});
parentApiItem.addMember(apiMethod);
}
}
_processApiMethodSignature(astDeclaration, context) {
const { name, parentApiItem } = context;
const overloadIndex = this._collector.getOverloadIndex(astDeclaration);
const containerKey = api_extractor_model_1.ApiMethodSignature.getContainerKey(name, overloadIndex);
let apiMethodSignature = parentApiItem.tryGetMemberByKey(containerKey);
if (apiMethodSignature === undefined) {
const methodSignature = astDeclaration.declaration;
const nodesToCapture = [];
const returnTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: methodSignature.type, tokenRange: returnTypeTokenRange });
const typeParameters = this._captureTypeParameters(nodesToCapture, methodSignature.typeParameters);
const parameters = this._captureParameters(nodesToCapture, methodSignature.parameters);
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isOptional = (astDeclaration.astSymbol.followedSymbol.flags & ts.SymbolFlags.Optional) !== 0;
const fileUrlPath = this._getFileUrlPath(methodSignature);
apiMethodSignature = new api_extractor_model_1.ApiMethodSignature({
name,
docComment,
releaseTag,
isOptional,
typeParameters,
parameters,
overloadIndex,
excerptTokens,
returnTypeTokenRange,
fileUrlPath
});
parentApiItem.addMember(apiMethodSignature);
}
}
_processApiNamespace(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiNamespace.getContainerKey(name);
let apiNamespace = parentApiItem.tryGetMemberByKey(containerKey);
if (apiNamespace === undefined) {
const excerptTokens = this._buildExcerptTokens(astDeclaration, []);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(astDeclaration.declaration);
apiNamespace = new api_extractor_model_1.ApiNamespace({
name,
docComment,
releaseTag,
excerptTokens,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiNamespace);
}
this._processChildDeclarations(astDeclaration, Object.assign(Object.assign({}, context), { parentApiItem: apiNamespace }));
}
_processApiProperty(astDeclaration, context) {
const { name, parentApiItem } = context;
const isStatic = (astDeclaration.modifierFlags & ts.ModifierFlags.Static) !== 0;
const containerKey = api_extractor_model_1.ApiProperty.getContainerKey(name, isStatic);
let apiProperty = parentApiItem.tryGetMemberByKey(containerKey);
if (apiProperty === undefined) {
const declaration = astDeclaration.declaration;
const nodesToCapture = [];
const propertyTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
let propertyTypeNode;
if (ts.isPropertyDeclaration(declaration) || ts.isGetAccessorDeclaration(declaration)) {
propertyTypeNode = declaration.type;
}
if (ts.isSetAccessorDeclaration(declaration)) {
// Note that TypeScript always reports an error if a setter does not have exactly one parameter.
propertyTypeNode = declaration.parameters[0].type;
}
nodesToCapture.push({ node: propertyTypeNode, tokenRange: propertyTypeTokenRange });
let initializerTokenRange = undefined;
if (ts.isPropertyDeclaration(declaration) && declaration.initializer) {
initializerTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: declaration.initializer, tokenRange: initializerTokenRange });
}
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isOptional = (astDeclaration.astSymbol.followedSymbol.flags & ts.SymbolFlags.Optional) !== 0;
const isProtected = (astDeclaration.modifierFlags & ts.ModifierFlags.Protected) !== 0;
const isAbstract = (astDeclaration.modifierFlags & ts.ModifierFlags.Abstract) !== 0;
const isReadonly = this._isReadonly(astDeclaration);
const fileUrlPath = this._getFileUrlPath(declaration);
apiProperty = new api_extractor_model_1.ApiProperty({
name,
docComment,
releaseTag,
isAbstract,
isProtected,
isStatic,
isOptional,
isReadonly,
excerptTokens,
propertyTypeTokenRange,
initializerTokenRange,
fileUrlPath
});
parentApiItem.addMember(apiProperty);
}
else {
// If the property was already declared before (via a merged interface declaration),
// we assume its signature is identical, because the language requires that.
}
}
_processApiPropertySignature(astDeclaration, context) {
const { name, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiPropertySignature.getContainerKey(name);
let apiPropertySignature = parentApiItem.tryGetMemberByKey(containerKey);
if (apiPropertySignature === undefined) {
const propertySignature = astDeclaration.declaration;
const nodesToCapture = [];
const propertyTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: propertySignature.type, tokenRange: propertyTypeTokenRange });
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isOptional = (astDeclaration.astSymbol.followedSymbol.flags & ts.SymbolFlags.Optional) !== 0;
const isReadonly = this._isReadonly(astDeclaration);
const fileUrlPath = this._getFileUrlPath(propertySignature);
apiPropertySignature = new api_extractor_model_1.ApiPropertySignature({
name,
docComment,
releaseTag,
isOptional,
excerptTokens,
propertyTypeTokenRange,
isReadonly,
fileUrlPath
});
parentApiItem.addMember(apiPropertySignature);
}
else {
// If the property was already declared before (via a merged interface declaration),
// we assume its signature is identical, because the language requires that.
}
}
_processApiTypeAlias(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiTypeAlias.getContainerKey(name);
let apiTypeAlias = parentApiItem.tryGetMemberByKey(containerKey);
if (apiTypeAlias === undefined) {
const typeAliasDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const typeParameters = this._captureTypeParameters(nodesToCapture, typeAliasDeclaration.typeParameters);
const typeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: typeAliasDeclaration.type, tokenRange: typeTokenRange });
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const fileUrlPath = this._getFileUrlPath(typeAliasDeclaration);
apiTypeAlias = new api_extractor_model_1.ApiTypeAlias({
name,
docComment,
typeParameters,
releaseTag,
excerptTokens,
typeTokenRange,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiTypeAlias);
}
}
_processApiVariable(astDeclaration, context) {
const { name, isExported, parentApiItem } = context;
const containerKey = api_extractor_model_1.ApiVariable.getContainerKey(name);
let apiVariable = parentApiItem.tryGetMemberByKey(containerKey);
if (apiVariable === undefined) {
const variableDeclaration = astDeclaration.declaration;
const nodesToCapture = [];
const variableTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: variableDeclaration.type, tokenRange: variableTypeTokenRange });
let initializerTokenRange = undefined;
if (variableDeclaration.initializer) {
initializerTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: variableDeclaration.initializer, tokenRange: initializerTokenRange });
}
const excerptTokens = this._buildExcerptTokens(astDeclaration, nodesToCapture);
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const releaseTag = apiItemMetadata.effectiveReleaseTag;
const isReadonly = this._isReadonly(astDeclaration);
const fileUrlPath = this._getFileUrlPath(variableDeclaration);
apiVariable = new api_extractor_model_1.ApiVariable({
name,
docComment,
releaseTag,
excerptTokens,
variableTypeTokenRange,
initializerTokenRange,
isReadonly,
isExported,
fileUrlPath
});
parentApiItem.addMember(apiVariable);
}
}
/**
* @param nodesToCapture - A list of child nodes whose token ranges we want to capture
*/
_buildExcerptTokens(astDeclaration, nodesToCapture) {
const excerptTokens = [];
// Build the main declaration
ExcerptBuilder_1.ExcerptBuilder.addDeclaration(excerptTokens, astDeclaration, nodesToCapture, this._referenceGenerator);
const declarationMetadata = this._collector.fetchDeclarationMetadata(astDeclaration);
// Add any ancillary declarations
for (const ancillaryDeclaration of declarationMetadata.ancillaryDeclarations) {
ExcerptBuilder_1.ExcerptBuilder.addBlankLine(excerptTokens);
ExcerptBuilder_1.ExcerptBuilder.addDeclaration(excerptTokens, ancillaryDeclaration, nodesToCapture, this._referenceGenerator);
}
return excerptTokens;
}
_captureTypeParameters(nodesToCapture, typeParameterNodes) {
const typeParameters = [];
if (typeParameterNodes) {
for (const typeParameter of typeParameterNodes) {
const constraintTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: typeParameter.constraint, tokenRange: constraintTokenRange });
const defaultTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: typeParameter.default, tokenRange: defaultTypeTokenRange });
typeParameters.push({
typeParameterName: typeParameter.name.getText().trim(),
constraintTokenRange,
defaultTypeTokenRange
});
}
}
return typeParameters;
}
_captureParameters(nodesToCapture, parameterNodes) {
const parameters = [];
for (const parameter of parameterNodes) {
const parameterTypeTokenRange = ExcerptBuilder_1.ExcerptBuilder.createEmptyTokenRange();
nodesToCapture.push({ node: parameter.type, tokenRange: parameterTypeTokenRange });
parameters.push({
parameterName: parameter.name.getText().trim(),
parameterTypeTokenRange,
isOptional: this._collector.typeChecker.isOptionalParameter(parameter)
});
}
return parameters;
}
_isReadonly(astDeclaration) {
var _a;
switch (astDeclaration.declaration.kind) {
case ts.SyntaxKind.GetAccessor:
case ts.SyntaxKind.IndexSignature:
case ts.SyntaxKind.PropertyDeclaration:
case ts.SyntaxKind.PropertySignature:
case ts.SyntaxKind.SetAccessor:
case ts.SyntaxKind.VariableDeclaration: {
const apiItemMetadata = this._collector.fetchApiItemMetadata(astDeclaration);
const docComment = apiItemMetadata.tsdocComment;
const declarationMetadata = this._collector.fetchDeclarationMetadata(astDeclaration);
const hasReadonlyModifier = (astDeclaration.modifierFlags & ts.ModifierFlags.Readonly) !== 0;
const hasReadonlyDocTag = !!((_a = docComment === null || docComment === void 0 ? void 0 : docComment.modifierTagSet) === null || _a === void 0 ? void 0 : _a.hasTagName('@readonly'));
const isGetterWithNoSetter = ts.isGetAccessorDeclaration(astDeclaration.declaration) &&
declarationMetadata.ancillaryDeclarations.length === 0;
const isVarConst = ts.isVariableDeclaration(astDeclaration.declaration) &&
TypeScriptInternals_1.TypeScriptInternals.isVarConst(astDeclaration.declaration);
return hasReadonlyModifier || hasReadonlyDocTag || isGetterWithNoSetter || isVarConst;
}
default: {
// Readonly-ness does not make sense for any other declaration kind.
return false;
}
}
}
_getFileUrlPath(declaration) {
const sourceFile = declaration.getSourceFile();
const sourceLocation = this._collector.sourceMapper.getSourceLocation({
sourceFile,
pos: declaration.pos
});
let result = path.relative(this._collector.extractorConfig.projectFolder, sourceLocation.sourceFilePath);
result = node_core_library_1.Path.convertToSlashes(result);
return result;
}
}
exports.ApiModelGenerator = ApiModelGenerator;
//# sourceMappingURL=ApiModelGenerator.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,30 @@
import { Collector } from '../collector/Collector';
export declare class ApiReportGenerator {
private static _trimSpacesRegExp;
/**
* Compares the contents of two API files that were created using ApiFileGenerator,
* and returns true if they are equivalent. Note that these files are not normally edited
* by a human; the "equivalence" comparison here is intended to ignore spurious changes that
* might be introduced by a tool, e.g. Git newline normalization or an editor that strips
* whitespace when saving.
*/
static areEquivalentApiFileContents(actualFileContent: string, expectedFileContent: string): boolean;
static generateReviewFileContent(collector: Collector): string;
/**
* Before writing out a declaration, _modifySpan() applies various fixups to make it nice.
*/
private static _modifySpan;
private static _shouldIncludeInReport;
/**
* For declarations marked as `@preapproved`, this is used instead of _modifySpan().
*/
private static _modifySpanForPreapproved;
/**
* Writes a synopsis of the AEDoc comments, which indicates the release tag,
* whether the item has been documented, and any warnings that were detected
* by the analysis.
*/
private static _getAedocSynopsis;
private static _writeLineAsComments;
}
//# sourceMappingURL=ApiReportGenerator.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"ApiReportGenerator.d.ts","sourceRoot":"","sources":["../../src/generators/ApiReportGenerator.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AAiBnD,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,MAAM,CAAC,iBAAiB,CAAmB;IAEnD;;;;;;OAMG;WACW,4BAA4B,CACxC,iBAAiB,EAAE,MAAM,EACzB,mBAAmB,EAAE,MAAM,GAC1B,OAAO;WAOI,yBAAyB,CAAC,SAAS,EAAE,SAAS,GAAG,MAAM;IA6MrE;;OAEG;IACH,OAAO,CAAC,MAAM,CAAC,WAAW;IAqL1B,OAAO,CAAC,MAAM,CAAC,sBAAsB;IAMrC;;OAEG;IACH,OAAO,CAAC,MAAM,CAAC,yBAAyB;IA8CxC;;;;OAIG;IACH,OAAO,CAAC,MAAM,CAAC,iBAAiB;IAgEhC,OAAO,CAAC,MAAM,CAAC,oBAAoB;CAQpC"}

View File

@ -0,0 +1,455 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ApiReportGenerator = void 0;
const ts = __importStar(require("typescript"));
const node_core_library_1 = require("@rushstack/node-core-library");
const api_extractor_model_1 = require("@microsoft/api-extractor-model");
const Collector_1 = require("../collector/Collector");
const TypeScriptHelpers_1 = require("../analyzer/TypeScriptHelpers");
const Span_1 = require("../analyzer/Span");
const AstDeclaration_1 = require("../analyzer/AstDeclaration");
const AstImport_1 = require("../analyzer/AstImport");
const AstSymbol_1 = require("../analyzer/AstSymbol");
const IndentedWriter_1 = require("./IndentedWriter");
const DtsEmitHelpers_1 = require("./DtsEmitHelpers");
const AstNamespaceImport_1 = require("../analyzer/AstNamespaceImport");
const SourceFileLocationFormatter_1 = require("../analyzer/SourceFileLocationFormatter");
class ApiReportGenerator {
/**
* Compares the contents of two API files that were created using ApiFileGenerator,
* and returns true if they are equivalent. Note that these files are not normally edited
* by a human; the "equivalence" comparison here is intended to ignore spurious changes that
* might be introduced by a tool, e.g. Git newline normalization or an editor that strips
* whitespace when saving.
*/
static areEquivalentApiFileContents(actualFileContent, expectedFileContent) {
// NOTE: "\s" also matches "\r" and "\n"
const normalizedActual = actualFileContent.replace(/[\s]+/g, ' ');
const normalizedExpected = expectedFileContent.replace(/[\s]+/g, ' ');
return normalizedActual === normalizedExpected;
}
static generateReviewFileContent(collector) {
const writer = new IndentedWriter_1.IndentedWriter();
writer.trimLeadingSpaces = true;
writer.writeLine([
`## API Report File for "${collector.workingPackage.name}"`,
``,
`> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/).`,
``
].join('\n'));
// Write the opening delimiter for the Markdown code fence
writer.writeLine('```ts\n');
// Emit the triple slash directives
for (const typeDirectiveReference of Array.from(collector.dtsTypeReferenceDirectives).sort()) {
// https://github.com/microsoft/TypeScript/blob/611ebc7aadd7a44a4c0447698bfda9222a78cb66/src/compiler/declarationEmitter.ts#L162
writer.writeLine(`/// <reference types="${typeDirectiveReference}" />`);
}
for (const libDirectiveReference of Array.from(collector.dtsLibReferenceDirectives).sort()) {
writer.writeLine(`/// <reference lib="${libDirectiveReference}" />`);
}
writer.ensureSkippedLine();
// Emit the imports
for (const entity of collector.entities) {
if (entity.astEntity instanceof AstImport_1.AstImport) {
DtsEmitHelpers_1.DtsEmitHelpers.emitImport(writer, entity, entity.astEntity);
}
}
writer.ensureSkippedLine();
// Emit the regular declarations
for (const entity of collector.entities) {
const astEntity = entity.astEntity;
if (entity.consumable || collector.extractorConfig.apiReportIncludeForgottenExports) {
const exportsToEmit = new Map();
for (const exportName of entity.exportNames) {
if (!entity.shouldInlineExport) {
exportsToEmit.set(exportName, { exportName, associatedMessages: [] });
}
}
if (astEntity instanceof AstSymbol_1.AstSymbol) {
// Emit all the declarations for this entity
for (const astDeclaration of astEntity.astDeclarations || []) {
// Get the messages associated with this declaration
const fetchedMessages = collector.messageRouter.fetchAssociatedMessagesForReviewFile(astDeclaration);
// Peel off the messages associated with an export statement and store them
// in IExportToEmit.associatedMessages (to be processed later). The remaining messages will
// added to messagesToReport, to be emitted next to the declaration instead of the export statement.
const messagesToReport = [];
for (const message of fetchedMessages) {
if (message.properties.exportName) {
const exportToEmit = exportsToEmit.get(message.properties.exportName);
if (exportToEmit) {
exportToEmit.associatedMessages.push(message);
continue;
}
}
messagesToReport.push(message);
}
writer.ensureSkippedLine();
writer.write(ApiReportGenerator._getAedocSynopsis(collector, astDeclaration, messagesToReport));
const span = new Span_1.Span(astDeclaration.declaration);
const apiItemMetadata = collector.fetchApiItemMetadata(astDeclaration);
if (apiItemMetadata.isPreapproved) {
ApiReportGenerator._modifySpanForPreapproved(span);
}
else {
ApiReportGenerator._modifySpan(collector, span, entity, astDeclaration, false);
}
span.writeModifiedText(writer);
writer.ensureNewLine();
}
}
if (astEntity instanceof AstNamespaceImport_1.AstNamespaceImport) {
const astModuleExportInfo = astEntity.fetchAstModuleExportInfo(collector);
if (entity.nameForEmit === undefined) {
// This should never happen
throw new node_core_library_1.InternalError('referencedEntry.nameForEmit is undefined');
}
if (astModuleExportInfo.starExportedExternalModules.size > 0) {
// We could support this, but we would need to find a way to safely represent it.
throw new Error(`The ${entity.nameForEmit} namespace import includes a star export, which is not supported:\n` +
SourceFileLocationFormatter_1.SourceFileLocationFormatter.formatDeclaration(astEntity.declaration));
}
// Emit a synthetic declaration for the namespace. It will look like this:
//
// declare namespace example {
// export {
// f1,
// f2
// }
// }
//
// Note that we do not try to relocate f1()/f2() to be inside the namespace because other type
// signatures may reference them directly (without using the namespace qualifier).
writer.ensureSkippedLine();
writer.writeLine(`declare namespace ${entity.nameForEmit} {`);
// all local exports of local imported module are just references to top-level declarations
writer.increaseIndent();
writer.writeLine('export {');
writer.increaseIndent();
const exportClauses = [];
for (const [exportedName, exportedEntity] of astModuleExportInfo.exportedLocalEntities) {
const collectorEntity = collector.tryGetCollectorEntity(exportedEntity);
if (collectorEntity === undefined) {
// This should never happen
// top-level exports of local imported module should be added as collector entities before
throw new node_core_library_1.InternalError(`Cannot find collector entity for ${entity.nameForEmit}.${exportedEntity.localName}`);
}
if (collectorEntity.nameForEmit === exportedName) {
exportClauses.push(collectorEntity.nameForEmit);
}
else {
exportClauses.push(`${collectorEntity.nameForEmit} as ${exportedName}`);
}
}
writer.writeLine(exportClauses.join(',\n'));
writer.decreaseIndent();
writer.writeLine('}'); // end of "export { ... }"
writer.decreaseIndent();
writer.writeLine('}'); // end of "declare namespace { ... }"
}
// Now emit the export statements for this entity.
for (const exportToEmit of exportsToEmit.values()) {
// Write any associated messages
if (exportToEmit.associatedMessages.length > 0) {
writer.ensureSkippedLine();
for (const message of exportToEmit.associatedMessages) {
ApiReportGenerator._writeLineAsComments(writer, 'Warning: ' + message.formatMessageWithoutLocation());
}
}
DtsEmitHelpers_1.DtsEmitHelpers.emitNamedExport(writer, exportToEmit.exportName, entity);
}
writer.ensureSkippedLine();
}
}
DtsEmitHelpers_1.DtsEmitHelpers.emitStarExports(writer, collector);
// Write the unassociated warnings at the bottom of the file
const unassociatedMessages = collector.messageRouter.fetchUnassociatedMessagesForReviewFile();
if (unassociatedMessages.length > 0) {
writer.ensureSkippedLine();
ApiReportGenerator._writeLineAsComments(writer, 'Warnings were encountered during analysis:');
ApiReportGenerator._writeLineAsComments(writer, '');
for (const unassociatedMessage of unassociatedMessages) {
ApiReportGenerator._writeLineAsComments(writer, unassociatedMessage.formatMessageWithLocation(collector.workingPackage.packageFolder));
}
}
if (collector.workingPackage.tsdocComment === undefined) {
writer.ensureSkippedLine();
ApiReportGenerator._writeLineAsComments(writer, '(No @packageDocumentation comment for this package)');
}
// Write the closing delimiter for the Markdown code fence
writer.ensureSkippedLine();
writer.writeLine('```');
// Remove any trailing spaces
return writer.toString().replace(ApiReportGenerator._trimSpacesRegExp, '');
}
/**
* Before writing out a declaration, _modifySpan() applies various fixups to make it nice.
*/
static _modifySpan(collector, span, entity, astDeclaration, insideTypeLiteral) {
// Should we process this declaration at all?
// eslint-disable-next-line no-bitwise
if (!ApiReportGenerator._shouldIncludeInReport(astDeclaration)) {
span.modification.skipAll();
return;
}
const previousSpan = span.previousSibling;
let recurseChildren = true;
let sortChildren = false;
switch (span.kind) {
case ts.SyntaxKind.JSDocComment:
span.modification.skipAll();
// For now, we don't transform JSDoc comment nodes at all
recurseChildren = false;
break;
case ts.SyntaxKind.ExportKeyword:
case ts.SyntaxKind.DefaultKeyword:
case ts.SyntaxKind.DeclareKeyword:
// Delete any explicit "export" or "declare" keywords -- we will re-add them below
span.modification.skipAll();
break;
case ts.SyntaxKind.InterfaceKeyword:
case ts.SyntaxKind.ClassKeyword:
case ts.SyntaxKind.EnumKeyword:
case ts.SyntaxKind.NamespaceKeyword:
case ts.SyntaxKind.ModuleKeyword:
case ts.SyntaxKind.TypeKeyword:
case ts.SyntaxKind.FunctionKeyword:
// Replace the stuff we possibly deleted above
let replacedModifiers = '';
if (entity.shouldInlineExport) {
replacedModifiers = 'export ' + replacedModifiers;
}
if (previousSpan && previousSpan.kind === ts.SyntaxKind.SyntaxList) {
// If there is a previous span of type SyntaxList, then apply it before any other modifiers
// (e.g. "abstract") that appear there.
previousSpan.modification.prefix = replacedModifiers + previousSpan.modification.prefix;
}
else {
// Otherwise just stick it in front of this span
span.modification.prefix = replacedModifiers + span.modification.prefix;
}
break;
case ts.SyntaxKind.SyntaxList:
if (span.parent) {
if (AstDeclaration_1.AstDeclaration.isSupportedSyntaxKind(span.parent.kind)) {
// If the immediate parent is an API declaration, and the immediate children are API declarations,
// then sort the children alphabetically
sortChildren = true;
}
else if (span.parent.kind === ts.SyntaxKind.ModuleBlock) {
// Namespaces are special because their chain goes ModuleDeclaration -> ModuleBlock -> SyntaxList
sortChildren = true;
}
}
break;
case ts.SyntaxKind.VariableDeclaration:
if (!span.parent) {
// The VariableDeclaration node is part of a VariableDeclarationList, however
// the Entry.followedSymbol points to the VariableDeclaration part because
// multiple definitions might share the same VariableDeclarationList.
//
// Since we are emitting a separate declaration for each one, we need to look upwards
// in the ts.Node tree and write a copy of the enclosing VariableDeclarationList
// content (e.g. "var" from "var x=1, y=2").
const list = TypeScriptHelpers_1.TypeScriptHelpers.matchAncestor(span.node, [
ts.SyntaxKind.VariableDeclarationList,
ts.SyntaxKind.VariableDeclaration
]);
if (!list) {
// This should not happen unless the compiler API changes somehow
throw new node_core_library_1.InternalError('Unsupported variable declaration');
}
const listPrefix = list
.getSourceFile()
.text.substring(list.getStart(), list.declarations[0].getStart());
span.modification.prefix = listPrefix + span.modification.prefix;
span.modification.suffix = ';';
if (entity.shouldInlineExport) {
span.modification.prefix = 'export ' + span.modification.prefix;
}
}
break;
case ts.SyntaxKind.Identifier:
const referencedEntity = collector.tryGetEntityForNode(span.node);
if (referencedEntity) {
if (!referencedEntity.nameForEmit) {
// This should never happen
throw new node_core_library_1.InternalError('referencedEntry.nameForEmit is undefined');
}
span.modification.prefix = referencedEntity.nameForEmit;
// For debugging:
// span.modification.prefix += '/*R=FIX*/';
}
else {
// For debugging:
// span.modification.prefix += '/*R=KEEP*/';
}
break;
case ts.SyntaxKind.TypeLiteral:
insideTypeLiteral = true;
break;
case ts.SyntaxKind.ImportType:
DtsEmitHelpers_1.DtsEmitHelpers.modifyImportTypeSpan(collector, span, astDeclaration, (childSpan, childAstDeclaration) => {
ApiReportGenerator._modifySpan(collector, childSpan, entity, childAstDeclaration, insideTypeLiteral);
});
break;
}
if (recurseChildren) {
for (const child of span.children) {
let childAstDeclaration = astDeclaration;
if (AstDeclaration_1.AstDeclaration.isSupportedSyntaxKind(child.kind)) {
childAstDeclaration = collector.astSymbolTable.getChildAstDeclarationByNode(child.node, astDeclaration);
if (ApiReportGenerator._shouldIncludeInReport(childAstDeclaration)) {
if (sortChildren) {
span.modification.sortChildren = true;
child.modification.sortKey = Collector_1.Collector.getSortKeyIgnoringUnderscore(childAstDeclaration.astSymbol.localName);
}
if (!insideTypeLiteral) {
const messagesToReport = collector.messageRouter.fetchAssociatedMessagesForReviewFile(childAstDeclaration);
// NOTE: This generates ae-undocumented messages as a side effect
const aedocSynopsis = ApiReportGenerator._getAedocSynopsis(collector, childAstDeclaration, messagesToReport);
child.modification.prefix = aedocSynopsis + child.modification.prefix;
}
}
}
ApiReportGenerator._modifySpan(collector, child, entity, childAstDeclaration, insideTypeLiteral);
}
}
}
static _shouldIncludeInReport(astDeclaration) {
// Private declarations are not included in the API report
// eslint-disable-next-line no-bitwise
return (astDeclaration.modifierFlags & ts.ModifierFlags.Private) === 0;
}
/**
* For declarations marked as `@preapproved`, this is used instead of _modifySpan().
*/
static _modifySpanForPreapproved(span) {
// Match something like this:
//
// ClassDeclaration:
// SyntaxList:
// ExportKeyword: pre=[export] sep=[ ]
// DeclareKeyword: pre=[declare] sep=[ ]
// ClassKeyword: pre=[class] sep=[ ]
// Identifier: pre=[_PreapprovedClass] sep=[ ]
// FirstPunctuation: pre=[{] sep=[\n\n ]
// SyntaxList:
// ...
// CloseBraceToken: pre=[}]
//
// or this:
// ModuleDeclaration:
// SyntaxList:
// ExportKeyword: pre=[export] sep=[ ]
// DeclareKeyword: pre=[declare] sep=[ ]
// NamespaceKeyword: pre=[namespace] sep=[ ]
// Identifier: pre=[_PreapprovedNamespace] sep=[ ]
// ModuleBlock:
// FirstPunctuation: pre=[{] sep=[\n\n ]
// SyntaxList:
// ...
// CloseBraceToken: pre=[}]
//
// And reduce it to something like this:
//
// // @internal (undocumented)
// class _PreapprovedClass { /* (preapproved) */ }
//
let skipRest = false;
for (const child of span.children) {
if (skipRest || child.kind === ts.SyntaxKind.SyntaxList || child.kind === ts.SyntaxKind.JSDocComment) {
child.modification.skipAll();
}
if (child.kind === ts.SyntaxKind.Identifier) {
skipRest = true;
child.modification.omitSeparatorAfter = true;
child.modification.suffix = ' { /* (preapproved) */ }';
}
}
}
/**
* Writes a synopsis of the AEDoc comments, which indicates the release tag,
* whether the item has been documented, and any warnings that were detected
* by the analysis.
*/
static _getAedocSynopsis(collector, astDeclaration, messagesToReport) {
const writer = new IndentedWriter_1.IndentedWriter();
for (const message of messagesToReport) {
ApiReportGenerator._writeLineAsComments(writer, 'Warning: ' + message.formatMessageWithoutLocation());
}
if (!collector.isAncillaryDeclaration(astDeclaration)) {
const footerParts = [];
const apiItemMetadata = collector.fetchApiItemMetadata(astDeclaration);
if (!apiItemMetadata.releaseTagSameAsParent) {
if (apiItemMetadata.effectiveReleaseTag !== api_extractor_model_1.ReleaseTag.None) {
footerParts.push(api_extractor_model_1.ReleaseTag.getTagName(apiItemMetadata.effectiveReleaseTag));
}
}
if (apiItemMetadata.isSealed) {
footerParts.push('@sealed');
}
if (apiItemMetadata.isVirtual) {
footerParts.push('@virtual');
}
if (apiItemMetadata.isOverride) {
footerParts.push('@override');
}
if (apiItemMetadata.isEventProperty) {
footerParts.push('@eventProperty');
}
if (apiItemMetadata.tsdocComment) {
if (apiItemMetadata.tsdocComment.deprecatedBlock) {
footerParts.push('@deprecated');
}
}
if (apiItemMetadata.undocumented) {
footerParts.push('(undocumented)');
collector.messageRouter.addAnalyzerIssue("ae-undocumented" /* ExtractorMessageId.Undocumented */, `Missing documentation for "${astDeclaration.astSymbol.localName}".`, astDeclaration);
}
if (footerParts.length > 0) {
if (messagesToReport.length > 0) {
ApiReportGenerator._writeLineAsComments(writer, ''); // skip a line after the warnings
}
ApiReportGenerator._writeLineAsComments(writer, footerParts.join(' '));
}
}
return writer.toString();
}
static _writeLineAsComments(writer, line) {
const lines = node_core_library_1.Text.convertToLf(line).split('\n');
for (const realLine of lines) {
writer.write('// ');
writer.write(realLine);
writer.writeLine();
}
}
}
ApiReportGenerator._trimSpacesRegExp = / +$/gm;
exports.ApiReportGenerator = ApiReportGenerator;
//# sourceMappingURL=ApiReportGenerator.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,26 @@
import * as ts from 'typescript';
import { DeclarationReference } from '@microsoft/tsdoc/lib-commonjs/beta/DeclarationReference';
import type { Collector } from '../collector/Collector';
export declare class DeclarationReferenceGenerator {
static readonly unknownReference: string;
private _collector;
constructor(collector: Collector);
/**
* Gets the UID for a TypeScript Identifier that references a type.
*/
getDeclarationReferenceForIdentifier(node: ts.Identifier): DeclarationReference | undefined;
/**
* Gets the DeclarationReference for a TypeScript Symbol for a given meaning.
*/
getDeclarationReferenceForSymbol(symbol: ts.Symbol, meaning: ts.SymbolFlags): DeclarationReference | undefined;
private static _isInExpressionContext;
private static _isExternalModuleSymbol;
private static _isSameSymbol;
private _getNavigationToSymbol;
private static _getMeaningOfSymbol;
private _symbolToDeclarationReference;
private _getParentReference;
private _getPackageName;
private _sourceFileToModuleSource;
}
//# sourceMappingURL=DeclarationReferenceGenerator.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"DeclarationReferenceGenerator.d.ts","sourceRoot":"","sources":["../../src/generators/DeclarationReferenceGenerator.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,MAAM,YAAY,CAAC;AACjC,OAAO,EACL,oBAAoB,EAKrB,MAAM,yDAAyD,CAAC;AAIjE,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AAIxD,qBAAa,6BAA6B;IACxC,gBAAuB,gBAAgB,EAAE,MAAM,CAAO;IAEtD,OAAO,CAAC,UAAU,CAAY;gBAEX,SAAS,EAAE,SAAS;IAIvC;;OAEG;IACI,oCAAoC,CAAC,IAAI,EAAE,EAAE,CAAC,UAAU,GAAG,oBAAoB,GAAG,SAAS;IAkBlG;;OAEG;IACI,gCAAgC,CACrC,MAAM,EAAE,EAAE,CAAC,MAAM,EACjB,OAAO,EAAE,EAAE,CAAC,WAAW,GACtB,oBAAoB,GAAG,SAAS;IAInC,OAAO,CAAC,MAAM,CAAC,sBAAsB;IAYrC,OAAO,CAAC,MAAM,CAAC,uBAAuB;IAQtC,OAAO,CAAC,MAAM,CAAC,aAAa;IAY5B,OAAO,CAAC,sBAAsB;IAoD9B,OAAO,CAAC,MAAM,CAAC,mBAAmB;IAiDlC,OAAO,CAAC,6BAA6B;IAiFrC,OAAO,CAAC,mBAAmB;IA8E3B,OAAO,CAAC,eAAe;IAavB,OAAO,CAAC,yBAAyB;CAelC"}

View File

@ -0,0 +1,319 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DeclarationReferenceGenerator = void 0;
/* eslint-disable no-bitwise */
const ts = __importStar(require("typescript"));
const DeclarationReference_1 = require("@microsoft/tsdoc/lib-commonjs/beta/DeclarationReference");
const node_core_library_1 = require("@rushstack/node-core-library");
const TypeScriptHelpers_1 = require("../analyzer/TypeScriptHelpers");
const TypeScriptInternals_1 = require("../analyzer/TypeScriptInternals");
const AstNamespaceImport_1 = require("../analyzer/AstNamespaceImport");
class DeclarationReferenceGenerator {
constructor(collector) {
this._collector = collector;
}
/**
* Gets the UID for a TypeScript Identifier that references a type.
*/
getDeclarationReferenceForIdentifier(node) {
const symbol = this._collector.typeChecker.getSymbolAtLocation(node);
if (symbol !== undefined) {
const isExpression = DeclarationReferenceGenerator._isInExpressionContext(node);
return (this.getDeclarationReferenceForSymbol(symbol, isExpression ? ts.SymbolFlags.Value : ts.SymbolFlags.Type) ||
this.getDeclarationReferenceForSymbol(symbol, isExpression ? ts.SymbolFlags.Type : ts.SymbolFlags.Value) ||
this.getDeclarationReferenceForSymbol(symbol, ts.SymbolFlags.Namespace));
}
}
/**
* Gets the DeclarationReference for a TypeScript Symbol for a given meaning.
*/
getDeclarationReferenceForSymbol(symbol, meaning) {
return this._symbolToDeclarationReference(symbol, meaning, /*includeModuleSymbols*/ false);
}
static _isInExpressionContext(node) {
switch (node.parent.kind) {
case ts.SyntaxKind.TypeQuery:
case ts.SyntaxKind.ComputedPropertyName:
return true;
case ts.SyntaxKind.QualifiedName:
return DeclarationReferenceGenerator._isInExpressionContext(node.parent);
default:
return false;
}
}
static _isExternalModuleSymbol(symbol) {
return (!!(symbol.flags & ts.SymbolFlags.ValueModule) &&
symbol.valueDeclaration !== undefined &&
ts.isSourceFile(symbol.valueDeclaration));
}
static _isSameSymbol(left, right) {
return (left === right ||
!!(left &&
left.valueDeclaration &&
right.valueDeclaration &&
left.valueDeclaration === right.valueDeclaration));
}
_getNavigationToSymbol(symbol) {
const declaration = TypeScriptHelpers_1.TypeScriptHelpers.tryGetADeclaration(symbol);
const sourceFile = declaration === null || declaration === void 0 ? void 0 : declaration.getSourceFile();
const parent = TypeScriptInternals_1.TypeScriptInternals.getSymbolParent(symbol);
// If it's global or from an external library, then use either Members or Exports. It's not possible for
// global symbols or external library symbols to be Locals.
const isGlobal = !!sourceFile && !ts.isExternalModule(sourceFile);
const isFromExternalLibrary = !!sourceFile && this._collector.program.isSourceFileFromExternalLibrary(sourceFile);
if (isGlobal || isFromExternalLibrary) {
if (parent &&
parent.members &&
DeclarationReferenceGenerator._isSameSymbol(parent.members.get(symbol.escapedName), symbol)) {
return "#" /* Navigation.Members */;
}
return "." /* Navigation.Exports */;
}
// Otherwise, this symbol is from the current package. If we've found an associated consumable
// `CollectorEntity`, then use Exports. We use `consumable` here instead of `exported` because
// if the symbol is exported from a non-consumable `AstNamespaceImport`, we don't want to use
// Exports. We should use Locals instead.
const entity = this._collector.tryGetEntityForSymbol(symbol);
if (entity === null || entity === void 0 ? void 0 : entity.consumable) {
return "." /* Navigation.Exports */;
}
// If its parent symbol is not a source file, then use either Exports or Members. If the parent symbol
// is a source file, but it wasn't exported from the package entry point (in the check above), then the
// symbol is a local, so fall through below.
if (parent && !DeclarationReferenceGenerator._isExternalModuleSymbol(parent)) {
if (parent.members &&
DeclarationReferenceGenerator._isSameSymbol(parent.members.get(symbol.escapedName), symbol)) {
return "#" /* Navigation.Members */;
}
return "." /* Navigation.Exports */;
}
// Otherwise, we have a local symbol, so use a Locals navigation. These are either:
//
// 1. Symbols that are exported from a file module but not the package entry point.
// 2. Symbols that are not exported from their parent module.
return "~" /* Navigation.Locals */;
}
static _getMeaningOfSymbol(symbol, meaning) {
if (symbol.flags & meaning & ts.SymbolFlags.Class) {
return "class" /* Meaning.Class */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Enum) {
return "enum" /* Meaning.Enum */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Interface) {
return "interface" /* Meaning.Interface */;
}
if (symbol.flags & meaning & ts.SymbolFlags.TypeAlias) {
return "type" /* Meaning.TypeAlias */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Function) {
return "function" /* Meaning.Function */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Variable) {
return "var" /* Meaning.Variable */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Module) {
return "namespace" /* Meaning.Namespace */;
}
if (symbol.flags & meaning & ts.SymbolFlags.ClassMember) {
return "member" /* Meaning.Member */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Constructor) {
return "constructor" /* Meaning.Constructor */;
}
if (symbol.flags & meaning & ts.SymbolFlags.EnumMember) {
return "member" /* Meaning.Member */;
}
if (symbol.flags & meaning & ts.SymbolFlags.Signature) {
if (symbol.escapedName === ts.InternalSymbolName.Call) {
return "call" /* Meaning.CallSignature */;
}
if (symbol.escapedName === ts.InternalSymbolName.New) {
return "new" /* Meaning.ConstructSignature */;
}
if (symbol.escapedName === ts.InternalSymbolName.Index) {
return "index" /* Meaning.IndexSignature */;
}
}
if (symbol.flags & meaning & ts.SymbolFlags.TypeParameter) {
// This should have already been handled in `getDeclarationReferenceOfSymbol`.
throw new node_core_library_1.InternalError('Not supported.');
}
return undefined;
}
_symbolToDeclarationReference(symbol, meaning, includeModuleSymbols) {
const declaration = TypeScriptHelpers_1.TypeScriptHelpers.tryGetADeclaration(symbol);
const sourceFile = declaration === null || declaration === void 0 ? void 0 : declaration.getSourceFile();
let followedSymbol = symbol;
if (followedSymbol.flags & ts.SymbolFlags.ExportValue) {
followedSymbol = this._collector.typeChecker.getExportSymbolOfSymbol(followedSymbol);
}
if (followedSymbol.flags & ts.SymbolFlags.Alias) {
followedSymbol = this._collector.typeChecker.getAliasedSymbol(followedSymbol);
// Without this logic, we end up following the symbol `ns` in `import * as ns from './file'` to
// the actual file `file.ts`. We don't want to do this, so revert to the original symbol.
if (followedSymbol.flags & ts.SymbolFlags.ValueModule) {
followedSymbol = symbol;
}
}
if (DeclarationReferenceGenerator._isExternalModuleSymbol(followedSymbol)) {
if (!includeModuleSymbols) {
return undefined;
}
return new DeclarationReference_1.DeclarationReference(this._sourceFileToModuleSource(sourceFile));
}
// Do not generate a declaration reference for a type parameter.
if (followedSymbol.flags & ts.SymbolFlags.TypeParameter) {
return undefined;
}
let parentRef = this._getParentReference(followedSymbol);
if (!parentRef) {
return undefined;
}
let localName = followedSymbol.name;
const entity = this._collector.tryGetEntityForSymbol(followedSymbol);
if (entity === null || entity === void 0 ? void 0 : entity.nameForEmit) {
localName = entity.nameForEmit;
}
if (followedSymbol.escapedName === ts.InternalSymbolName.Constructor) {
localName = 'constructor';
}
else {
const wellKnownName = TypeScriptHelpers_1.TypeScriptHelpers.tryDecodeWellKnownSymbolName(followedSymbol.escapedName);
if (wellKnownName) {
// TypeScript binds well-known ECMAScript symbols like 'Symbol.iterator' as '__@iterator'.
// This converts a string like '__@iterator' into the property name '[Symbol.iterator]'.
localName = wellKnownName;
}
else if (TypeScriptHelpers_1.TypeScriptHelpers.isUniqueSymbolName(followedSymbol.escapedName)) {
for (const decl of followedSymbol.declarations || []) {
const declName = ts.getNameOfDeclaration(decl);
if (declName && ts.isComputedPropertyName(declName)) {
const lateName = TypeScriptHelpers_1.TypeScriptHelpers.tryGetLateBoundName(declName);
if (lateName !== undefined) {
localName = lateName;
break;
}
}
}
}
}
const navigation = this._getNavigationToSymbol(followedSymbol);
// If the symbol is a global, ensure the source is global.
if (sourceFile && !ts.isExternalModule(sourceFile) && parentRef.source !== DeclarationReference_1.GlobalSource.instance) {
parentRef = new DeclarationReference_1.DeclarationReference(DeclarationReference_1.GlobalSource.instance);
}
return parentRef
.addNavigationStep(navigation, localName)
.withMeaning(DeclarationReferenceGenerator._getMeaningOfSymbol(followedSymbol, meaning));
}
_getParentReference(symbol) {
var _a;
const declaration = TypeScriptHelpers_1.TypeScriptHelpers.tryGetADeclaration(symbol);
const sourceFile = declaration === null || declaration === void 0 ? void 0 : declaration.getSourceFile();
// Note that it's possible for a symbol to be exported from an entry point as well as one or more
// namespaces. In that case, it's not clear what to choose as its parent. Today's logic is neither
// perfect nor particularly stable to API items being renamed and shuffled around.
const entity = this._collector.tryGetEntityForSymbol(symbol);
if (entity) {
if (entity.exportedFromEntryPoint) {
return new DeclarationReference_1.DeclarationReference(this._sourceFileToModuleSource(sourceFile));
}
const firstExportingConsumableParent = entity.getFirstExportingConsumableParent();
if (firstExportingConsumableParent &&
firstExportingConsumableParent.astEntity instanceof AstNamespaceImport_1.AstNamespaceImport) {
const parentSymbol = TypeScriptInternals_1.TypeScriptInternals.tryGetSymbolForDeclaration(firstExportingConsumableParent.astEntity.declaration, this._collector.typeChecker);
if (parentSymbol) {
return this._symbolToDeclarationReference(parentSymbol, parentSymbol.flags,
/*includeModuleSymbols*/ true);
}
}
}
// Next, try to find a parent symbol via the symbol tree.
const parentSymbol = TypeScriptInternals_1.TypeScriptInternals.getSymbolParent(symbol);
if (parentSymbol) {
return this._symbolToDeclarationReference(parentSymbol, parentSymbol.flags,
/*includeModuleSymbols*/ true);
}
// If that doesn't work, try to find a parent symbol via the node tree. As far as we can tell,
// this logic is only needed for local symbols within namespaces. For example:
//
// ```
// export namespace n {
// type SomeType = number;
// export function someFunction(): SomeType { return 5; }
// }
// ```
//
// In the example above, `SomeType` doesn't have a parent symbol per the TS internal API above,
// but its reference still needs to be qualified with the parent reference for `n`.
const grandParent = (_a = declaration === null || declaration === void 0 ? void 0 : declaration.parent) === null || _a === void 0 ? void 0 : _a.parent;
if (grandParent && ts.isModuleDeclaration(grandParent)) {
const grandParentSymbol = TypeScriptInternals_1.TypeScriptInternals.tryGetSymbolForDeclaration(grandParent, this._collector.typeChecker);
if (grandParentSymbol) {
return this._symbolToDeclarationReference(grandParentSymbol, grandParentSymbol.flags,
/*includeModuleSymbols*/ true);
}
}
// At this point, we have a local symbol in a module.
if (sourceFile && ts.isExternalModule(sourceFile)) {
return new DeclarationReference_1.DeclarationReference(this._sourceFileToModuleSource(sourceFile));
}
else {
return new DeclarationReference_1.DeclarationReference(DeclarationReference_1.GlobalSource.instance);
}
}
_getPackageName(sourceFile) {
if (this._collector.program.isSourceFileFromExternalLibrary(sourceFile)) {
const packageJson = this._collector.packageJsonLookup.tryLoadNodePackageJsonFor(sourceFile.fileName);
if (packageJson && packageJson.name) {
return packageJson.name;
}
return DeclarationReferenceGenerator.unknownReference;
}
return this._collector.workingPackage.name;
}
_sourceFileToModuleSource(sourceFile) {
if (sourceFile && ts.isExternalModule(sourceFile)) {
const packageName = this._getPackageName(sourceFile);
if (this._collector.bundledPackageNames.has(packageName)) {
// The api-extractor.json config file has a "bundledPackages" setting, which causes imports from
// certain NPM packages to be treated as part of the working project. In this case, we need to
// substitute the working package name.
return new DeclarationReference_1.ModuleSource(this._collector.workingPackage.name);
}
else {
return new DeclarationReference_1.ModuleSource(packageName);
}
}
return DeclarationReference_1.GlobalSource.instance;
}
}
DeclarationReferenceGenerator.unknownReference = '?';
exports.DeclarationReferenceGenerator = DeclarationReferenceGenerator;
//# sourceMappingURL=DeclarationReferenceGenerator.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,16 @@
import type { CollectorEntity } from '../collector/CollectorEntity';
import { AstImport } from '../analyzer/AstImport';
import { AstDeclaration } from '../analyzer/AstDeclaration';
import type { Collector } from '../collector/Collector';
import type { Span } from '../analyzer/Span';
import type { IndentedWriter } from './IndentedWriter';
/**
* Some common code shared between DtsRollupGenerator and ApiReportGenerator.
*/
export declare class DtsEmitHelpers {
static emitImport(writer: IndentedWriter, collectorEntity: CollectorEntity, astImport: AstImport): void;
static emitNamedExport(writer: IndentedWriter, exportName: string, collectorEntity: CollectorEntity): void;
static emitStarExports(writer: IndentedWriter, collector: Collector): void;
static modifyImportTypeSpan(collector: Collector, span: Span, astDeclaration: AstDeclaration, modifyNestedSpan: (childSpan: Span, childAstDeclaration: AstDeclaration) => void): void;
}
//# sourceMappingURL=DtsEmitHelpers.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"DtsEmitHelpers.d.ts","sourceRoot":"","sources":["../../src/generators/DtsEmitHelpers.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;AACpE,OAAO,EAAE,SAAS,EAAiB,MAAM,uBAAuB,CAAC;AACjE,OAAO,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AAC5D,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAGvD;;GAEG;AACH,qBAAa,cAAc;WACX,UAAU,CACtB,MAAM,EAAE,cAAc,EACtB,eAAe,EAAE,eAAe,EAChC,SAAS,EAAE,SAAS,GACnB,IAAI;WAkDO,eAAe,CAC3B,MAAM,EAAE,cAAc,EACtB,UAAU,EAAE,MAAM,EAClB,eAAe,EAAE,eAAe,GAC/B,IAAI;WAUO,eAAe,CAAC,MAAM,EAAE,cAAc,EAAE,SAAS,EAAE,SAAS,GAAG,IAAI;WASnE,oBAAoB,CAChC,SAAS,EAAE,SAAS,EACpB,IAAI,EAAE,IAAI,EACV,cAAc,EAAE,cAAc,EAC9B,gBAAgB,EAAE,CAAC,SAAS,EAAE,IAAI,EAAE,mBAAmB,EAAE,cAAc,KAAK,IAAI,GAC/E,IAAI;CAwER"}

View File

@ -0,0 +1,155 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DtsEmitHelpers = void 0;
const ts = __importStar(require("typescript"));
const node_core_library_1 = require("@rushstack/node-core-library");
const AstImport_1 = require("../analyzer/AstImport");
const AstDeclaration_1 = require("../analyzer/AstDeclaration");
const SourceFileLocationFormatter_1 = require("../analyzer/SourceFileLocationFormatter");
/**
* Some common code shared between DtsRollupGenerator and ApiReportGenerator.
*/
class DtsEmitHelpers {
static emitImport(writer, collectorEntity, astImport) {
const importPrefix = astImport.isTypeOnlyEverywhere ? 'import type' : 'import';
switch (astImport.importKind) {
case AstImport_1.AstImportKind.DefaultImport:
if (collectorEntity.nameForEmit !== astImport.exportName) {
writer.write(`${importPrefix} { default as ${collectorEntity.nameForEmit} }`);
}
else {
writer.write(`${importPrefix} ${astImport.exportName}`);
}
writer.writeLine(` from '${astImport.modulePath}';`);
break;
case AstImport_1.AstImportKind.NamedImport:
if (collectorEntity.nameForEmit === astImport.exportName) {
writer.write(`${importPrefix} { ${astImport.exportName} }`);
}
else {
writer.write(`${importPrefix} { ${astImport.exportName} as ${collectorEntity.nameForEmit} }`);
}
writer.writeLine(` from '${astImport.modulePath}';`);
break;
case AstImport_1.AstImportKind.StarImport:
writer.writeLine(`${importPrefix} * as ${collectorEntity.nameForEmit} from '${astImport.modulePath}';`);
break;
case AstImport_1.AstImportKind.EqualsImport:
writer.writeLine(`${importPrefix} ${collectorEntity.nameForEmit} = require('${astImport.modulePath}');`);
break;
case AstImport_1.AstImportKind.ImportType:
if (!astImport.exportName) {
writer.writeLine(`${importPrefix} * as ${collectorEntity.nameForEmit} from '${astImport.modulePath}';`);
}
else {
const topExportName = astImport.exportName.split('.')[0];
if (collectorEntity.nameForEmit === topExportName) {
writer.write(`${importPrefix} { ${topExportName} }`);
}
else {
writer.write(`${importPrefix} { ${topExportName} as ${collectorEntity.nameForEmit} }`);
}
writer.writeLine(` from '${astImport.modulePath}';`);
}
break;
default:
throw new node_core_library_1.InternalError('Unimplemented AstImportKind');
}
}
static emitNamedExport(writer, exportName, collectorEntity) {
if (exportName === ts.InternalSymbolName.Default) {
writer.writeLine(`export default ${collectorEntity.nameForEmit};`);
}
else if (collectorEntity.nameForEmit !== exportName) {
writer.writeLine(`export { ${collectorEntity.nameForEmit} as ${exportName} }`);
}
else {
writer.writeLine(`export { ${exportName} }`);
}
}
static emitStarExports(writer, collector) {
if (collector.starExportedExternalModulePaths.length > 0) {
writer.writeLine();
for (const starExportedExternalModulePath of collector.starExportedExternalModulePaths) {
writer.writeLine(`export * from "${starExportedExternalModulePath}";`);
}
}
}
static modifyImportTypeSpan(collector, span, astDeclaration, modifyNestedSpan) {
var _a, _b, _c, _d;
const node = span.node;
const referencedEntity = collector.tryGetEntityForNode(node);
if (referencedEntity) {
if (!referencedEntity.nameForEmit) {
// This should never happen
throw new node_core_library_1.InternalError('referencedEntry.nameForEmit is undefined');
}
let typeArgumentsText = '';
if (node.typeArguments && node.typeArguments.length > 0) {
// Type arguments have to be processed and written to the document
const lessThanTokenPos = span.children.findIndex((childSpan) => childSpan.node.kind === ts.SyntaxKind.LessThanToken);
const greaterThanTokenPos = span.children.findIndex((childSpan) => childSpan.node.kind === ts.SyntaxKind.GreaterThanToken);
if (lessThanTokenPos < 0 || greaterThanTokenPos <= lessThanTokenPos) {
throw new node_core_library_1.InternalError(`Invalid type arguments: ${node.getText()}\n` +
SourceFileLocationFormatter_1.SourceFileLocationFormatter.formatDeclaration(node));
}
const typeArgumentsSpans = span.children.slice(lessThanTokenPos + 1, greaterThanTokenPos);
// Apply modifications to Span elements of typeArguments
typeArgumentsSpans.forEach((childSpan) => {
const childAstDeclaration = AstDeclaration_1.AstDeclaration.isSupportedSyntaxKind(childSpan.kind)
? collector.astSymbolTable.getChildAstDeclarationByNode(childSpan.node, astDeclaration)
: astDeclaration;
modifyNestedSpan(childSpan, childAstDeclaration);
});
const typeArgumentsStrings = typeArgumentsSpans.map((childSpan) => childSpan.getModifiedText());
typeArgumentsText = `<${typeArgumentsStrings.join(', ')}>`;
}
const separatorAfter = (_b = (_a = /(\s*)$/.exec(span.getText())) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : '';
if (referencedEntity.astEntity instanceof AstImport_1.AstImport &&
referencedEntity.astEntity.importKind === AstImport_1.AstImportKind.ImportType &&
referencedEntity.astEntity.exportName) {
// For an ImportType with a namespace chain, only the top namespace is imported.
// Must add the original nested qualifiers to the rolled up import.
const qualifiersText = (_d = (_c = node.qualifier) === null || _c === void 0 ? void 0 : _c.getText()) !== null && _d !== void 0 ? _d : '';
const nestedQualifiersStart = qualifiersText.indexOf('.');
// Including the leading "."
const nestedQualifiersText = nestedQualifiersStart >= 0 ? qualifiersText.substring(nestedQualifiersStart) : '';
const replacement = `${referencedEntity.nameForEmit}${nestedQualifiersText}${typeArgumentsText}${separatorAfter}`;
span.modification.skipAll();
span.modification.prefix = replacement;
}
else {
// Replace with internal symbol or AstImport
span.modification.skipAll();
span.modification.prefix = `${referencedEntity.nameForEmit}${typeArgumentsText}${separatorAfter}`;
}
}
}
}
exports.DtsEmitHelpers = DtsEmitHelpers;
//# sourceMappingURL=DtsEmitHelpers.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,45 @@
import { type NewlineKind } from '@rushstack/node-core-library';
import type { Collector } from '../collector/Collector';
/**
* Used with DtsRollupGenerator.writeTypingsFile()
*/
export declare enum DtsRollupKind {
/**
* Generate a *.d.ts file for an internal release, or for the trimming=false mode.
* This output file will contain all definitions that are reachable from the entry point.
*/
InternalRelease = 0,
/**
* Generate a *.d.ts file for a preview release.
* This output file will contain all definitions that are reachable from the entry point,
* except definitions marked as \@internal.
*/
AlphaRelease = 1,
/**
* Generate a *.d.ts file for a preview release.
* This output file will contain all definitions that are reachable from the entry point,
* except definitions marked as \@alpha or \@internal.
*/
BetaRelease = 2,
/**
* Generate a *.d.ts file for a public release.
* This output file will contain all definitions that are reachable from the entry point,
* except definitions marked as \@beta, \@alpha, or \@internal.
*/
PublicRelease = 3
}
export declare class DtsRollupGenerator {
/**
* Generates the typings file and writes it to disk.
*
* @param dtsFilename - The *.d.ts output filename
*/
static writeTypingsFile(collector: Collector, dtsFilename: string, dtsKind: DtsRollupKind, newlineKind: NewlineKind): void;
private static _generateTypingsFileContent;
/**
* Before writing out a declaration, _modifySpan() applies various fixups to make it nice.
*/
private static _modifySpan;
private static _shouldIncludeReleaseTag;
}
//# sourceMappingURL=DtsRollupGenerator.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"DtsRollupGenerator.d.ts","sourceRoot":"","sources":["../../src/generators/DtsRollupGenerator.ts"],"names":[],"mappings":"AAMA,OAAO,EAAc,KAAK,WAAW,EAAiB,MAAM,8BAA8B,CAAC;AAG3F,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,wBAAwB,CAAC;AAiBxD;;GAEG;AACH,oBAAY,aAAa;IACvB;;;OAGG;IACH,eAAe,IAAA;IAEf;;;;OAIG;IACH,YAAY,IAAA;IAEZ;;;;OAIG;IACH,WAAW,IAAA;IAEX;;;;OAIG;IACH,aAAa,IAAA;CACd;AAED,qBAAa,kBAAkB;IAC7B;;;;OAIG;WACW,gBAAgB,CAC5B,SAAS,EAAE,SAAS,EACpB,WAAW,EAAE,MAAM,EACnB,OAAO,EAAE,aAAa,EACtB,WAAW,EAAE,WAAW,GACvB,IAAI;IAYP,OAAO,CAAC,MAAM,CAAC,2BAA2B;IA4K1C;;OAEG;IACH,OAAO,CAAC,MAAM,CAAC,WAAW;IA6M1B,OAAO,CAAC,MAAM,CAAC,wBAAwB;CAyBxC"}

View File

@ -0,0 +1,416 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DtsRollupGenerator = exports.DtsRollupKind = void 0;
/* eslint-disable no-bitwise */
const ts = __importStar(require("typescript"));
const node_core_library_1 = require("@rushstack/node-core-library");
const api_extractor_model_1 = require("@microsoft/api-extractor-model");
const TypeScriptHelpers_1 = require("../analyzer/TypeScriptHelpers");
const Span_1 = require("../analyzer/Span");
const AstImport_1 = require("../analyzer/AstImport");
const AstDeclaration_1 = require("../analyzer/AstDeclaration");
const AstSymbol_1 = require("../analyzer/AstSymbol");
const IndentedWriter_1 = require("./IndentedWriter");
const DtsEmitHelpers_1 = require("./DtsEmitHelpers");
const AstNamespaceImport_1 = require("../analyzer/AstNamespaceImport");
const SourceFileLocationFormatter_1 = require("../analyzer/SourceFileLocationFormatter");
/**
* Used with DtsRollupGenerator.writeTypingsFile()
*/
var DtsRollupKind;
(function (DtsRollupKind) {
/**
* Generate a *.d.ts file for an internal release, or for the trimming=false mode.
* This output file will contain all definitions that are reachable from the entry point.
*/
DtsRollupKind[DtsRollupKind["InternalRelease"] = 0] = "InternalRelease";
/**
* Generate a *.d.ts file for a preview release.
* This output file will contain all definitions that are reachable from the entry point,
* except definitions marked as \@internal.
*/
DtsRollupKind[DtsRollupKind["AlphaRelease"] = 1] = "AlphaRelease";
/**
* Generate a *.d.ts file for a preview release.
* This output file will contain all definitions that are reachable from the entry point,
* except definitions marked as \@alpha or \@internal.
*/
DtsRollupKind[DtsRollupKind["BetaRelease"] = 2] = "BetaRelease";
/**
* Generate a *.d.ts file for a public release.
* This output file will contain all definitions that are reachable from the entry point,
* except definitions marked as \@beta, \@alpha, or \@internal.
*/
DtsRollupKind[DtsRollupKind["PublicRelease"] = 3] = "PublicRelease";
})(DtsRollupKind = exports.DtsRollupKind || (exports.DtsRollupKind = {}));
class DtsRollupGenerator {
/**
* Generates the typings file and writes it to disk.
*
* @param dtsFilename - The *.d.ts output filename
*/
static writeTypingsFile(collector, dtsFilename, dtsKind, newlineKind) {
const writer = new IndentedWriter_1.IndentedWriter();
writer.trimLeadingSpaces = true;
DtsRollupGenerator._generateTypingsFileContent(collector, writer, dtsKind);
node_core_library_1.FileSystem.writeFile(dtsFilename, writer.toString(), {
convertLineEndings: newlineKind,
ensureFolderExists: true
});
}
static _generateTypingsFileContent(collector, writer, dtsKind) {
// Emit the @packageDocumentation comment at the top of the file
if (collector.workingPackage.tsdocParserContext) {
writer.trimLeadingSpaces = false;
writer.writeLine(collector.workingPackage.tsdocParserContext.sourceRange.toString());
writer.trimLeadingSpaces = true;
writer.ensureSkippedLine();
}
// Emit the triple slash directives
for (const typeDirectiveReference of collector.dtsTypeReferenceDirectives) {
// https://github.com/microsoft/TypeScript/blob/611ebc7aadd7a44a4c0447698bfda9222a78cb66/src/compiler/declarationEmitter.ts#L162
writer.writeLine(`/// <reference types="${typeDirectiveReference}" />`);
}
for (const libDirectiveReference of collector.dtsLibReferenceDirectives) {
writer.writeLine(`/// <reference lib="${libDirectiveReference}" />`);
}
writer.ensureSkippedLine();
// Emit the imports
for (const entity of collector.entities) {
if (entity.astEntity instanceof AstImport_1.AstImport) {
const astImport = entity.astEntity;
// For example, if the imported API comes from an external package that supports AEDoc,
// and it was marked as `@internal`, then don't emit it.
const symbolMetadata = collector.tryFetchMetadataForAstEntity(astImport);
const maxEffectiveReleaseTag = symbolMetadata
? symbolMetadata.maxEffectiveReleaseTag
: api_extractor_model_1.ReleaseTag.None;
if (this._shouldIncludeReleaseTag(maxEffectiveReleaseTag, dtsKind)) {
DtsEmitHelpers_1.DtsEmitHelpers.emitImport(writer, entity, astImport);
}
}
}
writer.ensureSkippedLine();
// Emit the regular declarations
for (const entity of collector.entities) {
const astEntity = entity.astEntity;
const symbolMetadata = collector.tryFetchMetadataForAstEntity(astEntity);
const maxEffectiveReleaseTag = symbolMetadata
? symbolMetadata.maxEffectiveReleaseTag
: api_extractor_model_1.ReleaseTag.None;
if (!this._shouldIncludeReleaseTag(maxEffectiveReleaseTag, dtsKind)) {
if (!collector.extractorConfig.omitTrimmingComments) {
writer.ensureSkippedLine();
writer.writeLine(`/* Excluded from this release type: ${entity.nameForEmit} */`);
}
continue;
}
if (astEntity instanceof AstSymbol_1.AstSymbol) {
// Emit all the declarations for this entry
for (const astDeclaration of astEntity.astDeclarations || []) {
const apiItemMetadata = collector.fetchApiItemMetadata(astDeclaration);
if (!this._shouldIncludeReleaseTag(apiItemMetadata.effectiveReleaseTag, dtsKind)) {
if (!collector.extractorConfig.omitTrimmingComments) {
writer.ensureSkippedLine();
writer.writeLine(`/* Excluded declaration from this release type: ${entity.nameForEmit} */`);
}
continue;
}
else {
const span = new Span_1.Span(astDeclaration.declaration);
DtsRollupGenerator._modifySpan(collector, span, entity, astDeclaration, dtsKind);
writer.ensureSkippedLine();
span.writeModifiedText(writer);
writer.ensureNewLine();
}
}
}
if (astEntity instanceof AstNamespaceImport_1.AstNamespaceImport) {
const astModuleExportInfo = astEntity.fetchAstModuleExportInfo(collector);
if (entity.nameForEmit === undefined) {
// This should never happen
throw new node_core_library_1.InternalError('referencedEntry.nameForEmit is undefined');
}
if (astModuleExportInfo.starExportedExternalModules.size > 0) {
// We could support this, but we would need to find a way to safely represent it.
throw new Error(`The ${entity.nameForEmit} namespace import includes a start export, which is not supported:\n` +
SourceFileLocationFormatter_1.SourceFileLocationFormatter.formatDeclaration(astEntity.declaration));
}
// Emit a synthetic declaration for the namespace. It will look like this:
//
// declare namespace example {
// export {
// f1,
// f2
// }
// }
//
// Note that we do not try to relocate f1()/f2() to be inside the namespace because other type
// signatures may reference them directly (without using the namespace qualifier).
writer.ensureSkippedLine();
if (entity.shouldInlineExport) {
writer.write('export ');
}
writer.writeLine(`declare namespace ${entity.nameForEmit} {`);
// all local exports of local imported module are just references to top-level declarations
writer.increaseIndent();
writer.writeLine('export {');
writer.increaseIndent();
const exportClauses = [];
for (const [exportedName, exportedEntity] of astModuleExportInfo.exportedLocalEntities) {
const collectorEntity = collector.tryGetCollectorEntity(exportedEntity);
if (collectorEntity === undefined) {
// This should never happen
// top-level exports of local imported module should be added as collector entities before
throw new node_core_library_1.InternalError(`Cannot find collector entity for ${entity.nameForEmit}.${exportedEntity.localName}`);
}
// If the entity's declaration won't be included, then neither should the namespace export it
// This fixes the issue encountered here: https://github.com/microsoft/rushstack/issues/2791
const exportedSymbolMetadata = collector.tryFetchMetadataForAstEntity(exportedEntity);
const exportedMaxEffectiveReleaseTag = exportedSymbolMetadata
? exportedSymbolMetadata.maxEffectiveReleaseTag
: api_extractor_model_1.ReleaseTag.None;
if (!this._shouldIncludeReleaseTag(exportedMaxEffectiveReleaseTag, dtsKind)) {
continue;
}
if (collectorEntity.nameForEmit === exportedName) {
exportClauses.push(collectorEntity.nameForEmit);
}
else {
exportClauses.push(`${collectorEntity.nameForEmit} as ${exportedName}`);
}
}
writer.writeLine(exportClauses.join(',\n'));
writer.decreaseIndent();
writer.writeLine('}'); // end of "export { ... }"
writer.decreaseIndent();
writer.writeLine('}'); // end of "declare namespace { ... }"
}
if (!entity.shouldInlineExport) {
for (const exportName of entity.exportNames) {
DtsEmitHelpers_1.DtsEmitHelpers.emitNamedExport(writer, exportName, entity);
}
}
writer.ensureSkippedLine();
}
DtsEmitHelpers_1.DtsEmitHelpers.emitStarExports(writer, collector);
// Emit "export { }" which is a special directive that prevents consumers from importing declarations
// that don't have an explicit "export" modifier.
writer.ensureSkippedLine();
writer.writeLine('export { }');
}
/**
* Before writing out a declaration, _modifySpan() applies various fixups to make it nice.
*/
static _modifySpan(collector, span, entity, astDeclaration, dtsKind) {
const previousSpan = span.previousSibling;
let recurseChildren = true;
switch (span.kind) {
case ts.SyntaxKind.JSDocComment:
// If the @packageDocumentation comment seems to be attached to one of the regular API items,
// omit it. It gets explictly emitted at the top of the file.
if (span.node.getText().match(/(?:\s|\*)@packageDocumentation(?:\s|\*)/gi)) {
span.modification.skipAll();
}
// For now, we don't transform JSDoc comment nodes at all
recurseChildren = false;
break;
case ts.SyntaxKind.ExportKeyword:
case ts.SyntaxKind.DefaultKeyword:
case ts.SyntaxKind.DeclareKeyword:
// Delete any explicit "export" or "declare" keywords -- we will re-add them below
span.modification.skipAll();
break;
case ts.SyntaxKind.InterfaceKeyword:
case ts.SyntaxKind.ClassKeyword:
case ts.SyntaxKind.EnumKeyword:
case ts.SyntaxKind.NamespaceKeyword:
case ts.SyntaxKind.ModuleKeyword:
case ts.SyntaxKind.TypeKeyword:
case ts.SyntaxKind.FunctionKeyword:
// Replace the stuff we possibly deleted above
let replacedModifiers = '';
// Add a declare statement for root declarations (but not for nested declarations)
if (!astDeclaration.parent) {
replacedModifiers += 'declare ';
}
if (entity.shouldInlineExport) {
replacedModifiers = 'export ' + replacedModifiers;
}
if (previousSpan && previousSpan.kind === ts.SyntaxKind.SyntaxList) {
// If there is a previous span of type SyntaxList, then apply it before any other modifiers
// (e.g. "abstract") that appear there.
previousSpan.modification.prefix = replacedModifiers + previousSpan.modification.prefix;
}
else {
// Otherwise just stick it in front of this span
span.modification.prefix = replacedModifiers + span.modification.prefix;
}
break;
case ts.SyntaxKind.VariableDeclaration:
// Is this a top-level variable declaration?
// (The logic below does not apply to variable declarations that are part of an explicit "namespace" block,
// since the compiler prefers not to emit "declare" or "export" keywords for those declarations.)
if (!span.parent) {
// The VariableDeclaration node is part of a VariableDeclarationList, however
// the Entry.followedSymbol points to the VariableDeclaration part because
// multiple definitions might share the same VariableDeclarationList.
//
// Since we are emitting a separate declaration for each one, we need to look upwards
// in the ts.Node tree and write a copy of the enclosing VariableDeclarationList
// content (e.g. "var" from "var x=1, y=2").
const list = TypeScriptHelpers_1.TypeScriptHelpers.matchAncestor(span.node, [
ts.SyntaxKind.VariableDeclarationList,
ts.SyntaxKind.VariableDeclaration
]);
if (!list) {
// This should not happen unless the compiler API changes somehow
throw new node_core_library_1.InternalError('Unsupported variable declaration');
}
const listPrefix = list
.getSourceFile()
.text.substring(list.getStart(), list.declarations[0].getStart());
span.modification.prefix = 'declare ' + listPrefix + span.modification.prefix;
span.modification.suffix = ';';
if (entity.shouldInlineExport) {
span.modification.prefix = 'export ' + span.modification.prefix;
}
const declarationMetadata = collector.fetchDeclarationMetadata(astDeclaration);
if (declarationMetadata.tsdocParserContext) {
// Typically the comment for a variable declaration is attached to the outer variable statement
// (which may possibly contain multiple variable declarations), so it's not part of the Span.
// Instead we need to manually inject it.
let originalComment = declarationMetadata.tsdocParserContext.sourceRange.toString();
if (!/\r?\n\s*$/.test(originalComment)) {
originalComment += '\n';
}
span.modification.indentDocComment = Span_1.IndentDocCommentScope.PrefixOnly;
span.modification.prefix = originalComment + span.modification.prefix;
}
}
break;
case ts.SyntaxKind.Identifier:
{
const referencedEntity = collector.tryGetEntityForNode(span.node);
if (referencedEntity) {
if (!referencedEntity.nameForEmit) {
// This should never happen
throw new node_core_library_1.InternalError('referencedEntry.nameForEmit is undefined');
}
span.modification.prefix = referencedEntity.nameForEmit;
// For debugging:
// span.modification.prefix += '/*R=FIX*/';
}
else {
// For debugging:
// span.modification.prefix += '/*R=KEEP*/';
}
}
break;
case ts.SyntaxKind.ImportType:
DtsEmitHelpers_1.DtsEmitHelpers.modifyImportTypeSpan(collector, span, astDeclaration, (childSpan, childAstDeclaration) => {
DtsRollupGenerator._modifySpan(collector, childSpan, entity, childAstDeclaration, dtsKind);
});
break;
}
if (recurseChildren) {
for (const child of span.children) {
let childAstDeclaration = astDeclaration;
// Should we trim this node?
let trimmed = false;
if (AstDeclaration_1.AstDeclaration.isSupportedSyntaxKind(child.kind)) {
childAstDeclaration = collector.astSymbolTable.getChildAstDeclarationByNode(child.node, astDeclaration);
const releaseTag = collector.fetchApiItemMetadata(childAstDeclaration).effectiveReleaseTag;
if (!this._shouldIncludeReleaseTag(releaseTag, dtsKind)) {
let nodeToTrim = child;
// If we are trimming a variable statement, then we need to trim the outer VariableDeclarationList
// as well.
if (child.kind === ts.SyntaxKind.VariableDeclaration) {
const variableStatement = child.findFirstParent(ts.SyntaxKind.VariableStatement);
if (variableStatement !== undefined) {
nodeToTrim = variableStatement;
}
}
const modification = nodeToTrim.modification;
// Yes, trim it and stop here
const name = childAstDeclaration.astSymbol.localName;
modification.omitChildren = true;
if (!collector.extractorConfig.omitTrimmingComments) {
modification.prefix = `/* Excluded from this release type: ${name} */`;
}
else {
modification.prefix = '';
}
modification.suffix = '';
if (nodeToTrim.children.length > 0) {
// If there are grandchildren, then keep the last grandchild's separator,
// since it often has useful whitespace
modification.suffix = nodeToTrim.children[nodeToTrim.children.length - 1].separator;
}
if (nodeToTrim.nextSibling) {
// If the thing we are trimming is followed by a comma, then trim the comma also.
// An example would be an enum member.
if (nodeToTrim.nextSibling.kind === ts.SyntaxKind.CommaToken) {
// Keep its separator since it often has useful whitespace
modification.suffix += nodeToTrim.nextSibling.separator;
nodeToTrim.nextSibling.modification.skipAll();
}
}
trimmed = true;
}
}
if (!trimmed) {
DtsRollupGenerator._modifySpan(collector, child, entity, childAstDeclaration, dtsKind);
}
}
}
}
static _shouldIncludeReleaseTag(releaseTag, dtsKind) {
switch (dtsKind) {
case DtsRollupKind.InternalRelease:
return true;
case DtsRollupKind.AlphaRelease:
return (releaseTag === api_extractor_model_1.ReleaseTag.Alpha ||
releaseTag === api_extractor_model_1.ReleaseTag.Beta ||
releaseTag === api_extractor_model_1.ReleaseTag.Public ||
// NOTE: If the release tag is "None", then we don't have enough information to trim it
releaseTag === api_extractor_model_1.ReleaseTag.None);
case DtsRollupKind.BetaRelease:
return (releaseTag === api_extractor_model_1.ReleaseTag.Beta ||
releaseTag === api_extractor_model_1.ReleaseTag.Public ||
// NOTE: If the release tag is "None", then we don't have enough information to trim it
releaseTag === api_extractor_model_1.ReleaseTag.None);
case DtsRollupKind.PublicRelease:
return releaseTag === api_extractor_model_1.ReleaseTag.Public || releaseTag === api_extractor_model_1.ReleaseTag.None;
default:
throw new Error(`${DtsRollupKind[dtsKind]} is not implemented`);
}
}
}
exports.DtsRollupGenerator = DtsRollupGenerator;
//# sourceMappingURL=DtsRollupGenerator.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,48 @@
import * as ts from 'typescript';
import { type IExcerptToken, type IExcerptTokenRange } from '@microsoft/api-extractor-model';
import type { DeclarationReferenceGenerator } from './DeclarationReferenceGenerator';
import type { AstDeclaration } from '../analyzer/AstDeclaration';
/**
* Used to provide ExcerptBuilder with a list of nodes whose token range we want to capture.
*/
export interface IExcerptBuilderNodeToCapture {
/**
* The node to capture
*/
node: ts.Node | undefined;
/**
* The token range whose startIndex/endIndex will be overwritten with the indexes for the
* tokens corresponding to IExcerptBuilderNodeToCapture.node
*/
tokenRange: IExcerptTokenRange;
}
export declare class ExcerptBuilder {
/**
* Appends a blank line to the `excerptTokens` list.
* @param excerptTokens - The target token list to append to
*/
static addBlankLine(excerptTokens: IExcerptToken[]): void;
/**
* Appends the signature for the specified `AstDeclaration` to the `excerptTokens` list.
* @param excerptTokens - The target token list to append to
* @param nodesToCapture - A list of child nodes whose token ranges we want to capture
*/
static addDeclaration(excerptTokens: IExcerptToken[], astDeclaration: AstDeclaration, nodesToCapture: IExcerptBuilderNodeToCapture[], referenceGenerator: DeclarationReferenceGenerator): void;
static createEmptyTokenRange(): IExcerptTokenRange;
private static _buildSpan;
private static _appendToken;
/**
* Condenses the provided excerpt tokens by merging tokens where possible. Updates the provided token ranges to
* remain accurate after token merging.
*
* @remarks
* For example, suppose we have excerpt tokens ["A", "B", "C"] and a token range [0, 2]. If the excerpt tokens
* are condensed to ["AB", "C"], then the token range would be updated to [0, 1]. Note that merges are only
* performed if they are compatible with the provided token ranges. In the example above, if our token range was
* originally [0, 1], we would not be able to merge tokens "A" and "B".
*/
private static _condenseTokens;
private static _isDeclarationName;
private static _isDeclaration;
}
//# sourceMappingURL=ExcerptBuilder.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"ExcerptBuilder.d.ts","sourceRoot":"","sources":["../../src/generators/ExcerptBuilder.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,MAAM,YAAY,CAAC;AAEjC,OAAO,EAEL,KAAK,aAAa,EAClB,KAAK,kBAAkB,EACxB,MAAM,gCAAgC,CAAC;AAGxC,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AAEjE;;GAEG;AACH,MAAM,WAAW,4BAA4B;IAC3C;;OAEG;IACH,IAAI,EAAE,EAAE,CAAC,IAAI,GAAG,SAAS,CAAC;IAC1B;;;OAGG;IACH,UAAU,EAAE,kBAAkB,CAAC;CAChC;AAiCD,qBAAa,cAAc;IACzB;;;OAGG;WACW,YAAY,CAAC,aAAa,EAAE,aAAa,EAAE,GAAG,IAAI;IAYhE;;;;OAIG;WACW,cAAc,CAC1B,aAAa,EAAE,aAAa,EAAE,EAC9B,cAAc,EAAE,cAAc,EAC9B,cAAc,EAAE,4BAA4B,EAAE,EAC9C,kBAAkB,EAAE,6BAA6B,GAChD,IAAI;WAmCO,qBAAqB,IAAI,kBAAkB;IAIzD,OAAO,CAAC,MAAM,CAAC,UAAU;IA8EzB,OAAO,CAAC,MAAM,CAAC,YAAY;IAiB3B;;;;;;;;;OASG;IACH,OAAO,CAAC,MAAM,CAAC,eAAe;IA8E9B,OAAO,CAAC,MAAM,CAAC,kBAAkB;IAIjC,OAAO,CAAC,MAAM,CAAC,cAAc;CA0B9B"}

View File

@ -0,0 +1,267 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ExcerptBuilder = void 0;
const ts = __importStar(require("typescript"));
const api_extractor_model_1 = require("@microsoft/api-extractor-model");
const Span_1 = require("../analyzer/Span");
class ExcerptBuilder {
/**
* Appends a blank line to the `excerptTokens` list.
* @param excerptTokens - The target token list to append to
*/
static addBlankLine(excerptTokens) {
let newlines = '\n\n';
// If the existing text already ended with a newline, then only append one newline
if (excerptTokens.length > 0) {
const previousText = excerptTokens[excerptTokens.length - 1].text;
if (/\n$/.test(previousText)) {
newlines = '\n';
}
}
excerptTokens.push({ kind: api_extractor_model_1.ExcerptTokenKind.Content, text: newlines });
}
/**
* Appends the signature for the specified `AstDeclaration` to the `excerptTokens` list.
* @param excerptTokens - The target token list to append to
* @param nodesToCapture - A list of child nodes whose token ranges we want to capture
*/
static addDeclaration(excerptTokens, astDeclaration, nodesToCapture, referenceGenerator) {
let stopBeforeChildKind = undefined;
switch (astDeclaration.declaration.kind) {
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.InterfaceDeclaration:
// FirstPunctuation = "{"
stopBeforeChildKind = ts.SyntaxKind.FirstPunctuation;
break;
case ts.SyntaxKind.ModuleDeclaration:
// ModuleBlock = the "{ ... }" block
stopBeforeChildKind = ts.SyntaxKind.ModuleBlock;
break;
}
const span = new Span_1.Span(astDeclaration.declaration);
const tokenRangesByNode = new Map();
for (const excerpt of nodesToCapture || []) {
if (excerpt.node) {
tokenRangesByNode.set(excerpt.node, excerpt.tokenRange);
}
}
ExcerptBuilder._buildSpan(excerptTokens, span, {
referenceGenerator: referenceGenerator,
startingNode: span.node,
stopBeforeChildKind,
tokenRangesByNode,
lastAppendedTokenIsSeparator: false
});
ExcerptBuilder._condenseTokens(excerptTokens, [...tokenRangesByNode.values()]);
}
static createEmptyTokenRange() {
return { startIndex: 0, endIndex: 0 };
}
static _buildSpan(excerptTokens, span, state) {
if (span.kind === ts.SyntaxKind.JSDocComment) {
// Discard any comments
return true;
}
// Can this node start a excerpt?
const capturedTokenRange = state.tokenRangesByNode.get(span.node);
let excerptStartIndex = 0;
if (capturedTokenRange) {
// We will assign capturedTokenRange.startIndex to be the index of the next token to be appended
excerptStartIndex = excerptTokens.length;
}
if (span.prefix) {
let canonicalReference = undefined;
if (span.kind === ts.SyntaxKind.Identifier) {
const name = span.node;
if (!ExcerptBuilder._isDeclarationName(name)) {
canonicalReference = state.referenceGenerator.getDeclarationReferenceForIdentifier(name);
}
}
if (canonicalReference) {
ExcerptBuilder._appendToken(excerptTokens, api_extractor_model_1.ExcerptTokenKind.Reference, span.prefix, canonicalReference);
}
else {
ExcerptBuilder._appendToken(excerptTokens, api_extractor_model_1.ExcerptTokenKind.Content, span.prefix);
}
state.lastAppendedTokenIsSeparator = false;
}
for (const child of span.children) {
if (span.node === state.startingNode) {
if (state.stopBeforeChildKind && child.kind === state.stopBeforeChildKind) {
// We reached a child whose kind is stopBeforeChildKind, so stop traversing
return false;
}
}
if (!this._buildSpan(excerptTokens, child, state)) {
return false;
}
}
if (span.suffix) {
ExcerptBuilder._appendToken(excerptTokens, api_extractor_model_1.ExcerptTokenKind.Content, span.suffix);
state.lastAppendedTokenIsSeparator = false;
}
if (span.separator) {
ExcerptBuilder._appendToken(excerptTokens, api_extractor_model_1.ExcerptTokenKind.Content, span.separator);
state.lastAppendedTokenIsSeparator = true;
}
// Are we building a excerpt? If so, set its range
if (capturedTokenRange) {
capturedTokenRange.startIndex = excerptStartIndex;
// We will assign capturedTokenRange.startIndex to be the index after the last token
// that was appended so far. However, if the last appended token was a separator, omit
// it from the range.
let excerptEndIndex = excerptTokens.length;
if (state.lastAppendedTokenIsSeparator) {
excerptEndIndex--;
}
capturedTokenRange.endIndex = excerptEndIndex;
}
return true;
}
static _appendToken(excerptTokens, excerptTokenKind, text, canonicalReference) {
if (text.length === 0) {
return;
}
const excerptToken = { kind: excerptTokenKind, text: text };
if (canonicalReference !== undefined) {
excerptToken.canonicalReference = canonicalReference.toString();
}
excerptTokens.push(excerptToken);
}
/**
* Condenses the provided excerpt tokens by merging tokens where possible. Updates the provided token ranges to
* remain accurate after token merging.
*
* @remarks
* For example, suppose we have excerpt tokens ["A", "B", "C"] and a token range [0, 2]. If the excerpt tokens
* are condensed to ["AB", "C"], then the token range would be updated to [0, 1]. Note that merges are only
* performed if they are compatible with the provided token ranges. In the example above, if our token range was
* originally [0, 1], we would not be able to merge tokens "A" and "B".
*/
static _condenseTokens(excerptTokens, tokenRanges) {
// This set is used to quickly lookup a start or end index.
const startOrEndIndices = new Set();
for (const tokenRange of tokenRanges) {
startOrEndIndices.add(tokenRange.startIndex);
startOrEndIndices.add(tokenRange.endIndex);
}
for (let currentIndex = 1; currentIndex < excerptTokens.length; ++currentIndex) {
while (currentIndex < excerptTokens.length) {
const prevPrevToken = excerptTokens[currentIndex - 2]; // May be undefined
const prevToken = excerptTokens[currentIndex - 1];
const currentToken = excerptTokens[currentIndex];
// The number of excerpt tokens that are merged in this iteration. We need this to determine
// how to update the start and end indices of our token ranges.
let mergeCount;
// There are two types of merges that can occur. We only perform these merges if they are
// compatible with all of our token ranges.
if (prevPrevToken &&
prevPrevToken.kind === api_extractor_model_1.ExcerptTokenKind.Reference &&
prevToken.kind === api_extractor_model_1.ExcerptTokenKind.Content &&
prevToken.text.trim() === '.' &&
currentToken.kind === api_extractor_model_1.ExcerptTokenKind.Reference &&
!startOrEndIndices.has(currentIndex) &&
!startOrEndIndices.has(currentIndex - 1)) {
// If the current token is a reference token, the previous token is a ".", and the previous-
// previous token is a reference token, then merge all three tokens into a reference token.
//
// For example: Given ["MyNamespace" (R), ".", "MyClass" (R)], tokens "." and "MyClass" might
// be merged into "MyNamespace". The condensed token would be ["MyNamespace.MyClass" (R)].
prevPrevToken.text += prevToken.text + currentToken.text;
prevPrevToken.canonicalReference = currentToken.canonicalReference;
mergeCount = 2;
currentIndex--;
}
else if (
// If the current and previous tokens are both content tokens, then merge the tokens into a
// single content token. For example: Given ["export ", "declare class"], these tokens
// might be merged into "export declare class".
prevToken.kind === api_extractor_model_1.ExcerptTokenKind.Content &&
prevToken.kind === currentToken.kind &&
!startOrEndIndices.has(currentIndex)) {
prevToken.text += currentToken.text;
mergeCount = 1;
}
else {
// Otherwise, no merging can occur here. Continue to the next index.
break;
}
// Remove the now redundant excerpt token(s), as they were merged into a previous token.
excerptTokens.splice(currentIndex, mergeCount);
// Update the start and end indices for all token ranges based upon how many excerpt
// tokens were merged and in what positions.
for (const tokenRange of tokenRanges) {
if (tokenRange.startIndex > currentIndex) {
tokenRange.startIndex -= mergeCount;
}
if (tokenRange.endIndex > currentIndex) {
tokenRange.endIndex -= mergeCount;
}
}
// Clear and repopulate our set with the updated indices.
startOrEndIndices.clear();
for (const tokenRange of tokenRanges) {
startOrEndIndices.add(tokenRange.startIndex);
startOrEndIndices.add(tokenRange.endIndex);
}
}
}
}
static _isDeclarationName(name) {
return ExcerptBuilder._isDeclaration(name.parent) && name.parent.name === name;
}
static _isDeclaration(node) {
switch (node.kind) {
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.FunctionExpression:
case ts.SyntaxKind.VariableDeclaration:
case ts.SyntaxKind.Parameter:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.ClassExpression:
case ts.SyntaxKind.ModuleDeclaration:
case ts.SyntaxKind.MethodDeclaration:
case ts.SyntaxKind.MethodSignature:
case ts.SyntaxKind.PropertyDeclaration:
case ts.SyntaxKind.PropertySignature:
case ts.SyntaxKind.GetAccessor:
case ts.SyntaxKind.SetAccessor:
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.TypeParameter:
case ts.SyntaxKind.EnumMember:
case ts.SyntaxKind.BindingElement:
return true;
default:
return false;
}
}
}
exports.ExcerptBuilder = ExcerptBuilder;
//# sourceMappingURL=ExcerptBuilder.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,136 @@
import { type IStringBuilder } from '@rushstack/node-core-library';
/**
* A utility for writing indented text.
*
* @remarks
*
* Note that the indentation is inserted at the last possible opportunity.
* For example, this code...
*
* ```ts
* writer.write('begin\n');
* writer.increaseIndent();
* writer.write('one\ntwo\n');
* writer.decreaseIndent();
* writer.increaseIndent();
* writer.decreaseIndent();
* writer.write('end');
* ```
*
* ...would produce this output:
*
* ```
* begin
* one
* two
* end
* ```
*/
export declare class IndentedWriter {
/**
* The text characters used to create one level of indentation.
* Two spaces by default.
*/
defaultIndentPrefix: string;
/**
* Whether to indent blank lines
*/
indentBlankLines: boolean;
/**
* Trims leading spaces from the input text before applying the indent.
*
* @remarks
* Consider the following example:
*
* ```ts
* indentedWriter.increaseIndent(' '); // four spaces
* indentedWriter.write(' a\n b c\n');
* indentedWriter.decreaseIndent();
* ```
*
* Normally the output would be indented by 6 spaces: 4 from `increaseIndent()`, plus the 2 spaces
* from `write()`:
* ```
* a
* b c
* ```
*
* Setting `trimLeadingSpaces=true` will trim the leading spaces, so that the lines are indented
* by 4 spaces only:
* ```
* a
* b c
* ```
*/
trimLeadingSpaces: boolean;
private readonly _builder;
private _latestChunk;
private _previousChunk;
private _atStartOfLine;
private readonly _indentStack;
private _indentText;
private _previousLineIsBlank;
private _currentLineIsBlank;
constructor(builder?: IStringBuilder);
/**
* Retrieves the output that was built so far.
*/
getText(): string;
toString(): string;
/**
* Increases the indentation. Normally the indentation is two spaces,
* however an arbitrary prefix can optional be specified. (For example,
* the prefix could be "// " to indent and comment simultaneously.)
* Each call to IndentedWriter.increaseIndent() must be followed by a
* corresponding call to IndentedWriter.decreaseIndent().
*/
increaseIndent(indentPrefix?: string): void;
/**
* Decreases the indentation, reverting the effect of the corresponding call
* to IndentedWriter.increaseIndent().
*/
decreaseIndent(): void;
/**
* A shorthand for ensuring that increaseIndent()/decreaseIndent() occur
* in pairs.
*/
indentScope(scope: () => void, indentPrefix?: string): void;
/**
* Adds a newline if the file pointer is not already at the start of the line (or start of the stream).
*/
ensureNewLine(): void;
/**
* Adds up to two newlines to ensure that there is a blank line above the current position.
* The start of the stream is considered to be a blank line, so `ensureSkippedLine()` has no effect
* unless some text has been written.
*/
ensureSkippedLine(): void;
/**
* Returns the last character that was written, or an empty string if no characters have been written yet.
*/
peekLastCharacter(): string;
/**
* Returns the second to last character that was written, or an empty string if less than one characters
* have been written yet.
*/
peekSecondLastCharacter(): string;
/**
* Writes some text to the internal string buffer, applying indentation according
* to the current indentation level. If the string contains multiple newlines,
* each line will be indented separately.
*/
write(message: string): void;
/**
* A shorthand for writing an optional message, followed by a newline.
* Indentation is applied following the semantics of IndentedWriter.write().
*/
writeLine(message?: string): void;
/**
* Writes a string that does not contain any newline characters.
*/
private _writeLinePart;
private _writeNewLine;
private _write;
private _updateIndentText;
}
//# sourceMappingURL=IndentedWriter.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"IndentedWriter.d.ts","sourceRoot":"","sources":["../../src/generators/IndentedWriter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAiB,KAAK,cAAc,EAAE,MAAM,8BAA8B,CAAC;AAElF;;;;;;;;;;;;;;;;;;;;;;;;;;GA0BG;AACH,qBAAa,cAAc;IACzB;;;OAGG;IACI,mBAAmB,EAAE,MAAM,CAAU;IAE5C;;OAEG;IACI,gBAAgB,EAAE,OAAO,CAAS;IAEzC;;;;;;;;;;;;;;;;;;;;;;;;;OAyBG;IACI,iBAAiB,EAAE,OAAO,CAAS;IAE1C,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAiB;IAE1C,OAAO,CAAC,YAAY,CAAqB;IACzC,OAAO,CAAC,cAAc,CAAqB;IAC3C,OAAO,CAAC,cAAc,CAAU;IAEhC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAW;IACxC,OAAO,CAAC,WAAW,CAAS;IAE5B,OAAO,CAAC,oBAAoB,CAAU;IACtC,OAAO,CAAC,mBAAmB,CAAU;gBAElB,OAAO,CAAC,EAAE,cAAc;IAY3C;;OAEG;IACI,OAAO,IAAI,MAAM;IAIjB,QAAQ,IAAI,MAAM;IAIzB;;;;;;OAMG;IACI,cAAc,CAAC,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI;IAKlD;;;OAGG;IACI,cAAc,IAAI,IAAI;IAK7B;;;OAGG;IACI,WAAW,CAAC,KAAK,EAAE,MAAM,IAAI,EAAE,YAAY,CAAC,EAAE,MAAM,GAAG,IAAI;IAMlE;;OAEG;IACI,aAAa,IAAI,IAAI;IAO5B;;;;OAIG;IACI,iBAAiB,IAAI,IAAI;IAOhC;;OAEG;IACI,iBAAiB,IAAI,MAAM;IAOlC;;;OAGG;IACI,uBAAuB,IAAI,MAAM;IAYxC;;;;OAIG;IACI,KAAK,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI;IAyBnC;;;OAGG;IACI,SAAS,CAAC,OAAO,GAAE,MAAW,GAAG,IAAI;IAO5C;;OAEG;IACH,OAAO,CAAC,cAAc;IAqBtB,OAAO,CAAC,aAAa;IAarB,OAAO,CAAC,MAAM;IAMd,OAAO,CAAC,iBAAiB;CAG1B"}

View File

@ -0,0 +1,242 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
Object.defineProperty(exports, "__esModule", { value: true });
exports.IndentedWriter = void 0;
const node_core_library_1 = require("@rushstack/node-core-library");
/**
* A utility for writing indented text.
*
* @remarks
*
* Note that the indentation is inserted at the last possible opportunity.
* For example, this code...
*
* ```ts
* writer.write('begin\n');
* writer.increaseIndent();
* writer.write('one\ntwo\n');
* writer.decreaseIndent();
* writer.increaseIndent();
* writer.decreaseIndent();
* writer.write('end');
* ```
*
* ...would produce this output:
*
* ```
* begin
* one
* two
* end
* ```
*/
class IndentedWriter {
constructor(builder) {
/**
* The text characters used to create one level of indentation.
* Two spaces by default.
*/
this.defaultIndentPrefix = ' ';
/**
* Whether to indent blank lines
*/
this.indentBlankLines = false;
/**
* Trims leading spaces from the input text before applying the indent.
*
* @remarks
* Consider the following example:
*
* ```ts
* indentedWriter.increaseIndent(' '); // four spaces
* indentedWriter.write(' a\n b c\n');
* indentedWriter.decreaseIndent();
* ```
*
* Normally the output would be indented by 6 spaces: 4 from `increaseIndent()`, plus the 2 spaces
* from `write()`:
* ```
* a
* b c
* ```
*
* Setting `trimLeadingSpaces=true` will trim the leading spaces, so that the lines are indented
* by 4 spaces only:
* ```
* a
* b c
* ```
*/
this.trimLeadingSpaces = false;
this._builder = builder === undefined ? new node_core_library_1.StringBuilder() : builder;
this._latestChunk = undefined;
this._previousChunk = undefined;
this._atStartOfLine = true;
this._previousLineIsBlank = true;
this._currentLineIsBlank = true;
this._indentStack = [];
this._indentText = '';
}
/**
* Retrieves the output that was built so far.
*/
getText() {
return this._builder.toString();
}
toString() {
return this.getText();
}
/**
* Increases the indentation. Normally the indentation is two spaces,
* however an arbitrary prefix can optional be specified. (For example,
* the prefix could be "// " to indent and comment simultaneously.)
* Each call to IndentedWriter.increaseIndent() must be followed by a
* corresponding call to IndentedWriter.decreaseIndent().
*/
increaseIndent(indentPrefix) {
this._indentStack.push(indentPrefix !== undefined ? indentPrefix : this.defaultIndentPrefix);
this._updateIndentText();
}
/**
* Decreases the indentation, reverting the effect of the corresponding call
* to IndentedWriter.increaseIndent().
*/
decreaseIndent() {
this._indentStack.pop();
this._updateIndentText();
}
/**
* A shorthand for ensuring that increaseIndent()/decreaseIndent() occur
* in pairs.
*/
indentScope(scope, indentPrefix) {
this.increaseIndent(indentPrefix);
scope();
this.decreaseIndent();
}
/**
* Adds a newline if the file pointer is not already at the start of the line (or start of the stream).
*/
ensureNewLine() {
const lastCharacter = this.peekLastCharacter();
if (lastCharacter !== '\n' && lastCharacter !== '') {
this._writeNewLine();
}
}
/**
* Adds up to two newlines to ensure that there is a blank line above the current position.
* The start of the stream is considered to be a blank line, so `ensureSkippedLine()` has no effect
* unless some text has been written.
*/
ensureSkippedLine() {
this.ensureNewLine();
if (!this._previousLineIsBlank) {
this._writeNewLine();
}
}
/**
* Returns the last character that was written, or an empty string if no characters have been written yet.
*/
peekLastCharacter() {
if (this._latestChunk !== undefined) {
return this._latestChunk.substr(-1, 1);
}
return '';
}
/**
* Returns the second to last character that was written, or an empty string if less than one characters
* have been written yet.
*/
peekSecondLastCharacter() {
if (this._latestChunk !== undefined) {
if (this._latestChunk.length > 1) {
return this._latestChunk.substr(-2, 1);
}
if (this._previousChunk !== undefined) {
return this._previousChunk.substr(-1, 1);
}
}
return '';
}
/**
* Writes some text to the internal string buffer, applying indentation according
* to the current indentation level. If the string contains multiple newlines,
* each line will be indented separately.
*/
write(message) {
if (message.length === 0) {
return;
}
// If there are no newline characters, then append the string verbatim
if (!/[\r\n]/.test(message)) {
this._writeLinePart(message);
return;
}
// Otherwise split the lines and write each one individually
let first = true;
for (const linePart of message.split('\n')) {
if (!first) {
this._writeNewLine();
}
else {
first = false;
}
if (linePart) {
this._writeLinePart(linePart.replace(/[\r]/g, ''));
}
}
}
/**
* A shorthand for writing an optional message, followed by a newline.
* Indentation is applied following the semantics of IndentedWriter.write().
*/
writeLine(message = '') {
if (message.length > 0) {
this.write(message);
}
this._writeNewLine();
}
/**
* Writes a string that does not contain any newline characters.
*/
_writeLinePart(message) {
let trimmedMessage = message;
if (this.trimLeadingSpaces && this._atStartOfLine) {
trimmedMessage = message.replace(/^ +/, '');
}
if (trimmedMessage.length > 0) {
if (this._atStartOfLine && this._indentText.length > 0) {
this._write(this._indentText);
}
this._write(trimmedMessage);
if (this._currentLineIsBlank) {
if (/\S/.test(trimmedMessage)) {
this._currentLineIsBlank = false;
}
}
this._atStartOfLine = false;
}
}
_writeNewLine() {
if (this.indentBlankLines) {
if (this._atStartOfLine && this._indentText.length > 0) {
this._write(this._indentText);
}
}
this._previousLineIsBlank = this._currentLineIsBlank;
this._write('\n');
this._currentLineIsBlank = true;
this._atStartOfLine = true;
}
_write(s) {
this._previousChunk = this._latestChunk;
this._latestChunk = s;
this._builder.append(s);
}
_updateIndentText() {
this._indentText = this._indentStack.join('');
}
}
exports.IndentedWriter = IndentedWriter;
//# sourceMappingURL=IndentedWriter.js.map

File diff suppressed because one or more lines are too long