diff options
author | Fabio Berger <me@fabioberger.com> | 2018-07-30 03:58:39 +0800 |
---|---|---|
committer | Fabio Berger <me@fabioberger.com> | 2018-07-30 03:58:39 +0800 |
commit | f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de (patch) | |
tree | d8246a826b3aaffb9fbe27020c5297772f322f21 /packages/monorepo-scripts | |
parent | 48e538f5c7dc208775ec71635e4d4866b5907228 (diff) | |
download | dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.tar dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.tar.gz dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.tar.bz2 dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.tar.lz dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.tar.xz dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.tar.zst dexon-sol-tools-f4a2e227e1a7224fbbe9c99d9aa033d176a9c4de.zip |
Remove all in-package monorepo-scripts by adding doc gen/upload and aggregate release note publishing to publish script
Diffstat (limited to 'packages/monorepo-scripts')
-rw-r--r-- | packages/monorepo-scripts/package.json | 10 | ||||
-rw-r--r-- | packages/monorepo-scripts/src/doc_generate_and_upload.ts | 215 | ||||
-rw-r--r-- | packages/monorepo-scripts/src/index.ts | 1 | ||||
-rw-r--r-- | packages/monorepo-scripts/src/postpublish_utils.ts | 202 | ||||
-rw-r--r-- | packages/monorepo-scripts/src/publish.ts | 98 |
5 files changed, 318 insertions, 208 deletions
diff --git a/packages/monorepo-scripts/package.json b/packages/monorepo-scripts/package.json index 128bdcff5..24f1607c0 100644 --- a/packages/monorepo-scripts/package.json +++ b/packages/monorepo-scripts/package.json @@ -17,7 +17,8 @@ "script:deps_versions": "node ./lib/deps_versions.js", "script:prepublish_checks": "node ./lib/prepublish_checks.js", "script:publish": "IS_DRY_RUN=true node ./lib/publish.js", - "script:find_unused_deps": "node ./lib/find_unused_dependencies.js" + "script:find_unused_deps": "node ./lib/find_unused_dependencies.js", + "script:doc_generate_and_upload": "node ./lib/doc_generate_and_upload.js" }, "repository": { "type": "git", @@ -38,8 +39,7 @@ "make-promises-safe": "^1.1.0", "npm-run-all": "^4.1.2", "shx": "^0.2.2", - "tslint": "5.11.0", - "typescript": "2.7.1" + "tslint": "5.11.0" }, "dependencies": { "@lerna/batch-packages": "^3.0.0-beta.18", @@ -58,7 +58,9 @@ "rimraf": "^2.6.2", "semver": "5.5.0", "semver-diff": "^2.1.0", - "semver-sort": "0.0.4" + "semver-sort": "0.0.4", + "typedoc": "0xProject/typedoc", + "typescript": "2.7.1" }, "publishConfig": { "access": "public" diff --git a/packages/monorepo-scripts/src/doc_generate_and_upload.ts b/packages/monorepo-scripts/src/doc_generate_and_upload.ts new file mode 100644 index 000000000..b6a4801e3 --- /dev/null +++ b/packages/monorepo-scripts/src/doc_generate_and_upload.ts @@ -0,0 +1,215 @@ +import { readFileSync, writeFileSync } from 'fs'; +import * as _ from 'lodash'; +import * as path from 'path'; +import { exec as execAsync } from 'promisify-child-process'; +import * as ts from 'typescript'; +import * as yargs from 'yargs'; + +import { constants } from './constants'; +import { utils } from './utils/utils'; + +export interface ExportPathToExportedItems { + [pkgName: string]: string[]; +} + +const args = yargs + .option('package', { + describe: 'Monorepo sub-package for which to generate DocJSON', + type: 'string', + demandOption: true, + }) + .option('isStaging', { + describe: 'Whether we with to publish docs to staging or production', + type: 'boolean', + demandOption: true, + }) + .example("$0 --package '0x.js' --isStaging true", 'Full usage example').argv; + +(async () => { + const packageName = args.package; + const isStaging = args.isStaging; + + await generateAndUploadDocsAsync(packageName, isStaging); +})(); + +export async function generateAndUploadDocsAsync(packageName: string, isStaging: boolean): Promise<void> { + const pathToPackage = `${constants.monorepoRootPath}/packages/${packageName}`; + const indexPath = `${pathToPackage}/src/index.ts`; + const exportPathToExportedItems = getExportPathToExportedItems(indexPath); + + const monorepoPackages = utils.getPackages(constants.monorepoRootPath); + const pkg = _.find(monorepoPackages, monorepoPackage => { + return _.includes(monorepoPackage.packageJson.name, packageName); + }); + if (_.isUndefined(pkg)) { + throw new Error(`Couldn't find a package.json for ${packageName}`); + } + + const packageJson = pkg.packageJson; + const shouldPublishDocs = !!_.get(packageJson, 'config.postpublish.shouldPublishDocs'); + if (!shouldPublishDocs) { + utils.log( + `GENERATE_UPLOAD_DOCS: ${ + packageJson.name + } packageJson.config.postpublish.shouldPublishDocs is false. Skipping doc JSON generation.`, + ); + return; + } + + const pkgNameToPath: { [name: string]: string } = {}; + _.each(monorepoPackages, pkg => { + pkgNameToPath[pkg.packageJson.name] = pkg.location; + }); + + // For each dep that is another one of our monorepo packages, we fetch it's index.ts + // and see which specific files we must pass to TypeDoc. + let typeDocExtraFileIncludes: string[] = []; + _.each(exportPathToExportedItems, (exportedItems, exportPath) => { + const isInternalToPkg = _.startsWith(exportPath, '.'); + if (isInternalToPkg) { + const pathToInternalPkg = path.join(pathToPackage, 'src', `${exportPath}.ts`); + typeDocExtraFileIncludes.push(pathToInternalPkg); + } + const pathIfExists = pkgNameToPath[exportPath]; + if (_.isUndefined(pathIfExists)) { + return; // It's an external package + } + const typeDocSourceIncludes = new Set(); + const pathToIndex = `${pathIfExists}/src/index.ts`; + const innerExportPathToExportedItems = getExportPathToExportedItems(pathToIndex); + _.each(exportedItems, exportName => { + _.each(innerExportPathToExportedItems, (innerExportItems, innerExportPath) => { + if (!_.startsWith(innerExportPath, './')) { + // noop. Not an internal export... but rather an external one. Should we follow it? + return; + } + if (_.includes(innerExportItems, exportName)) { + const absoluteSrcPath = path.join(pathIfExists, 'src', `${innerExportPath}.ts`); + typeDocSourceIncludes.add(absoluteSrcPath); + } + }); + }); + // @0xproject/types & ethereum-types are examples of packages where their index.ts exports types + // directly, meaning no internal paths will exist to follow. This, we add the index file. + // TODO: Maybe we should add the index for all packages? + if (typeDocSourceIncludes.size === 0) { + typeDocSourceIncludes.add(pathToIndex); + } + typeDocExtraFileIncludes = [...typeDocExtraFileIncludes, ...Array.from(typeDocSourceIncludes)]; + }); + + // Generate Typedoc JSON file + const jsonFilePath = path.join(pathToPackage, 'generated_docs', 'index.json'); + const projectFiles = typeDocExtraFileIncludes.join(' '); + const cwd = path.join(constants.monorepoRootPath, 'packages/0x.js/'); + // HACK: For some reason calling `typedoc` command directly from here, even with `cwd` set to the + // packages root dir, does not work. It only works when called via a `package.json` script located + // in the packages root. + await execAsync(`JSON_FILE_PATH=${jsonFilePath} PROJECT_FILES="${projectFiles}" yarn docs:json`, { + cwd, + }); + + // For each entry, see if it was exported in index.ts. If not, remove it. + const typedocOutputString = readFileSync(jsonFilePath).toString(); + const typedocOutput = JSON.parse(typedocOutputString); + const finalTypeDocOutput = _.clone(typedocOutput); + _.each(typedocOutput.children, (file, i) => { + const exportItems = findExportItemsGivenTypedocName(exportPathToExportedItems, packageName, file.name); + // Map file "name" to exportPath... HOW?! + _.each(file.children, (child, j) => { + if (!_.includes(exportItems, child.name)) { + delete finalTypeDocOutput.children[i].children[j]; + } + }); + finalTypeDocOutput.children[i].children = _.compact(finalTypeDocOutput.children[i].children); + }); + // Write modified TypeDoc JSON, without all the unexported stuff + writeFileSync(jsonFilePath, JSON.stringify(finalTypeDocOutput, null, 2)); + + const fileName = `v${packageJson.version}.json`; + utils.log(`GENERATE_UPLOAD_DOCS: Doc generation successful, uploading docs... as ${fileName}`); + const S3BucketPath = isStaging ? `s3://staging-doc-jsons/${packageName}/` : `s3://doc-jsons/${packageName}/`; + const s3Url = `${S3BucketPath}${fileName}`; + await execAsync( + `aws s3 cp ${jsonFilePath} ${s3Url} --profile 0xproject --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers --content-type application/json`, + { + cwd, + }, + ); + utils.log(`GENERATE_UPLOAD_DOCS: Docs uploaded to S3 bucket: ${S3BucketPath}`); + // Remove the generated docs directory + await execAsync(`rm -rf ${jsonFilePath}`, { + cwd, + }); +} + +function findExportItemsGivenTypedocName( + exportPathToExportedItems: ExportPathToExportedItems, + packageName: string, + typedocName: string, +): string[] { + const typeDocNameWithoutQuotes = _.replace(typedocName, '"', ''); + const sanitizedExportPathToExportPath: { [sanitizedName: string]: string } = {}; + const exportPaths = _.keys(exportPathToExportedItems); + const sanitizedExportPaths = _.map(exportPaths, exportPath => { + if (_.startsWith(exportPath, './')) { + const sanitizedExportPath = path.join(packageName, 'src', exportPath); + sanitizedExportPathToExportPath[sanitizedExportPath] = exportPath; + return sanitizedExportPath; + } + const monorepoPrefix = '@0xproject/'; + if (_.startsWith(exportPath, monorepoPrefix)) { + const sanitizedExportPath = exportPath.split(monorepoPrefix)[1]; + sanitizedExportPathToExportPath[sanitizedExportPath] = exportPath; + return sanitizedExportPath; + } + sanitizedExportPathToExportPath[exportPath] = exportPath; + return exportPath; + }); + const matchingSanitizedExportPathIfExists = _.find(sanitizedExportPaths, p => { + return _.startsWith(typeDocNameWithoutQuotes, p); + }); + if (_.isUndefined(matchingSanitizedExportPathIfExists)) { + throw new Error(`Didn't find an exportPath for ${typeDocNameWithoutQuotes}`); + } + const matchingExportPath = sanitizedExportPathToExportPath[matchingSanitizedExportPathIfExists]; + return exportPathToExportedItems[matchingExportPath]; +} + +function getExportPathToExportedItems(pkgPath: string): ExportPathToExportedItems { + const sourceFile = ts.createSourceFile( + 'indexFile', + readFileSync(pkgPath).toString(), + ts.ScriptTarget.ES2017, + /*setParentNodes */ true, + ); + const exportPathToExportedItems = _getExportPathToExportedItems(sourceFile); + return exportPathToExportedItems; +} + +function _getExportPathToExportedItems(sf: ts.SourceFile): ExportPathToExportedItems { + const exportPathToExportedItems: ExportPathToExportedItems = {}; + processNode(sf); + + function processNode(node: ts.Node): void { + switch (node.kind) { + case ts.SyntaxKind.ExportDeclaration: + // console.log(node); + const exportClause = (node as any).exportClause; + const pkgName = exportClause.parent.moduleSpecifier.text; + _.each(exportClause.elements, element => { + exportPathToExportedItems[pkgName] = _.isUndefined(exportPathToExportedItems[pkgName]) + ? [element.name.escapedText] + : [...exportPathToExportedItems[pkgName], element.name.escapedText]; + }); + break; + + default: + // noop + break; + } + + ts.forEachChild(node, processNode); + } + return exportPathToExportedItems; +} diff --git a/packages/monorepo-scripts/src/index.ts b/packages/monorepo-scripts/src/index.ts index 95c96ebe8..e69de29bb 100644 --- a/packages/monorepo-scripts/src/index.ts +++ b/packages/monorepo-scripts/src/index.ts @@ -1 +0,0 @@ -export { postpublishUtils } from './postpublish_utils'; diff --git a/packages/monorepo-scripts/src/postpublish_utils.ts b/packages/monorepo-scripts/src/postpublish_utils.ts deleted file mode 100644 index 8e445a045..000000000 --- a/packages/monorepo-scripts/src/postpublish_utils.ts +++ /dev/null @@ -1,202 +0,0 @@ -import { execAsync } from 'async-child-process'; -import * as promisify from 'es6-promisify'; -import * as fs from 'fs'; -import * as _ from 'lodash'; -import * as path from 'path'; -import * as publishRelease from 'publish-release'; - -import { constants } from './constants'; -import { configs } from './utils/configs'; -import { utils } from './utils/utils'; - -const publishReleaseAsync = promisify(publishRelease); -const generatedDocsDirectoryName = 'generated_docs'; - -export interface PostpublishConfigs { - cwd: string; - packageName: string; - version: string; - assets: string[]; - docPublishConfigs: DocPublishConfigs; -} - -export interface DocPublishConfigs { - fileIncludes: string[]; - s3BucketPath: string; - s3StagingBucketPath: string; -} - -export const postpublishUtils = { - generateConfig(packageJSON: any, tsConfigJSON: any, cwd: string): PostpublishConfigs { - if (_.isUndefined(packageJSON.name)) { - throw new Error('name field required in package.json. Cannot publish release notes to Github.'); - } - if (_.isUndefined(packageJSON.version)) { - throw new Error('version field required in package.json. Cannot publish release notes to Github.'); - } - const postpublishConfig = _.get(packageJSON, 'config.postpublish', {}); - const postpublishConfigs: PostpublishConfigs = { - cwd, - packageName: packageJSON.name, - version: packageJSON.version, - assets: _.get(postpublishConfig, 'assets', []), - docPublishConfigs: { - fileIncludes: [ - ...tsConfigJSON.include, - ..._.get(postpublishConfig, 'docPublishConfigs.extraFileIncludes', []), - ], - s3BucketPath: _.get(postpublishConfig, 'docPublishConfigs.s3BucketPath'), - s3StagingBucketPath: _.get(postpublishConfig, 'docPublishConfigs.s3StagingBucketPath'), - }, - }; - return postpublishConfigs; - }, - async runAsync(packageJSON: any, tsConfigJSON: any, cwd: string): Promise<void> { - if (configs.IS_LOCAL_PUBLISH) { - return; - } - const postpublishConfigs = postpublishUtils.generateConfig(packageJSON, tsConfigJSON, cwd); - await postpublishUtils.publishReleaseNotesAsync( - postpublishConfigs.packageName, - postpublishConfigs.version, - postpublishConfigs.assets, - ); - if ( - !_.isUndefined(postpublishConfigs.docPublishConfigs.s3BucketPath) || - !_.isUndefined(postpublishConfigs.docPublishConfigs.s3StagingBucketPath) - ) { - utils.log('POSTPUBLISH: Release successful, generating docs...'); - await postpublishUtils.generateAndUploadDocsAsync( - postpublishConfigs.cwd, - postpublishConfigs.docPublishConfigs.fileIncludes, - postpublishConfigs.version, - postpublishConfigs.docPublishConfigs.s3BucketPath, - ); - } else { - utils.log(`POSTPUBLISH: No S3Bucket config found for ${packageJSON.name}. Skipping doc JSON generation.`); - } - }, - async publishDocsToStagingAsync(packageJSON: any, tsConfigJSON: any, cwd: string): Promise<void> { - const postpublishConfigs = postpublishUtils.generateConfig(packageJSON, tsConfigJSON, cwd); - if (_.isUndefined(postpublishConfigs.docPublishConfigs.s3StagingBucketPath)) { - utils.log('config.postpublish.docPublishConfigs.s3StagingBucketPath entry in package.json not found!'); - return; - } - - utils.log('POSTPUBLISH: Generating docs...'); - await postpublishUtils.generateAndUploadDocsAsync( - postpublishConfigs.cwd, - postpublishConfigs.docPublishConfigs.fileIncludes, - postpublishConfigs.version, - postpublishConfigs.docPublishConfigs.s3StagingBucketPath, - ); - }, - async publishReleaseNotesAsync(packageName: string, version: string, assets: string[]): Promise<void> { - const notes = postpublishUtils.getReleaseNotes(packageName, version); - const releaseName = postpublishUtils.getReleaseName(packageName, version); - const tag = postpublishUtils.getTag(packageName, version); - postpublishUtils.adjustAssetPaths(assets); - utils.log('POSTPUBLISH: Releasing ', releaseName, '...'); - await publishReleaseAsync({ - token: constants.githubPersonalAccessToken, - owner: '0xProject', - repo: '0x-monorepo', - tag, - name: releaseName, - notes, - draft: false, - prerelease: false, - reuseRelease: true, - reuseDraftOnly: false, - assets, - }); - }, - getReleaseNotes(packageName: string, version: string): string { - const packageNameWithNamespace = packageName.replace('@0xproject/', ''); - const changelogJSONPath = path.join( - constants.monorepoRootPath, - 'packages', - packageNameWithNamespace, - 'CHANGELOG.json', - ); - const changelogJSON = fs.readFileSync(changelogJSONPath, 'utf-8'); - const changelogs = JSON.parse(changelogJSON); - const latestLog = changelogs[0]; - // We sanity check that the version for the changelog notes we are about to publish to Github - // correspond to the new version of the package. - if (version !== latestLog.version) { - throw new Error('Expected CHANGELOG.json latest entry version to coincide with published version.'); - } - let notes = ''; - _.each(latestLog.changes, change => { - notes += `* ${change.note}`; - if (change.pr) { - notes += ` (#${change.pr})`; - } - notes += `\n`; - }); - return notes; - }, - getTag(packageName: string, version: string): string { - return `${packageName}@${version}`; - }, - getReleaseName(subPackageName: string, version: string): string { - const releaseName = `${subPackageName} v${version}`; - return releaseName; - }, - // Asset paths should described from the monorepo root. This method prefixes - // the supplied path with the absolute path to the monorepo root. - adjustAssetPaths(assets: string[]): string[] { - const finalAssets: string[] = []; - _.each(assets, (asset: string) => { - finalAssets.push(`${constants.monorepoRootPath}/${asset}`); - }); - return finalAssets; - }, - adjustFileIncludePaths(fileIncludes: string[], cwd: string): string[] { - const fileIncludesAdjusted = _.map(fileIncludes, fileInclude => { - let includePath = _.startsWith(fileInclude, './') - ? `${cwd}/${fileInclude.substr(2)}` - : `${cwd}/${fileInclude}`; - - // HACK: tsconfig.json needs wildcard directory endings as `/**/*` - // but TypeDoc needs it as `/**` in order to pick up files at the root - if (_.endsWith(includePath, '/**/*')) { - // tslint:disable-next-line:custom-no-magic-numbers - includePath = includePath.slice(0, -2); - } - return includePath; - }); - return fileIncludesAdjusted; - }, - async generateAndUploadDocsAsync( - cwd: string, - fileIncludes: string[], - version: string, - S3BucketPath: string, - ): Promise<void> { - const fileIncludesAdjusted = postpublishUtils.adjustFileIncludePaths(fileIncludes, cwd); - const projectFiles = fileIncludesAdjusted.join(' '); - const jsonFilePath = `${cwd}/${generatedDocsDirectoryName}/index.json`; - const result = await execAsync( - `JSON_FILE_PATH=${jsonFilePath} PROJECT_FILES="${projectFiles}" yarn docs:json`, - { - cwd, - }, - ); - if (!_.isEmpty(result.stderr)) { - throw new Error(result.stderr); - } - const fileName = `v${version}.json`; - utils.log(`POSTPUBLISH: Doc generation successful, uploading docs... as ${fileName}`); - const s3Url = S3BucketPath + fileName; - await execAsync(`S3_URL=${s3Url} yarn upload_docs_json`, { - cwd, - }); - // Remove the generated docs directory - await execAsync(`rm -rf ${generatedDocsDirectoryName}`, { - cwd, - }); - utils.log(`POSTPUBLISH: Docs uploaded to S3 bucket: ${S3BucketPath}`); - }, -}; diff --git a/packages/monorepo-scripts/src/publish.ts b/packages/monorepo-scripts/src/publish.ts index 5992131db..7444c64b1 100644 --- a/packages/monorepo-scripts/src/publish.ts +++ b/packages/monorepo-scripts/src/publish.ts @@ -8,12 +8,16 @@ import { exec as execAsync } from 'promisify-child-process'; import * as prompt from 'prompt'; import semver = require('semver'); import semverSort = require('semver-sort'); +import * as publishRelease from 'publish-release'; + +const publishReleaseAsync = promisify(publishRelease); import { constants } from './constants'; import { Package, PackageToNextVersion, VersionChangelog } from './types'; import { changelogUtils } from './utils/changelog_utils'; import { configs } from './utils/configs'; import { utils } from './utils/utils'; +import { generateAndUploadDocsAsync } from './doc_generate_and_upload'; const DOC_GEN_COMMAND = 'docs:json'; const NPM_NAMESPACE = '@0xproject/'; @@ -70,15 +74,27 @@ const packageNameToWebsitePath: { [name: string]: string } = { }); utils.log(`Calling 'lerna publish'...`); await lernaPublishAsync(packageToNextVersion); + const isStaging = false; + await generateAndUploadDocJsonsAsync(updatedPublicPackages, isStaging); + await publishReleaseNotesAsync(updatedPublicPackages); })().catch(err => { utils.log(err); process.exit(1); }); +async function generateAndUploadDocJsonsAsync(updatedPublicPackages: Package[], isStaging: boolean) { + for (const pkg of updatedPublicPackages) { + const packageName = pkg.packageJson.name; + const nameWithoutPrefix = packageName.replace('@0xproject/', ''); + await generateAndUploadDocsAsync(nameWithoutPrefix, isStaging); + } +} + async function confirmDocPagesRenderAsync(packages: Package[]): Promise<void> { // push docs to staging utils.log("Upload all docJson's to S3 staging..."); - await execAsync(`yarn stage_docs`, { cwd: constants.monorepoRootPath }); + const isStaging = true; + await generateAndUploadDocJsonsAsync(packages, isStaging); // deploy website to staging utils.log('Deploy website to staging...'); @@ -178,6 +194,86 @@ async function updateChangeLogsAsync(updatedPublicPackages: Package[]): Promise< return packageToNextVersion; } +async function publishReleaseNotesAsync(updatedPublishPackages: Package[]): Promise<void> { + // Git push a tag representing this publish (publish-{commit-hash}) (truncate hash) + const result = await execAsync('git log -n 1 --pretty=format:"%H"', { cwd: constants.monorepoRootPath }); + const latestGitCommit = result.stdout; + const shortenedGitCommit = latestGitCommit.slice(0, 7); + const tagName = `monorepo@${shortenedGitCommit}`; + // TODO: We might need to handle the case where the tag already exists locally + await execAsync('git tag ${tagName}'); + await execAsync('git push origin ${tagName}'); + const releaseName = `0x monorepo - ${shortenedGitCommit}`; + + let assets: string[] = []; + let aggregateNotes = ''; + _.each(updatedPublishPackages, pkg => { + const notes = getReleaseNotesForPackage(pkg.packageJson.name, pkg.packageJson.version); + aggregateNotes += `### ${pkg.packageJson.name}@${pkg.packageJson.version}\n${notes}\n\n`; + + const packageAssets = _.get(pkg.packageJson, 'config.postpublish.assets'); + if (!_.isUndefined(packageAssets)) { + assets = [...assets, ...packageAssets]; + } + }); + adjustAssetPaths(assets); + + utils.log('PUBLISH: Releasing ', releaseName, '...'); + // TODO: Currently publish-release doesn't let you specify the labels for each asset uploaded + // Ideally we would like to name the assets after the package they are from + // Source: https://github.com/remixz/publish-release/issues/39 + await publishReleaseAsync({ + token: constants.githubPersonalAccessToken, + owner: '0xProject', + tag: tagName, + repo: '0x-monorepo', + name: releaseName, + notes: aggregateNotes, + draft: false, + prerelease: false, + reuseRelease: true, + reuseDraftOnly: false, + assets, + }); +} + +// Asset paths should described from the monorepo root. This method prefixes +// the supplied path with the absolute path to the monorepo root. +function adjustAssetPaths(assets: string[]): string[] { + const finalAssets: string[] = []; + _.each(assets, (asset: string) => { + finalAssets.push(`${constants.monorepoRootPath}/${asset}`); + }); + return finalAssets; +} + +function getReleaseNotesForPackage(packageName: string, version: string): string { + const packageNameWithoutNamespace = packageName.replace('@0xproject/', ''); + const changelogJSONPath = path.join( + constants.monorepoRootPath, + 'packages', + packageNameWithoutNamespace, + 'CHANGELOG.json', + ); + const changelogJSON = fs.readFileSync(changelogJSONPath, 'utf-8'); + const changelogs = JSON.parse(changelogJSON); + const latestLog = changelogs[0]; + // We sanity check that the version for the changelog notes we are about to publish to Github + // correspond to the new version of the package. + if (version !== latestLog.version) { + throw new Error('Expected CHANGELOG.json latest entry version to coincide with published version.'); + } + let notes = ''; + _.each(latestLog.changes, change => { + notes += `* ${change.note}`; + if (change.pr) { + notes += ` (#${change.pr})`; + } + notes += `\n`; + }); + return notes; +} + async function lernaPublishAsync(packageToNextVersion: { [name: string]: string }): Promise<void> { const packageVersionString = _.map(packageToNextVersion, (nextVersion: string, packageName: string) => { return `${packageName}@${nextVersion}`; |