-
Notifications
You must be signed in to change notification settings - Fork 435
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fix: use absolute urls for published sourcemap urls
- Loading branch information
Showing
3 changed files
with
220 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,11 +1,17 @@ | ||
import {type Dirent, readdirSync, readFileSync} from 'node:fs' | ||
import {readdir, readFile, stat, writeFile} from 'node:fs/promises' | ||
import {type SourceMapPayload} from 'node:module' | ||
import path from 'node:path' | ||
|
||
// eslint-disable-next-line import/no-extraneous-dependencies | ||
/* eslint-disable import/no-extraneous-dependencies */ | ||
import {Storage, type UploadOptions} from '@google-cloud/storage' | ||
import {type NormalizedReadResult, readPackageUp} from 'read-package-up' | ||
|
||
/* eslint-enable import/no-extraneous-dependencies */ | ||
import {readEnv} from './utils/envVars' | ||
|
||
const BASE_PATH = path.resolve(path.dirname(new URL(import.meta.url).pathname), '..') | ||
|
||
type KnownEnvVar = 'GOOGLE_PROJECT_ID' | 'GCLOUD_SERVICE_KEY' | 'GCLOUD_BUCKET' | ||
|
||
const storage = new Storage({ | ||
|
@@ -15,6 +21,8 @@ const storage = new Storage({ | |
|
||
const bucket = storage.bucket(readEnv<KnownEnvVar>('GCLOUD_BUCKET')) | ||
|
||
const monoRepoPackageVersions: Record<string, string> = getMonorepoPackageVersions() | ||
|
||
const corePkgs = ['sanity', '@sanity/vision'] as const | ||
|
||
const appVersion = 'v1' | ||
|
@@ -59,7 +67,7 @@ async function copyPackages() { | |
for (const pkg of corePkgs) { | ||
console.log(`Copying files from ${pkg}`) | ||
|
||
const packageJson = JSON.parse(await readFile(`packages/${pkg}/package.json`, 'utf8')) | ||
const {packageJson} = await readPackageJson(`packages/${pkg}/package.json`) | ||
|
||
const {version} = packageJson | ||
packageVersions.set(pkg, version) | ||
|
@@ -160,7 +168,111 @@ async function updateManifest(newVersions: Map<string, string>) { | |
} | ||
} | ||
|
||
async function cleanupSourceMaps() { | ||
const packageVersions = new Map<string, string>() | ||
|
||
// First we iterate through each core package located in `packages/` | ||
for (const pkg of corePkgs) { | ||
const {packageJson} = await readPackageJson(`packages/${pkg}/package.json`) | ||
|
||
const {version} = packageJson | ||
packageVersions.set(pkg, version) | ||
|
||
for await (const filePath of getFiles(`packages/${pkg}/dist`)) { | ||
if (path.extname(filePath) !== '.map') { | ||
continue | ||
} | ||
|
||
try { | ||
const sourceMap = await readSourceMap(filePath) | ||
const newSources = await Promise.all( | ||
sourceMap.sources.map((source) => rewriteSource(source, filePath)), | ||
) | ||
sourceMap.sources = newSources | ||
await writeFile(filePath, JSON.stringify(sourceMap), 'utf-8') | ||
} catch (error) { | ||
throw new Error(`Failed to rewrite source map from ${pkg}`, {cause: error}) | ||
} | ||
} | ||
|
||
console.log(`Completed source map rewriting for directory ${pkg}`) | ||
} | ||
} | ||
|
||
/** | ||
* Rewrite source paths to absolute URLs for CDN-published bundles. | ||
* | ||
* Technically speaking this is "incorrect", as we don't use the URLs given for actually building, | ||
* however it is useful for debugging purposes, and leaving the original paths do not make sense | ||
* as we are not hosting the source files on the CDN anyway. | ||
* | ||
* The `sourcesContent` property does contain the actual contents we need for most debugging, | ||
* so this is mostly a way for us to jump to a file we can peek at, as well as telling us which | ||
* version of the file we are looking at. This is more helpful than, say, | ||
* `../../node_modules/someModule/index.js`. | ||
* | ||
* The URLs we rewrite to are as follows: | ||
* - For absolute URLs, leave them as-is. | ||
* - For dependencies (eg anything inside of `node_modules`), we use jsdelivr.net. | ||
* - For Sanity monorepo packages, we use GitHub URLs. Note that these link to the HTML interface, | ||
* _NOT_ the raw file - so any tools that tries to actually fetch the file might fail. If this | ||
* proves to be an issue, we could switch to `raw.githubusercontent.com` instead, but it is | ||
* less user-friendly for us developers. | ||
* | ||
* @param source - The source path to rewrite | ||
* @param sourceMapPath - The path to the source map file that contained this source path | ||
* @returns The rewritten source path (URL) | ||
* @internal | ||
*/ | ||
async function rewriteSource(source: string, sourceMapPath: string): Promise<string> { | ||
if (/^https?:\/\//.test(source)) { | ||
return source | ||
} | ||
|
||
const sourceMapDir = path.dirname(sourceMapPath) | ||
const sourcePath = path.resolve(sourceMapDir, source) | ||
if (sourcePath.includes('node_modules')) { | ||
const { | ||
packageJson: {name, version}, | ||
packagePath, | ||
} = await readPackageJson(sourcePath) | ||
|
||
if (name === 'sanity-root') { | ||
throw new Error(`Found sanity-root instead of a package for ${sourcePath}`) | ||
} | ||
const pathFromPackage = path.relative(packagePath, sourcePath) | ||
// eg `../../../node_modules/.pnpm/@[email protected][email protected]/node_modules/@sanity/client/dist/index.browser.js` | ||
// => `https://cdn.jsdelivr.net/npm/@sanity/[email protected]/dist/index.browser.js` | ||
return `https://cdn.jsdelivr.net/npm/${name}@${version}/${pathFromPackage}` | ||
} | ||
|
||
// eg `../src/core/schema/createSchema.ts` => | ||
// => `https://github.com/sanity-io/sanity/blob/v3.59.1/packages/sanity/src/core/schema/createSchema.ts` | ||
const relativePath = path.posix.relative(BASE_PATH, sourcePath) | ||
const pathParts = relativePath.split('/') | ||
|
||
if (pathParts.shift() !== 'packages') { | ||
console.warn('Failed to rewrite source path, unknown path type', {source, sourceMapPath}) | ||
return source | ||
} | ||
|
||
const pkgName = pathParts[0].startsWith('@') ? `${pathParts[0]}/${pathParts[1]}` : pathParts[0] | ||
const pkgVersion = monoRepoPackageVersions[pkgName] | ||
if (!pkgVersion) { | ||
console.warn(`Failed to rewrite source path, could not find version for ${pkgName}`) | ||
return source | ||
} | ||
|
||
// Encode for GitHub URLs, eg `@sanity/vision` -> `%40sanity/vision` | ||
const cleanDir = encodeURIComponent(relativePath).replace(/%2F/g, '/') | ||
return `https://github.com/sanity-io/sanity/blob/v${pkgVersion}/${cleanDir}` | ||
} | ||
|
||
async function uploadBundles() { | ||
// Clean up source maps | ||
await cleanupSourceMaps() | ||
console.log('**Completed cleaning up source maps** ✅') | ||
|
||
// Copy all the bundles | ||
const pkgVersions = await copyPackages() | ||
console.log('**Completed copying all files** ✅') | ||
|
@@ -170,6 +282,58 @@ async function uploadBundles() { | |
console.log('**Completed updating manifest** ✅') | ||
} | ||
|
||
async function readSourceMap( | ||
filePath: string, | ||
): Promise<Omit<SourceMapPayload, 'sourceRoot'> & {sourceRoot?: string}> { | ||
const sourceMap = JSON.parse(await readFile(filePath, 'utf8')) | ||
if (typeof sourceMap !== 'object' || sourceMap === null || Array.isArray(sourceMap)) { | ||
throw new Error(`Invalid source map at ${filePath}`) | ||
} | ||
|
||
if (!('sources' in sourceMap)) { | ||
throw new Error(`Missing 'sources' in source map at ${filePath}`) | ||
} | ||
|
||
return sourceMap | ||
} | ||
|
||
async function readPackageJson( | ||
fromFilePath: string, | ||
): Promise<NormalizedReadResult & {packagePath: string}> { | ||
const depPkg = await readPackageUp({cwd: fromFilePath}) | ||
if (!depPkg || !depPkg.packageJson) { | ||
throw new Error(`No package.json found for ${fromFilePath}`) | ||
} | ||
|
||
return {...depPkg, packagePath: path.dirname(depPkg.path)} | ||
} | ||
|
||
function getMonorepoPackageVersions(): Record<string, string> { | ||
const isDir = (dirent: Dirent) => dirent.isDirectory() && !dirent.name.startsWith('.') | ||
const getFullPath = (dirent: Dirent) => path.join(dirent.parentPath, dirent.name) | ||
const listOpts = {withFileTypes: true} as const | ||
|
||
const scoped = readdirSync(path.join(BASE_PATH, 'packages', '@sanity'), listOpts) | ||
.filter(isDir) | ||
.map(getFullPath) | ||
|
||
const unscoped = readdirSync(path.join(BASE_PATH, 'packages'), listOpts) | ||
.filter((dirent) => isDir(dirent) && !dirent.name.startsWith('@')) | ||
.map(getFullPath) | ||
|
||
const versions: Record<string, string> = {} | ||
;[...scoped, ...unscoped].forEach((pkgPath) => { | ||
try { | ||
const {name, version} = JSON.parse(readFileSync(path.join(pkgPath, 'package.json'), 'utf-8')) | ||
versions[name] = version | ||
} catch (err) { | ||
console.warn(`Failed to read package.json for ${pkgPath}`, err) | ||
} | ||
}) | ||
|
||
return versions | ||
} | ||
|
||
uploadBundles().catch((err) => { | ||
console.error(err) | ||
process.exit(1) | ||
|