From 5c4d1b2ac58c447c8904a82abd3dd335c1f58ad6 Mon Sep 17 00:00:00 2001 From: Roman Vyakhirev Date: Thu, 30 Oct 2025 12:18:00 +0100 Subject: [PATCH 1/2] chore: add oss clearance tools --- automation/utils/bin/rui-oss-clearance.ts | 315 ++++++++++++++++++++++ automation/utils/package.json | 1 + automation/utils/src/changelog.ts | 9 - automation/utils/src/github.ts | 170 +++++++++++- automation/utils/src/oss-clearance.ts | 90 +++++++ automation/utils/src/oss-readme.ts | 12 - automation/utils/src/steps.ts | 2 +- package.json | 1 + 8 files changed, 572 insertions(+), 28 deletions(-) create mode 100755 automation/utils/bin/rui-oss-clearance.ts create mode 100644 automation/utils/src/oss-clearance.ts delete mode 100644 automation/utils/src/oss-readme.ts diff --git a/automation/utils/bin/rui-oss-clearance.ts b/automation/utils/bin/rui-oss-clearance.ts new file mode 100755 index 0000000000..71cdae4cb8 --- /dev/null +++ b/automation/utils/bin/rui-oss-clearance.ts @@ -0,0 +1,315 @@ +#!/usr/bin/env ts-node-script + +import { gh, GitHubDraftRelease, GitHubReleaseAsset } from "../src/github"; +import { basename, join } from "path"; +import { prompt } from "enquirer"; +import chalk from "chalk"; +import { createReadStream } from "node:fs"; +import * as crypto from "crypto"; +import { pipeline } from "stream/promises"; +import { homedir } from "node:os"; +import { + createSBomGeneratorFolderStructure, + findAllReadmeOssLocally, + generateSBomArtifactsInFolder, + getRecommendedReadmeOss, + includeReadmeOssIntoMpk +} from "../src/oss-clearance"; + +// ============================================================================ +// Constants +// ============================================================================ + +const SBOM_GENERATOR_JAR = join(homedir(), "SBOM_Generator.jar"); + +// ============================================================================ +// Utility Functions +// ============================================================================ + +function printHeader(title: string): void { + console.log("\n" + chalk.bold.cyan("═".repeat(60))); + console.log(chalk.bold.cyan(` ${title}`)); + console.log(chalk.bold.cyan("═".repeat(60)) + "\n"); +} + +function printStep(step: number, total: number, message: string): void { + console.log(chalk.bold.blue(`\n[${step}/${total}]`) + chalk.white(` ${message}`)); +} + +function printSuccess(message: string): void { + console.log(chalk.green(`✅ ${message}`)); +} + +function printError(message: string): void { + console.log(chalk.red(`❌ ${message}`)); +} + +function printWarning(message: string): void { + console.log(chalk.yellow(`⚠️ ${message}`)); +} + +function printInfo(message: string): void { + console.log(chalk.cyan(`ℹ️ ${message}`)); +} + +function printProgress(message: string): void { + console.log(chalk.gray(` → ${message}`)); +} + +// ============================================================================ +// Core Functions +// ============================================================================ + +async function verifyGitHubAuth(): Promise { + printStep(1, 5, "Verifying GitHub authentication..."); + + try { + await gh.ensureAuth(); + printSuccess("GitHub authentication verified"); + } catch (error) { + printError(`GitHub authentication failed: ${(error as Error).message}`); + console.log(chalk.yellow("\n💡 Setup Instructions:\n")); + console.log(chalk.white("1. Install GitHub CLI:")); + console.log(chalk.cyan(" • Download: https://cli.github.com/")); + console.log(chalk.cyan(" • Or via brew: brew install gh\n")); + console.log(chalk.white("2. Authenticate (choose one option):")); + console.log(chalk.cyan(" • Option A: export GITHUB_TOKEN=your_token_here")); + console.log(chalk.cyan(" • Option B: export GH_PAT=your_token_here")); + console.log(chalk.cyan(" • Option C: gh auth login\n")); + console.log(chalk.white("3. For A and B get your token at:")); + console.log(chalk.cyan(" https://github.com/settings/tokens\n")); + throw new Error("GitHub authentication required"); + } +} + +async function selectRelease(): Promise { + printStep(2, 5, "Fetching draft releases..."); + + const releases = await gh.getDraftReleases(); + printSuccess(`Found ${releases.length} draft release${releases.length !== 1 ? "s" : ""}`); + + if (releases.length === 0) { + printWarning("No draft releases found"); + throw new Error("No releases available"); + } + + console.log(); // spacing + const { tag_name } = await prompt<{ tag_name: string }>({ + type: "select", + name: "tag_name", + message: "Select a release to process:", + choices: releases.map(r => ({ + name: r.tag_name, + message: `${r.name} ${chalk.gray(`(${r.tag_name})`)}` + })) + }); + + const release = releases.find(r => r.tag_name === tag_name); + if (!release) { + throw new Error(`Release not found: ${tag_name}`); + } + + printInfo(`Selected release: ${chalk.bold(release.name)}`); + return release; +} + +async function findAndValidateMpkAsset(release: GitHubDraftRelease): Promise { + printStep(3, 5, "Locating MPK asset..."); + + const mpkAsset = release.assets.find(asset => asset.name.endsWith(".mpk")); + + if (!mpkAsset) { + printError("No MPK asset found in release"); + printInfo(`Available assets: ${release.assets.map(a => a.name).join(", ")}`); + throw new Error("MPK asset not found"); + } + + printSuccess(`Found MPK asset: ${chalk.bold(mpkAsset.name)}`); + printInfo(`Asset ID: ${mpkAsset.id}`); + return mpkAsset; +} + +async function downloadAndVerifyAsset(mpkAsset: GitHubReleaseAsset, downloadPath: string): Promise { + printStep(4, 5, "Downloading and verifying MPK asset..."); + + printProgress(`Downloading to: ${downloadPath}`); + await gh.downloadReleaseAsset(mpkAsset.id, downloadPath); + printSuccess("Download completed"); + + printProgress("Computing SHA-256 hash..."); + const fileHash = await computeHash(downloadPath); + printInfo(`Computed hash: ${fileHash}`); + + const expectedDigest = mpkAsset.digest.replace("sha256:", ""); + if (fileHash !== expectedDigest) { + printError("Hash mismatch detected!"); + printInfo(`Expected: ${expectedDigest}`); + printInfo(`Got: ${fileHash}`); + throw new Error("Asset integrity verification failed"); + } + + printSuccess("Hash verification passed"); + return fileHash; +} + +async function runSbomGenerator(tmpFolder: string, releaseName: string, fileHash: string): Promise { + printStep(5, 5, "Running SBOM Generator..."); + + printProgress("Generating OSS Clearance artifacts..."); + + const finalName = `${releaseName} [${fileHash}].zip`; + const finalPath = join(homedir(), "Downloads", finalName); + + await generateSBomArtifactsInFolder(tmpFolder, SBOM_GENERATOR_JAR, releaseName, finalPath); + printSuccess("Completed."); + + return finalPath; +} + +async function computeHash(filepath: string): Promise { + const input = createReadStream(filepath); + const hash = crypto.createHash("sha256"); + await pipeline(input, hash); + return hash.digest("hex"); +} + +// ============================================================================ +// Command Handlers +// ============================================================================ + +async function handlePrepareCommand(): Promise { + printHeader("OSS Clearance Artifacts Preparation"); + + try { + // Step 1: Verify authentication + await verifyGitHubAuth(); + + // Step 2: Select release + const release = await selectRelease(); + + // Step 3: Find MPK asset + const mpkAsset = await findAndValidateMpkAsset(release); + + // Prepare folder structure + const [tmpFolder, downloadPath] = await createSBomGeneratorFolderStructure(release.name); + printInfo(`Working directory: ${tmpFolder}`); + + // Step 4: Download and verify + const fileHash = await downloadAndVerifyAsset(mpkAsset, downloadPath); + + // Step 5: Run SBOM Generator + const finalPath = await runSbomGenerator(tmpFolder, release.name, fileHash); + + console.log(chalk.bold.green(`\n🎉 Success! Output file:`)); + console.log(chalk.cyan(` ${finalPath}\n`)); + } catch (error) { + console.log("\n" + chalk.bold.red("═".repeat(60))); + printError(`Process failed: ${(error as Error).message}`); + console.log(chalk.bold.red("═".repeat(60)) + "\n"); + process.exit(1); + } +} + +async function handleIncludeCommand(): Promise { + printHeader("OSS Clearance Readme Include"); + + try { + // TODO: Implement include command logic + // Step 1: Verify authentication + await verifyGitHubAuth(); + + // Step 2: Select release + const release = await selectRelease(); + + // Step 3: Find MPK asset + const mpkAsset = await findAndValidateMpkAsset(release); + + // Step 4: Find and select OSS Readme + const readmes = findAllReadmeOssLocally(); + const recommendedReadmeOss = getRecommendedReadmeOss( + release.name.split(" ")[0], + release.name.split(" ")[1], + readmes + ); + + let readmeToInclude: string; + + if (!recommendedReadmeOss) { + const { selectedReadme } = await prompt<{ selectedReadme: string }>({ + type: "select", + name: "selectedReadme", + message: "Select a release to process:", + choices: readmes.map(r => ({ + name: r, + message: basename(r) + })) + }); + + readmeToInclude = selectedReadme; + } else { + readmeToInclude = recommendedReadmeOss; + } + + printInfo(`Readme to include: ${readmeToInclude}`); + + // Prepare folder structure + const [tmpFolder, downloadPath] = await createSBomGeneratorFolderStructure(release.name); + printInfo(`Working directory: ${tmpFolder}`); + + // Step 5: Download and verify + await downloadAndVerifyAsset(mpkAsset, downloadPath); + + // Step 6: Include readmeToInclude into the mpk + await includeReadmeOssIntoMpk(readmeToInclude, downloadPath); + + // Step 7: Upload updated asses to the draft release + const newAsset = await gh.updateReleaseAsset(release.id, mpkAsset, downloadPath); + console.log(`Successfully uploaded asset ${newAsset.name} (ID: ${newAsset.id})`); + + console.log(release.id); + } catch (error) { + console.log("\n" + chalk.bold.red("═".repeat(60))); + printError(`Process failed: ${(error as Error).message}`); + console.log(chalk.bold.red("═".repeat(60)) + "\n"); + process.exit(1); + } +} + +// ============================================================================ +// Main Function +// ============================================================================ + +async function main(): Promise { + const command = process.argv[2]; + + switch (command) { + case "prepare": + await handlePrepareCommand(); + break; + case "include": + await handleIncludeCommand(); + break; + default: + printError(command ? `Unknown command: ${command}` : "No command specified"); + console.log(chalk.white("\nUsage:")); + console.log( + chalk.cyan(" rui-oss-clearance.ts prepare ") + + chalk.gray("- Prepare OSS clearance artifact from draft release") + ); + console.log( + chalk.cyan(" rui-oss-clearance.ts include ") + + chalk.gray("- Include OSS Readme file into a draft release") + ); + console.log(); + process.exit(1); + } +} + +// ============================================================================ +// Entry Point +// ============================================================================ + +main().catch(e => { + console.error(chalk.red("\n💥 Unexpected error:"), e); + process.exit(1); +}); diff --git a/automation/utils/package.json b/automation/utils/package.json index 483509d4d9..480fe2dabe 100644 --- a/automation/utils/package.json +++ b/automation/utils/package.json @@ -30,6 +30,7 @@ "compile:parser:widget": "peggy -o ./src/changelog-parser/parser/module/module.js ./src/changelog-parser/parser/module/module.pegjs", "format": "prettier --write .", "lint": "eslint --ext .jsx,.js,.ts,.tsx src/", + "oss-clearance": "ts-node bin/rui-oss-clearance.ts", "prepare": "pnpm run compile:parser:widget && pnpm run compile:parser:module && tsc", "prepare-release": "ts-node bin/rui-prepare-release.ts", "start": "tsc --watch", diff --git a/automation/utils/src/changelog.ts b/automation/utils/src/changelog.ts index 93abe42eab..3f323b48d7 100644 --- a/automation/utils/src/changelog.ts +++ b/automation/utils/src/changelog.ts @@ -1,8 +1,6 @@ import { gh } from "./github"; import { PublishedInfo } from "./package-info"; import { exec, popd, pushd } from "./shell"; -import { findOssReadme } from "./oss-readme"; -import { join } from "path"; export async function updateChangelogsAndCreatePR( info: PublishedInfo, @@ -53,13 +51,6 @@ export async function updateChangelogsAndCreatePR( pushd(root.trim()); await exec(`git add '*/CHANGELOG.md'`); - const path = process.cwd(); - const readmeossFile = findOssReadme(path, info.mxpackage.name, info.version.format()); - if (readmeossFile) { - console.log(`Removing OSS clearance readme file '${readmeossFile}'...`); - await exec(`git rm '${readmeossFile}'`); - } - await exec(`git commit -m "chore(${info.name}): update changelog"`); await exec(`git push ${remoteName} ${releaseBranchName}`); popd(); diff --git a/automation/utils/src/github.ts b/automation/utils/src/github.ts index ef5f7c92d0..02fe89a8b6 100644 --- a/automation/utils/src/github.ts +++ b/automation/utils/src/github.ts @@ -1,8 +1,30 @@ -import { mkdtemp, writeFile } from "fs/promises"; +import { mkdtemp, readFile, writeFile } from "fs/promises"; +import { createWriteStream } from "fs"; import { join } from "path"; +import { pipeline } from "stream/promises"; +import nodefetch from "node-fetch"; import { fetch } from "./fetch"; import { exec } from "./shell"; +export interface GitHubReleaseAsset { + id: string; + name: string; + browser_download_url: string; + size: number; + content_type: string; + digest: string; +} + +export interface GitHubDraftRelease { + id: string; + tag_name: string; + name: string; + draft: boolean; + created_at: string; + published_at: string | null; + assets: GitHubReleaseAsset[]; +} + interface GitHubReleaseInfo { title: string; tag: string; @@ -29,12 +51,11 @@ interface GitHubPRInfo { export class GitHub { authSet = false; tmpPrefix = "gh-"; + authToken: string = ""; async ensureAuth(): Promise { if (!this.authSet) { - if (process.env.GITHUB_TOKEN) { - // when using GITHUB_TOKEN, gh will automatically use it - } else if (process.env.GH_PAT) { + if (process.env.GH_PAT) { await exec(`echo "${process.env.GH_PAT}" | gh auth login --with-token`); } else { // No environment variables set, check if already authenticated @@ -53,8 +74,10 @@ export class GitHub { try { // Try to run 'gh auth status' to check if authenticated await exec("gh auth status", { stdio: "pipe" }); + const { stdout: token } = await exec(`gh auth token`, { stdio: "pipe" }); + this.authToken = token.trim(); return true; - } catch (error) { + } catch (_error: unknown) { // If the command fails, the user is not authenticated return false; } @@ -107,7 +130,7 @@ export class GitHub { get ghAPIHeaders(): Record { return { "X-GitHub-Api-Version": "2022-11-28", - Authorization: `Bearer ${process.env.GH_PAT}` + Authorization: `Bearer ${this.authToken || process.env.GH_PAT}` }; } @@ -165,6 +188,58 @@ export class GitHub { return downloadUrl; } + async getDraftReleases(owner = "mendix", repo = "web-widgets"): Promise { + const releases = await fetch( + "GET", + `https://api.github.com/repos/${owner}/${repo}/releases`, + undefined, + { + ...this.ghAPIHeaders + } + ); + + // Filter only draft releases + return releases.filter(release => release.draft); + } + + async downloadReleaseAsset( + assetId: string, + destinationPath: string, + owner = "mendix", + repo = "web-widgets" + ): Promise { + await this.ensureAuth(); + + const url = `https://api.github.com/repos/${owner}/${repo}/releases/assets/${assetId}`; + + try { + const response = await nodefetch(url, { + method: "GET", + headers: { + Accept: "application/octet-stream", + ...this.ghAPIHeaders + }, + redirect: "follow" + }); + + if (!response.ok) { + throw new Error(`Failed to download asset ${assetId}: ${response.status} ${response.statusText}`); + } + + if (!response.body) { + throw new Error(`No response body received for asset ${assetId}`); + } + + // Stream the response body to the file + const fileStream = createWriteStream(destinationPath); + await pipeline(response.body, fileStream); + } catch (error) { + throw new Error( + `Failed to download release asset ${assetId}: ${error instanceof Error ? error.message : String(error)}` + ); + } + } + async createReleaseNotesFile(releaseNotesText: string): Promise { const filePath = await this.createTempFile(); await writeFile(filePath, releaseNotesText); @@ -211,6 +286,89 @@ export class GitHub { } }); } + + /** + * Delete a release asset by ID + */ + private async deleteReleaseAsset(assetId: string, owner = "mendix", repo = "web-widgets"): Promise { + await this.ensureAuth(); + + const response = await nodefetch(`https://api.github.com/repos/${owner}/${repo}/releases/assets/${assetId}`, { + method: "DELETE", + headers: this.ghAPIHeaders + }); + + if (!response.ok) { + throw new Error(`Failed to delete asset ${assetId}: ${response.status} ${response.statusText}`); + } + } + + /** + * Upload a new asset to a release + */ + private async uploadReleaseAsset( + releaseId: string, + filePath: string, + assetName: string, + owner = "mendix", + repo = "web-widgets" + ): Promise { + await this.ensureAuth(); + + // Get release info to get upload URL + const release = await fetch<{ upload_url: string }>( + "GET", + `https://api.github.com/repos/${owner}/${repo}/releases/${releaseId}`, + undefined, + this.ghAPIHeaders + ); + + // The upload_url comes with {?name,label} template, we need to replace it + const uploadUrl = release.upload_url.replace(/\{[^}]+}/g, "") + `?name=${encodeURIComponent(assetName)}`; + + // Read the file + const fileBuffer = await readFile(filePath); + + // Upload the file + const response = await nodefetch(uploadUrl, { + method: "POST", + headers: { + ...this.ghAPIHeaders, + "Content-Type": "application/octet-stream", + "Content-Length": fileBuffer.length.toString() + }, + body: fileBuffer + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error( + `Failed to upload asset ${assetName}: ${response.status} ${response.statusText} - ${errorText}` + ); + } + + const asset = (await response.json()) as GitHubReleaseAsset; + + return asset; + } + + /** + * Update a release asset by replacing it with a new file. + */ + async updateReleaseAsset( + releaseId: string, + existingAsset: GitHubReleaseAsset, + localFilePath: string, + owner = "mendix", + repo = "web-widgets" + ): Promise { + await this.ensureAuth(); + + await this.deleteReleaseAsset(existingAsset.id, owner, repo); + + // Upload the new asset + return this.uploadReleaseAsset(releaseId, localFilePath, existingAsset.name, owner, repo); + } } export const gh = new GitHub(); diff --git a/automation/utils/src/oss-clearance.ts b/automation/utils/src/oss-clearance.ts new file mode 100644 index 0000000000..f2cfb25b87 --- /dev/null +++ b/automation/utils/src/oss-clearance.ts @@ -0,0 +1,90 @@ +import { globSync } from "glob"; +import { basename, join, parse } from "path"; +import { homedir, tmpdir } from "node:os"; +import { mkdtemp, stat } from "node:fs/promises"; +import { chmod, cp, exec, mkdir, mv, rm, unzip, zip } from "./shell"; +import chalk from "chalk"; + +export function findOssReadme(packageRoot: string, widgetName: string, version: string): string | undefined { + const readmeossPattern = `**/*${widgetName}__${version}__READMEOSS_*.html`; + + console.info(`Looking for READMEOSS file matching pattern: ${readmeossPattern}`); + + // Use glob to find files matching the pattern in package root + const matchingFiles = globSync(readmeossPattern, { cwd: packageRoot, absolute: true, ignore: "**/dist/**" }); + + return matchingFiles[0]; +} + +export function findAllReadmeOssLocally(): string[] { + const readmeossPattern = join("**", `*__*__READMEOSS_*.html`); + const path1 = join(homedir(), "Downloads"); + const path2 = join(homedir(), "Documents"); + + const matchingFiles1 = globSync(readmeossPattern, { cwd: path1, absolute: true, ignore: "**/.*/**" }); + const matchingFiles2 = globSync(readmeossPattern, { cwd: path2, absolute: true, ignore: "**/.*/**" }); + + return matchingFiles1.concat(matchingFiles2); +} + +export function getRecommendedReadmeOss(name: string, version: string, availableReadmes: string[]): string | undefined { + const fileNames = availableReadmes.map(r => basename(r)); + + return fileNames.find(r => r.includes(name) && r.includes(version)); +} + +export async function createSBomGeneratorFolderStructure( + assetNameAndVersion: string +): Promise<[folder: string, assetPath: string]> { + const tmpFolder = await mkdtemp(join(tmpdir(), "tmp_OSS_Clearance_Artifacts_")); + const artifactsFolder = join(tmpFolder, "SBOM_GENERATOR", assetNameAndVersion); + await mkdir("-p", artifactsFolder); + return [tmpFolder, join(artifactsFolder, `${assetNameAndVersion}.mpk`)]; +} + +export async function generateSBomArtifactsInFolder( + tmpFolder: string, + generatorBinaryPath: string, + expectedName: string, + finalPath: string +): Promise { + // run generator + await exec(`java -jar ${generatorBinaryPath} SBOM_GENERATOR unzip`, { cwd: tmpFolder }); + await exec(`java -jar ${generatorBinaryPath} SBOM_GENERATOR scan`, { cwd: tmpFolder }); + + // check results + const resultsFolder = join(tmpFolder, "CCA_JSON"); + const assetsFolder = join(resultsFolder, expectedName); + const assets = await stat(assetsFolder); + if (!assets.isDirectory()) { + throw new Error("Can't find assets folder"); + } + + // archive results + const archiveName = `${expectedName}.zip`; + await zip(resultsFolder, archiveName); + const ossArtifactZip = join(resultsFolder, archiveName); + + // move to final destination + await mv(ossArtifactZip, finalPath); + + // removing tmp folder + await rm("-rf", tmpFolder); +} + +export async function includeReadmeOssIntoMpk(readmeOssPath: string, mpkPath: string): Promise { + const mpkEntry = parse(mpkPath); + const unzipTarget = join(mpkEntry.dir, "tmp"); + + // unzip + rm("-rf", unzipTarget); + await unzip(mpkPath, unzipTarget); + chmod("-R", "a+rw", unzipTarget); + + // Copy the READMEOSS file to the target directory + cp(readmeOssPath, unzipTarget); + + // zip it back + await zip(unzipTarget, mpkPath); + rm("-rf", unzipTarget); +} diff --git a/automation/utils/src/oss-readme.ts b/automation/utils/src/oss-readme.ts deleted file mode 100644 index 1fb73035eb..0000000000 --- a/automation/utils/src/oss-readme.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { globSync } from "glob"; - -export function findOssReadme(packageRoot: string, widgetName: string, version: string): string | undefined { - const readmeossPattern = `**/*${widgetName}__${version}__READMEOSS_*.html`; - - console.info(`Looking for READMEOSS file matching pattern: ${readmeossPattern}`); - - // Use glob to find files matching the pattern in package root - const matchingFiles = globSync(readmeossPattern, { cwd: packageRoot, absolute: true, ignore: "**/dist/**" }); - - return matchingFiles[0]; -} diff --git a/automation/utils/src/steps.ts b/automation/utils/src/steps.ts index 4522396755..baf9dfba6a 100644 --- a/automation/utils/src/steps.ts +++ b/automation/utils/src/steps.ts @@ -14,7 +14,7 @@ import { ModuleInfo, PackageInfo, WidgetInfo } from "./package-info"; import { addFilesToPackageXml, PackageType } from "./package-xml"; import { chmod, cp, ensureFileExists, exec, find, mkdir, popd, pushd, rm, unzip, zip } from "./shell"; import chalk from "chalk"; -import { findOssReadme } from "./oss-readme"; +import { findOssReadme } from "./oss-clearance"; type Step = (params: { info: Info; config: Config }) => Promise; diff --git a/package.json b/package.json index b79f4793f0..efccd1e5fc 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,7 @@ "scripts": { "build": "turbo run build", "changelog": "pnpm --filter @mendix/automation-utils run changelog", + "oss-clearance": "pnpm --filter @mendix/automation-utils run oss-clearance", "create-gh-release": "turbo run create-gh-release --concurrency 1", "create-translation": "turbo run create-translation", "postinstall": "turbo run agent-rules", From 673597e239023e6ae2b52a5b30e2793e635a7a52 Mon Sep 17 00:00:00 2001 From: Roman Vyakhirev Date: Thu, 6 Nov 2025 11:19:36 +0100 Subject: [PATCH 2/2] chore: don't repackage mpk, just include readme --- automation/utils/bin/rui-oss-clearance.ts | 18 ++---- automation/utils/bin/rui-prepare-release.ts | 6 +- automation/utils/src/github.ts | 64 +++++++-------------- 3 files changed, 27 insertions(+), 61 deletions(-) diff --git a/automation/utils/bin/rui-oss-clearance.ts b/automation/utils/bin/rui-oss-clearance.ts index 71cdae4cb8..834c814741 100755 --- a/automation/utils/bin/rui-oss-clearance.ts +++ b/automation/utils/bin/rui-oss-clearance.ts @@ -89,8 +89,10 @@ async function selectRelease(): Promise { printSuccess(`Found ${releases.length} draft release${releases.length !== 1 ? "s" : ""}`); if (releases.length === 0) { - printWarning("No draft releases found"); - throw new Error("No releases available"); + printWarning( + "No draft releases found. Please create a draft release before trying again using `prepare-release` tool" + ); + throw new Error("No draft releases found"); } console.log(); // spacing @@ -252,18 +254,8 @@ async function handleIncludeCommand(): Promise { printInfo(`Readme to include: ${readmeToInclude}`); - // Prepare folder structure - const [tmpFolder, downloadPath] = await createSBomGeneratorFolderStructure(release.name); - printInfo(`Working directory: ${tmpFolder}`); - - // Step 5: Download and verify - await downloadAndVerifyAsset(mpkAsset, downloadPath); - - // Step 6: Include readmeToInclude into the mpk - await includeReadmeOssIntoMpk(readmeToInclude, downloadPath); - // Step 7: Upload updated asses to the draft release - const newAsset = await gh.updateReleaseAsset(release.id, mpkAsset, downloadPath); + const newAsset = await gh.uploadReleaseAsset(release.id, readmeToInclude, basename(readmeToInclude)); console.log(`Successfully uploaded asset ${newAsset.name} (ID: ${newAsset.id})`); console.log(release.id); diff --git a/automation/utils/bin/rui-prepare-release.ts b/automation/utils/bin/rui-prepare-release.ts index 253fe6a1a2..12a00b4ec6 100755 --- a/automation/utils/bin/rui-prepare-release.ts +++ b/automation/utils/bin/rui-prepare-release.ts @@ -353,15 +353,13 @@ async function createReleaseBranch(packageName: string, version: string): Promis } async function initializeJiraClient(): Promise { - const projectKey = process.env.JIRA_PROJECT_KEY; - const baseUrl = process.env.JIRA_BASE_URL; + const projectKey = process.env.JIRA_PROJECT_KEY ?? "WC"; + const baseUrl = process.env.JIRA_BASE_URL ?? "https://mendix.atlassian.net"; const apiToken = process.env.JIRA_API_TOKEN; if (!projectKey || !baseUrl || !apiToken) { console.error(chalk.red("❌ Missing Jira environment variables")); console.log(chalk.dim(" Required variables:")); - console.log(chalk.dim(" export JIRA_PROJECT_KEY=WEB")); - console.log(chalk.dim(" export JIRA_BASE_URL=https://your-company.atlassian.net")); console.log(chalk.dim(" export JIRA_API_TOKEN=username@your-company.com:ATATT3xFfGF0...")); console.log(chalk.dim(" Get your API token at: https://id.atlassian.com/manage-profile/security/api-tokens")); throw new Error("Missing Jira environment variables"); diff --git a/automation/utils/src/github.ts b/automation/utils/src/github.ts index 02fe89a8b6..6ad65d1fad 100644 --- a/automation/utils/src/github.ts +++ b/automation/utils/src/github.ts @@ -52,6 +52,8 @@ export class GitHub { authSet = false; tmpPrefix = "gh-"; authToken: string = ""; + owner = "mendix"; + repo = "web-widgets"; async ensureAuth(): Promise { if (!this.authSet) { @@ -140,7 +142,7 @@ export class GitHub { const release = (await fetch<{ id: string }>( "GET", - `https://api.github.com/repos/mendix/web-widgets/releases/tags/${releaseTag}`, + `https://api.github.com/repos/${this.owner}/${this.repo}/releases/tags/${releaseTag}`, undefined, { ...this.ghAPIHeaders } )) ?? []; @@ -171,7 +173,7 @@ export class GitHub { name: string; browser_download_url: string; }> - >("GET", `https://api.github.com/repos/mendix/web-widgets/releases/${releaseId}/assets`, undefined, { + >("GET", `https://api.github.com/repos/${this.owner}/${this.repo}/releases/${releaseId}/assets`, undefined, { ...this.ghAPIHeaders }); } @@ -188,10 +190,10 @@ export class GitHub { return downloadUrl; } - async getDraftReleases(owner = "mendix", repo = "web-widgets"): Promise { + async getDraftReleases(): Promise { const releases = await fetch( "GET", - `https://api.github.com/repos/${owner}/${repo}/releases`, + `https://api.github.com/repos/${this.owner}/${this.repo}/releases`, undefined, { ...this.ghAPIHeaders @@ -202,15 +204,10 @@ export class GitHub { return releases.filter(release => release.draft); } - async downloadReleaseAsset( - assetId: string, - destinationPath: string, - owner = "mendix", - repo = "web-widgets" - ): Promise { + async downloadReleaseAsset(assetId: string, destinationPath: string): Promise { await this.ensureAuth(); - const url = `https://api.github.com/repos/${owner}/${repo}/releases/assets/${assetId}`; + const url = `https://api.github.com/repos/${this.owner}/${this.repo}/releases/assets/${assetId}`; try { const response = await nodefetch(url, { @@ -256,14 +253,14 @@ export class GitHub { }): Promise { await this.ensureAuth(); - const { workflowId, ref, inputs, owner = "mendix", repo = "web-widgets" } = params; + const { workflowId, ref, inputs } = params; // Convert inputs object to CLI parameters const inputParams = Object.entries(inputs) .map(([key, value]) => `-f ${key}=${value}`) .join(" "); - const repoParam = `${owner}/${repo}`; + const repoParam = `${this.owner}/${this.repo}`; const command = [`gh workflow run`, `"${workflowId}"`, `--ref "${ref}"`, inputParams, `-R "${repoParam}"`] .filter(Boolean) @@ -290,13 +287,16 @@ export class GitHub { /** * Delete a release asset by ID */ - private async deleteReleaseAsset(assetId: string, owner = "mendix", repo = "web-widgets"): Promise { + async deleteReleaseAsset(assetId: string): Promise { await this.ensureAuth(); - const response = await nodefetch(`https://api.github.com/repos/${owner}/${repo}/releases/assets/${assetId}`, { - method: "DELETE", - headers: this.ghAPIHeaders - }); + const response = await nodefetch( + `https://api.github.com/repos/${this.owner}/${this.repo}/releases/assets/${assetId}`, + { + method: "DELETE", + headers: this.ghAPIHeaders + } + ); if (!response.ok) { throw new Error(`Failed to delete asset ${assetId}: ${response.status} ${response.statusText}`); @@ -306,19 +306,13 @@ export class GitHub { /** * Upload a new asset to a release */ - private async uploadReleaseAsset( - releaseId: string, - filePath: string, - assetName: string, - owner = "mendix", - repo = "web-widgets" - ): Promise { + async uploadReleaseAsset(releaseId: string, filePath: string, assetName: string): Promise { await this.ensureAuth(); // Get release info to get upload URL const release = await fetch<{ upload_url: string }>( "GET", - `https://api.github.com/repos/${owner}/${repo}/releases/${releaseId}`, + `https://api.github.com/repos/${this.owner}/${this.repo}/releases/${releaseId}`, undefined, this.ghAPIHeaders ); @@ -351,24 +345,6 @@ export class GitHub { return asset; } - - /** - * Update a release asset by replacing it with a new file. - */ - async updateReleaseAsset( - releaseId: string, - existingAsset: GitHubReleaseAsset, - localFilePath: string, - owner = "mendix", - repo = "web-widgets" - ): Promise { - await this.ensureAuth(); - - await this.deleteReleaseAsset(existingAsset.id, owner, repo); - - // Upload the new asset - return this.uploadReleaseAsset(releaseId, localFilePath, existingAsset.name, owner, repo); - } } export const gh = new GitHub();