Consolidate scripts and automate report management (#1540)

* removing old scripts

* consolidated folder

* Updating usage of scripts

* Adding script to generate an open PR report, rather than making AI gen it each time

* Adding step to close old quality report discussions
This commit is contained in:
Aaron Powell
2026-04-28 17:29:40 +10:00
committed by GitHub
parent f7a7ef7c28
commit 2f972ba80c
9 changed files with 409 additions and 321 deletions

80
eng/delete-gone-branches.sh Executable file
View File

@@ -0,0 +1,80 @@
#!/bin/bash
set -euo pipefail
usage() {
cat <<'EOF'
Usage: bash scripts/delete-gone-branches.sh [--apply]
Find local branches whose upstream is marked "[gone]" and delete them.
Options:
--apply Actually delete the branches with `git branch -D`
--help Show this help text
Without --apply, the script prints what it would delete.
EOF
}
apply=false
case "${1:-}" in
"")
;;
--apply)
apply=true
;;
--help|-h)
usage
exit 0
;;
*)
echo "Unknown option: $1" >&2
usage >&2
exit 1
;;
esac
git fetch --prune --quiet
mapfile -t gone_branches < <(
git for-each-ref --format='%(refname:short) %(upstream:track)' refs/heads |
while IFS= read -r line; do
branch=${line% *}
tracking=${line#"$branch "}
if [[ "$tracking" == "[gone]" ]]; then
printf '%s\n' "$branch"
fi
done
)
if [[ ${#gone_branches[@]} -eq 0 ]]; then
echo "No local branches with gone upstreams found."
exit 0
fi
current_branch="$(git branch --show-current)"
echo "Found ${#gone_branches[@]} branch(es) with gone upstreams:"
printf ' %s\n' "${gone_branches[@]}"
if [[ "$apply" != true ]]; then
echo
echo "Dry run only. Re-run with --apply to delete them."
exit 0
fi
deleted_count=0
for branch in "${gone_branches[@]}"; do
if [[ "$branch" == "$current_branch" ]]; then
echo "Skipping current branch: $branch"
continue
fi
git branch -D "$branch"
deleted_count=$((deleted_count + 1))
done
echo
echo "Deleted $deleted_count branch(es)."

9
eng/fix-line-endings.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
# Script to fix line endings in all markdown files
echo "Normalizing line endings in markdown files..."
# Find all markdown files and convert CRLF to LF
find . -name "*.md" -type f -exec sed -i 's/\r$//' {} \;
echo "Done! All markdown files now have LF line endings."

291
eng/generate-open-pr-report.mjs Executable file
View File

@@ -0,0 +1,291 @@
#!/usr/bin/env node
import { execFileSync } from "node:child_process";
import fs from "node:fs";
import path from "node:path";
import { ROOT_FOLDER } from "./constants.mjs";
import { setupGracefulShutdown } from "./utils/graceful-shutdown.mjs";
const DEFAULT_REPO = "github/awesome-copilot";
const DEFAULT_LIMIT = 500;
const DEFAULT_CMD_TIMEOUT = 30_000;
const REPORT_DEFINITIONS = [
{
heading: "PRs that target `main`",
fileName: "prs-targeting-main.json",
predicate: (pr) => pr.targetBranch === "main"
},
{
heading: "PRs that target `staged` which are passing all checks and have less than 10 files",
fileName: "prs-staged-passing-under-10-files.json",
predicate: (pr) => pr.targetBranch === "staged" && pr.checksPass && pr.fileCount < 10
},
{
heading: "PRs that target `staged` which have between 10 and 50 files",
fileName: "prs-staged-10-to-50-files.json",
predicate: (pr) => pr.targetBranch === "staged" && pr.fileCount >= 10 && pr.fileCount <= 50
},
{
heading: "PRs that target `staged` with greater than 50 files",
fileName: "prs-staged-over-50-files.json",
predicate: (pr) => pr.targetBranch === "staged" && pr.fileCount > 50
}
];
setupGracefulShutdown("generate-open-pr-report");
function printUsage() {
console.log(`Usage: node eng/generate-open-pr-report.mjs [--repo owner/name] [--output-dir path] [--limit N]
Generate open PR reports for a GitHub repository.
Outputs:
- open-pr-report.md
- prs-targeting-main.json
- prs-staged-passing-under-10-files.json
- prs-staged-10-to-50-files.json
- prs-staged-over-50-files.json
Options:
--repo GitHub repository in owner/name format (default: ${DEFAULT_REPO})
--output-dir Directory for generated reports (default: <repo-root>/reports)
--limit Max number of open PRs to fetch (default: ${DEFAULT_LIMIT})
--help, -h Show this help text`);
}
function parseArgs(argv) {
const options = {
repo: DEFAULT_REPO,
outputDir: path.join(ROOT_FOLDER, "reports"),
limit: DEFAULT_LIMIT
};
for (let i = 0; i < argv.length; i += 1) {
const arg = argv[i];
if (arg === "--help" || arg === "-h") {
printUsage();
process.exit(0);
}
if (arg === "--repo") {
options.repo = argv[i + 1] ?? "";
i += 1;
continue;
}
if (arg === "--output-dir") {
options.outputDir = argv[i + 1] ?? "";
i += 1;
continue;
}
if (arg === "--limit") {
options.limit = Number.parseInt(argv[i + 1] ?? "", 10);
i += 1;
continue;
}
throw new Error(`Unknown option: ${arg}`);
}
if (!options.repo || !options.repo.includes("/")) {
throw new Error("--repo must be in owner/name format.");
}
if (!Number.isInteger(options.limit) || options.limit < 1) {
throw new Error("--limit must be a positive integer.");
}
if (!options.outputDir) {
throw new Error("--output-dir is required.");
}
return options;
}
function ensureCommandAvailable(command) {
try {
execFileSync(command, ["--version"], {
stdio: "ignore",
timeout: DEFAULT_CMD_TIMEOUT
});
} catch (error) {
throw new Error(`Missing required command: ${command}`);
}
}
function runGhJson(args) {
const output = execFileSync("gh", args, {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
timeout: DEFAULT_CMD_TIMEOUT
});
return JSON.parse(output);
}
function getCheckState(statusCheckRollup) {
if (!Array.isArray(statusCheckRollup) || statusCheckRollup.length === 0) {
return "NONE";
}
if (statusCheckRollup.some((check) => check.status !== "COMPLETED")) {
return "PENDING";
}
const failureConclusions = new Set([
"FAILURE",
"TIMED_OUT",
"ACTION_REQUIRED",
"CANCELLED",
"STALE",
"STARTUP_FAILURE"
]);
if (statusCheckRollup.some((check) => failureConclusions.has(check.conclusion ?? ""))) {
return "FAILURE";
}
const successConclusions = new Set(["SUCCESS", "NEUTRAL", "SKIPPED"]);
const allSuccessful = statusCheckRollup.every((check) => successConclusions.has(check.conclusion ?? ""));
return allSuccessful ? "SUCCESS" : "FAILURE";
}
function normalizePullRequest(pr) {
const checkState = getCheckState(pr.statusCheckRollup);
return {
id: pr.number,
title: pr.title,
author: pr.author?.login ?? "ghost",
checksPass: checkState === "SUCCESS",
checkState,
targetBranch: pr.baseRefName,
fileCount: pr.changedFiles,
createdAt: pr.createdAt,
updatedAt: pr.updatedAt,
createdAgeDays: getAgeInDays(pr.createdAt),
updatedAgeDays: getAgeInDays(pr.updatedAt),
url: pr.url
};
}
function getCheckLabel(pr) {
if (pr.checkState === "SUCCESS") {
return "Yes";
}
if (pr.checkState === "PENDING") {
return "Pending";
}
if (pr.checkState === "NONE") {
return "No checks";
}
return "No";
}
function escapeMarkdownCell(value) {
return String(value).replaceAll("|", "\\|");
}
function getAgeInDays(timestamp) {
const milliseconds = Date.now() - new Date(timestamp).getTime();
return Math.max(0, Math.floor(milliseconds / (24 * 60 * 60 * 1000)));
}
function formatTimestampWithAge(timestamp) {
return `${timestamp.slice(0, 10)} (${getAgeInDays(timestamp)}d ago)`;
}
function renderTable(prs) {
const lines = [
"| PR title + ID | Author | Whether checks pass | Created | Updated | Link to PR |",
"| --- | --- | --- | --- | --- | --- |"
];
if (prs.length === 0) {
lines.push("| None | - | - | - | - | - |");
return lines.join("\n");
}
for (const pr of prs) {
lines.push(
`| ${escapeMarkdownCell(pr.title)} (#${pr.id}) | ${escapeMarkdownCell(pr.author)} | ${getCheckLabel(pr)} | ${formatTimestampWithAge(pr.createdAt)} | ${formatTimestampWithAge(pr.updatedAt)} | [Link](${pr.url}) |`
);
}
return lines.join("\n");
}
function renderMarkdownReport(repo, generatedAt, categorizedReports) {
const sections = [
"# Open PR report",
"",
`**Repository:** \`${repo}\` `,
`**Generated:** \`${generatedAt}\``
];
for (const report of categorizedReports) {
sections.push("", `## ${report.heading}`, "", renderTable(report.items));
}
return `${sections.join("\n")}\n`;
}
function writeJsonReport(filePath, items) {
fs.writeFileSync(filePath, `${JSON.stringify(items, null, 2)}\n`);
}
function generateOpenPrReport() {
const options = parseArgs(process.argv.slice(2));
ensureCommandAvailable("gh");
console.log(`Fetching open PRs from ${options.repo}...`);
const pullRequests = runGhJson([
"pr",
"list",
"--repo",
options.repo,
"--state",
"open",
"--limit",
String(options.limit),
"--json",
"number,title,url,author,baseRefName,changedFiles,createdAt,updatedAt,statusCheckRollup"
]);
const normalizedPullRequests = pullRequests.map(normalizePullRequest);
const categorizedReports = REPORT_DEFINITIONS.map((report) => ({
...report,
items: normalizedPullRequests.filter(report.predicate)
}));
fs.mkdirSync(options.outputDir, { recursive: true });
for (const report of categorizedReports) {
writeJsonReport(path.join(options.outputDir, report.fileName), report.items);
}
const markdownReport = renderMarkdownReport(
options.repo,
new Date().toISOString(),
categorizedReports
);
const markdownFilePath = path.join(options.outputDir, "open-pr-report.md");
fs.writeFileSync(markdownFilePath, markdownReport);
console.log(`Generated reports in ${options.outputDir}:`);
console.log(" open-pr-report.md");
for (const report of categorizedReports) {
console.log(` ${report.fileName}`);
}
}
generateOpenPrReport();

0
eng/generate-website-data.mjs Normal file → Executable file
View File

View File

@@ -1,137 +0,0 @@
#!/usr/bin/env node
import fs from "fs";
import path from "path";
import { ROOT_FOLDER, SKILLS_DIR } from "./constants.mjs";
import { parseFrontmatter } from "./yaml-parser.mjs";
const PROMPTS_DIR = path.join(ROOT_FOLDER, "prompts");
/**
* Convert a prompt file to a skill folder
* @param {string} promptFilePath - Full path to the prompt file
* @returns {object} Result with success status and details
*/
function convertPromptToSkill(promptFilePath) {
const filename = path.basename(promptFilePath);
const baseName = filename.replace(".prompt.md", "");
console.log(`\nConverting: ${baseName}`);
// Parse the prompt file frontmatter
const frontmatter = parseFrontmatter(promptFilePath);
const content = fs.readFileSync(promptFilePath, "utf8");
// Extract the content after frontmatter
const frontmatterEndMatch = content.match(/^---\n[\s\S]*?\n---\n/);
const mainContent = frontmatterEndMatch
? content.substring(frontmatterEndMatch[0].length).trim()
: content.trim();
// Create skill folder
const skillFolderPath = path.join(SKILLS_DIR, baseName);
if (fs.existsSync(skillFolderPath)) {
console.log(` ⚠️ Skill folder already exists: ${baseName}`);
return { success: false, reason: "already-exists", name: baseName };
}
fs.mkdirSync(skillFolderPath, { recursive: true });
// Build new frontmatter for SKILL.md
const skillFrontmatter = {
name: baseName,
description: frontmatter?.description || `Skill converted from ${filename}`,
};
// Build SKILL.md content
const skillContent = `---
name: ${skillFrontmatter.name}
description: '${skillFrontmatter.description.replace(/'/g, "'''")}'
---
${mainContent}
`;
// Write SKILL.md
const skillFilePath = path.join(skillFolderPath, "SKILL.md");
fs.writeFileSync(skillFilePath, skillContent, "utf8");
console.log(` ✓ Created skill: ${baseName}`);
return { success: true, name: baseName, path: skillFolderPath };
}
/**
* Main migration function
*/
function main() {
console.log("=".repeat(60));
console.log("Starting Prompt to Skills Migration");
console.log("=".repeat(60));
// Check if prompts directory exists
if (!fs.existsSync(PROMPTS_DIR)) {
console.error(`Error: Prompts directory not found: ${PROMPTS_DIR}`);
process.exit(1);
}
// Get all prompt files
const promptFiles = fs
.readdirSync(PROMPTS_DIR)
.filter((file) => file.endsWith(".prompt.md"))
.map((file) => path.join(PROMPTS_DIR, file));
console.log(`Found ${promptFiles.length} prompt files to convert\n`);
const results = {
success: [],
alreadyExists: [],
failed: [],
};
// Convert each prompt
for (const promptFile of promptFiles) {
try {
const result = convertPromptToSkill(promptFile);
if (result.success) {
results.success.push(result.name);
} else if (result.reason === "already-exists") {
results.alreadyExists.push(result.name);
} else {
results.failed.push(result.name);
}
} catch (error) {
const baseName = path.basename(promptFile, ".prompt.md");
console.error(` ✗ Error converting ${baseName}: ${error.message}`);
results.failed.push(baseName);
}
}
// Print summary
console.log("\n" + "=".repeat(60));
console.log("Migration Summary");
console.log("=".repeat(60));
console.log(`✓ Successfully converted: ${results.success.length}`);
console.log(`⚠ Already existed: ${results.alreadyExists.length}`);
console.log(`✗ Failed: ${results.failed.length}`);
console.log(`Total processed: ${promptFiles.length}`);
if (results.failed.length > 0) {
console.log("\nFailed conversions:");
results.failed.forEach((name) => console.log(` - ${name}`));
}
if (results.alreadyExists.length > 0) {
console.log("\nSkipped (already exist):");
results.alreadyExists.forEach((name) => console.log(` - ${name}`));
}
console.log("\n✅ Migration complete!");
console.log(
"\nNext steps:\n" +
"1. Run 'npm run skill:validate' to validate all new skills\n" +
"2. Update plugin manifests to reference skills instead of commands\n" +
"3. Remove prompts directory after testing\n"
);
}
// Run migration
main();

View File

@@ -1,165 +0,0 @@
#!/usr/bin/env node
import fs from "fs";
import path from "path";
import { PLUGINS_DIR } from "./constants.mjs";
/**
* Convert commands references to skills references in a plugin.json
* @param {string} pluginJsonPath - Path to the plugin.json file
* @returns {object} Result with success status and details
*/
function updatePluginManifest(pluginJsonPath) {
const pluginDir = path.dirname(path.dirname(path.dirname(pluginJsonPath)));
const pluginName = path.basename(pluginDir);
console.log(`\nProcessing plugin: ${pluginName}`);
// Read and parse plugin.json
let plugin;
try {
const content = fs.readFileSync(pluginJsonPath, "utf8");
plugin = JSON.parse(content);
} catch (error) {
console.log(` ✗ Error reading/parsing: ${error.message}`);
return { success: false, name: pluginName, reason: "parse-error" };
}
// Check if plugin has commands field
if (!plugin.commands || !Array.isArray(plugin.commands)) {
console.log(` No commands field found`);
return { success: false, name: pluginName, reason: "no-commands" };
}
const commandCount = plugin.commands.length;
console.log(` Found ${commandCount} command(s) to convert`);
// Validate and convert commands to skills format
// Commands: "./commands/foo.md" → Skills: "./skills/foo/"
const validCommands = plugin.commands.filter((cmd) => {
if (typeof cmd !== "string") {
console.log(` ⚠ Skipping non-string command entry: ${JSON.stringify(cmd)}`);
return false;
}
if (!cmd.startsWith("./commands/") || !cmd.endsWith(".md")) {
console.log(` ⚠ Skipping command with unexpected format: ${cmd}`);
return false;
}
return true;
});
const skills = validCommands.map((cmd) => {
const basename = path.basename(cmd, ".md");
return `./skills/${basename}/`;
});
// Initialize skills array if it doesn't exist or is not an array
if (!Array.isArray(plugin.skills)) {
plugin.skills = [];
}
// Add converted commands to skills array, de-duplicating entries
const allSkills = new Set(plugin.skills);
for (const skillPath of skills) {
allSkills.add(skillPath);
}
plugin.skills = Array.from(allSkills);
// Remove commands field
delete plugin.commands;
// Write updated plugin.json
try {
fs.writeFileSync(
pluginJsonPath,
JSON.stringify(plugin, null, 2) + "\n",
"utf8"
);
console.log(` ✓ Converted ${commandCount} command(s) to skills`);
return { success: true, name: pluginName, count: commandCount };
} catch (error) {
console.log(` ✗ Error writing file: ${error.message}`);
return { success: false, name: pluginName, reason: "write-error" };
}
}
/**
* Main function to update all plugin manifests
*/
function main() {
console.log("=".repeat(60));
console.log("Updating Plugin Manifests: Commands → Skills");
console.log("=".repeat(60));
// Check if plugins directory exists
if (!fs.existsSync(PLUGINS_DIR)) {
console.error(`Error: Plugins directory not found: ${PLUGINS_DIR}`);
process.exit(1);
}
// Find all plugin.json files
const pluginDirs = fs
.readdirSync(PLUGINS_DIR, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name);
console.log(`Found ${pluginDirs.length} plugin directory(ies)\n`);
const results = {
updated: [],
noCommands: [],
failed: [],
};
// Process each plugin
for (const dirName of pluginDirs) {
const pluginJsonPath = path.join(
PLUGINS_DIR,
dirName,
".github/plugin",
"plugin.json"
);
if (!fs.existsSync(pluginJsonPath)) {
console.log(`\nSkipping ${dirName}: no plugin.json found`);
continue;
}
const result = updatePluginManifest(pluginJsonPath);
if (result.success) {
results.updated.push({ name: result.name, count: result.count });
} else if (result.reason === "no-commands") {
results.noCommands.push(result.name);
} else {
results.failed.push(result.name);
}
}
// Print summary
console.log("\n" + "=".repeat(60));
console.log("Update Summary");
console.log("=".repeat(60));
console.log(`✓ Updated plugins: ${results.updated.length}`);
console.log(` No commands field: ${results.noCommands.length}`);
console.log(`✗ Failed: ${results.failed.length}`);
console.log(`Total processed: ${pluginDirs.length}`);
if (results.updated.length > 0) {
console.log("\nUpdated plugins:");
results.updated.forEach(({ name, count }) =>
console.log(` - ${name} (${count} command(s) → skills)`)
);
}
if (results.failed.length > 0) {
console.log("\nFailed updates:");
results.failed.forEach((name) => console.log(` - ${name}`));
}
console.log("\n✅ Plugin manifest updates complete!");
console.log(
"\nNext steps:\n" +
"1. Run 'npm run plugin:validate' to validate all updated plugins\n" +
"2. Test that plugins work correctly\n"
);
}
// Run the update
main();