diff --git a/.github/workflows/presubmit.yml b/.github/workflows/presubmit.yml index d738d4bff..3c19d316a 100644 --- a/.github/workflows/presubmit.yml +++ b/.github/workflows/presubmit.yml @@ -51,15 +51,15 @@ jobs: - name: Install dependencies run: npm ci - - name: Generate documents - run: npm run docs + - name: Generate + run: npm run gen - - name: Check if autogenerated docs differ + - name: Check if autogenerated code and docs are out of date run: | diff_file=$(mktemp doc_diff_XXXXXX) git diff --color > $diff_file if [[ -s $diff_file ]]; then - echo "Please update the documentation by running 'npm run generate-docs'. The following was the diff" + echo "Please update the documentation by running 'npm run gen'. The following was the diff" cat $diff_file rm $diff_file exit 1 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9f64034ca..6919b2a58 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -86,7 +86,7 @@ You can use the `DEBUG` environment variable as usual to control categories that ### Updating documentation -When adding a new tool or updating a tool name or description, make sure to run `npm run docs` to generate the tool reference documentation. +When adding a new tool or updating a tool name or description, make sure to run `npm run gen` to generate the tool reference documentation. ### Contributing to Evals diff --git a/docs/slim-tool-reference.md b/docs/slim-tool-reference.md index c417ef10c..b149cc0a5 100644 --- a/docs/slim-tool-reference.md +++ b/docs/slim-tool-reference.md @@ -1,4 +1,4 @@ - + # Chrome DevTools MCP Slim Tool Reference (~359 cl100k_base tokens) diff --git a/docs/tool-reference.md b/docs/tool-reference.md index 219b965c7..50b4c02c5 100644 --- a/docs/tool-reference.md +++ b/docs/tool-reference.md @@ -1,4 +1,4 @@ - + # Chrome DevTools MCP Tool Reference (~6940 cl100k_base tokens) diff --git a/package.json b/package.json index 46c73bbf6..00c24ce46 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "typecheck": "tsc --noEmit", "format": "eslint --cache --fix . && prettier --write --cache .", "check-format": "eslint --cache . && prettier --check --cache .;", - "docs": "npm run build && npm run docs:generate && npm run format", + "gen": "npm run build && npm run docs:generate && npm run cli:generate && npm run format", "docs:generate": "node --experimental-strip-types scripts/generate-docs.ts", "start": "npm run build && node build/src/index.js", "start-debug": "DEBUG=mcp:* DEBUG_COLORS=false npm run build && node build/src/index.js", diff --git a/scripts/generate-docs.ts b/scripts/generate-docs.ts index de67fcab5..c2e5da7ca 100644 --- a/scripts/generate-docs.ts +++ b/scripts/generate-docs.ts @@ -323,7 +323,7 @@ async function generateReference( console.log(`Found ${toolsWithAnnotations.length} tools`); // Generate markdown documentation - let markdown = ` + let markdown = ` # ${title} (~${(await measureServer(serverArgs)).tokenCount} cl100k_base tokens) diff --git a/src/bin/cliDefinitions.ts b/src/bin/cliDefinitions.ts index 82783bbde..6e317a333 100644 --- a/src/bin/cliDefinitions.ts +++ b/src/bin/cliDefinitions.ts @@ -287,7 +287,7 @@ export const commands: Commands = { }, lighthouse_audit: { description: - 'Get Lighthouse score and reports for accessibility, SEO and best practices.', + 'Get Lighthouse score and reports for accessibility, SEO and best practices. This excludes performance. For performance audits, run performance_start_trace', category: 'Debugging', args: { mode: { @@ -393,7 +393,8 @@ export const commands: Commands = { args: {}, }, navigate_page: { - description: 'Navigates the currently selected page to a URL.', + description: + 'Go to a URL, or back, forward, or reload. Use project URL if not specified otherwise.', category: 'Navigation automation', args: { type: { @@ -441,7 +442,8 @@ export const commands: Commands = { }, }, new_page: { - description: 'Creates a new page', + description: + 'Open a new tab and load a URL. Use project URL if not specified otherwise.', category: 'Navigation automation', args: { url: { @@ -496,7 +498,7 @@ export const commands: Commands = { }, performance_start_trace: { description: - 'Starts a performance trace recording on the selected page. This can be used to look for performance problems and insights to improve the performance of the page. It will also report Core Web Vital (CWV) scores for the page.', + 'Start a performance trace on the selected webpage. Use to find frontend performance issues, Core Web Vitals (LCP, INP, CLS), and improve page load speed.', category: 'Performance', args: { reload: { @@ -504,14 +506,16 @@ export const commands: Commands = { type: 'boolean', description: 'Determines if, once tracing has started, the current selected page should be automatically reloaded. Navigate the page to the right URL using the navigate_page tool BEFORE starting the trace if reload or autoStop is set to true.', - required: true, + required: false, + default: true, }, autoStop: { name: 'autoStop', type: 'boolean', description: 'Determines if the trace recording should be automatically stopped.', - required: true, + required: false, + default: true, }, filePath: { name: 'filePath', @@ -524,7 +528,7 @@ export const commands: Commands = { }, performance_stop_trace: { description: - 'Stops the active performance trace recording on the selected page.', + 'Stop the active performance trace recording on the selected webpage.', category: 'Performance', args: { filePath: {