From df04d106df25ae2861fc9212b9a0c04cad7e4fe0 Mon Sep 17 00:00:00 2001 From: Sarah Schneider Date: Thu, 30 Oct 2025 11:41:32 -0400 Subject: [PATCH 1/9] Batch linter updates (#58270) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../contributing/content-linter-rules.md | 12 - src/content-linter/lib/helpers/rule-utils.ts | 22 + .../lib/helpers/should-include-result.ts | 67 -- .../linting-rules/british-english-quotes.ts | 106 ---- .../linting-rules/code-fence-line-length.ts | 34 -- .../linting-rules/frontmatter-validation.ts | 214 ------- src/content-linter/lib/linting-rules/index.ts | 12 - .../list-first-word-capitalization.ts | 70 --- .../multiple-emphasis-patterns.ts | 100 --- .../linting-rules/note-warning-formatting.ts | 236 ------- .../outdated-release-phase-terminology.ts | 27 + .../third-party-actions-reusable.ts | 38 +- src/content-linter/scripts/lint-content.ts | 30 +- src/content-linter/scripts/lint-report.ts | 39 +- src/content-linter/style/base.ts | 39 -- src/content-linter/style/github-docs.ts | 65 -- .../tests/unit/british-english-quotes.ts | 225 ------- .../tests/unit/code-fence-line-length.ts | 50 -- .../tests/unit/frontmatter-validation.ts | 576 ------------------ .../tests/unit/lint-report-exclusions.ts | 258 ++++---- .../unit/list-first-word-captitalization.ts | 100 --- .../tests/unit/multiple-emphasis-patterns.ts | 231 ------- .../tests/unit/note-warning-formatting.ts | 324 ---------- .../tests/unit/rule-filtering.ts | 8 +- .../scripts/render-content-markdown.ts | 4 - 25 files changed, 248 insertions(+), 2639 deletions(-) create mode 100644 src/content-linter/lib/helpers/rule-utils.ts delete mode 100644 src/content-linter/lib/helpers/should-include-result.ts delete mode 100644 src/content-linter/lib/linting-rules/british-english-quotes.ts delete mode 100644 src/content-linter/lib/linting-rules/code-fence-line-length.ts delete mode 100644 src/content-linter/lib/linting-rules/frontmatter-validation.ts delete mode 100644 src/content-linter/lib/linting-rules/list-first-word-capitalization.ts delete mode 100644 src/content-linter/lib/linting-rules/multiple-emphasis-patterns.ts delete mode 100644 src/content-linter/lib/linting-rules/note-warning-formatting.ts delete mode 100644 src/content-linter/tests/unit/british-english-quotes.ts delete mode 100644 src/content-linter/tests/unit/code-fence-line-length.ts delete mode 100644 src/content-linter/tests/unit/frontmatter-validation.ts delete mode 100644 src/content-linter/tests/unit/list-first-word-captitalization.ts delete mode 100644 src/content-linter/tests/unit/multiple-emphasis-patterns.ts delete mode 100644 src/content-linter/tests/unit/note-warning-formatting.ts diff --git a/data/reusables/contributing/content-linter-rules.md b/data/reusables/contributing/content-linter-rules.md index 878adcbe4ea1..144e24c7f19a 100644 --- a/data/reusables/contributing/content-linter-rules.md +++ b/data/reusables/contributing/content-linter-rules.md @@ -3,14 +3,10 @@ | Rule ID | Rule Name(s) | Description | Severity | Tags | | ------- | ------------ | ----------- | -------- | ---- | | [MD001](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md001.md) | heading-increment | Heading levels should only increment by one level at a time | error | headings | -| [MD004](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md004.md) | ul-style | Unordered list style | error | bullet, ul | -| [MD009](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md009.md) | no-trailing-spaces | Trailing spaces | error | whitespace | | [MD011](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md011.md) | no-reversed-links | Reversed link syntax | error | links | -| [MD012](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md012.md) | no-multiple-blanks | Multiple consecutive blank lines | error | whitespace, blank_lines | | [MD014](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md014.md) | commands-show-output | Dollar signs used before commands without showing output | error | code | | [MD018](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md018.md) | no-missing-space-atx | No space after hash on atx style heading | error | headings, atx, spaces | | [MD019](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md019.md) | no-multiple-space-atx | Multiple spaces after hash on atx style heading | error | headings, atx, spaces | -| [MD022](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md022.md) | blanks-around-headings | Headings should be surrounded by blank lines | error | headings, blank_lines | | [MD023](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md023.md) | heading-start-left | Headings must start at the beginning of the line | error | headings, spaces | | [MD027](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md027.md) | no-multiple-space-blockquote | Multiple spaces after blockquote symbol | error | blockquote, whitespace, indentation | | [MD029](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md029.md) | ol-prefix | Ordered list item prefix | error | ol | @@ -20,8 +16,6 @@ | [MD039](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md039.md) | no-space-in-links | Spaces inside link text | error | whitespace, links | | [MD040](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md040.md) | fenced-code-language | Fenced code blocks should have a language specified | error | code, language | | [MD042](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md042.md) | no-empty-links | No empty links | error | links | -| [MD047](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md047.md) | single-trailing-newline | Files should end with a single newline character | error | blank_lines | -| [MD049](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md049.md) | emphasis-style | Emphasis style | error | emphasis | | [MD050](https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md050.md) | strong-style | Strong style | error | emphasis | | [GH001](https://github.com/github/markdownlint-github/blob/main/docs/rules/GH001-no-default-alt-text.md) | no-default-alt-text | Images should have meaningful alternative text (alt text) | error | accessibility, images | | [GH002](https://github.com/github/markdownlint-github/blob/main/docs/rules/GH002-no-generic-link-text.md) | no-generic-link-text | Avoid using generic link text like `Learn more` or `Click here` | error | accessibility, links | @@ -47,11 +41,9 @@ | GHD020 | liquid-ifversion-tags | Liquid `ifversion` tags should contain valid version names as arguments | error | liquid, versioning | | GHD021 | yaml-scheduled-jobs | YAML snippets that include scheduled workflows must not run on the hour and must be unique | error | feature, actions | | GHD022 | liquid-ifversion-versions | Liquid `ifversion`, `elsif`, and `else` tags should be valid and not contain unsupported versions. | error | liquid, versioning | -| GHD030 | code-fence-line-length | Code fence lines should not exceed a maximum length | warning | code, accessibility | | GHD031 | image-alt-text-exclude-words | Alternate text for images should not begin with words like "image" or "graphic" | error | accessibility, images | | GHD032 | image-alt-text-end-punctuation | Alternate text for images should end with punctuation | error | accessibility, images | | GHD033 | incorrect-alt-text-length | Images alternate text should be between 40-150 characters | warning | accessibility, images | -| GHD034 | list-first-word-capitalization | First word of list item should be capitalized | warning | ul, ol | | GHD035 | rai-reusable-usage | RAI articles and reusables can only reference reusable content in the data/reusables/rai directory | error | feature, rai | | GHD036 | image-no-gif | Image must not be a gif, styleguide reference: contributing/style-guide-and-content-model/style-guide.md#images | error | images | | GHD038 | expired-content | Expired content must be remediated. | warning | expired | @@ -64,13 +56,9 @@ | GHD045 | code-annotation-comment-spacing | Code comments in annotation blocks must have exactly one space after the comment character(s) | warning | code, comments, annotate, spacing | | GHD046 | outdated-release-phase-terminology | Outdated release phase terminology should be replaced with current GitHub terminology | warning | terminology, consistency, release-phases | | GHD047 | table-column-integrity | Tables must have consistent column counts across all rows | warning | tables, accessibility, formatting | -| GHD048 | british-english-quotes | Periods and commas should be placed inside quotation marks (American English style) | warning | punctuation, quotes, style, consistency | -| GHD049 | note-warning-formatting | Note and warning tags should be formatted according to style guide | warning | formatting, callouts, notes, warnings, style | -| GHD050 | multiple-emphasis-patterns | Do not use more than one emphasis/strong, italics, or uppercase for a string | warning | formatting, emphasis, style | | GHD051 | frontmatter-versions-whitespace | Versions frontmatter should not contain unnecessary whitespace | warning | frontmatter, versions | | GHD053 | header-content-requirement | Headers must have content between them, such as an introduction | warning | headers, structure, content | | GHD054 | third-party-actions-reusable | Code examples with third-party actions must include disclaimer reusable | warning | actions, reusable, third-party | -| GHD055 | frontmatter-validation | Frontmatter properties must meet character limits and required property requirements | warning | frontmatter, character-limits, required-properties | | GHD056 | frontmatter-landing-recommended | Only landing pages can have recommended articles, there should be no duplicate recommended articles, and all recommended articles must exist | error | frontmatter, landing, recommended | | GHD057 | ctas-schema | CTA URLs must conform to the schema | error | ctas, schema, urls | | GHD058 | journey-tracks-liquid | Journey track properties must use valid Liquid syntax | error | frontmatter, journey-tracks, liquid | diff --git a/src/content-linter/lib/helpers/rule-utils.ts b/src/content-linter/lib/helpers/rule-utils.ts new file mode 100644 index 000000000000..99d42c902d29 --- /dev/null +++ b/src/content-linter/lib/helpers/rule-utils.ts @@ -0,0 +1,22 @@ +interface LintFlaw { + severity: string + ruleNames: string[] + errorDetail?: string +} + +/** + * Gets all rule names from a flaw, including sub-rules from search-replace errors + */ +export function getAllRuleNames(flaw: LintFlaw): string[] { + const ruleNames = [...flaw.ruleNames] + + // Extract sub-rule name from search-replace error details + if (flaw.ruleNames.includes('search-replace') && flaw.errorDetail) { + const match = flaw.errorDetail.match(/^([^:]+):/) + if (match) { + ruleNames.push(match[1]) + } + } + + return ruleNames +} diff --git a/src/content-linter/lib/helpers/should-include-result.ts b/src/content-linter/lib/helpers/should-include-result.ts deleted file mode 100644 index afa77e5afd44..000000000000 --- a/src/content-linter/lib/helpers/should-include-result.ts +++ /dev/null @@ -1,67 +0,0 @@ -import nodePath from 'path' -import { reportingConfig } from '@/content-linter/style/github-docs' - -interface LintFlaw { - severity: string - ruleNames: string[] - errorDetail?: string -} - -/** - * Determines if a lint result should be included based on reporting configuration - * - * @param flaw - The lint flaw object containing rule names, severity, etc. - * @param filePath - The path of the file being linted - * @returns true if the flaw should be included, false if it should be excluded - */ -export function shouldIncludeResult(flaw: LintFlaw, filePath: string): boolean { - if (!flaw.ruleNames || !Array.isArray(flaw.ruleNames)) { - return true - } - - // Extract all possible rule names including sub-rules from search-replace - const allRuleNames = [...flaw.ruleNames] - - // For search-replace rules, extract the sub-rule name from errorDetail - if (flaw.ruleNames.includes('search-replace') && flaw.errorDetail) { - const match = flaw.errorDetail.match(/^([^:]+):/) - if (match) { - allRuleNames.push(match[1]) - } - } - - // Check if any rule name is in the exclude list - const hasExcludedRule = allRuleNames.some((ruleName: string) => - reportingConfig.excludeRules.includes(ruleName), - ) - if (hasExcludedRule) { - return false - } - - // Check if this specific file should be excluded for any of the rules - for (const ruleName of allRuleNames) { - const excludedFiles = - reportingConfig.excludeFilesFromRules?.[ - ruleName as keyof typeof reportingConfig.excludeFilesFromRules - ] - if ( - excludedFiles && - excludedFiles.some((excludedPath: string) => { - // Normalize paths for comparison - const normalizedFilePath = nodePath.normalize(filePath) - const normalizedExcludedPath = nodePath.normalize(excludedPath) - return ( - normalizedFilePath === normalizedExcludedPath || - normalizedFilePath.endsWith(normalizedExcludedPath) - ) - }) - ) { - return false - } - } - - // Default to true - include everything unless explicitly excluded - // This function only handles exclusions; reporting-specific inclusion logic - // (like severity/rule filtering) is handled separately in lint-report.ts - return true -} diff --git a/src/content-linter/lib/linting-rules/british-english-quotes.ts b/src/content-linter/lib/linting-rules/british-english-quotes.ts deleted file mode 100644 index eaa8cf28c7b0..000000000000 --- a/src/content-linter/lib/linting-rules/british-english-quotes.ts +++ /dev/null @@ -1,106 +0,0 @@ -// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations -import { addError } from 'markdownlint-rule-helpers' -import { getRange } from '../helpers/utils' -import frontmatter from '@/frame/lib/read-frontmatter' - -import type { RuleParams, RuleErrorCallback } from '@/content-linter/types' - -export const britishEnglishQuotes = { - names: ['GHD048', 'british-english-quotes'], - description: - 'Periods and commas should be placed inside quotation marks (American English style)', - tags: ['punctuation', 'quotes', 'style', 'consistency'], - severity: 'warning', // Non-blocking as requested in the issue - function: (params: RuleParams, onError: RuleErrorCallback) => { - // Skip autogenerated files - const frontmatterString = params.frontMatterLines.join('\n') - const fm = frontmatter(frontmatterString).data - if (fm && fm.autogenerated) return - - // Check each line for British English quote patterns - for (let i = 0; i < params.lines.length; i++) { - const line = params.lines[i] - const lineNumber = i + 1 - - // Skip code blocks, code spans, and URLs - if (isInCodeContext(line, params.lines, i)) { - continue - } - - // Find British English quote patterns and report them - findAndReportBritishQuotes(line, lineNumber, onError) - } - }, -} - -/** - * Check if the current position is within a code context (code blocks, inline code, URLs) - */ -function isInCodeContext(line: string, allLines: string[], lineIndex: number): boolean { - // Skip if line contains code fences - if (line.includes('```') || line.includes('~~~')) { - return true - } - - // Check if we're inside a code block - let inCodeBlock = false - for (let i = 0; i < lineIndex; i++) { - if (allLines[i].includes('```') || allLines[i].includes('~~~')) { - inCodeBlock = !inCodeBlock - } - } - if (inCodeBlock) { - return true - } - - // Skip if line appears to be mostly code (has multiple backticks) - const backtickCount = (line.match(/`/g) || []).length - if (backtickCount >= 4) { - return true - } - - // Skip URLs and email addresses - if (line.includes('http://') || line.includes('https://') || line.includes('mailto:')) { - return true - } - - return false -} - -/** - * Find and report British English quote patterns in a line - */ -function findAndReportBritishQuotes( - line: string, - lineNumber: number, - onError: RuleErrorCallback, -): void { - // Pattern to find quote followed by punctuation outside - // Matches: "text". or 'text', or "text", etc. - const britishPattern = /(["'])([^"']*?)\1\s*([.,])/g - - let match: RegExpMatchArray | null - while ((match = britishPattern.exec(line)) !== null) { - const quoteChar = match[1] - const quotedText = match[2] - const punctuation = match[3] - const fullMatch = match[0] - const startIndex = match.index ?? 0 - - // Create the corrected version (punctuation inside quotes) - const correctedText = quoteChar + quotedText + punctuation + quoteChar - - const range = getRange(line, fullMatch) - const punctuationName = punctuation === '.' ? 'period' : 'comma' - const errorMessage = `Use American English punctuation: place ${punctuationName} inside the quotation marks` - - // Provide auto-fix - const fixInfo = { - editColumn: startIndex + 1, - deleteCount: fullMatch.length, - insertText: correctedText, - } - - addError(onError, lineNumber, errorMessage, line, range, fixInfo) - } -} diff --git a/src/content-linter/lib/linting-rules/code-fence-line-length.ts b/src/content-linter/lib/linting-rules/code-fence-line-length.ts deleted file mode 100644 index 16b3ef9c2323..000000000000 --- a/src/content-linter/lib/linting-rules/code-fence-line-length.ts +++ /dev/null @@ -1,34 +0,0 @@ -// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations -import { addError, filterTokens, newLineRe } from 'markdownlint-rule-helpers' - -import type { RuleParams, RuleErrorCallback, MarkdownToken, Rule } from '@/content-linter/types' - -export const codeFenceLineLength: Rule = { - names: ['GHD030', 'code-fence-line-length'], - description: 'Code fence lines should not exceed a maximum length', - tags: ['code', 'accessibility'], - parser: 'markdownit', - function: (params: RuleParams, onError: RuleErrorCallback) => { - const MAX_LINE_LENGTH: number = params.config?.maxLength || 60 - filterTokens(params, 'fence', (token: MarkdownToken) => { - if (!token.content) return - const lines: string[] = token.content.split(newLineRe) - lines.forEach((line: string, index: number) => { - if (line.length > MAX_LINE_LENGTH) { - // The token line number is the line number of the first line of the - // code fence. We want to report the line number of the content within - // the code fence so we need to add 1 + the index. - const lineNumber: number = token.lineNumber + index + 1 - addError( - onError, - lineNumber, - `Code fence line exceeds ${MAX_LINE_LENGTH} characters.`, - line, - [1, line.length], - null, // No fix possible - ) - } - }) - }) - }, -} diff --git a/src/content-linter/lib/linting-rules/frontmatter-validation.ts b/src/content-linter/lib/linting-rules/frontmatter-validation.ts deleted file mode 100644 index ff91c4b24605..000000000000 --- a/src/content-linter/lib/linting-rules/frontmatter-validation.ts +++ /dev/null @@ -1,214 +0,0 @@ -// @ts-ignore - no types available for markdownlint-rule-helpers -import { addError } from 'markdownlint-rule-helpers' -import { getFrontmatter } from '@/content-linter/lib/helpers/utils' - -import type { RuleParams, RuleErrorCallback } from '@/content-linter/types' - -interface PropertyLimits { - max: number - recommended: number - required?: boolean -} - -interface ContentRules { - title: PropertyLimits - shortTitle: PropertyLimits - intro: PropertyLimits - requiredProperties: string[] -} - -type ContentType = 'category' | 'mapTopic' | 'article' | null - -// Strip liquid tags from text for character counting purposes -function stripLiquidTags(text: unknown): string { - if (typeof text !== 'string') return text as string - // Remove both {% %} and {{ }} liquid tags - return text.replace(/\{%.*?%\}/g, '').replace(/\{\{.*?\}\}/g, '') -} - -export const frontmatterValidation = { - names: ['GHD055', 'frontmatter-validation'], - description: - 'Frontmatter properties must meet character limits and required property requirements', - tags: ['frontmatter', 'character-limits', 'required-properties'], - function: (params: RuleParams, onError: RuleErrorCallback) => { - const fm = getFrontmatter(params.lines as string[]) - if (!fm) return - - // Detect content type based on frontmatter properties and file path - const contentType = detectContentType(fm, params.name) - - // Define character limits and requirements for different content types - const contentRules: Record = { - category: { - title: { max: 70, recommended: 67 }, - shortTitle: { max: 30, recommended: 27 }, - intro: { required: true, recommended: 280, max: 362 }, - requiredProperties: ['intro'], - }, - mapTopic: { - title: { max: 70, recommended: 63 }, - shortTitle: { max: 35, recommended: 30 }, - intro: { required: true, recommended: 280, max: 362 }, - requiredProperties: ['intro'], - }, - article: { - title: { max: 80, recommended: 60 }, - shortTitle: { max: 30, recommended: 25 }, - intro: { required: false, recommended: 251, max: 354 }, - requiredProperties: ['topics'], - }, - } - - const rules = contentType ? contentRules[contentType] : null - if (!rules) return - - // Check required properties - for (const property of rules.requiredProperties) { - if (!fm[property]) { - addError( - onError, - 1, - `Missing required property '${property}' for ${contentType} content type`, - null, - null, - null, - ) - } - } - - // Check title length - if (fm.title) { - validatePropertyLength( - onError, - params.lines as string[], - 'title', - fm.title, - rules.title, - 'Title', - ) - } - - // Check shortTitle length - if (fm.shortTitle) { - validatePropertyLength( - onError, - params.lines as string[], - 'shortTitle', - fm.shortTitle, - rules.shortTitle, - 'ShortTitle', - ) - } - - // Check intro length if it exists - if (fm.intro && rules.intro) { - validatePropertyLength( - onError, - params.lines as string[], - 'intro', - fm.intro, - rules.intro, - 'Intro', - ) - } - - // Cross-property validation: if title is longer than shortTitle limit, shortTitle must exist - const strippedTitle = stripLiquidTags(fm.title) - if (fm.title && (strippedTitle as string).length > rules.shortTitle.max && !fm.shortTitle) { - const titleLine = findPropertyLine(params.lines as string[], 'title') - addError( - onError, - titleLine, - `Title is ${(strippedTitle as string).length} characters, which exceeds the shortTitle limit of ${rules.shortTitle.max} characters. A shortTitle must be provided.`, - fm.title, - null, - null, - ) - } - - // Special validation for articles: should have at least one topic - if (contentType === 'article' && fm.topics) { - if (!Array.isArray(fm.topics)) { - const topicsLine = findPropertyLine(params.lines as string[], 'topics') - addError(onError, topicsLine, 'Topics must be an array', String(fm.topics), null, null) - } else if (fm.topics.length === 0) { - const topicsLine = findPropertyLine(params.lines as string[], 'topics') - addError( - onError, - topicsLine, - 'Articles should have at least one topic', - 'topics: []', - null, - null, - ) - } - } - }, -} - -function validatePropertyLength( - onError: RuleErrorCallback, - lines: string[], - propertyName: string, - propertyValue: string, - limits: PropertyLimits, - displayName: string, -): void { - const strippedValue = stripLiquidTags(propertyValue) - const propertyLength = (strippedValue as string).length - const propertyLine = findPropertyLine(lines, propertyName) - - // Only report the most severe error - maximum takes precedence over recommended - if (propertyLength > limits.max) { - addError( - onError, - propertyLine, - `${displayName} exceeds maximum length of ${limits.max} characters (current: ${propertyLength})`, - propertyValue, - null, - null, - ) - } else if (propertyLength > limits.recommended) { - addError( - onError, - propertyLine, - `${displayName} exceeds recommended length of ${limits.recommended} characters (current: ${propertyLength})`, - propertyValue, - null, - null, - ) - } -} - -// frontmatter object structure varies based on YAML content, using any for flexibility -function detectContentType(frontmatter: any, filePath: string): ContentType { - // Only apply validation to markdown files - if (!filePath || !filePath.endsWith('.md')) { - return null - } - - // Map topics have mapTopic: true - if (frontmatter.mapTopic === true) { - return 'mapTopic' - } - - // Categories are index.md files that contain children but no mapTopic - // Only check files that look like they're in the content directory structure - if ( - filePath.includes('/index.md') && - frontmatter.children && - Array.isArray(frontmatter.children) && - !frontmatter.mapTopic - ) { - return 'category' - } - - // Everything else is an article - return 'article' -} - -function findPropertyLine(lines: string[], property: string): number { - const line = lines.find((line) => line.trim().startsWith(`${property}:`)) - return line ? lines.indexOf(line) + 1 : 1 -} diff --git a/src/content-linter/lib/linting-rules/index.ts b/src/content-linter/lib/linting-rules/index.ts index 9f115734384b..a868ed9c6827 100644 --- a/src/content-linter/lib/linting-rules/index.ts +++ b/src/content-linter/lib/linting-rules/index.ts @@ -3,14 +3,12 @@ import searchReplace from 'markdownlint-rule-search-replace' // @ts-ignore - @github/markdownlint-github doesn't provide TypeScript declarations import markdownlintGitHub from '@github/markdownlint-github' -import { codeFenceLineLength } from '@/content-linter/lib/linting-rules/code-fence-line-length' import { imageAltTextEndPunctuation } from '@/content-linter/lib/linting-rules/image-alt-text-end-punctuation' import { imageFileKebabCase } from '@/content-linter/lib/linting-rules/image-file-kebab-case' import { incorrectAltTextLength } from '@/content-linter/lib/linting-rules/image-alt-text-length' import { internalLinksNoLang } from '@/content-linter/lib/linting-rules/internal-links-no-lang' import { internalLinksSlash } from '@/content-linter/lib/linting-rules/internal-links-slash' import { imageAltTextExcludeStartWords } from '@/content-linter/lib/linting-rules/image-alt-text-exclude-start-words' -import { listFirstWordCapitalization } from '@/content-linter/lib/linting-rules/list-first-word-capitalization' import { linkPunctuation } from '@/content-linter/lib/linting-rules/link-punctuation' import { earlyAccessReferences, @@ -49,11 +47,7 @@ import { linkQuotation } from '@/content-linter/lib/linting-rules/link-quotation import { octiconAriaLabels } from '@/content-linter/lib/linting-rules/octicon-aria-labels' import { liquidIfversionVersions } from '@/content-linter/lib/linting-rules/liquid-ifversion-versions' import { outdatedReleasePhaseTerminology } from '@/content-linter/lib/linting-rules/outdated-release-phase-terminology' -import { britishEnglishQuotes } from '@/content-linter/lib/linting-rules/british-english-quotes' -import { multipleEmphasisPatterns } from '@/content-linter/lib/linting-rules/multiple-emphasis-patterns' -import { noteWarningFormatting } from '@/content-linter/lib/linting-rules/note-warning-formatting' import { frontmatterVersionsWhitespace } from '@/content-linter/lib/linting-rules/frontmatter-versions-whitespace' -import { frontmatterValidation } from '@/content-linter/lib/linting-rules/frontmatter-validation' import { headerContentRequirement } from '@/content-linter/lib/linting-rules/header-content-requirement' import { thirdPartyActionsReusable } from '@/content-linter/lib/linting-rules/third-party-actions-reusable' import { frontmatterLandingRecommended } from '@/content-linter/lib/linting-rules/frontmatter-landing-recommended' @@ -103,11 +97,9 @@ export const gitHubDocsMarkdownlint = { liquidIfVersionTags, // GHD020 yamlScheduledJobs, // GHD021 liquidIfversionVersions, // GHD022 - codeFenceLineLength, // GHD030 imageAltTextExcludeStartWords, // GHD031 imageAltTextEndPunctuation, // GHD032 incorrectAltTextLength, // GHD033 - listFirstWordCapitalization, // GHD034 raiReusableUsage, // GHD035 imageNoGif, // GHD036 expiredContent, // GHD038 @@ -120,13 +112,9 @@ export const gitHubDocsMarkdownlint = { codeAnnotationCommentSpacing, // GHD045 outdatedReleasePhaseTerminology, // GHD046 tableColumnIntegrity, // GHD047 - britishEnglishQuotes, // GHD048 - noteWarningFormatting, // GHD049 - multipleEmphasisPatterns, // GHD050 frontmatterVersionsWhitespace, // GHD051 headerContentRequirement, // GHD053 thirdPartyActionsReusable, // GHD054 - frontmatterValidation, // GHD055 frontmatterLandingRecommended, // GHD056 ctasSchema, // GHD057 journeyTracksLiquid, // GHD058 diff --git a/src/content-linter/lib/linting-rules/list-first-word-capitalization.ts b/src/content-linter/lib/linting-rules/list-first-word-capitalization.ts deleted file mode 100644 index 128f1e4f77ca..000000000000 --- a/src/content-linter/lib/linting-rules/list-first-word-capitalization.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { addFixErrorDetail, getRange, filterTokensByOrder } from '../helpers/utils' -import type { RuleParams, RuleErrorCallback, MarkdownToken, Rule } from '../../types' - -export const listFirstWordCapitalization: Rule = { - names: ['GHD034', 'list-first-word-capitalization'], - description: 'First word of list item should be capitalized', - tags: ['ul', 'ol'], - function: (params: RuleParams, onError: RuleErrorCallback) => { - // Skip site-policy directory as these are legal documents with specific formatting requirements - if (params.name && params.name.includes('content/site-policy/')) return - - // We're going to look for a sequence of 3 tokens. If the markdown - // is a really small string, it might not even have that many tokens - // in it. Can bail early. - if (!params.tokens || params.tokens.length < 3) return - - const inlineListItems = filterTokensByOrder(params.tokens, [ - 'list_item_open', - 'paragraph_open', - 'inline', - ]).filter((token: MarkdownToken) => token.type === 'inline') - - inlineListItems.forEach((token: MarkdownToken) => { - // Only proceed if all of the token's children start with a text - // node that is not empty. - // This filters out cases where the list item is inline code, or - // a link, or an image, etc. - // This also avoids cases like `- **bold** text` where the first - // child is a text node string but the text node content is empty. - const firstWordTextNode = - token.children && - token.children.length > 0 && - token.children[0].type === 'text' && - token.children[0].content !== '' - if (!firstWordTextNode) return - - const content = (token.content || '').trim() - const firstWord = content.trim().split(' ')[0] - - // If the first character in the first word is not an alphanumeric, - // don't bother. For example `"ubunut-latest"` or `{% data ... %}`. - if (/^[^a-z]/i.test(firstWord)) return - // If the first letter is capitalized, it's not an error - // And any special characters (like @) that can't be capitalized - if (/[A-Z@]/.test(firstWord[0])) return - // There are items that start with a number or words that contain numbers - // e.g., x64 - if (/\d/.test(firstWord)) return - // Catches proper nouns like macOS or openSUSE - if (/[A-Z]/.test(firstWord.slice(1))) return - - const lineNumber = token.lineNumber - const range = getRange(token.line, firstWord) - if (!range) return - addFixErrorDetail( - onError, - lineNumber, - `${firstWord[0].toUpperCase()}${firstWord.slice(1)}`, - firstWord, - range, - { - lineNumber, - editColumn: range[0], - deleteCount: 1, - insertText: firstWord[0].toUpperCase(), - }, - ) - }) - }, -} diff --git a/src/content-linter/lib/linting-rules/multiple-emphasis-patterns.ts b/src/content-linter/lib/linting-rules/multiple-emphasis-patterns.ts deleted file mode 100644 index 3075ce51ff8d..000000000000 --- a/src/content-linter/lib/linting-rules/multiple-emphasis-patterns.ts +++ /dev/null @@ -1,100 +0,0 @@ -// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations -import { addError } from 'markdownlint-rule-helpers' -import { getRange } from '../helpers/utils' -import frontmatter from '@/frame/lib/read-frontmatter' -import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types' - -interface Frontmatter { - autogenerated?: boolean - [key: string]: any -} - -export const multipleEmphasisPatterns: Rule = { - names: ['GHD050', 'multiple-emphasis-patterns'], - description: 'Do not use more than one emphasis/strong, italics, or uppercase for a string', - tags: ['formatting', 'emphasis', 'style'], - severity: 'warning', - function: (params: RuleParams, onError: RuleErrorCallback) => { - // Skip autogenerated files - const frontmatterString = params.frontMatterLines.join('\n') - const fm = frontmatter(frontmatterString).data as Frontmatter - if (fm && fm.autogenerated) return - - const lines = params.lines - let inCodeBlock = false - - for (let i = 0; i < lines.length; i++) { - const line = lines[i] - const lineNumber = i + 1 - - // Track code block state - if (line.trim().startsWith('```')) { - inCodeBlock = !inCodeBlock - continue - } - - // Skip code blocks and indented code - if (inCodeBlock || line.trim().startsWith(' ')) continue - - // Check for multiple emphasis patterns - checkMultipleEmphasis(line, lineNumber, onError) - } - }, -} - -/** - * Check for multiple emphasis types in a single text segment - */ -function checkMultipleEmphasis(line: string, lineNumber: number, onError: RuleErrorCallback): void { - // Focus on the clearest violations of the style guide - const multipleEmphasisPatterns: Array<{ regex: RegExp; types: string[] }> = [ - // Bold + italic combinations (***text***) - { regex: /\*\*\*([^*]+)\*\*\*/g, types: ['bold', 'italic'] }, - { regex: /___([^_]+)___/g, types: ['bold', 'italic'] }, - - // Bold with code nested inside - { regex: /\*\*([^*]*`[^`]+`[^*]*)\*\*/g, types: ['bold', 'code'] }, - { regex: /__([^_]*`[^`]+`[^_]*)__/g, types: ['bold', 'code'] }, - - // Code with bold nested inside - { regex: /`([^`]*\*\*[^*]+\*\*[^`]*)`/g, types: ['code', 'bold'] }, - { regex: /`([^`]*__[^_]+__[^`]*)`/g, types: ['code', 'bold'] }, - ] - - for (const pattern of multipleEmphasisPatterns) { - let match - while ((match = pattern.regex.exec(line)) !== null) { - // Skip if this is likely intentional or very short - if (shouldSkipMatch(match[0], match[1])) continue - - const range = getRange(line, match[0]) - addError( - onError, - lineNumber, - `Do not use multiple emphasis types in a single string: ${pattern.types.join(' + ')}`, - line, - range, - null, // No auto-fix as this requires editorial judgment - ) - } - } -} - -/** - * Determine if a match should be skipped (likely intentional formatting) - */ -function shouldSkipMatch(fullMatch: string, content: string): boolean { - // Skip common false positives - if (!content) return true - - // Skip very short content (likely intentional single chars) - if (content.trim().length < 2) return true - - // Skip if it's mostly code-like content (constants, variables) - if (/^[A-Z_][A-Z0-9_]*$/.test(content.trim())) return true - - // Skip file extensions or URLs - if (/\.[a-z]{2,4}$/i.test(content.trim()) || /https?:\/\//.test(content)) return true - - return false -} diff --git a/src/content-linter/lib/linting-rules/note-warning-formatting.ts b/src/content-linter/lib/linting-rules/note-warning-formatting.ts deleted file mode 100644 index e55a57da2295..000000000000 --- a/src/content-linter/lib/linting-rules/note-warning-formatting.ts +++ /dev/null @@ -1,236 +0,0 @@ -// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations -import { addError } from 'markdownlint-rule-helpers' -import { getRange } from '../helpers/utils' -import frontmatter from '@/frame/lib/read-frontmatter' -import type { RuleParams, RuleErrorCallback } from '@/content-linter/types' - -interface NoteContentItem { - text: string - lineNumber: number -} - -export const noteWarningFormatting = { - names: ['GHD049', 'note-warning-formatting'], - description: 'Note and warning tags should be formatted according to style guide', - tags: ['formatting', 'callouts', 'notes', 'warnings', 'style'], - severity: 'warning', - function: (params: RuleParams, onError: RuleErrorCallback) => { - // Skip autogenerated files - const frontmatterString = params.frontMatterLines.join('\n') - const fm = frontmatter(frontmatterString).data - if (fm && fm.autogenerated) return - - const lines = params.lines - let inLegacyNote = false - let noteStartLine: number | null = null - let noteContent: NoteContentItem[] = [] - - for (let i = 0; i < lines.length; i++) { - const line = lines[i] - const lineNumber = i + 1 - - // Check for legacy {% note %} tags - if (line.trim() === '{% note %}') { - inLegacyNote = true - noteStartLine = lineNumber - noteContent = [] - - // Check for missing line break before {% note %} - const prevLine = i > 0 ? lines[i - 1] : '' - if (prevLine.trim() !== '') { - const range = getRange(line, '{% note %}') - addError(onError, lineNumber, 'Add a blank line before {% note %} tag', line, range, { - editColumn: 1, - deleteCount: 0, - insertText: '\n', - }) - } - continue - } - - // Check for end of legacy note - if (line.trim() === '{% endnote %}') { - if (inLegacyNote) { - inLegacyNote = false - - // Check for missing line break after {% endnote %} - const nextLine = i < lines.length - 1 ? lines[i + 1] : '' - if (nextLine.trim() !== '') { - const range = getRange(line, '{% endnote %}') - addError(onError, lineNumber, 'Add a blank line after {% endnote %} tag', line, range, { - editColumn: line.length + 1, - deleteCount: 0, - insertText: '\n', - }) - } - - // Check note content formatting - validateNoteContent(noteContent, noteStartLine, onError) - } - continue - } - - // Collect content inside legacy notes - if (inLegacyNote) { - noteContent.push({ text: line, lineNumber }) - continue - } - - // Check for new-style callouts > [!NOTE], > [!WARNING], > [!DANGER] - const calloutMatch = line.match(/^>\s*\[!(NOTE|WARNING|DANGER)\]\s*$/) - if (calloutMatch) { - const calloutType = calloutMatch[1] - - // Check for missing line break before callout - const prevLine = i > 0 ? lines[i - 1] : '' - if (prevLine.trim() !== '') { - const range = getRange(line, line.trim()) - addError( - onError, - lineNumber, - `Add a blank line before > [!${calloutType}] callout`, - line, - range, - { - editColumn: 1, - deleteCount: 0, - insertText: '\n', - }, - ) - } - - // Find the end of this callout block and validate content - const calloutContent = [] - let j = i + 1 - while (j < lines.length && lines[j].startsWith('>')) { - if (lines[j].trim() !== '>') { - calloutContent.push({ text: lines[j], lineNumber: j + 1 }) - } - j++ - } - - // Check for missing line break after callout - if (j < lines.length && lines[j].trim() !== '') { - const range = getRange(lines[j], lines[j].trim()) - addError( - onError, - j + 1, - `Add a blank line after > [!${calloutType}] callout block`, - lines[j], - range, - { - editColumn: 1, - deleteCount: 0, - insertText: '\n', - }, - ) - } - - validateCalloutContent(calloutContent, calloutType, lineNumber, onError) - i = j - 1 // Skip to end of callout block - continue - } - - // Check for orphaned **Note:**/**Warning:**/**Danger:** outside callouts - const orphanedPrefixMatch = line.match(/\*\*(Note|Warning|Danger):\*\*/) - if (orphanedPrefixMatch && !inLegacyNote && !line.startsWith('>')) { - const range = getRange(line, orphanedPrefixMatch[0]) - addError( - onError, - lineNumber, - `${orphanedPrefixMatch[1]} prefix should be inside a callout block`, - line, - range, - null, // No auto-fix as this requires human decision - ) - } - } - }, -} - -/** - * Validate content inside legacy {% note %} blocks - */ -function validateNoteContent( - noteContent: NoteContentItem[], - noteStartLine: number | null, - onError: RuleErrorCallback, -) { - if (noteContent.length === 0) return - - const contentLines = noteContent.filter((item) => item.text.trim() !== '') - if (contentLines.length === 0) return - - // Count bullet points - const bulletLines = contentLines.filter((item) => item.text.trim().match(/^[*\-+]\s/)) - if (bulletLines.length > 2) { - const range = getRange(bulletLines[2].text, bulletLines[2].text.trim()) - addError( - onError, - bulletLines[2].lineNumber, - 'Do not include more than 2 bullet points inside a callout', - bulletLines[2].text, - range, - null, // No auto-fix as this requires content restructuring - ) - } - - // Check for missing prefix (only if it looks like a traditional note) - const firstContentLine = contentLines[0] - const allContent = contentLines.map((line) => line.text).join(' ') - const hasButtons = - allContent.includes(' item.text.trim() !== '>') - if (contentLines.length === 0) return - - // Count bullet points - const bulletLines = contentLines.filter((item) => item.text.match(/^>\s*[*\-+]\s/)) - if (bulletLines.length > 2) { - const range = getRange(bulletLines[2].text, bulletLines[2].text.trim()) - addError( - onError, - bulletLines[2].lineNumber, - 'Do not include more than 2 bullet points inside a callout', - bulletLines[2].text, - range, - null, // No auto-fix as this requires content restructuring - ) - } - - // For new-style callouts, the prefix is handled by the [!NOTE] syntax itself - // so we don't need to check for manual **Note:** prefixes -} diff --git a/src/content-linter/lib/linting-rules/outdated-release-phase-terminology.ts b/src/content-linter/lib/linting-rules/outdated-release-phase-terminology.ts index 28ce1d3e2c8b..d2f457f528b7 100644 --- a/src/content-linter/lib/linting-rules/outdated-release-phase-terminology.ts +++ b/src/content-linter/lib/linting-rules/outdated-release-phase-terminology.ts @@ -26,6 +26,26 @@ const TERMINOLOGY_REPLACEMENTS: [string, string][] = [ ['sunset', 'retired'], ] +// Don't lint filepaths that have legitimate uses of these terms +const EXCLUDED_PATHS: string[] = [ + // Individual files + 'content/actions/reference/runners/github-hosted-runners.md', + 'content/actions/reference/workflows-and-actions/metadata-syntax.md', + 'content/admin/administering-your-instance/administering-your-instance-from-the-command-line/command-line-utilities.md', + 'content/authentication/managing-commit-signature-verification/checking-for-existing-gpg-keys.md', + 'content/codespaces/setting-your-user-preferences/choosing-the-stable-or-beta-host-image.md', + 'content/rest/using-the-rest-api/getting-started-with-the-rest-api.md', + 'data/reusables/actions/jobs/choosing-runner-github-hosted.md', + 'data/reusables/code-scanning/codeql-query-tables/cpp.md', + 'data/reusables/dependabot/dependabot-updates-supported-versioning-tags.md', + 'data/variables/release-phases.yml', + // Directories + 'content/site-policy/', + 'data/features/', + 'data/release-notes/enterprise-server/3-14/', + 'data/release-notes/enterprise-server/3-15/', +] + interface CompiledRegex { regex: RegExp outdatedTerm: string @@ -96,6 +116,13 @@ export const outdatedReleasePhaseTerminology = { tags: ['terminology', 'consistency', 'release-phases'], severity: 'error', function: (params: RuleParams, onError: RuleErrorCallback) => { + // Skip excluded files + for (const filepath of EXCLUDED_PATHS) { + if (params.name.startsWith(filepath)) { + return + } + } + // Skip autogenerated files const frontmatterString = params.frontMatterLines.join('\n') const fm = frontmatter(frontmatterString).data diff --git a/src/content-linter/lib/linting-rules/third-party-actions-reusable.ts b/src/content-linter/lib/linting-rules/third-party-actions-reusable.ts index 6b95c52fa9ab..907495a9a884 100644 --- a/src/content-linter/lib/linting-rules/third-party-actions-reusable.ts +++ b/src/content-linter/lib/linting-rules/third-party-actions-reusable.ts @@ -43,34 +43,46 @@ export const thirdPartyActionsReusable = { /** * Find third-party actions in YAML content - * Third-party actions are identified by the pattern: owner/action@version - * where owner is not 'actions' or 'github' + * Third-party actions are identified by actions that are not GitHub-owned or documentation examples */ function findThirdPartyActions(yamlContent: string): string[] { const thirdPartyActions: string[] = [] - - // Pattern to match 'uses: owner/action@version' where owner is not actions or github const actionPattern = /uses:\s+([^{\s]+\/[^@\s]+@[^\s]+)/g let match while ((match = actionPattern.exec(yamlContent)) !== null) { const actionRef = match[1] - // Extract owner from action reference - const parts = actionRef.split('/') - if (parts.length >= 2) { - const owner = parts[0] - - // Skip GitHub-owned actions (actions/* and github/*) - if (owner !== 'actions' && owner !== 'github') { - thirdPartyActions.push(actionRef) - } + if (!isExampleOrGitHubAction(actionRef)) { + thirdPartyActions.push(actionRef) } } return thirdPartyActions } +/** + * Check if an action should be skipped (GitHub-owned or documentation example) + */ +function isExampleOrGitHubAction(actionRef: string): boolean { + // List of patterns to exclude (GitHub-owned and documentation examples) + const excludePatterns = [ + // GitHub-owned + /^actions\//, + /^github\//, + // Example organizations + /^(octo-org|octocat|different-org|fakeaction|some|OWNER|my-org)\//, + // Example repos (any owner) + /\/example-repo[/@]/, + /\/octo-repo[/@]/, + /\/hello-world-composite-action[/@]/, + /\/monorepo[/@]/, + // Monorepo patterns + ] + + return excludePatterns.some((pattern) => pattern.test(actionRef)) +} + /** * Check if the disclaimer reusable is present before the given line number or inside the code block * Looks backward from the code block and also inside the code block content diff --git a/src/content-linter/scripts/lint-content.ts b/src/content-linter/scripts/lint-content.ts index aeca69d6ce07..b516d5f5917b 100755 --- a/src/content-linter/scripts/lint-content.ts +++ b/src/content-linter/scripts/lint-content.ts @@ -16,7 +16,14 @@ import { prettyPrintResults } from './pretty-print-results' import { getLintableYml } from '@/content-linter/lib/helpers/get-lintable-yml' import { printAnnotationResults } from '../lib/helpers/print-annotations' import languages from '@/languages/lib/languages-server' -import { shouldIncludeResult } from '../lib/helpers/should-include-result' + +/** + * Config that applies to all rules in all environments (CI, reports, precommit). + */ +export const globalConfig = { + // Do not ever lint these filepaths + excludePaths: ['content/contributing/'], +} program .description('Run GitHub Docs Markdownlint rules.') @@ -197,12 +204,7 @@ async function main() { if (printAnnotations) { printAnnotationResults(formattedResults, { - skippableRules: [ - // As of Feb 2024, this rule is quite noisy. It's present in - // many files and is not always a problem. And besides, when it - // does warn, it's usually a very long one. - 'code-fence-line-length', // a.k.a. GHD030 - ], + skippableRules: [], skippableFlawProperties: [ // As of Feb 2024, we don't support reporting flaws for lines // and columns numbers of YAML files. YAML files consist of one @@ -349,7 +351,14 @@ function getFilesToLint(paths) { (!filePath.endsWith('.md') && !filePath.endsWith('.yml')) ) continue + const relPath = path.relative(root, filePath) + + // Skip files that match any of the excluded paths + if (globalConfig.excludePaths.some((excludePath) => relPath.startsWith(excludePath))) { + continue + } + if (seen.has(relPath)) continue seen.add(relPath) clean.push(relPath) @@ -427,9 +436,7 @@ function getFormattedResults(allResults, isPrecommit) { if (verbose) { output[key] = [...results] } else { - const formattedResults = results - .map((flaw) => formatResult(flaw, isPrecommit)) - .filter((flaw) => shouldIncludeResult(flaw, key)) + const formattedResults = results.map((flaw) => formatResult(flaw, isPrecommit)) // Only add the file to output if there are results after filtering if (formattedResults.length > 0) { @@ -562,9 +569,6 @@ function getMarkdownLintConfig(errorsOnly, runRules) { // Check if the rule should be included based on user-specified rules if (runRules && !shouldIncludeRule(ruleName, runRules)) continue - // Skip british-english-quotes rule in CI/PRs (only run in pre-commit) - if (ruleName === 'british-english-quotes' && !isPrecommit) continue - // There are a subset of rules run on just the frontmatter in files if (githubDocsFrontmatterConfig[ruleName]) { config.frontMatter[ruleName] = ruleConfig diff --git a/src/content-linter/scripts/lint-report.ts b/src/content-linter/scripts/lint-report.ts index 7a5bf0c657f0..098588abec52 100644 --- a/src/content-linter/scripts/lint-report.ts +++ b/src/content-linter/scripts/lint-report.ts @@ -5,12 +5,21 @@ import coreLib from '@actions/core' import github from '@/workflows/github' import { getEnvInputs } from '@/workflows/get-env-inputs' import { createReportIssue, linkReports } from '@/workflows/issue-report' -import { shouldIncludeResult } from '@/content-linter/lib/helpers/should-include-result' -import { reportingConfig } from '@/content-linter/style/github-docs' +import { getAllRuleNames } from '@/content-linter/lib/helpers/rule-utils' // GitHub issue body size limit is ~65k characters, so we'll use 60k as a safe limit const MAX_ISSUE_BODY_SIZE = 60000 +/** + * Config that only applies to automated weekly reports. + */ +export const reportingConfig = { + // Include only rules with these severities in reports + includeSeverities: ['error'], + // Include these rules regardless of severity in reports + includeRules: ['expired-content'], +} + interface LintFlaw { severity: string ruleNames: string[] @@ -19,34 +28,16 @@ interface LintFlaw { /** * Determines if a lint result should be included in the automated report - * Uses shared exclusion logic with additional reporting-specific filtering */ -function shouldIncludeInReport(flaw: LintFlaw, filePath: string): boolean { - if (!flaw.ruleNames || !Array.isArray(flaw.ruleNames)) { - return false - } - - // First check if it should be excluded (file-specific or rule-specific exclusions) - if (!shouldIncludeResult(flaw, filePath)) { - return false - } - - // Extract all possible rule names including sub-rules from search-replace - const allRuleNames = [...flaw.ruleNames] - if (flaw.ruleNames.includes('search-replace') && flaw.errorDetail) { - const match = flaw.errorDetail.match(/^([^:]+):/) - if (match) { - allRuleNames.push(match[1]) - } - } +function shouldIncludeInReport(flaw: LintFlaw): boolean { + const allRuleNames = getAllRuleNames(flaw) - // Apply reporting-specific filtering // Check if severity should be included if (reportingConfig.includeSeverities.includes(flaw.severity)) { return true } - // Check if any rule name is in the include list + // Check if any rule name is in the include list that overrides severity const hasIncludedRule = allRuleNames.some((ruleName: string) => reportingConfig.includeRules.includes(ruleName), ) @@ -101,7 +92,7 @@ async function main() { // Filter results based on reporting configuration const filteredResults: Record = {} for (const [file, flaws] of Object.entries(parsedResults)) { - const filteredFlaws = (flaws as LintFlaw[]).filter((flaw) => shouldIncludeInReport(flaw, file)) + const filteredFlaws = (flaws as LintFlaw[]).filter((flaw) => shouldIncludeInReport(flaw)) // Only include files that have remaining flaws after filtering if (filteredFlaws.length > 0) { diff --git a/src/content-linter/style/base.ts b/src/content-linter/style/base.ts index 9faabfeca6f6..600a65017754 100644 --- a/src/content-linter/style/base.ts +++ b/src/content-linter/style/base.ts @@ -33,32 +33,12 @@ export const baseConfig: BaseConfig = { 'partial-markdown-files': false, 'yml-files': false, }, - 'ul-style': { - // MD004 - severity: 'error', - style: 'asterisk', - 'partial-markdown-files': true, - 'yml-files': false, - context: `We use asterisks to format bulleted lists because this gives clearer, more accessible source code.`, - }, - 'no-trailing-spaces': { - // MD009 - severity: 'error', - 'partial-markdown-files': true, - 'yml-files': true, - }, 'no-reversed-links': { // MD011 severity: 'error', 'partial-markdown-files': true, 'yml-files': true, }, - 'no-multiple-blanks': { - // MD012 - severity: 'error', - 'partial-markdown-files': true, - 'yml-files': true, - }, 'commands-show-output': { // MD014 severity: 'error', @@ -77,12 +57,6 @@ export const baseConfig: BaseConfig = { 'partial-markdown-files': true, 'yml-files': true, }, - 'blanks-around-headings': { - // MD022 - severity: 'error', - 'partial-markdown-files': false, - 'yml-files': false, - }, 'heading-start-left': { // MD023 severity: 'error', @@ -140,19 +114,6 @@ export const baseConfig: BaseConfig = { 'partial-markdown-files': true, 'yml-files': true, }, - 'single-trailing-newline': { - // MD047 - severity: 'error', - 'partial-markdown-files': true, - 'yml-files': false, - }, - 'emphasis-style': { - // MD049 - severity: 'error', - style: 'underscore', - 'partial-markdown-files': true, - 'yml-files': true, - }, 'strong-style': { // MD050 severity: 'error', diff --git a/src/content-linter/style/github-docs.ts b/src/content-linter/style/github-docs.ts index c20a684d9e86..edccc56973e0 100644 --- a/src/content-linter/style/github-docs.ts +++ b/src/content-linter/style/github-docs.ts @@ -1,31 +1,3 @@ -export const reportingConfig = { - // Always include all rules with these severities in automated weekly reports - includeSeverities: ['error'], - - // Specific rules to include regardless of severity - // Add rule names (short or long form) that should always be reported - includeRules: [ - 'GHD038', // expired-content - Content that has passed its expiration date - 'expired-content', - ], - - // Specific rules to exclude from CI and reports (overrides severity-based inclusion) - // Add rule names here if you want to suppress them from reports - excludeRules: [ - // Example: 'GHD030' // Uncomment to exclude code-fence-line-length warnings - 'british-english-quotes', // Exclude from reports but keep for pre-commit - ], - - // Files to exclude from specific rules in CI and reports - // Format: { 'rule-name': ['file/path/pattern1', 'file/path/pattern2'] } - excludeFilesFromRules: { - 'todocs-placeholder': [ - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md', - 'content/contributing/collaborating-on-github-docs/index.md', - ], - }, -} - const githubDocsConfig = { 'link-punctuation': { // GHD001 @@ -129,12 +101,6 @@ const githubDocsConfig = { 'partial-markdown-files': true, 'yml-files': true, }, - 'code-fence-line-length': { - // GHD030 - severity: 'warning', - 'partial-markdown-files': true, - 'yml-files': true, - }, 'image-alt-text-exclude-words': { // GHD031 severity: 'error', @@ -153,12 +119,6 @@ const githubDocsConfig = { 'partial-markdown-files': true, 'yml-files': true, }, - 'list-first-word-capitalization': { - // GHD034 - severity: 'warning', - 'partial-markdown-files': true, - 'yml-files': true, - }, 'rai-reusable-usage': { // GHD035 severity: 'error', @@ -226,25 +186,6 @@ const githubDocsConfig = { 'partial-markdown-files': true, 'yml-files': true, }, - 'british-english-quotes': { - // GHD048 - severity: 'warning', - precommitSeverity: 'warning', // Show warnings locally for writer awareness - 'partial-markdown-files': true, - 'yml-files': true, - }, - 'note-warning-formatting': { - // GHD049 - severity: 'warning', - 'partial-markdown-files': true, - 'yml-files': true, - }, - 'multiple-emphasis-patterns': { - // GHD050 - severity: 'warning', - 'partial-markdown-files': true, - 'yml-files': true, - }, 'header-content-requirement': { // GHD053 severity: 'warning', @@ -312,12 +253,6 @@ export const githubDocsFrontmatterConfig = { 'partial-markdown-files': false, 'yml-files': false, }, - 'frontmatter-validation': { - // GHD055 - severity: 'warning', - 'partial-markdown-files': false, - 'yml-files': false, - }, 'frontmatter-landing-recommended': { // GHD056 severity: 'error', diff --git a/src/content-linter/tests/unit/british-english-quotes.ts b/src/content-linter/tests/unit/british-english-quotes.ts deleted file mode 100644 index 6423d7960eda..000000000000 --- a/src/content-linter/tests/unit/british-english-quotes.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { runRule } from '../../lib/init-test' -import { britishEnglishQuotes } from '../../lib/linting-rules/british-english-quotes' - -describe(britishEnglishQuotes.names.join(' - '), () => { - test('Correct American English punctuation passes', async () => { - const markdown = [ - 'She said, "Hello, world."', - 'The guide mentions "Getting started."', - 'See "[AUTOTITLE]."', - 'Zara replied, "That sounds great!"', - 'The section titled "Prerequisites," explains the setup.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('British English quotes with AUTOTITLE are flagged', async () => { - const markdown = [ - 'For more information, see "[AUTOTITLE]".', - 'The article "[AUTOTITLE]", covers this topic.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(2) - expect(errors[0].lineNumber).toBe(1) - if (errors[0].detail) { - expect(errors[0].detail).toContain('place period inside the quotation marks') - } - expect(errors[1].lineNumber).toBe(2) - if (errors[1].detail) { - expect(errors[1].detail).toContain('place comma inside the quotation marks') - } - }) - - test('General British English punctuation patterns are detected', async () => { - const markdown = [ - 'Priya said "Hello".', - 'The tutorial called "Advanced Git", is helpful.', - 'Marcus mentioned "DevOps best practices".', - 'See the guide titled "Getting Started", for details.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(4) - if (errors[0].detail) { - expect(errors[0].detail).toContain('period inside') - } - if (errors[1].detail) { - expect(errors[1].detail).toContain('comma inside') - } - if (errors[2].detail) { - expect(errors[2].detail).toContain('period inside') - } - if (errors[3].detail) { - expect(errors[3].detail).toContain('comma inside') - } - }) - - test('Single quotes are also detected', async () => { - const markdown = [ - "Aisha said 'excellent work'.", - "The term 'API endpoint', refers to a specific URL.", - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(2) - if (errors[0].detail) { - expect(errors[0].detail).toContain('period inside') - } - if (errors[1].detail) { - expect(errors[1].detail).toContain('comma inside') - } - }) - - test('Code blocks and inline code are ignored', async () => { - const markdown = [ - '```javascript', - 'console.log("Hello");', - 'const message = "World";', - '```', - '', - 'In code, use `console.log("Debug");` for logging.', - 'The command `git commit -m "Fix bug";` creates a commit.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(0) - }) - - test('URLs and emails are ignored', async () => { - const markdown = [ - 'Visit https://example.com/api"docs" for more info.', - 'Email support@company.com"help" for assistance.', - 'The webhook URL http://api.service.com"endpoint" should work.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(0) - }) - - test('Auto-fix suggestions work correctly', async () => { - const markdown = [ - 'See "[AUTOTITLE]".', - 'The guide "Setup Instructions", explains everything.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail and fixInfo properties not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(2) - - // Check that fix info is provided - expect(errors[0].fixInfo).toBeDefined() - expect(errors[0].fixInfo.insertText).toContain('."') - expect(errors[1].fixInfo).toBeDefined() - expect(errors[1].fixInfo.insertText).toContain(',"') - }) - - test('Mixed punctuation scenarios', async () => { - const markdown = [ - 'Chen explained, "The process involves three steps". First, prepare the data.', - 'The error message "File not found", appears when the path is incorrect.', - 'As Fatima noted, "Testing is crucial"; quality depends on it.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(2) - expect(errors[0].lineNumber).toBe(1) - expect(errors[1].lineNumber).toBe(2) - }) - - test('Nested quotes are handled appropriately', async () => { - const markdown = [ - 'She said, "The article \'Best Practices\', is recommended".', - 'The message "Error: \'Invalid input\'" appears sometimes.', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(1) - if (errors[0].detail) { - expect(errors[0].detail).toContain('period inside') - } - }) - - test('Edge cases with spacing', async () => { - const markdown = [ - 'The command "npm install" .', - 'See documentation "API Guide" , which covers authentication.', - 'Reference "[AUTOTITLE]" .', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(3) - if (errors[0].detail) { - expect(errors[0].detail).toContain('period inside') - } - if (errors[1].detail) { - expect(errors[1].detail).toContain('comma inside') - } - if (errors[2].detail) { - expect(errors[2].detail).toContain('period inside') - } - }) - - test('Autogenerated files are skipped', async () => { - const frontmatter = ['---', 'title: API Reference', 'autogenerated: rest', '---'].join('\n') - const markdown = ['The endpoint "GET /users", returns user data.', 'See "[AUTOTITLE]".'].join( - '\n', - ) - const result = await runRule(britishEnglishQuotes, { - strings: { - markdown: frontmatter + '\n' + markdown, - }, - }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Complex real-world examples', async () => { - const markdown = [ - '## Configuration Options', - '', - 'To enable the feature, set `enabled: true` in "config.yml".', - 'Aaliyah mentioned that the tutorial "Docker Basics", covers containers.', - 'The error "Permission denied", occurs when access is restricted.', - 'For troubleshooting, see "[AUTOTITLE]".', - '', - '```yaml', - 'name: "production"', - 'debug: false', - '```', - '', - 'Dmitri explained, "The workflow has multiple stages."', - ].join('\n') - const result = await runRule(britishEnglishQuotes, { strings: { markdown } }) - // Markdownlint error objects include detail property not in base LintError type - const errors = result.markdown as any[] - expect(errors.length).toBe(4) - expect(errors[0].lineNumber).toBe(3) // config.yml line - expect(errors[1].lineNumber).toBe(4) // Docker Basics line - expect(errors[2].lineNumber).toBe(5) // Permission denied line - expect(errors[3].lineNumber).toBe(6) // AUTOTITLE line - }) - - test('Warning severity is set correctly', () => { - expect(britishEnglishQuotes.severity).toBe('warning') - }) - - test('Rule has correct metadata', () => { - expect(britishEnglishQuotes.names).toEqual(['GHD048', 'british-english-quotes']) - expect(britishEnglishQuotes.description).toContain('American English style') - expect(britishEnglishQuotes.tags).toContain('punctuation') - expect(britishEnglishQuotes.tags).toContain('quotes') - }) -}) diff --git a/src/content-linter/tests/unit/code-fence-line-length.ts b/src/content-linter/tests/unit/code-fence-line-length.ts deleted file mode 100644 index 26961b8dea81..000000000000 --- a/src/content-linter/tests/unit/code-fence-line-length.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { runRule } from '../../lib/init-test' -import { codeFenceLineLength } from '../../lib/linting-rules/code-fence-line-length' - -describe(codeFenceLineLength.names.join(' - '), () => { - test('line length of max + 1 fails', async () => { - const markdown = [ - '```shell', - '111', - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - 'bbb', - '```', - ].join('\n') - const result = await runRule(codeFenceLineLength, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(3) - expect(errors[0].errorRange).toEqual([1, 61]) - expect(errors[0].fixInfo).toBeNull() - }) - test('line length less than or equal to max length passes', async () => { - const markdown = [ - '```javascript', - '111', - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', - '```', - ].join('\n') - const result = await runRule(codeFenceLineLength, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - test('multiple lines in code block that exceed max length fail', async () => { - const markdown = [ - '```', - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaccc', - '1', - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbb', - '```', - ].join('\n') - const result = await runRule(codeFenceLineLength, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(2) - expect(errors[0].lineNumber).toBe(2) - expect(errors[1].lineNumber).toBe(4) - expect(errors[0].errorRange).toEqual([1, 61]) - expect(errors[1].errorRange).toEqual([1, 61]) - }) -}) diff --git a/src/content-linter/tests/unit/frontmatter-validation.ts b/src/content-linter/tests/unit/frontmatter-validation.ts deleted file mode 100644 index 30ce723e7a75..000000000000 --- a/src/content-linter/tests/unit/frontmatter-validation.ts +++ /dev/null @@ -1,576 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { runRule } from '@/content-linter/lib/init-test' -import { frontmatterValidation } from '@/content-linter/lib/linting-rules/frontmatter-validation' - -const ruleName = frontmatterValidation.names[1] - -// Configure the test fixture to not split frontmatter and content -const fmOptions = { markdownlintOptions: { frontMatter: null } } - -describe(ruleName, () => { - // Character limit tests - test('category title within limits passes', async () => { - const markdown = `--- -title: 'Short category title' -intro: 'Category introduction' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toEqual([]) - }) - - test('category title exceeds recommended limit shows warning', async () => { - const markdown = `--- -title: 'This category title is exactly 68 characters long for testing purpos' -shortTitle: 'Short title' -intro: 'Category introduction' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toHaveLength(1) - expect(result['content/section/index.md'][0].errorDetail).toContain( - 'exceeds recommended length of 67 characters', - ) - }) - - test('category title exceeds maximum limit shows error', async () => { - const markdown = `--- -title: 'This is exactly 71 characters long to exceed the maximum limit for catx' -shortTitle: 'Short title' -intro: 'Category introduction' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toHaveLength(1) - expect(result['content/section/index.md'][0].errorDetail).toContain( - 'exceeds maximum length of 70 characters', - ) - }) - - test('category shortTitle exceeds limit shows error', async () => { - const markdown = `--- -title: 'Category title' -shortTitle: 'This short title is exactly 31x' -intro: 'Category introduction' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toHaveLength(1) - expect(result['content/section/index.md'][0].errorDetail).toContain('ShortTitle exceeds') - }) - - test('mapTopic title within limits passes', async () => { - const markdown = `--- -title: 'Using workflows' -intro: 'Map topic introduction' -mapTopic: true ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/using-workflows/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/using-workflows/index.md']).toEqual([]) - }) - - test('mapTopic title exceeds recommended limit shows warning', async () => { - const markdown = `--- -title: 'This map topic title is exactly 64 characters long for tests now' -shortTitle: 'Short title' -intro: 'Map topic introduction' -mapTopic: true ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/using-workflows/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/using-workflows/index.md']).toHaveLength(1) - expect(result['content/actions/using-workflows/index.md'][0].errorDetail).toContain( - 'exceeds recommended length of 63 characters', - ) - }) - - test('article title within limits passes', async () => { - const markdown = `--- -title: 'GitHub Actions quickstart' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/quickstart.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/quickstart.md']).toEqual([]) - }) - - test('article title exceeds recommended limit shows warning', async () => { - const markdown = `--- -title: 'This article title is exactly 61 characters long for test now' -shortTitle: 'Short title' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/quickstart.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/quickstart.md']).toHaveLength(1) - expect(result['content/actions/quickstart.md'][0].errorDetail).toContain( - 'exceeds recommended length of 60 characters', - ) - }) - - test('article title exceeds maximum limit shows error', async () => { - const markdown = `--- -title: 'This article title is exactly 81 characters long to exceed the maximum limits now' -shortTitle: 'Short title' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/quickstart.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/quickstart.md']).toHaveLength(1) - expect(result['content/actions/quickstart.md'][0].errorDetail).toContain( - 'exceeds maximum length of 80 characters', - ) - }) - - test('cross-property validation: long title without shortTitle shows error', async () => { - const markdown = `--- -title: 'This article title is exactly 50 characters long' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/quickstart.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/quickstart.md']).toHaveLength(1) - expect(result['content/actions/quickstart.md'][0].errorDetail).toContain( - 'A shortTitle must be provided', - ) - }) - - test('cross-property validation: long title with shortTitle passes', async () => { - const markdown = `--- -title: 'This article title is exactly 50 characters long' -shortTitle: 'Actions quickstart' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/actions/quickstart.md': markdown }, - ...fmOptions, - }) - expect(result['content/actions/quickstart.md']).toEqual([]) - }) - - // Required properties tests - test('category with required intro passes', async () => { - const markdown = `--- -title: 'Category title' -intro: 'This is the category introduction.' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toEqual([]) - }) - - test('category without required intro fails', async () => { - const markdown = `--- -title: 'Category title' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toHaveLength(1) - expect(result['content/section/index.md'][0].errorDetail).toContain( - "Missing required property 'intro' for category content type", - ) - }) - - test('category with intro too long shows warning', async () => { - const longIntro = 'A'.repeat(400) // Exceeds 362 char limit - const markdown = `--- -title: 'Category title' -intro: '${longIntro}' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toHaveLength(1) - expect(result['content/section/index.md'][0].errorDetail).toContain( - 'Intro exceeds maximum length of 362 characters', - ) - }) - - test('mapTopic with required intro passes', async () => { - const markdown = `--- -title: 'Map topic title' -intro: 'This is the map topic introduction.' -mapTopic: true ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/topic.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/topic.md']).toEqual([]) - }) - - test('mapTopic without required intro fails', async () => { - const markdown = `--- -title: 'Map topic title' -mapTopic: true ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/topic.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/topic.md']).toHaveLength(1) - expect(result['content/section/topic.md'][0].errorDetail).toContain( - "Missing required property 'intro' for mapTopic content type", - ) - }) - - test('mapTopic with intro too long shows warning', async () => { - const longIntro = 'A'.repeat(400) // Exceeds 362 char limit - const markdown = `--- -title: 'Map topic title' -intro: '${longIntro}' -mapTopic: true ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/topic.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/topic.md']).toHaveLength(1) - expect(result['content/section/topic.md'][0].errorDetail).toContain( - 'Intro exceeds maximum length of 362 characters', - ) - }) - - test('article with required topics passes', async () => { - const markdown = `--- -title: 'Article title' -topics: - - Actions - - CI/CD ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toEqual([]) - }) - - test('article without required topics fails', async () => { - const markdown = `--- -title: 'Article title' ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain( - "Missing required property 'topics' for article content type", - ) - }) - - test('article with empty topics array fails', async () => { - const markdown = `--- -title: 'Article title' -topics: [] ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain( - 'Articles should have at least one topic', - ) - }) - - test('article with topics as string fails', async () => { - const markdown = `--- -title: 'Article title' -topics: 'Actions' ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain('Topics must be an array') - }) - - test('article with topics as number fails', async () => { - const markdown = `--- -title: 'Article title' -topics: 123 ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain('Topics must be an array') - }) - - test('article with intro too long shows warning', async () => { - const longIntro = 'A'.repeat(400) // Exceeds 354 char limit for articles - const markdown = `--- -title: 'Article title' -intro: '${longIntro}' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain( - 'Intro exceeds maximum length of 354 characters', - ) - }) - - test('article intro exceeds recommended but not maximum shows warning', async () => { - const mediumIntro = 'A'.repeat(300) // Exceeds 251 recommended but under 354 max - const markdown = `--- -title: 'Article title' -intro: '${mediumIntro}' -topics: - - Actions ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain( - 'Intro exceeds recommended length of 251 characters', - ) - }) - - // Combined validation tests - test('multiple violations show multiple errors', async () => { - const longIntro = 'A'.repeat(400) - const markdown = `--- -title: 'This is exactly 71 characters long to exceed the maximum limit for catx' -intro: '${longIntro}' -shortTitle: 'Short title' -children: - - /path/to/child ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': markdown }, - ...fmOptions, - }) - expect(result['content/section/index.md']).toHaveLength(2) - expect(result['content/section/index.md'][0].errorDetail).toContain('Title exceeds') - expect(result['content/section/index.md'][1].errorDetail).toContain('Intro exceeds') - }) - - test('no frontmatter passes', async () => { - const markdown = `# Content without frontmatter` - const result = await runRule(frontmatterValidation, { strings: { markdown }, ...fmOptions }) - expect(result.markdown).toEqual([]) - }) - - test('content type detection works correctly', async () => { - // Test category detection - const categoryMarkdown = `--- -title: 'Category' -intro: 'Category intro' -children: - - /child ---- -# Content -` - const categoryResult = await runRule(frontmatterValidation, { - strings: { 'content/section/index.md': categoryMarkdown }, - ...fmOptions, - }) - expect(categoryResult['content/section/index.md']).toEqual([]) - - // Test mapTopic detection - const mapTopicMarkdown = `--- -title: 'Map Topic' -intro: 'Map topic intro' -mapTopic: true ---- -# Content -` - const mapTopicResult = await runRule(frontmatterValidation, { - strings: { 'content/section/topic.md': mapTopicMarkdown }, - ...fmOptions, - }) - expect(mapTopicResult['content/section/topic.md']).toEqual([]) - - // Test article detection - const articleMarkdown = `--- -title: 'Article' -topics: - - Topic ---- -# Content -` - const articleResult = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': articleMarkdown }, - ...fmOptions, - }) - expect(articleResult['content/section/article.md']).toEqual([]) - }) - - // Liquid variable handling tests - test('title with liquid variables counts characters correctly', async () => { - const markdown = `--- -title: 'Getting started with {% data variables.product.prodname_github %}' -topics: - - GitHub ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - // 'Getting started with ' (21 chars) + liquid tag (0 chars) = 21 chars, should pass - expect(result['content/section/article.md']).toEqual([]) - }) - - test('intro with liquid variables counts characters correctly', async () => { - const markdown = `--- -title: 'Article title' -intro: 'Learn how to use {% data variables.product.prodname_copilot %} for {{ something }}' -topics: - - GitHub ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - // 'Learn how to use for ' (21 chars) should pass - expect(result['content/section/article.md']).toEqual([]) - }) - - test('shortTitle with liquid variables counts characters correctly', async () => { - const markdown = `--- -title: 'This article title is exactly fifty characters!!!!' -shortTitle: '{% data variables.product.prodname_copilot_short %}' -topics: - - GitHub ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - // Liquid tag should count as 0 characters, should pass - expect(result['content/section/article.md']).toEqual([]) - }) - - test('long text with liquid variables still fails when limit exceeded', async () => { - const longText = 'A'.repeat(70) // 70 chars - const markdown = `--- -title: '${longText} {% data variables.product.prodname_github %} extra text' -shortTitle: 'Short title' -topics: - - GitHub ---- -# Content -` - const result = await runRule(frontmatterValidation, { - strings: { 'content/section/article.md': markdown }, - ...fmOptions, - }) - // 70 A's + 1 space + 0 (liquid tag) + 1 space + 10 ('extra text') = 82 chars, should exceed 80 char limit for articles - expect(result['content/section/article.md']).toHaveLength(1) - expect(result['content/section/article.md'][0].errorDetail).toContain( - 'exceeds maximum length of 80 characters', - ) - }) -}) diff --git a/src/content-linter/tests/unit/lint-report-exclusions.ts b/src/content-linter/tests/unit/lint-report-exclusions.ts index a1b5f084f6dd..7879f14a3987 100644 --- a/src/content-linter/tests/unit/lint-report-exclusions.ts +++ b/src/content-linter/tests/unit/lint-report-exclusions.ts @@ -1,6 +1,15 @@ import { describe, expect, test } from 'vitest' -import { shouldIncludeResult } from '../../lib/helpers/should-include-result' -import { reportingConfig } from '../../style/github-docs' +import { getAllRuleNames } from '../../lib/helpers/rule-utils' + +// Use static config objects for testing to avoid Commander.js conflicts +const globalConfig = { + excludePaths: ['content/contributing/'], +} + +const reportingConfig = { + includeSeverities: ['error'], + includeRules: ['expired-content'], +} interface LintFlaw { severity: string @@ -8,159 +17,168 @@ interface LintFlaw { errorDetail?: string } -describe('lint report exclusions', () => { - // Helper function to simulate the reporting logic from lint-report.ts - function shouldIncludeInReport(flaw: LintFlaw, filePath: string): boolean { - if (!flaw.ruleNames || !Array.isArray(flaw.ruleNames)) { - return false - } - - // First check exclusions using shared function - if (!shouldIncludeResult(flaw, filePath)) { - return false - } +describe('content linter configuration', () => { + describe('global path exclusions (lint-content.ts)', () => { + test('globalConfig.excludePaths is properly configured', () => { + expect(globalConfig.excludePaths).toBeDefined() + expect(Array.isArray(globalConfig.excludePaths)).toBe(true) + expect(globalConfig.excludePaths).toContain('content/contributing/') + }) - // Extract all possible rule names including sub-rules from search-replace - const allRuleNames = [...flaw.ruleNames] - if (flaw.ruleNames.includes('search-replace') && flaw.errorDetail) { - const match = flaw.errorDetail.match(/^([^:]+):/) - if (match) { - allRuleNames.push(match[1]) + test('simulates path exclusion logic', () => { + // Simulate the cleanPaths function logic from lint-content.ts + function isPathExcluded(filePath: string): boolean { + return globalConfig.excludePaths.some((excludePath) => filePath.startsWith(excludePath)) } - } - // Apply reporting-specific filtering - // Check if severity should be included - if (reportingConfig.includeSeverities.includes(flaw.severity)) { - return true - } + // Files in contributing directory should be excluded + expect(isPathExcluded('content/contributing/README.md')).toBe(true) + expect(isPathExcluded('content/contributing/how-to-contribute.md')).toBe(true) + expect(isPathExcluded('content/contributing/collaborating-on-github-docs/file.md')).toBe(true) - // Check if any rule name is in the include list - const hasIncludedRule = allRuleNames.some((ruleName) => - reportingConfig.includeRules.includes(ruleName), - ) - if (hasIncludedRule) { - return true - } - - return false - } + // Files outside contributing directory should not be excluded + expect(isPathExcluded('content/actions/README.md')).toBe(false) + expect(isPathExcluded('content/copilot/getting-started.md')).toBe(false) + expect(isPathExcluded('data/variables/example.yml')).toBe(false) - test('TODOCS placeholder errors are excluded for documentation file', () => { - const flaw = { - severity: 'error', - ruleNames: ['search-replace'], - errorDetail: 'todocs-placeholder: Catch occurrences of TODOCS placeholder.', - } + // Edge case: partial matches should not be excluded + expect(isPathExcluded('content/contributing-guide.md')).toBe(false) + }) + }) - const excludedFilePath = - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md' - const regularFilePath = 'content/some-other-article.md' + describe('report filtering (lint-report.ts)', () => { + // Helper function that matches the actual logic in lint-report.ts + function shouldIncludeInReport(flaw: LintFlaw): boolean { + const allRuleNames = getAllRuleNames(flaw) - // Should be excluded for the specific documentation file - expect(shouldIncludeInReport(flaw, excludedFilePath)).toBe(false) + // Check if severity should be included + if (reportingConfig.includeSeverities.includes(flaw.severity)) { + return true + } - // Should still be included for other files - expect(shouldIncludeInReport(flaw, regularFilePath)).toBe(true) - }) + // Check if any rule name is in the include list that overrides severity + const hasIncludedRule = allRuleNames.some((ruleName: string) => + reportingConfig.includeRules.includes(ruleName), + ) + if (hasIncludedRule) { + return true + } - test('TODOCS placeholder errors are excluded with different path formats', () => { - const flaw = { - severity: 'error', - ruleNames: ['search-replace'], - errorDetail: 'todocs-placeholder: Catch occurrences of TODOCS placeholder.', + return false } - // Test various path formats that should match - const pathVariants = [ - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md', - './content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md', - '/absolute/path/content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md', - ] - - pathVariants.forEach((path) => { - expect(shouldIncludeInReport(flaw, path)).toBe(false) + test('reportingConfig is properly structured', () => { + expect(reportingConfig.includeSeverities).toBeDefined() + expect(Array.isArray(reportingConfig.includeSeverities)).toBe(true) + expect(reportingConfig.includeRules).toBeDefined() + expect(Array.isArray(reportingConfig.includeRules)).toBe(true) }) - }) - - test('other rules are not affected by TODOCS file exclusions', () => { - const flaw = { - severity: 'error', - ruleNames: ['docs-domain'], - } - const excludedFilePath = - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md' + test('includes errors by default (severity-based filtering)', () => { + const errorFlaw = { + severity: 'error', + ruleNames: ['some-rule'], + } - // Should still be included for other rules even in the excluded file - expect(shouldIncludeInReport(flaw, excludedFilePath)).toBe(true) - }) + expect(shouldIncludeInReport(errorFlaw)).toBe(true) + }) - test('multiple rule names with mixed exclusions', () => { - const flaw = { - severity: 'error', - ruleNames: ['search-replace', 'docs-domain'], - errorDetail: 'todocs-placeholder: Catch occurrences of TODOCS placeholder.', - } + test('excludes warnings by default (severity-based filtering)', () => { + const warningFlaw = { + severity: 'warning', + ruleNames: ['some-rule'], + } - const excludedFilePath = - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md' + expect(shouldIncludeInReport(warningFlaw)).toBe(false) + }) - // Should be excluded because one of the rules (todocs-placeholder) is excluded for this file - expect(shouldIncludeInReport(flaw, excludedFilePath)).toBe(false) - }) + test('includes specific rules regardless of severity', () => { + const expiredContentWarning = { + severity: 'warning', + ruleNames: ['expired-content'], + } - test('exclusion configuration exists and is properly structured', () => { - expect(reportingConfig.excludeFilesFromRules).toBeDefined() - expect(reportingConfig.excludeFilesFromRules['todocs-placeholder']).toBeDefined() - expect(Array.isArray(reportingConfig.excludeFilesFromRules['todocs-placeholder'])).toBe(true) - expect( - reportingConfig.excludeFilesFromRules['todocs-placeholder'].includes( - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md', - ), - ).toBe(true) - }) + // Should be included because expired-content is in includeRules + expect(shouldIncludeInReport(expiredContentWarning)).toBe(true) + }) - describe('shared shouldIncludeResult function', () => { - test('excludes TODOCS placeholder errors for specific file', () => { - const flaw = { - severity: 'error', + test('handles search-replace sub-rules correctly', () => { + const searchReplaceFlaw = { + severity: 'warning', ruleNames: ['search-replace'], errorDetail: 'todocs-placeholder: Catch occurrences of TODOCS placeholder.', } - const excludedFilePath = - 'content/contributing/collaborating-on-github-docs/using-the-todocs-placeholder-to-leave-notes.md' - const regularFilePath = 'content/some-other-article.md' + // Should extract 'todocs-placeholder' as a rule name and check against includeRules + // This will depend on your actual includeRules configuration + const result = shouldIncludeInReport(searchReplaceFlaw) + expect(typeof result).toBe('boolean') + }) - // Should be excluded for the specific documentation file - expect(shouldIncludeResult(flaw, excludedFilePath)).toBe(false) + test('handles missing errorDetail gracefully for search-replace', () => { + const searchReplaceFlawNoDetail = { + severity: 'warning', + ruleNames: ['search-replace'], + // no errorDetail + } - // Should be included for other files - expect(shouldIncludeResult(flaw, regularFilePath)).toBe(true) + // Should not throw an error and return false (warning not in includeSeverities) + expect(shouldIncludeInReport(searchReplaceFlawNoDetail)).toBe(false) }) - test('includes flaws by default when no exclusions apply', () => { - const flaw = { + test('rule extraction logic works correctly', () => { + const regularFlaw = { severity: 'error', - ruleNames: ['some-other-rule'], + ruleNames: ['docs-domain'], } + expect(getAllRuleNames(regularFlaw)).toEqual(['docs-domain']) - const filePath = 'content/some-article.md' - - expect(shouldIncludeResult(flaw, filePath)).toBe(true) - }) - - test('handles missing errorDetail gracefully', () => { - const flaw = { + const searchReplaceFlaw = { severity: 'error', ruleNames: ['search-replace'], - // no errorDetail + errorDetail: 'todocs-placeholder: Catch occurrences of TODOCS placeholder.', } + expect(getAllRuleNames(searchReplaceFlaw)).toEqual(['search-replace', 'todocs-placeholder']) + + const multipleRulesFlaw = { + severity: 'error', + ruleNames: ['search-replace', 'another-rule'], + errorDetail: 'docs-domain: Some error message.', + } + expect(getAllRuleNames(multipleRulesFlaw)).toEqual([ + 'search-replace', + 'another-rule', + 'docs-domain', + ]) + }) + }) + + describe('integration between systems', () => { + test('path exclusions happen before report filtering', () => { + // This is a conceptual test - in practice, files excluded by globalConfig.excludePaths + // never reach the reporting stage, so they never get filtered by reportingConfig + + // Files in excluded paths should never be linted at all + const isExcluded = (path: string) => + globalConfig.excludePaths.some((excludePath) => path.startsWith(excludePath)) + + expect(isExcluded('content/contributing/some-file.md')).toBe(true) + + // If a file is excluded at the path level, it doesn't matter what the reportingConfig says + // because the file will never be processed for linting in the first place + }) + + test('configurations are independent', () => { + // globalConfig handles what gets linted + expect(globalConfig.excludePaths).toBeDefined() - const filePath = 'content/some-article.md' + // reportingConfig handles what gets reported + expect(reportingConfig.includeSeverities).toBeDefined() + expect(reportingConfig.includeRules).toBeDefined() - expect(shouldIncludeResult(flaw, filePath)).toBe(true) + // They should not overlap or depend on each other + expect(globalConfig).not.toHaveProperty('includeSeverities') + expect(reportingConfig).not.toHaveProperty('excludePaths') }) }) }) diff --git a/src/content-linter/tests/unit/list-first-word-captitalization.ts b/src/content-linter/tests/unit/list-first-word-captitalization.ts deleted file mode 100644 index b3804634ed32..000000000000 --- a/src/content-linter/tests/unit/list-first-word-captitalization.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { runRule } from '../../lib/init-test' -import { listFirstWordCapitalization } from '../../lib/linting-rules/list-first-word-capitalization' - -describe(listFirstWordCapitalization.names.join(' - '), () => { - test('ensure multi-level lists catch incorrect capitalization errors', async () => { - const markdown = [ - '- List item', - ' - `list` item', - ' - list item', - '1. number item', - '1. Number 2 item', - '- `X` item', - '- always start `with code`', - '- remember to go to [foo](/bar)', - ].join('\n') - const result = await runRule(listFirstWordCapitalization, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(4) - expect(errors[0].errorRange).toEqual([7, 4]) - expect(errors[0].lineNumber).toBe(3) - expect(errors[0].fixInfo).toEqual({ - deleteCount: 1, - editColumn: 7, - insertText: 'L', - lineNumber: 3, - }) - expect(errors[1].errorRange).toEqual([4, 6]) - expect(errors[1].lineNumber).toBe(4) - expect(errors[1].fixInfo).toEqual({ - deleteCount: 1, - editColumn: 4, - insertText: 'N', - lineNumber: 4, - }) - }) - - test('list items that start with special characters pass', async () => { - const markdown = [ - '- `X-GitHub-Event`: Name of the event that triggered the delivery.', - '- **October 1, 2018**: GitHub discontinued allowing users to install services. We removed GitHub Services from the GitHub.com user interface.', - '- **boldness** is a cool thing', - '- Always start `with code`', - '- Remember to go to [foo](/bar)', - '- **{% data variables.product.prodname_oauth_apps %}**: Request either the `repo_hook` and/or `org_hook` scope(s) to manage the relevant events on behalf of users.', - '- "[AUTOTITLE](/billing/managing-billing-for-github-marketplace-apps)"', - "- '[AUTOTITLE](/billing/managing-billing-for-github-marketplace-apps)'", - '- [Viewing your sponsors and sponsorships](/sponsors/receiving-sponsorships-through-github-sponsors/viewing-your-sponsors-and-sponsorships)', - '- macOS', - '- [{% data variables.actions.test %}](/apple/test)', - '- {{ foo }} for example', - '- {% data variables.product.prodname_dotcom_the_website %} Services Continuity and Incident Management Plan', - '- {% data variables.product.prodname_dotcom_the_website %} Services Continuity and Incident Management Plan', - '- x64', - '- @mention your friends', - '- @hash tags', - '- 05:00', - '- "keyword" starts with a quotation sign', - ].join('\n') - const result = await runRule(listFirstWordCapitalization, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test("list items that aren't simple lists", async () => { - const markdown = ['- > Blockquote in a list', '- ### Heading in a list'].join('\n') - const result = await runRule(listFirstWordCapitalization, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('works on markdown that has no lists at all, actually', async () => { - const markdown = '- \n' - const result = await runRule(listFirstWordCapitalization, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('skips site-policy directory files', async () => { - const markdown = [ - '- list item should normally be flagged', - '- another uncapitalized item', - '- a. this is alphabetic numbering', - '- b. this is also alphabetic numbering', - ].join('\n') - - // Test normal behavior (should flag errors) - const normalResult = await runRule(listFirstWordCapitalization, { strings: { markdown } }) - expect(normalResult.markdown.length).toBeGreaterThan(0) - - // Test site-policy exclusion (should skip all errors) - const sitePolicyResult = await runRule(listFirstWordCapitalization, { - strings: { - 'content/site-policy/some-policy.md': markdown, - }, - }) - expect(sitePolicyResult['content/site-policy/some-policy.md'].length).toBe(0) - }) -}) diff --git a/src/content-linter/tests/unit/multiple-emphasis-patterns.ts b/src/content-linter/tests/unit/multiple-emphasis-patterns.ts deleted file mode 100644 index b8d642339f08..000000000000 --- a/src/content-linter/tests/unit/multiple-emphasis-patterns.ts +++ /dev/null @@ -1,231 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { runRule } from '../../lib/init-test' -import { multipleEmphasisPatterns } from '../../lib/linting-rules/multiple-emphasis-patterns' - -describe(multipleEmphasisPatterns.names.join(' - '), () => { - test('Single emphasis types pass', async () => { - const markdown = [ - 'This is **bold text** that is fine.', - 'This is *italic text* that is okay.', - 'This is `code text` that is acceptable.', - 'This is a SCREAMING_CASE_WORD that is allowed.', - 'This is __bold with underscores__ that works.', - 'This is _italic with underscores_ that works.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Multiple emphasis types in same string are flagged', async () => { - const markdown = [ - 'This is **bold and `code`** in the same string.', - 'This is ***bold and italic*** combined.', - 'This is `code with **bold**` inside.', - 'This is ___bold and italic___ with underscores.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(4) - expect(errors[0].lineNumber).toBe(1) - expect(errors[1].lineNumber).toBe(2) - expect(errors[2].lineNumber).toBe(3) - expect(errors[3].lineNumber).toBe(4) - }) - - test('Nested emphasis patterns are flagged', async () => { - const markdown = [ - 'This is **bold with `code` inside**.', - 'This is `code with **bold** nested`.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(2) - expect(errors[0].lineNumber).toBe(1) - expect(errors[1].lineNumber).toBe(2) - }) - - test('Separate emphasis patterns on same line pass', async () => { - const markdown = [ - 'This is **bold** and this is *italic* but separate.', - 'Here is `code` and here is UPPERCASE but apart.', - 'First **bold**, then some text, then *italic*.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Code blocks are ignored', async () => { - const markdown = [ - '```javascript', - 'const text = "**bold** and `code` mixed";', - 'const more = "***triple emphasis***";', - '```', - '', - ' // Indented code block', - ' const example = "**bold** with `code`";', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Inline code prevents other emphasis detection', async () => { - const markdown = [ - 'Use `**bold**` to make text bold.', - 'The `*italic*` syntax creates italic text.', - 'Type `__bold__` for bold formatting.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(2) // Code with bold inside is detected - }) - - test('Complex mixed emphasis patterns', async () => { - const markdown = [ - 'This is **bold and `code`** mixed.', - 'Here is ***bold italic*** combined.', - 'Text with __bold and `code`__ together.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(3) - expect(errors[0].lineNumber).toBe(1) - expect(errors[1].lineNumber).toBe(2) - expect(errors[2].lineNumber).toBe(3) - }) - - test('Edge case: adjacent emphasis without overlap passes', async () => { - const markdown = [ - 'This is **bold**_italic_ adjacent but not overlapping.', - 'Here is `code`**bold** touching but separate.', - 'Text with UPPERCASE**bold** next to each other.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Triple asterisk bold+italic is flagged', async () => { - const markdown = [ - 'This is ***bold and italic*** combined.', - 'Here is ___bold and italic___ with underscores.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(2) - expect(errors[0].lineNumber).toBe(1) - expect(errors[1].lineNumber).toBe(2) - }) - - test('Mixed adjacent emphasis types are allowed', async () => { - const markdown = [ - 'This has **bold** and normal text.', - 'This has **bold** and other text.', - 'The API key and **configuration** work.', - 'The API key and **setup** process.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Autogenerated files are skipped', async () => { - const frontmatter = ['---', 'title: API Reference', 'autogenerated: rest', '---'].join('\n') - const markdown = [ - 'This is **bold and `code`** mixed.', - 'This is ***bold italic*** combined.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { - strings: { - markdown: frontmatter + '\n' + markdown, - }, - }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Links with emphasis are handled correctly', async () => { - const markdown = [ - 'See [**bold link**](http://example.com) for details.', - 'Check [*italic link*](http://example.com) here.', - 'Visit [`code link`](http://example.com) for info.', - 'Go to [**bold and `code`**](http://example.com) - should be flagged.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(4) - }) - - test('Headers with emphasis are checked', async () => { - const markdown = [ - '# This is **bold** header', - '## This is *italic* header', - '### This is **bold and `code`** header', - '#### This is normal header', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(3) - }) - - test('List items with emphasis are checked', async () => { - const markdown = [ - '- This is **bold** item', - '- This is *italic* item', - '- This is **bold and `code`** item', - '1. This is numbered **bold** item', - '2. This is numbered ***bold italic*** item', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(2) - expect(errors[0].lineNumber).toBe(3) - expect(errors[1].lineNumber).toBe(5) - }) - - test('Escaped emphasis characters are ignored', async () => { - const markdown = [ - 'This has \\*\\*escaped\\*\\* asterisks.', - 'This has \\`escaped\\` backticks.', - 'This has \\_escaped\\_ underscores.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Rule has correct metadata', () => { - expect(multipleEmphasisPatterns.names).toEqual(['GHD050', 'multiple-emphasis-patterns']) - expect(multipleEmphasisPatterns.description).toContain('emphasis') - expect(multipleEmphasisPatterns.tags).toContain('formatting') - expect(multipleEmphasisPatterns.tags).toContain('emphasis') - expect(multipleEmphasisPatterns.tags).toContain('style') - expect(multipleEmphasisPatterns.severity).toBe('warning') - }) - - test('Empty content does not cause errors', async () => { - const markdown = ['', ' ', '\t'].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Single character emphasis is handled', async () => { - const markdown = [ - 'This is **a** single letter.', - 'This is *b* single letter.', - 'This is `c` single letter.', - 'This is **a** and *b* separate.', - 'This is **`x`** nested single chars.', - ].join('\n') - const result = await runRule(multipleEmphasisPatterns, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) // Nested single chars still flagged - expect(errors[0].lineNumber).toBe(5) - }) -}) diff --git a/src/content-linter/tests/unit/note-warning-formatting.ts b/src/content-linter/tests/unit/note-warning-formatting.ts deleted file mode 100644 index c2451553e0ca..000000000000 --- a/src/content-linter/tests/unit/note-warning-formatting.ts +++ /dev/null @@ -1,324 +0,0 @@ -import { describe, expect, test } from 'vitest' - -import { runRule } from '../../lib/init-test' -import { noteWarningFormatting } from '../../lib/linting-rules/note-warning-formatting' - -describe(noteWarningFormatting.names.join(' - '), () => { - test('Correctly formatted legacy notes pass', async () => { - const markdown = [ - 'This is a paragraph.', - '', - '{% note %}', - '', - '**Note:** This is a properly formatted note.', - '', - '{% endnote %}', - '', - 'Another paragraph follows.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Correctly formatted new-style callouts pass', async () => { - const markdown = [ - 'This is a paragraph.', - '', - '> [!NOTE]', - '> This is a properly formatted callout note.', - '', - 'Another paragraph follows.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Missing line break before legacy note is flagged', async () => { - const markdown = [ - 'This is a paragraph.', - '{% note %}', - '**Note:** This note needs a line break before it.', - '{% endnote %}', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(2) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Add a blank line before {% note %}') - } - }) - - test('Missing line break after legacy note is flagged', async () => { - const markdown = [ - '', - '{% note %}', - '**Note:** This note needs a line break after it.', - '{% endnote %}', - 'This paragraph is too close.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(4) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Add a blank line after {% endnote %}') - } - }) - - test('Missing line break before new-style callout is flagged', async () => { - const markdown = [ - 'This is a paragraph.', - '> [!WARNING]', - '> This warning needs a line break before it.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(2) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Add a blank line before > [!WARNING]') - } - }) - - test('Missing line break after new-style callout is flagged', async () => { - const markdown = [ - '', - '> [!DANGER]', - '> This danger callout needs a line break after it.', - 'This paragraph is too close.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(4) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Add a blank line after > [!DANGER]') - } - }) - - test('Too many bullet points in legacy note is flagged', async () => { - const markdown = [ - '', - '{% note %}', - '', - '**Note:** This note has too many bullets:', - '', - '* First bullet point', - '* Second bullet point', - '* Third bullet point (this should be flagged)', - '', - '{% endnote %}', - '', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(8) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Do not include more than 2 bullet points') - } - }) - - test('Too many bullet points in new-style callout is flagged', async () => { - const markdown = [ - '', - '> [!NOTE]', - '> This callout has too many bullets:', - '>', - '> * First bullet point', - '> * Second bullet point', - '> * Third bullet point (this should be flagged)', - '', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(7) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Do not include more than 2 bullet points') - } - }) - - test('Missing prefix in legacy note is flagged and fixable', async () => { - const markdown = [ - '', - '{% note %}', - '', - 'This note is missing the proper prefix.', - '', - '{% endnote %}', - '', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(4) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('should start with **Note:**') - } - expect(errors[0].fixInfo).toBeDefined() - if (errors[0].fixInfo) { - expect(errors[0].fixInfo?.insertText).toBe('**Note:** ') - } - }) - - test('Orphaned note prefix outside callout is flagged', async () => { - const markdown = [ - 'This is a regular paragraph.', - '', - '**Note:** This note prefix should be inside a callout block.', - '', - 'Another paragraph.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(3) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('should be inside a callout block') - } - }) - - test('Orphaned warning prefix outside callout is flagged', async () => { - const markdown = [ - 'Regular content here.', - '', - '**Warning:** This warning should be in a proper callout.', - '', - 'More content.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(1) - expect(errors[0].lineNumber).toBe(3) - if (errors[0].errorDetail) { - expect(errors[0].errorDetail).toContain('Warning prefix should be inside a callout block') - } - }) - - test('Feedback forms in legacy notes are not flagged for missing prefix', async () => { - const markdown = [ - '', - '{% note %}', - '', - 'Did you successfully complete this task?', - '', - 'Yes', - '', - '{% endnote %}', - '', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - // Should only flag missing line breaks, not missing prefix for feedback forms - expect(errors.length).toBe(0) - }) - - test('Multiple formatting issues are all caught', async () => { - const markdown = [ - 'Paragraph without break.', - '{% note %}', - 'Missing prefix and has bullets:', - '* First bullet', - '* Second bullet', - '* Third bullet (too many)', - '{% endnote %}', - 'No break after note.', - '', - '**Danger:** Orphaned danger prefix.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(5) - - // Check we get all expected error types by line numbers and error count - const errorLines = errors.map((e) => e.lineNumber).sort((a, b) => a - b) - expect(errorLines).toEqual([2, 3, 6, 7, 10]) - - // Verify we have the expected number of different types of errors: - // 1. Missing line break before note (line 2) - // 2. Missing prefix in note content (line 3) - // 3. Too many bullet points (line 6) - // 4. Missing line break after note (line 7) - // 5. Orphaned danger prefix (line 10) - expect(errors.length).toBe(5) - }) - - test('Mixed legacy and new-style callouts work correctly', async () => { - const markdown = [ - 'Some content.', - '', - '{% note %}', - '**Note:** This is a legacy note.', - '{% endnote %}', - '', - 'More content.', - '', - '> [!WARNING]', - '> This is a new-style warning.', - '', - 'Final content.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Different callout types are handled correctly', async () => { - const markdown = [ - '', - '> [!NOTE]', - '> This is a note callout.', - '', - '> [!WARNING]', - '> This is a warning callout.', - '', - '> [!DANGER]', - '> This is a danger callout.', - '', - ].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Autogenerated files are skipped', async () => { - const frontmatter = ['---', 'title: API Reference', 'autogenerated: rest', '---'].join('\n') - const markdown = [ - 'Content.', - '{% note %}', - 'Badly formatted note.', - '{% endnote %}', - 'More content.', - ].join('\n') - const result = await runRule(noteWarningFormatting, { - strings: { - markdown: frontmatter + '\n' + markdown, - }, - }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Empty notes and callouts do not cause errors', async () => { - const markdown = ['', '{% note %}', '', '{% endnote %}', '', '> [!NOTE]', '>', ''].join('\n') - const result = await runRule(noteWarningFormatting, { strings: { markdown } }) - const errors = result.markdown - expect(errors.length).toBe(0) - }) - - test('Warning severity is set correctly', () => { - expect(noteWarningFormatting.severity).toBe('warning') - }) - - test('Rule has correct metadata', () => { - expect(noteWarningFormatting.names).toEqual(['GHD049', 'note-warning-formatting']) - expect(noteWarningFormatting.description).toContain('style guide') - expect(noteWarningFormatting.tags).toContain('callouts') - expect(noteWarningFormatting.tags).toContain('notes') - expect(noteWarningFormatting.tags).toContain('warnings') - }) -}) diff --git a/src/content-linter/tests/unit/rule-filtering.ts b/src/content-linter/tests/unit/rule-filtering.ts index 606ee95e7d19..22cebe280f97 100644 --- a/src/content-linter/tests/unit/rule-filtering.ts +++ b/src/content-linter/tests/unit/rule-filtering.ts @@ -19,8 +19,8 @@ vi.mock('../../lib/helpers/get-rules', () => ({ description: 'Headers must have content below them', }, { - names: ['GHD030', 'code-fence-line-length'], - description: 'Code fence content should not exceed line length limit', + names: ['GHD001', 'link-punctuation'], + description: 'Internal link titles must not contain punctuation', }, ], allConfig: {}, @@ -41,12 +41,12 @@ describe('shouldIncludeRule', () => { test('includes custom rule by short code', () => { expect(shouldIncludeRule('header-content-requirement', ['GHD053'])).toBe(true) - expect(shouldIncludeRule('code-fence-line-length', ['GHD030'])).toBe(true) + expect(shouldIncludeRule('link-punctuation', ['GHD001'])).toBe(true) }) test('excludes rule not in list', () => { expect(shouldIncludeRule('heading-increment', ['MD002'])).toBe(false) - expect(shouldIncludeRule('header-content-requirement', ['GHD030'])).toBe(false) + expect(shouldIncludeRule('header-content-requirement', ['GHD001'])).toBe(false) }) test('handles multiple rules', () => { diff --git a/src/content-render/scripts/render-content-markdown.ts b/src/content-render/scripts/render-content-markdown.ts index a7d3d8cb296e..8e976014d324 100755 --- a/src/content-render/scripts/render-content-markdown.ts +++ b/src/content-render/scripts/render-content-markdown.ts @@ -1,6 +1,5 @@ import fs from 'fs' import path from 'path' -import { execSync } from 'child_process' import { renderLiquid } from '@/content-render/liquid/index' import shortVersionsMiddleware from '@/versions/middleware/short-versions' @@ -83,7 +82,4 @@ for (const page of pages) { console.log(err) } } -console.log('---\nWriting files done. Now linting content...\n') -// Content linter to remove any blank lines -execSync('npm run lint-content -- --paths content-copilot --rules no-multiple-blanks --fix') console.log(`Finished - content is available in: ${contentCopilotDir}`) From 8139ef18e5e989f072b93515ff2b172774a9a156 Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Thu, 30 Oct 2025 09:06:16 -0700 Subject: [PATCH 2/9] Fix bash command substitution bug in OpenAPI sync workflow (#58271) --- .github/workflows/sync-openapi.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sync-openapi.yml b/.github/workflows/sync-openapi.yml index 0c6e5d838de5..941df7895c28 100644 --- a/.github/workflows/sync-openapi.yml +++ b/.github/workflows/sync-openapi.yml @@ -85,7 +85,7 @@ jobs: # If nothing to commit, exit now. It's fine. No orphans. changes=$(git diff --name-only | wc -l) if [[ $changes -eq 0 ]]; then - echo "There are no changes to commit after running `npm run sync-rest` Exiting..." + echo "There are no changes to commit after running 'npm run sync-rest'. Exiting..." exit 0 fi From 4f46d28722fbd07e2a6094d2960925f2734c947a Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Thu, 30 Oct 2025 09:13:18 -0700 Subject: [PATCH 3/9] Fix eslint no-undef violations (#58233) --- eslint.config.ts | 11 ++++++++++- src/app/client-layout.tsx | 3 ++- src/app/components/AppRouterLanguagesContext.tsx | 2 +- src/app/layout.tsx | 1 + src/app/lib/locale-context.tsx | 1 + src/app/types.ts | 1 + src/automated-pipelines/components/AutomatedPage.tsx | 1 + .../components/AutomatedPageContext.tsx | 1 + src/events/components/Survey.tsx | 2 +- src/frame/components/DefaultLayout.tsx | 1 + src/frame/components/article/ArticleTitle.tsx | 2 ++ src/frame/components/article/HeadingLink.tsx | 1 + src/frame/components/context/ArticleContext.tsx | 1 + src/frame/components/page-header/Header.tsx | 1 + .../components/page-header/HeaderSearchAndWidgets.tsx | 1 + src/frame/components/ui/Lead/Lead.tsx | 1 + .../components/ui/MarkdownContent/MarkdownContent.tsx | 1 + .../ui/MarkdownContent/UnrenderedMarkdownContent.tsx | 1 + src/frame/lib/fetch-utils.ts | 1 + src/graphql/components/GraphqlItem.tsx | 1 + src/graphql/components/GraphqlPage.tsx | 1 + src/landings/components/LandingHero.tsx | 2 +- src/landings/components/LandingSection.tsx | 1 + .../shared/LandingArticleGridWithFilter.tsx | 2 +- src/rest/components/RestCodeSamples.tsx | 2 +- src/search/components/input/AskAIResults.tsx | 2 +- src/search/components/input/SearchBarButton.tsx | 1 + .../components/input/SearchOverlayContainer.tsx | 1 + src/search/components/results/SearchResults.tsx | 2 +- src/tools/components/Fields.tsx | 2 +- 30 files changed, 41 insertions(+), 10 deletions(-) diff --git a/eslint.config.ts b/eslint.config.ts index 3668a5fde641..323197067e6d 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -25,6 +25,16 @@ export default [ ...globals.node, ...globals.commonjs, ...globals.es2020, + // Fetch API types for TypeScript + RequestInit: 'readonly', + RequestInfo: 'readonly', + HeadersInit: 'readonly', + JSX: 'readonly', + // Node.js types for TypeScript + BufferEncoding: 'readonly', + NodeJS: 'readonly', + // cheerio namespace for TypeScript + cheerio: 'readonly', }, parserOptions: { requireConfigFile: false, @@ -87,7 +97,6 @@ export default [ // Disabled rules to review 'github/no-then': 'off', // 30+ '@typescript-eslint/ban-ts-comment': 'off', // 50+ - 'no-undef': 'off', // 50+ 'no-shadow': 'off', // 150+ 'prefer-template': 'off', // 150+ 'github/array-foreach': 'off', // 250+ diff --git a/src/app/client-layout.tsx b/src/app/client-layout.tsx index 4cb24a664696..aa657c4b62e2 100644 --- a/src/app/client-layout.tsx +++ b/src/app/client-layout.tsx @@ -1,7 +1,8 @@ 'use client' +import React, { useEffect, useMemo, useState } from 'react' +import type { JSX } from 'react' import { ThemeProvider } from '@primer/react' -import { useEffect, useMemo, useState } from 'react' import { LocaleProvider } from '@/app/lib/locale-context' import { useDetectLocale } from '@/app/lib/use-detect-locale' diff --git a/src/app/components/AppRouterLanguagesContext.tsx b/src/app/components/AppRouterLanguagesContext.tsx index 96aa54097d8d..e78a3e6c1e78 100644 --- a/src/app/components/AppRouterLanguagesContext.tsx +++ b/src/app/components/AppRouterLanguagesContext.tsx @@ -1,6 +1,6 @@ 'use client' -import { createContext, useContext } from 'react' +import React, { createContext, useContext } from 'react' import { languages, type LanguageCode } from '@/languages/lib/languages' export type AppRouterLanguageItem = { diff --git a/src/app/layout.tsx b/src/app/layout.tsx index 29fe4be37928..a14c6bd1aaa6 100644 --- a/src/app/layout.tsx +++ b/src/app/layout.tsx @@ -1,6 +1,7 @@ import '@/frame/stylesheets/index.scss' import type { Metadata, Viewport } from 'next' import { ReactNode } from 'react' +import type { JSX } from 'react' export const metadata: Metadata = { title: { diff --git a/src/app/lib/locale-context.tsx b/src/app/lib/locale-context.tsx index bad82ed43256..e06327f3c644 100644 --- a/src/app/lib/locale-context.tsx +++ b/src/app/lib/locale-context.tsx @@ -1,6 +1,7 @@ 'use client' import { createContext, useContext, ReactNode, useMemo } from 'react' +import type { JSX } from 'react' import { languages, languageKeys, type LanguageCode } from '@/languages/lib/languages' interface LocaleContextType { diff --git a/src/app/types.ts b/src/app/types.ts index bfa7ce13feac..8b7be385f009 100644 --- a/src/app/types.ts +++ b/src/app/types.ts @@ -2,6 +2,7 @@ * Enhanced type definitions for the app router with strict validation */ +import React from 'react' import type { LanguageCode } from '@/languages/lib/languages' // Core theme types with strict validation diff --git a/src/automated-pipelines/components/AutomatedPage.tsx b/src/automated-pipelines/components/AutomatedPage.tsx index b5635a6bc3fd..be7bcab209ea 100644 --- a/src/automated-pipelines/components/AutomatedPage.tsx +++ b/src/automated-pipelines/components/AutomatedPage.tsx @@ -1,3 +1,4 @@ +import React from 'react' import { DefaultLayout } from '@/frame/components/DefaultLayout' import { ArticleTitle } from '@/frame/components/article/ArticleTitle' import { MarkdownContent } from '@/frame/components/ui/MarkdownContent' diff --git a/src/automated-pipelines/components/AutomatedPageContext.tsx b/src/automated-pipelines/components/AutomatedPageContext.tsx index 39decb834628..50efe899b9f2 100644 --- a/src/automated-pipelines/components/AutomatedPageContext.tsx +++ b/src/automated-pipelines/components/AutomatedPageContext.tsx @@ -1,4 +1,5 @@ import { createContext, useContext } from 'react' +import type { JSX } from 'react' import type { MiniTocItem } from '@/frame/components/context/ArticleContext' export type AutomatedPageContextT = { diff --git a/src/events/components/Survey.tsx b/src/events/components/Survey.tsx index 9e5154ebe3bc..9beac8fe77fb 100644 --- a/src/events/components/Survey.tsx +++ b/src/events/components/Survey.tsx @@ -1,4 +1,4 @@ -import { useState, useRef, useEffect } from 'react' +import React, { useState, useRef, useEffect } from 'react' import cx from 'classnames' import { useRouter } from 'next/router' import { ThumbsdownIcon, ThumbsupIcon } from '@primer/octicons-react' diff --git a/src/frame/components/DefaultLayout.tsx b/src/frame/components/DefaultLayout.tsx index cf96acfe6490..411ba1a7c436 100644 --- a/src/frame/components/DefaultLayout.tsx +++ b/src/frame/components/DefaultLayout.tsx @@ -1,3 +1,4 @@ +import React from 'react' import Head from 'next/head' import { useRouter } from 'next/router' diff --git a/src/frame/components/article/ArticleTitle.tsx b/src/frame/components/article/ArticleTitle.tsx index a8c3a0185c27..e9ebc3ef6864 100644 --- a/src/frame/components/article/ArticleTitle.tsx +++ b/src/frame/components/article/ArticleTitle.tsx @@ -1,3 +1,5 @@ +import React from 'react' + type Props = { children: React.ReactNode } diff --git a/src/frame/components/article/HeadingLink.tsx b/src/frame/components/article/HeadingLink.tsx index 7e4c4e56e0c3..1b7280159e40 100644 --- a/src/frame/components/article/HeadingLink.tsx +++ b/src/frame/components/article/HeadingLink.tsx @@ -1,4 +1,5 @@ import GithubSlugger from 'github-slugger' +import type { JSX } from 'react' const slugger = new GithubSlugger() diff --git a/src/frame/components/context/ArticleContext.tsx b/src/frame/components/context/ArticleContext.tsx index 3f35eebd5430..222fcebd0bd2 100644 --- a/src/frame/components/context/ArticleContext.tsx +++ b/src/frame/components/context/ArticleContext.tsx @@ -1,5 +1,6 @@ import { SupportPortalVaIframeProps } from '@/frame/components/article/SupportPortalVaIframe' import { createContext, useContext } from 'react' +import type { JSX } from 'react' import type { JourneyContext } from '@/journeys/lib/journey-path-resolver' export type LearningTrack = { diff --git a/src/frame/components/page-header/Header.tsx b/src/frame/components/page-header/Header.tsx index 07e8409fcbb3..49508d32b802 100644 --- a/src/frame/components/page-header/Header.tsx +++ b/src/frame/components/page-header/Header.tsx @@ -1,4 +1,5 @@ import { useCallback, useEffect, useRef, useState } from 'react' +import type { JSX } from 'react' import cx from 'classnames' import { useRouter } from 'next/router' import { Dialog, IconButton } from '@primer/react' diff --git a/src/frame/components/page-header/HeaderSearchAndWidgets.tsx b/src/frame/components/page-header/HeaderSearchAndWidgets.tsx index c400fc6a3e9c..1826e92f2e23 100644 --- a/src/frame/components/page-header/HeaderSearchAndWidgets.tsx +++ b/src/frame/components/page-header/HeaderSearchAndWidgets.tsx @@ -1,4 +1,5 @@ import cx from 'classnames' +import type { JSX } from 'react' import { KebabHorizontalIcon, LinkExternalIcon } from '@primer/octicons-react' import { IconButton, ActionMenu, ActionList } from '@primer/react' diff --git a/src/frame/components/ui/Lead/Lead.tsx b/src/frame/components/ui/Lead/Lead.tsx index 443e1866da0d..96b735db3da1 100644 --- a/src/frame/components/ui/Lead/Lead.tsx +++ b/src/frame/components/ui/Lead/Lead.tsx @@ -1,4 +1,5 @@ import { ReactNode } from 'react' +import type { JSX } from 'react' import cx from 'classnames' import styles from './Lead.module.scss' diff --git a/src/frame/components/ui/MarkdownContent/MarkdownContent.tsx b/src/frame/components/ui/MarkdownContent/MarkdownContent.tsx index 93b67dda731e..3f3790287234 100644 --- a/src/frame/components/ui/MarkdownContent/MarkdownContent.tsx +++ b/src/frame/components/ui/MarkdownContent/MarkdownContent.tsx @@ -1,4 +1,5 @@ import { ReactNode } from 'react' +import type { JSX } from 'react' import cx from 'classnames' import styles from './MarkdownContent.module.scss' diff --git a/src/frame/components/ui/MarkdownContent/UnrenderedMarkdownContent.tsx b/src/frame/components/ui/MarkdownContent/UnrenderedMarkdownContent.tsx index 98f02c43c773..c22dcfdabc52 100644 --- a/src/frame/components/ui/MarkdownContent/UnrenderedMarkdownContent.tsx +++ b/src/frame/components/ui/MarkdownContent/UnrenderedMarkdownContent.tsx @@ -1,6 +1,7 @@ import React from 'react' import ReactMarkdown from 'react-markdown' import type { Components } from 'react-markdown' +import type { JSX } from 'react' import remarkGfm from 'remark-gfm' import cx from 'classnames' import { IconButton } from '@primer/react' diff --git a/src/frame/lib/fetch-utils.ts b/src/frame/lib/fetch-utils.ts index 5b3c57fd9a91..12dc2723822c 100644 --- a/src/frame/lib/fetch-utils.ts +++ b/src/frame/lib/fetch-utils.ts @@ -2,6 +2,7 @@ * Utility functions for fetch with retry and timeout functionality * to replace got library functionality */ + export interface FetchWithRetryOptions { retries?: number retryDelay?: number diff --git a/src/graphql/components/GraphqlItem.tsx b/src/graphql/components/GraphqlItem.tsx index a37841bbba5e..4a8916111d76 100644 --- a/src/graphql/components/GraphqlItem.tsx +++ b/src/graphql/components/GraphqlItem.tsx @@ -1,3 +1,4 @@ +import React from 'react' import { HeadingLink } from '@/frame/components/article/HeadingLink' import type { GraphqlT } from './types' import { Notice } from './Notice' diff --git a/src/graphql/components/GraphqlPage.tsx b/src/graphql/components/GraphqlPage.tsx index 0557f1f5741d..c22f772630b1 100644 --- a/src/graphql/components/GraphqlPage.tsx +++ b/src/graphql/components/GraphqlPage.tsx @@ -1,4 +1,5 @@ import React from 'react' +import type { JSX } from 'react' import cx from 'classnames' import { Enum } from './Enum' diff --git a/src/landings/components/LandingHero.tsx b/src/landings/components/LandingHero.tsx index abee30152333..658899337f00 100644 --- a/src/landings/components/LandingHero.tsx +++ b/src/landings/components/LandingHero.tsx @@ -1,4 +1,4 @@ -import { useEffect, useState } from 'react' +import React, { useEffect, useState } from 'react' import cx from 'classnames' import { useRouter } from 'next/router' import { LinkExternalIcon, NoteIcon } from '@primer/octicons-react' diff --git a/src/landings/components/LandingSection.tsx b/src/landings/components/LandingSection.tsx index d318ce9a40f0..35902d4bda7b 100644 --- a/src/landings/components/LandingSection.tsx +++ b/src/landings/components/LandingSection.tsx @@ -1,3 +1,4 @@ +import React from 'react' import cx from 'classnames' import { HeadingLink } from '@/frame/components/article/HeadingLink' diff --git a/src/landings/components/shared/LandingArticleGridWithFilter.tsx b/src/landings/components/shared/LandingArticleGridWithFilter.tsx index 90ab78167ffb..f719b4cf428e 100644 --- a/src/landings/components/shared/LandingArticleGridWithFilter.tsx +++ b/src/landings/components/shared/LandingArticleGridWithFilter.tsx @@ -1,4 +1,4 @@ -import { useState, useRef, useEffect, useMemo } from 'react' +import React, { useState, useRef, useEffect, useMemo } from 'react' import { TextInput, ActionMenu, ActionList, Token, Pagination } from '@primer/react' import { SearchIcon } from '@primer/octicons-react' import cx from 'classnames' diff --git a/src/rest/components/RestCodeSamples.tsx b/src/rest/components/RestCodeSamples.tsx index b03338fbbd7f..f3568e6f1003 100644 --- a/src/rest/components/RestCodeSamples.tsx +++ b/src/rest/components/RestCodeSamples.tsx @@ -1,4 +1,4 @@ -import { useState, useEffect, useRef, FormEvent } from 'react' +import React, { useState, useEffect, useRef, FormEvent } from 'react' import { FormControl, IconButton, Select, SegmentedControl } from '@primer/react' import { CheckIcon, CopyIcon, InfoIcon } from '@primer/octicons-react' import { announce } from '@primer/live-region-element' diff --git a/src/search/components/input/AskAIResults.tsx b/src/search/components/input/AskAIResults.tsx index 5fce5019c0d6..dbaffc6355d7 100644 --- a/src/search/components/input/AskAIResults.tsx +++ b/src/search/components/input/AskAIResults.tsx @@ -1,4 +1,4 @@ -import { useEffect, useRef, useState } from 'react' +import React, { useEffect, useRef, useState } from 'react' import { uniqBy } from 'lodash-es' import { executeAISearch } from '../helpers/execute-search-actions' import { useRouter } from 'next/router' diff --git a/src/search/components/input/SearchBarButton.tsx b/src/search/components/input/SearchBarButton.tsx index 039b4851a139..0560fbfb8c6a 100644 --- a/src/search/components/input/SearchBarButton.tsx +++ b/src/search/components/input/SearchBarButton.tsx @@ -1,3 +1,4 @@ +import React from 'react' import cx from 'classnames' import { IconButton } from '@primer/react' import { CopilotIcon, SearchIcon } from '@primer/octicons-react' diff --git a/src/search/components/input/SearchOverlayContainer.tsx b/src/search/components/input/SearchOverlayContainer.tsx index 103ee815d8fb..cd504ca62b6f 100644 --- a/src/search/components/input/SearchOverlayContainer.tsx +++ b/src/search/components/input/SearchOverlayContainer.tsx @@ -1,3 +1,4 @@ +import React from 'react' import { SearchOverlay } from './SearchOverlay' import { QueryParams } from '../hooks/useMultiQueryParams' diff --git a/src/search/components/results/SearchResults.tsx b/src/search/components/results/SearchResults.tsx index 9cac096f17be..27c3a0d5b260 100644 --- a/src/search/components/results/SearchResults.tsx +++ b/src/search/components/results/SearchResults.tsx @@ -1,7 +1,7 @@ import { Box, Pagination, Text } from '@primer/react' import { SearchIcon } from '@primer/octicons-react' import { useRouter } from 'next/router' -import { useEffect, useRef, useState } from 'react' +import React, { useEffect, useRef, useState } from 'react' import cx from 'classnames' import { useTranslation } from '@/languages/components/useTranslation' diff --git a/src/tools/components/Fields.tsx b/src/tools/components/Fields.tsx index e6ae59f4e563..73c3dae6c54e 100644 --- a/src/tools/components/Fields.tsx +++ b/src/tools/components/Fields.tsx @@ -1,5 +1,5 @@ import { ActionList } from '@primer/react' -import { ReactNode } from 'react' +import React, { ReactNode } from 'react' import cx from 'classnames' import { PickerItem } from './Picker' From 7d1a2096607d4c8e6efb979b69f69127524fd0f0 Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Thu, 30 Oct 2025 09:24:12 -0700 Subject: [PATCH 4/9] Remove disabling of prefer-template eslint rule (#58230) --- eslint.config.ts | 3 +- src/app/lib/language-utils.ts | 2 +- .../archived-enterprise-versions.ts | 4 +- src/article-api/middleware/article.ts | 2 +- src/article-api/scripts/generate-api-docs.ts | 6 +- src/assets/tests/dynamic-assets.ts | 2 +- src/assets/tests/static-assets.ts | 2 +- src/codeql-cli/scripts/sync.ts | 2 +- src/codeql-cli/tests/test-circular-links.ts | 2 +- .../code-annotation-comment-spacing.ts | 2 +- .../image-alt-text-end-punctuation.ts | 2 +- .../lib/linting-rules/link-quotation.ts | 4 +- .../liquid-ifversion-versions.ts | 4 +- .../lib/linting-rules/liquid-syntax.ts | 4 +- .../linting-rules/liquid-tag-whitespace.ts | 7 +-- .../lib/linting-rules/octicon-aria-labels.ts | 2 +- .../scripts/find-unsed-variables.ts | 5 +- .../scripts/pretty-print-results.ts | 8 +-- .../outdated-release-phase-terminology.ts | 2 +- src/content-render/liquid/octicon.ts | 2 +- src/content-render/scripts/move-content.ts | 2 +- .../scripts/reusables-cli/find/unused.ts | 2 +- .../scripts/reusables-cli/find/used.ts | 2 +- .../scripts/test-moved-content.ts | 6 +- src/content-render/unified/alerts.ts | 2 +- .../tests/playwright-rendering.spec.ts | 4 +- src/frame/components/DefaultLayout.tsx | 2 +- src/frame/components/context/MainContext.tsx | 2 +- src/frame/lib/get-mini-toc-items.ts | 2 +- src/frame/lib/read-json-file.ts | 2 +- src/frame/middleware/find-page.ts | 2 +- src/frame/middleware/handle-next-data-path.ts | 2 +- src/frame/middleware/render-page.ts | 4 +- src/frame/tests/pages.ts | 18 +++--- src/frame/tests/toc-links.ts | 2 +- .../scripts/create-enterprise-issue.ts | 4 +- .../scripts/deprecate/update-content.ts | 6 +- src/github-apps/scripts/sync.ts | 8 +-- src/github-apps/tests/rendering.ts | 4 +- src/graphql/scripts/build-changelog.ts | 11 ++-- src/graphql/scripts/utils/schema-helpers.ts | 2 +- .../components/ProductLandingContext.tsx | 2 +- src/landings/components/SidebarProduct.tsx | 4 +- src/languages/components/LanguagePicker.tsx | 2 +- .../lib/correct-translation-content.ts | 4 +- src/languages/lib/render-with-fallback.ts | 6 +- src/languages/lib/translation-utils.ts | 3 +- src/languages/tests/api-search.ts | 2 +- .../scripts/rendered-content-link-checker.ts | 4 +- .../generate-new-json.ts | 2 +- src/metrics/scripts/docsaudit.ts | 2 +- src/observability/logger/lib/to-logfmt.ts | 4 +- .../get-automatic-request-logger.ts | 4 +- src/redirects/lib/get-redirect.ts | 47 +++++++-------- src/redirects/middleware/handle-redirects.ts | 2 +- src/rest/components/RestAuth.tsx | 4 +- src/rest/components/RestRedirect.tsx | 2 +- src/rest/docs.ts | 50 ++++++++-------- .../scripts/utils/create-rest-examples.ts | 6 +- src/rest/tests/api.ts | 10 ++-- src/rest/tests/create-rest-examples.ts | 2 +- src/rest/tests/rendering.ts | 11 ++-- src/search/components/input/SearchOverlay.tsx | 2 +- src/search/lib/ai-search-proxy.ts | 3 +- src/search/lib/helpers/strings.ts | 2 +- src/search/lib/helpers/time.ts | 2 +- .../scripts/scrape/lib/build-records.ts | 59 +++++++++---------- src/search/tests/api-search.ts | 56 +++++++++--------- src/webhooks/tests/api.ts | 8 +-- .../content-changes-table-comment.ts | 4 +- src/workflows/enable-automerge.ts | 7 ++- .../experimental/readability-report.ts | 12 ++-- 72 files changed, 234 insertions(+), 250 deletions(-) diff --git a/eslint.config.ts b/eslint.config.ts index 323197067e6d..9e40b282ae79 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -93,14 +93,13 @@ export default [ 'no-redeclare': 'off', // Allow function overloads in TypeScript 'i18n-text/no-en': 'off', // This rule causes eslint to not run at all 'filenames/match-regex': 'off', // This rule causes eslint to not run at all + camelcase: 'off', // Many gh apis use underscores, 600+ uses // Disabled rules to review 'github/no-then': 'off', // 30+ '@typescript-eslint/ban-ts-comment': 'off', // 50+ 'no-shadow': 'off', // 150+ - 'prefer-template': 'off', // 150+ 'github/array-foreach': 'off', // 250+ - camelcase: 'off', // 600+ 'no-console': 'off', // 800+ '@typescript-eslint/no-explicit-any': 'off', // 1000+ }, diff --git a/src/app/lib/language-utils.ts b/src/app/lib/language-utils.ts index 173f4e9a1f55..c3c88dea6212 100644 --- a/src/app/lib/language-utils.ts +++ b/src/app/lib/language-utils.ts @@ -34,7 +34,7 @@ export function hasLanguagePrefix(path: string): boolean { export function stripLanguagePrefix(path: string): string { if (hasLanguagePrefix(path)) { const pathSegments = path.split('/') - return '/' + pathSegments.slice(2).join('/') + return `/${pathSegments.slice(2).join('/')}` } return path } diff --git a/src/archives/middleware/archived-enterprise-versions.ts b/src/archives/middleware/archived-enterprise-versions.ts index 848a151f0e03..74dbf9caf2e1 100644 --- a/src/archives/middleware/archived-enterprise-versions.ts +++ b/src/archives/middleware/archived-enterprise-versions.ts @@ -353,14 +353,14 @@ function getProxyPath(reqPath: string, requestedVersion: string) { // Releases 2.18 and higher if (versionSatisfiesRange(requestedVersion, `>${lastVersionWithoutArchivedRedirectsFile}`)) { - const newReqPath = reqPath.includes('redirects.json') ? `/${reqPath}` : reqPath + '/index.html' + const newReqPath = reqPath.includes('redirects.json') ? `/${reqPath}` : `${reqPath}/index.html` return ENTERPRISE_GH_PAGES_URL_PREFIX + requestedVersion + newReqPath } // Releases 2.13 - 2.17 // redirect.json files don't exist for these versions if (versionSatisfiesRange(requestedVersion, `>=2.13`)) { - return ENTERPRISE_GH_PAGES_URL_PREFIX + requestedVersion + reqPath + '/index.html' + return `${ENTERPRISE_GH_PAGES_URL_PREFIX + requestedVersion + reqPath}/index.html` } // Releases 2.12 and lower diff --git a/src/article-api/middleware/article.ts b/src/article-api/middleware/article.ts index eca1e0a97f68..2d73067da152 100644 --- a/src/article-api/middleware/article.ts +++ b/src/article-api/middleware/article.ts @@ -167,7 +167,7 @@ function incrementArticleLookup( const source = req.get('X-Request-Source') || (req.get('Referer') - ? 'external-' + (new URL(req.get('Referer') || '').hostname || 'unknown') + ? `external-${new URL(req.get('Referer') || '').hostname || 'unknown'}` : 'external') const tags = [ diff --git a/src/article-api/scripts/generate-api-docs.ts b/src/article-api/scripts/generate-api-docs.ts index bddac9a9c10e..81b9cad96a1e 100644 --- a/src/article-api/scripts/generate-api-docs.ts +++ b/src/article-api/scripts/generate-api-docs.ts @@ -156,10 +156,10 @@ function updateReadme(readmePath: string, markdown: string): void { // Replace API documentation section, or append to end if (readme.includes(placeholderComment)) { - const pattern = new RegExp(placeholderComment + '[\\s\\S]*', 'g') - readme = readme.replace(pattern, placeholderComment + '\n' + markdown) + const pattern = new RegExp(`${placeholderComment}[\\s\\S]*`, 'g') + readme = readme.replace(pattern, `${placeholderComment}\n${markdown}`) } else { - readme += '\n' + markdown + readme += `\n${markdown}` } writeFileSync(readmePath, readme) diff --git a/src/assets/tests/dynamic-assets.ts b/src/assets/tests/dynamic-assets.ts index b889735b0067..9bfb457a6f67 100644 --- a/src/assets/tests/dynamic-assets.ts +++ b/src/assets/tests/dynamic-assets.ts @@ -103,7 +103,7 @@ describe('dynamic assets', () => { }) test.each(['key', 'key=value'])('any query string (%p) triggers a redirect', async (qs) => { - const res = await get('/assets/images/_fixtures/screenshot.webp?' + qs) + const res = await get(`/assets/images/_fixtures/screenshot.webp?${qs}`) expect(res.statusCode).toBe(302) expect(res.headers.location).toBe('/assets/images/_fixtures/screenshot.webp') expect(res.headers['cache-control']).toContain('public') diff --git a/src/assets/tests/static-assets.ts b/src/assets/tests/static-assets.ts index 479b6af44f96..272aa862017f 100644 --- a/src/assets/tests/static-assets.ts +++ b/src/assets/tests/static-assets.ts @@ -113,7 +113,7 @@ describe('static assets', () => { // This picks the first one found. We just need it to be anything // that actually resolves. const filePath = getNextStaticAsset('css') - const asURL = '/' + filePath.replace('.next', '_next').split(path.sep).join('/') + const asURL = `/${filePath.replace('.next', '_next').split(path.sep).join('/')}` const res = await get(asURL) expect(res.statusCode).toBe(200) checkCachingHeaders(res) diff --git a/src/codeql-cli/scripts/sync.ts b/src/codeql-cli/scripts/sync.ts index 407b96e0560f..5f360da6bf72 100755 --- a/src/codeql-cli/scripts/sync.ts +++ b/src/codeql-cli/scripts/sync.ts @@ -41,7 +41,7 @@ async function main() { const matchHeading = '## Options\n' const primaryHeadingSourceContent = sourceContent.replace( matchHeading, - matchHeading + '\n### Primary Options\n', + `${matchHeading}\n### Primary Options\n`, ) const currentFileName = path.basename(file) const { data, content } = await convertContentToDocs( diff --git a/src/codeql-cli/tests/test-circular-links.ts b/src/codeql-cli/tests/test-circular-links.ts index d8a2def89dfc..651d5ebc0dd9 100644 --- a/src/codeql-cli/tests/test-circular-links.ts +++ b/src/codeql-cli/tests/test-circular-links.ts @@ -56,7 +56,7 @@ async function testCircularLinkFix(): Promise { } console.log('\n--- Generated content preview ---') - console.log(result1.content.substring(0, 800) + '...') + console.log(`${result1.content.substring(0, 800)}...`) return !hasCircularLink && hasValidLink } catch (error) { diff --git a/src/content-linter/lib/linting-rules/code-annotation-comment-spacing.ts b/src/content-linter/lib/linting-rules/code-annotation-comment-spacing.ts index 26ac674bc60a..96c69dcb2d1e 100644 --- a/src/content-linter/lib/linting-rules/code-annotation-comment-spacing.ts +++ b/src/content-linter/lib/linting-rules/code-annotation-comment-spacing.ts @@ -81,7 +81,7 @@ export const codeAnnotationCommentSpacing = { // No space after comment character - this is an error const lineNumber: number = token.lineNumber + index + 1 const leadingWhitespace: string = line.match(/^\s*/)![0] - const fixedLine: string = leadingWhitespace + commentChar + ' ' + restOfLine + const fixedLine: string = `${leadingWhitespace + commentChar} ${restOfLine}` addError( onError, diff --git a/src/content-linter/lib/linting-rules/image-alt-text-end-punctuation.ts b/src/content-linter/lib/linting-rules/image-alt-text-end-punctuation.ts index c78c29e3501c..d00b8819969c 100644 --- a/src/content-linter/lib/linting-rules/image-alt-text-end-punctuation.ts +++ b/src/content-linter/lib/linting-rules/image-alt-text-end-punctuation.ts @@ -26,7 +26,7 @@ export const imageAltTextEndPunctuation: Rule = { const range = getRange(token.line, imageAltText) - addFixErrorDetail(onError, token.lineNumber, imageAltText + '.', imageAltText, range, { + addFixErrorDetail(onError, token.lineNumber, `${imageAltText}.`, imageAltText, range, { lineNumber: token.lineNumber, editColumn: isStringQuoted(imageAltText) ? token.line.indexOf(']') diff --git a/src/content-linter/lib/linting-rules/link-quotation.ts b/src/content-linter/lib/linting-rules/link-quotation.ts index 9d4eb089a170..609896ec5fe3 100644 --- a/src/content-linter/lib/linting-rules/link-quotation.ts +++ b/src/content-linter/lib/linting-rules/link-quotation.ts @@ -26,7 +26,7 @@ export const linkQuotation: Rule = { } else if (inLinkWithPrecedingQuotes && child.type === 'text') { content.push(escapeRegExp((child.content || '').trim())) } else if (inLinkWithPrecedingQuotes && child.type === 'code_inline') { - content.push('`' + escapeRegExp((child.content || '').trim()) + '`') + content.push(`\`${escapeRegExp((child.content || '').trim())}\``) } else if (child.type === 'link_close') { const title = content.join(' ') const regex = new RegExp(`"\\[${title}\\]\\(${linkUrl}\\)({%.*%})?(!|\\.|\\?|,)?"`) @@ -44,7 +44,7 @@ export const linkQuotation: Rule = { newLine = newLine.slice(0, -1) } if (newLine.endsWith('".')) { - newLine = newLine.slice(0, -2) + '.' + newLine = `${newLine.slice(0, -2)}.` } const lineNumber = child.lineNumber addError( diff --git a/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts b/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts index 97da99f41abe..8748bbca12b3 100644 --- a/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts +++ b/src/content-linter/lib/linting-rules/liquid-ifversion-versions.ts @@ -104,7 +104,7 @@ function setLiquidErrors(condTagItems: any[], onError: RuleErrorCallback, lines: for (let i = 0; i < condTagItems.length; i++) { const item = condTagItems[i] const tagNameNoCond = item.name === 'endif' || item.name === 'else' - const itemErrorName = tagNameNoCond ? item.name : item.name + ' ' + item.cond + const itemErrorName = tagNameNoCond ? item.name : `${item.name} ${item.cond}` if (item.action.type === 'delete') { // There is no next stack item, the endif tag is alway the @@ -438,7 +438,7 @@ function updateConditionals(condTagItems: any[]) { const newVersions = Object.entries(item.versionsObj).map(([key, value]) => { if (key === 'ghes') { if (value === '*') return key - return key + ' ' + value + return `${key} ${value}` } else return key }) item.action.cond = newVersions.join(' or ') diff --git a/src/content-linter/lib/linting-rules/liquid-syntax.ts b/src/content-linter/lib/linting-rules/liquid-syntax.ts index 1203b7e07367..efab53c928de 100644 --- a/src/content-linter/lib/linting-rules/liquid-syntax.ts +++ b/src/content-linter/lib/linting-rules/liquid-syntax.ts @@ -55,7 +55,7 @@ export const frontmatterLiquidSyntax = { addError( onError, lineNumber, - 'Liquid syntax error: ' + errorDescription, + `Liquid syntax error: ${errorDescription}`, value, range, null, // No fix possible @@ -92,7 +92,7 @@ export const liquidSyntax = { addError( onError, lineNumber, - 'Liquid syntax error: ' + errorDescription, + `Liquid syntax error: ${errorDescription}`, line, range, null, // No fix possible diff --git a/src/content-linter/lib/linting-rules/liquid-tag-whitespace.ts b/src/content-linter/lib/linting-rules/liquid-tag-whitespace.ts index 3b06630ac4ad..e0678607accf 100644 --- a/src/content-linter/lib/linting-rules/liquid-tag-whitespace.ts +++ b/src/content-linter/lib/linting-rules/liquid-tag-whitespace.ts @@ -46,14 +46,13 @@ export const liquidTagWhitespace: Rule = { const openTag = tag.slice(0, token.contentRange[0] - token.begin) const closeTag = tag.slice(-(token.end - token.contentRange[1])) - const isOpenTagOneSpace = openTag !== openTag.trim() + ' ' - const isCloseTagOneSpace = closeTag !== ' ' + closeTag.trim() + const isOpenTagOneSpace = openTag !== `${openTag.trim()} ` + const isCloseTagOneSpace = closeTag !== ` ${closeTag.trim()}` const moreThanOneSpace = /\s{2,}/ const isArgOneSpace = moreThanOneSpace.test(tag) - const fixedContent = - openTag.trim() + ' ' + token.content.replace(moreThanOneSpace, ' ') + ' ' + closeTag.trim() + const fixedContent = `${openTag.trim()} ${token.content.replace(moreThanOneSpace, ' ')} ${closeTag.trim()}` if (isOpenTagOneSpace || isCloseTagOneSpace || isArgOneSpace) { addFixErrorDetail( diff --git a/src/content-linter/lib/linting-rules/octicon-aria-labels.ts b/src/content-linter/lib/linting-rules/octicon-aria-labels.ts index a3eb531395ad..376c67185f8e 100644 --- a/src/content-linter/lib/linting-rules/octicon-aria-labels.ts +++ b/src/content-linter/lib/linting-rules/octicon-aria-labels.ts @@ -37,7 +37,7 @@ export const octiconAriaLabels: Rule = { const octiconNameMatch = token.args.match(/["']([^"']+)["']/) const octiconName = octiconNameMatch ? octiconNameMatch[1] : 'icon' const originalContent = token.content - const fixedContent = originalContent + ` aria-label="${octiconName}"` + const fixedContent = `${originalContent} aria-label="${octiconName}"` addFixErrorDetail( onError, diff --git a/src/content-linter/scripts/find-unsed-variables.ts b/src/content-linter/scripts/find-unsed-variables.ts index 2e6a5560b8fc..4fbd0ff085ea 100644 --- a/src/content-linter/scripts/find-unsed-variables.ts +++ b/src/content-linter/scripts/find-unsed-variables.ts @@ -104,11 +104,10 @@ async function main(options: Options) { function getVariables(): Map { const variables = new Map() for (const filePath of walkFiles('data/variables', '.yml')) { - const dottedPathBase = - 'variables.' + filePath.replace('data/variables/', '').replace('.yml', '').replace(/\//g, '.') + const dottedPathBase = `variables.${filePath.replace('data/variables/', '').replace('.yml', '').replace(/\//g, '.')}` const data = yaml.load(fs.readFileSync(filePath, 'utf-8')) as Record for (const key of Object.keys(data)) { - const dottedPath = dottedPathBase + '.' + key + const dottedPath = `${dottedPathBase}.${key}` variables.set(dottedPath, filePath) } } diff --git a/src/content-linter/scripts/pretty-print-results.ts b/src/content-linter/scripts/pretty-print-results.ts index 615fb812ef89..d447b1170c01 100644 --- a/src/content-linter/scripts/pretty-print-results.ts +++ b/src/content-linter/scripts/pretty-print-results.ts @@ -170,14 +170,14 @@ function indentWrappedString(str: string, startingIndent: number): string { if ((currentLine + word).length > effectiveWidth) { if (isFirstLine) { - indentedString += currentLine.trim() + '\n' + indentedString += `${currentLine.trim()}\n` isFirstLine = false } else { - indentedString += NEW_LINE_PADDING + currentLine.trim() + '\n' + indentedString += `${NEW_LINE_PADDING + currentLine.trim()}\n` } - currentLine = word + ' ' + currentLine = `${word} ` } else { - currentLine += word + ' ' + currentLine += `${word} ` } } if (isFirstLine) { diff --git a/src/content-linter/tests/unit/outdated-release-phase-terminology.ts b/src/content-linter/tests/unit/outdated-release-phase-terminology.ts index 166a0126140c..d986c769b885 100644 --- a/src/content-linter/tests/unit/outdated-release-phase-terminology.ts +++ b/src/content-linter/tests/unit/outdated-release-phase-terminology.ts @@ -145,7 +145,7 @@ describe(outdatedReleasePhaseTerminology.names.join(' - '), () => { const markdown = ['This feature is in beta.'].join('\n') const result = await runRule(outdatedReleasePhaseTerminology, { strings: { - markdown: frontmatter + '\n' + markdown, + markdown: `${frontmatter}\n${markdown}`, }, }) const errors = result.markdown diff --git a/src/content-render/liquid/octicon.ts b/src/content-render/liquid/octicon.ts index 3a9e1c72c5eb..88bab333cf8d 100644 --- a/src/content-render/liquid/octicon.ts +++ b/src/content-render/liquid/octicon.ts @@ -18,7 +18,7 @@ interface OcticonsMatch { } const OptionsSyntax = /([a-zA-Z-]+)="([\w\s-]+)"*/g -const Syntax = new RegExp('"(?[a-zA-Z-]+)"(?(?:\\s' + OptionsSyntax.source + ')*)') +const Syntax = new RegExp(`"(?[a-zA-Z-]+)"(?(?:\\s${OptionsSyntax.source})*)`) const SyntaxHelp = 'Syntax Error in tag \'octicon\' - Valid syntax: octicon "" ' /** diff --git a/src/content-render/scripts/move-content.ts b/src/content-render/scripts/move-content.ts index 018b98b57dcf..2a699d7c00e1 100755 --- a/src/content-render/scripts/move-content.ts +++ b/src/content-render/scripts/move-content.ts @@ -268,7 +268,7 @@ function makeHref(root, filePath) { } else { nameSplit.push(nameSplit.pop().replace(/\.md$/, '')) } - return '/' + nameSplit.join('/') + return `/${nameSplit.join('/')}` } function moveFolder(oldPath, newPath, files, opts) { diff --git a/src/content-render/scripts/reusables-cli/find/unused.ts b/src/content-render/scripts/reusables-cli/find/unused.ts index e3d0e893d706..82feb590ab87 100644 --- a/src/content-render/scripts/reusables-cli/find/unused.ts +++ b/src/content-render/scripts/reusables-cli/find/unused.ts @@ -28,7 +28,7 @@ export function findUnused({ absolute }: { absolute: boolean }) { (name === 'data' || name === 'indented_data_reference') && args.startsWith('reusables.') ) { - const reusableName = path.join('data', ...args.split(' ')[0].split('.')) + '.md' + const reusableName = `${path.join('data', ...args.split(' ')[0].split('.'))}.md` // Special cases where we don't want them to count as reusables. It's an example in a how-to doc if ( reusableName.includes('foo/bar.md') || diff --git a/src/content-render/scripts/reusables-cli/find/used.ts b/src/content-render/scripts/reusables-cli/find/used.ts index c6815cf501d3..24e1851a1a68 100644 --- a/src/content-render/scripts/reusables-cli/find/used.ts +++ b/src/content-render/scripts/reusables-cli/find/used.ts @@ -65,7 +65,7 @@ export function findTopUsed(numberOfMostUsedToFind: number, { absolute }: { abso console.log(`\nTop ${numberOfMostUsedToFind} most used reusables:`) let i = 0 for (const [reusable, count] of sortedCounts.slice(0, numberOfMostUsedToFind)) { - let printReusablePath = path.join('data', ...reusable.split('.')) + '.md' + let printReusablePath = `${path.join('data', ...reusable.split('.'))}.md` if (absolute) { printReusablePath = path.resolve(printReusablePath) } diff --git a/src/content-render/scripts/test-moved-content.ts b/src/content-render/scripts/test-moved-content.ts index 3916b9158de4..7b5dcac10151 100644 --- a/src/content-render/scripts/test-moved-content.ts +++ b/src/content-render/scripts/test-moved-content.ts @@ -31,7 +31,7 @@ async function main(nameTuple: [string, string]) { const parentIndexMd = path.join(path.dirname(after), 'index.md') const fileContent = fs.readFileSync(parentIndexMd, 'utf-8') const { data } = readFrontmatter(fileContent) - const afterShortname = '/' + after.split('/').slice(-1)[0].replace(/\.md$/, '') + const afterShortname = `/${after.split('/').slice(-1)[0].replace(/\.md$/, '')}` if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`) } } else { @@ -43,7 +43,7 @@ async function main(nameTuple: [string, string]) { const parentIndexMd = path.join(path.dirname(after), 'index.md') const fileContent = fs.readFileSync(parentIndexMd, 'utf-8') const { data } = readFrontmatter(fileContent) - const afterShortname = '/' + after.split('/').slice(-1) + const afterShortname = `/${after.split('/').slice(-1)}` if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`) } } @@ -57,5 +57,5 @@ function makeHref(root: string, filePath: string) { const last = nameSplit.pop() if (last) nameSplit.push(last.replace(/\.md$/, '')) } - return '/' + nameSplit.join('/') + return `/${nameSplit.join('/')}` } diff --git a/src/content-render/unified/alerts.ts b/src/content-render/unified/alerts.ts index dd9e3ecc7898..5fe2436a7dad 100644 --- a/src/content-render/unified/alerts.ts +++ b/src/content-render/unified/alerts.ts @@ -43,7 +43,7 @@ export default function alerts({ alertTitles = {} }: { alertTitles?: Record { // Playwright will cache this redirect, so we need to add something // to "cache bust" the URL const cb = `?cb=${Math.random()}` - await page.goto('/get-started/start-your-journey/hello-world' + cb) - await expect(page).toHaveURL('/ja/get-started/start-your-journey/hello-world' + cb) + await page.goto(`/get-started/start-your-journey/hello-world${cb}`) + await expect(page).toHaveURL(`/ja/get-started/start-your-journey/hello-world${cb}`) // If you go, with the Japanese cookie, to the English page directly, // it will offer a link to the Japanese URL in a banner. diff --git a/src/frame/components/DefaultLayout.tsx b/src/frame/components/DefaultLayout.tsx index 411ba1a7c436..a23a377995c7 100644 --- a/src/frame/components/DefaultLayout.tsx +++ b/src/frame/components/DefaultLayout.tsx @@ -64,7 +64,7 @@ export const DefaultLayout = (props: Props) => { const metaDescription = page.introPlainText ? page.introPlainText : t('default_description') const SOCIAL_CATEGORIES = new Set(['code-security', 'actions', 'issues', 'copilot']) - const SOCIAL_CARD_IMG_BASE_URL = `${xHost ? 'https://' + xHost : ''}/assets/cb-345/images/social-cards` + const SOCIAL_CARD_IMG_BASE_URL = `${xHost ? `https://${xHost}` : ''}/assets/cb-345/images/social-cards` function getCategoryImageUrl(category: string): string { return `${SOCIAL_CARD_IMG_BASE_URL}/${category}.png` diff --git a/src/frame/components/context/MainContext.tsx b/src/frame/components/context/MainContext.tsx index 714dadb3a256..f90691cd4d8a 100644 --- a/src/frame/components/context/MainContext.tsx +++ b/src/frame/components/context/MainContext.tsx @@ -266,7 +266,7 @@ export const getMainContext = async (req: any, res: any): Promise enterpriseServerVersions: req.context.enterpriseServerVersions, error: req.context.error ? req.context.error.toString() : '', featureFlags: {}, - fullUrl: req.protocol + '://' + req.hostname + req.originalUrl, // does not include port for localhost + fullUrl: `${req.protocol}://${req.hostname}${req.originalUrl}`, // does not include port for localhost isHomepageVersion: req.context.page?.documentType === 'homepage', nonEnterpriseDefaultVersion: req.context.nonEnterpriseDefaultVersion, page: pageInfo, diff --git a/src/frame/lib/get-mini-toc-items.ts b/src/frame/lib/get-mini-toc-items.ts index e2d0c8cf8fd9..a701e6bb9e98 100644 --- a/src/frame/lib/get-mini-toc-items.ts +++ b/src/frame/lib/get-mini-toc-items.ts @@ -174,7 +174,7 @@ export async function getAutomatedPageMiniTocItems( for (let i = 0; i < depth; i++) { title += '#' } - return title + ` ${item}\n` + return `${title} ${item}\n` }) .join('') diff --git a/src/frame/lib/read-json-file.ts b/src/frame/lib/read-json-file.ts index 99824f21bedf..44b6758fd2ea 100644 --- a/src/frame/lib/read-json-file.ts +++ b/src/frame/lib/read-json-file.ts @@ -57,7 +57,7 @@ export function readCompressedJsonFileFallbackLazily(xpath: string): () => any { // err is any because fs errors can have various shapes with code property if (err.code === 'ENOENT') { try { - fs.accessSync(xpath + '.br') + fs.accessSync(`${xpath}.br`) } catch (err: any) { // err is any because fs errors can have various shapes with code property if (err.code === 'ENOENT') { diff --git a/src/frame/middleware/find-page.ts b/src/frame/middleware/find-page.ts index 00b0013da895..617b162d901d 100644 --- a/src/frame/middleware/find-page.ts +++ b/src/frame/middleware/find-page.ts @@ -110,7 +110,7 @@ async function rereadByPath( // but perhaps one day we can always and only do these kinds of lookups // at runtime. const possible = path.join(contentRoot, withoutVersion) - const filePath = existsSync(possible) ? path.join(possible, 'index.md') : possible + '.md' + const filePath = existsSync(possible) ? path.join(possible, 'index.md') : `${possible}.md` const relativePath = path.relative(contentRoot, filePath) const basePath = contentRoot diff --git a/src/frame/middleware/handle-next-data-path.ts b/src/frame/middleware/handle-next-data-path.ts index 8aff87bbdabf..882720748ce0 100644 --- a/src/frame/middleware/handle-next-data-path.ts +++ b/src/frame/middleware/handle-next-data-path.ts @@ -30,7 +30,7 @@ export default function handleNextDataPath( if (parts[1] === 'free-pro-team@latest') { parts.splice(1, 1) } - req.pagePath = '/' + parts.join('/').replace(/.json+$/, '') + req.pagePath = `/${parts.join('/').replace(/.json+$/, '')}` } else { req.pagePath = req.path } diff --git a/src/frame/middleware/render-page.ts b/src/frame/middleware/render-page.ts index f647718ef790..eeb2eb0bf929 100644 --- a/src/frame/middleware/render-page.ts +++ b/src/frame/middleware/render-page.ts @@ -106,7 +106,7 @@ export default async function renderPage(req: ExtendedRequest, res: Response) { req.context.currentVersion === 'free-pro-team@latest' || !allVersions[req.context.currentVersion!] ) { - page.fullTitle += ' - ' + context.site!.data.ui.header.github_docs + page.fullTitle += ` - ${context.site!.data.ui.header.github_docs}` } else { const { versionTitle } = allVersions[req.context.currentVersion!] page.fullTitle += ' - ' @@ -116,7 +116,7 @@ export default async function renderPage(req: ExtendedRequest, res: Response) { if (!versionTitle.includes('GitHub')) { page.fullTitle += 'GitHub ' } - page.fullTitle += versionTitle + ' Docs' + page.fullTitle += `${versionTitle} Docs` } } diff --git a/src/frame/tests/pages.ts b/src/frame/tests/pages.ts index af8c4efc412e..99d000e884e5 100644 --- a/src/frame/tests/pages.ts +++ b/src/frame/tests/pages.ts @@ -78,16 +78,14 @@ describe('pages module', () => { .map(([path]) => path) // Build a detailed message with sources for each duplicate - const message = - `Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}. + const message = `Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}. Ensure that you don't define the same path more than once in the redirect_from property in a single file and across all English files. - You may also receive this error if you have defined the same children property more than once.\n` + - duplicates + You may also receive this error if you have defined the same children property more than once.\n${duplicates .map((dup) => { const files = Array.from(redirectToFiles.get(dup) || []) return `${dup}\n Defined in:\n ${files.join('\n ')}` }) - .join('\n\n') + .join('\n\n')}` expect(duplicates.length, message).toBe(0) }) @@ -136,10 +134,12 @@ describe('pages module', () => { .flatten() .value() - const failureMessage = - JSON.stringify(frontmatterErrors, null, 2) + - '\n\n' + - chain(frontmatterErrors).map('filepath').join('\n').value() + const failureMessage = `${JSON.stringify(frontmatterErrors, null, 2)}\n\n${chain( + frontmatterErrors, + ) + .map('filepath') + .join('\n') + .value()}` expect(frontmatterErrors.length, failureMessage).toBe(0) }) diff --git a/src/frame/tests/toc-links.ts b/src/frame/tests/toc-links.ts index 3288fb21799b..16dc42c754c4 100644 --- a/src/frame/tests/toc-links.ts +++ b/src/frame/tests/toc-links.ts @@ -47,7 +47,7 @@ describe('toc links', () => { } } - const message = 'broken link in a TOC: ' + JSON.stringify(issues, null, 2) + const message = `broken link in a TOC: ${JSON.stringify(issues, null, 2)}` expect(issues.length, message).toBe(0) }) }) diff --git a/src/ghes-releases/scripts/create-enterprise-issue.ts b/src/ghes-releases/scripts/create-enterprise-issue.ts index e7fe92ee61f1..73eda3325789 100644 --- a/src/ghes-releases/scripts/create-enterprise-issue.ts +++ b/src/ghes-releases/scripts/create-enterprise-issue.ts @@ -359,10 +359,10 @@ async function isExistingIssue( let query = encodeURIComponent(`is:issue repo:${repo} `) if (searchQuery) { - query += '+' + searchQuery + query += `+${searchQuery}` } if (labelQuery) { - query += '+' + labelQuery + query += `+${labelQuery}` } const issues = await octokit.request(`GET /search/issues?q=${query}`) diff --git a/src/ghes-releases/scripts/deprecate/update-content.ts b/src/ghes-releases/scripts/deprecate/update-content.ts index 86cf54309fee..36d0e56ef801 100644 --- a/src/ghes-releases/scripts/deprecate/update-content.ts +++ b/src/ghes-releases/scripts/deprecate/update-content.ts @@ -28,7 +28,7 @@ export function updateContentFiles() { let featureData = undefined if (data.versions.feature) { - const featureFilePath = 'data/features/' + data.versions.feature + '.yml' + const featureFilePath = `data/features/${data.versions.feature}.yml` const featureContent = fs.readFileSync(featureFilePath, 'utf8') featureData = yaml.load(featureContent) as featureDataType if (!featureData || !featureData.versions) @@ -117,8 +117,8 @@ function removeFileUpdateParent(filePath: string) { if (!data) return // Children paths are relative to the index.md file's directory const childPath = filePath.endsWith('index.md') - ? '/' + path.basename(path.dirname(filePath)) - : '/' + path.basename(filePath, '.md') + ? `/${path.basename(path.dirname(filePath))}` + : `/${path.basename(filePath, '.md')}` // Remove the childPath from the parent index.md file's children frontmatter data.children = data.children.filter((child) => child !== childPath) diff --git a/src/github-apps/scripts/sync.ts b/src/github-apps/scripts/sync.ts index 000b510e0062..01a6cfa91443 100755 --- a/src/github-apps/scripts/sync.ts +++ b/src/github-apps/scripts/sync.ts @@ -448,11 +448,11 @@ function getDisplayTitle( const displayTitle = isRest ? !resourceGroup - ? sentenceCase(title) + ' permissions' - : `"${sentenceCase(title)}" ` + resourceGroup + ' permissions' + ? `${sentenceCase(title)} permissions` + : `"${sentenceCase(title)}" ${resourceGroup} permissions` : !resourceGroup - ? sentenceCase(title) + ' permissions' - : sentenceCase(resourceGroup) + ` permissions for "${title}"` + ? `${sentenceCase(title)} permissions` + : `${sentenceCase(resourceGroup)} permissions for "${title}"` return { title, displayTitle } } diff --git a/src/github-apps/tests/rendering.ts b/src/github-apps/tests/rendering.ts index 2a352c666dea..19ebb9ca7e78 100644 --- a/src/github-apps/tests/rendering.ts +++ b/src/github-apps/tests/rendering.ts @@ -76,7 +76,7 @@ describe('REST references docs', () => { ...value.map( (item: EnabledItem) => `/en/rest/${key}${ - categoriesWithoutSubcategories.includes(key) ? '' : '/' + item.subcategory + categoriesWithoutSubcategories.includes(key) ? '' : `/${item.subcategory}` }#${item.slug}`, ), ) @@ -110,7 +110,7 @@ describe('REST references docs', () => { ...value.permissions.map( (item: PermissionItem) => `/en/rest/${item.category}${ - categoriesWithoutSubcategories.includes(item.category) ? '' : '/' + item.subcategory + categoriesWithoutSubcategories.includes(item.category) ? '' : `/${item.subcategory}` }#${item.slug}`, ), ) diff --git a/src/graphql/scripts/build-changelog.ts b/src/graphql/scripts/build-changelog.ts index 1a5b248c3585..f02b51026146 100644 --- a/src/graphql/scripts/build-changelog.ts +++ b/src/graphql/scripts/build-changelog.ts @@ -175,12 +175,9 @@ export async function createChangelogEntry( }), ) const cleanTitle = cleanPreviewTitle(previewTitle) - const entryTitle = - 'The [' + - cleanTitle + - '](/graphql/overview/schema-previews#' + - previewAnchor(cleanTitle) + - ') includes these changes:' + const entryTitle = `The [${cleanTitle}](/graphql/overview/schema-previews#${previewAnchor( + cleanTitle, + )}) includes these changes:` changelogEntry.previewChanges.push({ title: entryTitle, changes: renderedPreviewChanges, @@ -220,7 +217,7 @@ export function cleanPreviewTitle(title: string): string { } else if (title === 'MergeInfoPreview') { title = 'Merge info preview' } else if (!title.endsWith('preview')) { - title = title + ' preview' + title = `${title} preview` } return title } diff --git a/src/graphql/scripts/utils/schema-helpers.ts b/src/graphql/scripts/utils/schema-helpers.ts index 1d1968276068..0bd516f1e257 100644 --- a/src/graphql/scripts/utils/schema-helpers.ts +++ b/src/graphql/scripts/utils/schema-helpers.ts @@ -59,7 +59,7 @@ const graphqlTypes: GraphQLTypeInfo[] = JSON.parse( const singleQuotesInsteadOfBackticks = / '(\S+?)' / function addPeriod(string: string): string { - return string.endsWith('.') ? string : string + '.' + return string.endsWith('.') ? string : `${string}.` } async function getArguments( diff --git a/src/landings/components/ProductLandingContext.tsx b/src/landings/components/ProductLandingContext.tsx index f732228e93da..edd65a6daa0c 100644 --- a/src/landings/components/ProductLandingContext.tsx +++ b/src/landings/components/ProductLandingContext.tsx @@ -146,7 +146,7 @@ export const getProductLandingContextFromRequest = async ( key, label: key === 'popular' || key === 'videos' - ? req.context.page.featuredLinks[key + 'Heading'] || req.context.site.data.ui.toc[key] + ? req.context.page.featuredLinks[`${key}Heading`] || req.context.site.data.ui.toc[key] : req.context.site.data.ui.toc[key], viewAllHref: key === 'startHere' && !req.context.currentCategory && hasGuidesPage diff --git a/src/landings/components/SidebarProduct.tsx b/src/landings/components/SidebarProduct.tsx index 89494c3b1e6f..c899fff64a78 100644 --- a/src/landings/components/SidebarProduct.tsx +++ b/src/landings/components/SidebarProduct.tsx @@ -135,10 +135,10 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) { (entries) => { entries.forEach((entry) => { if (entry.target.id) { - const anchor = '#' + entry.target.id.split('--')[0] + const anchor = `#${entry.target.id.split('--')[0]}` if (entry.isIntersecting === true) setVisibleAnchor(anchor) } else if (asPath.includes('#')) { - setVisibleAnchor('#' + asPath.split('#')[1]) + setVisibleAnchor(`#${asPath.split('#')[1]}`) } else { setVisibleAnchor('') } diff --git a/src/languages/components/LanguagePicker.tsx b/src/languages/components/LanguagePicker.tsx index b2ed1b50b632..05c46694d8a9 100644 --- a/src/languages/components/LanguagePicker.tsx +++ b/src/languages/components/LanguagePicker.tsx @@ -84,7 +84,7 @@ export const LanguagePicker = ({ xs, mediumOrLower }: Props) => { className={`color-fg-default width-full ${styles.menuButton}`} aria-label={`Select language: current language is ${selectedLang.name}`} > - {t('language_picker_label') + '\n'} + {`${t('language_picker_label')}\n`} {selectedLang.name} diff --git a/src/languages/lib/correct-translation-content.ts b/src/languages/lib/correct-translation-content.ts index fda0b9af99ae..8916afaf9351 100644 --- a/src/languages/lib/correct-translation-content.ts +++ b/src/languages/lib/correct-translation-content.ts @@ -253,7 +253,7 @@ export function correctTranslatedContentStrings( return match } - const withLinebreak = match.slice(0, -1) + '\n' + const withLinebreak = `${match.slice(0, -1)}\n` if (englishContent.includes(withLinebreak) && !englishContent.includes(match)) { return withLinebreak } @@ -332,7 +332,7 @@ export function correctTranslatedContentStrings( const keyString = '5DE3 E050 9C47 EA3C F04A 42D3 4AEE 18F8 3AFD EB23' const translatedSentences = [ // ru - 'Полный отпечаток ключа\u00A0\u2014 `' + keyString + '`.', + `Полный отпечаток ключа\u00A0\u2014 \`${keyString}\`.`, // ko `키의 전체 지문은 \`${keyString}\`입니다.`, // es diff --git a/src/languages/lib/render-with-fallback.ts b/src/languages/lib/render-with-fallback.ts index 3a76c90da54c..30c3686e5439 100644 --- a/src/languages/lib/render-with-fallback.ts +++ b/src/languages/lib/render-with-fallback.ts @@ -70,7 +70,7 @@ export function createTranslationFallbackComment(error: Error, property: string) // Limit message length to keep comment manageable if (cleanMessage.length > 200) { - cleanMessage = cleanMessage.substring(0, 200) + '...' + cleanMessage = `${cleanMessage.substring(0, 200)}...` } errorDetails.push(`msg="${cleanMessage.replace(/"/g, "'")}"`) @@ -141,7 +141,7 @@ export async function renderContentWithFallback( // Skip for textOnly rendering to avoid breaking plain text output if (context.currentLanguage !== 'en' && !options?.textOnly) { const errorComment = createTranslationFallbackComment(error as Error, property) - return errorComment + '\n' + fallbackContent + return `${errorComment}\n${fallbackContent}` } return fallbackContent @@ -181,7 +181,7 @@ export async function executeWithFallback( // Only for HTML content (detected by presence of HTML tags) if (typeof fallbackContent === 'string' && /<[^>]+>/.test(fallbackContent)) { const errorComment = createTranslationFallbackComment(error as Error, 'content') - return (errorComment + '\n' + fallbackContent) as T + return `${errorComment}\n${fallbackContent}` as T } return fallbackContent diff --git a/src/languages/lib/translation-utils.ts b/src/languages/lib/translation-utils.ts index 40acaa3b7369..e190e94b87d1 100644 --- a/src/languages/lib/translation-utils.ts +++ b/src/languages/lib/translation-utils.ts @@ -19,8 +19,7 @@ export function createTranslationFunctions(uiData: UIStrings, namespaces: string if (missingNamespaces.length > 0) { console.warn( `Missing namespaces [${missingNamespaces.join(', ')}] in UI data. ` + - 'Available namespaces: ' + - Object.keys(uiData).sort().join(', '), + `Available namespaces: ${Object.keys(uiData).sort().join(', ')}`, ) // For 404 pages, we can't afford to throw errors; create defensive fallbacks diff --git a/src/languages/tests/api-search.ts b/src/languages/tests/api-search.ts index ed5adb37e15d..1cd9f87c1dc8 100644 --- a/src/languages/tests/api-search.ts +++ b/src/languages/tests/api-search.ts @@ -14,7 +14,7 @@ describeIfElasticsearchURL('search v1 middleware in non-English', () => { // which clearly has a record with the title "Foo" sp.set('query', 'foo') sp.set('language', 'ja') - const res = await get('/api/search/v1?' + sp) + const res = await get(`/api/search/v1?${sp}`) expect(res.statusCode).toBe(200) const results = JSON.parse(res.body) diff --git a/src/links/scripts/rendered-content-link-checker.ts b/src/links/scripts/rendered-content-link-checker.ts index 3218a712d14a..fbc81c0eddb6 100755 --- a/src/links/scripts/rendered-content-link-checker.ts +++ b/src/links/scripts/rendered-content-link-checker.ts @@ -611,7 +611,7 @@ function flawIssueDisplay(flaws: LinkFlaw[], opts: Options, mentionExternalExclu // limit is 65536 if (output.length > 60000) { - output = output.slice(0, 60000) + '\n\n---\n\nOUTPUT TRUNCATED' + output = `${output.slice(0, 60000)}\n\n---\n\nOUTPUT TRUNCATED` } return output @@ -950,7 +950,7 @@ async function checkHrefLink( // 6. 'https://example.com' (external link) const [pathFragment, hashFragment] = href.split('#') - const hash = '#' + hashFragment // the hash is the part that starts with `#` + const hash = `#${hashFragment}` // the hash is the part that starts with `#` // this conditional handles cases in which the link is to the current article (cases 1-3 above) if (checkAnchors && (!pathFragment || pathFragment === permalink.href)) { diff --git a/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts b/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts index 9c026d1ff234..20ad6e3ab5bb 100644 --- a/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts +++ b/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts @@ -38,7 +38,7 @@ export function generateNewJSON( const writeTo = options.output || destinationFilePath // It's important that this serializes exactly like the Ruby code // that is the CLI script `script/add-docs-url` in github/github. - const serialized = JSON.stringify(destination, null, 2) + '\n' + const serialized = `${JSON.stringify(destination, null, 2)}\n` fs.writeFileSync(writeTo, serialized, 'utf-8') console.log(`Wrote ${countChanges} change${countChanges === 1 ? '' : 's'} to ${writeTo}`) if (writeTo !== destinationFilePath) { diff --git a/src/metrics/scripts/docsaudit.ts b/src/metrics/scripts/docsaudit.ts index a49d6ccce9a4..75ef1d552ffe 100644 --- a/src/metrics/scripts/docsaudit.ts +++ b/src/metrics/scripts/docsaudit.ts @@ -77,7 +77,7 @@ async function main(): Promise { console.log(csvEntry) results.push(csvEntry) } - csvString += results.join('\n') + '\n' + csvString += `${results.join('\n')}\n` fs.writeFileSync(outputFile, csvString.trim(), 'utf8') console.log(`Done! Wrote ${outputFile}`) diff --git a/src/observability/logger/lib/to-logfmt.ts b/src/observability/logger/lib/to-logfmt.ts index fad0475869e4..0f59c7ee850c 100644 --- a/src/observability/logger/lib/to-logfmt.ts +++ b/src/observability/logger/lib/to-logfmt.ts @@ -40,13 +40,13 @@ function stringify(data: Record): string { stringValue = stringValue.replace(/["\\]/g, '\\$&') } if (needs_quoting || needs_escaping) { - stringValue = '"' + stringValue + '"' + stringValue = `"${stringValue}"` } if (stringValue === '' && !is_null) { stringValue = '""' } - line += key + '=' + stringValue + ' ' + line += `${key}=${stringValue} ` } // trim trailing space diff --git a/src/observability/logger/middleware/get-automatic-request-logger.ts b/src/observability/logger/middleware/get-automatic-request-logger.ts index 7e9d01508303..0ea1992178bb 100644 --- a/src/observability/logger/middleware/get-automatic-request-logger.ts +++ b/src/observability/logger/middleware/get-automatic-request-logger.ts @@ -47,7 +47,7 @@ export function getAutomaticRequestLogger() { toLogfmt({ ...loggerContext, status, - responseTime: responseTime + ' ms', + responseTime: `${responseTime} ms`, contentLength: String(contentLength), method, url, @@ -71,7 +71,7 @@ export function getAutomaticRequestLogger() { chalk.reset(method), chalk.reset(url), chalk[color](status), - chalk.reset(responseTime + ' ms'), + chalk.reset(`${responseTime} ms`), chalk.reset('-'), chalk.reset(String(contentLength)), ].join(' ') diff --git a/src/redirects/lib/get-redirect.ts b/src/redirects/lib/get-redirect.ts index 8cc5e1d0439d..99b1d7556453 100644 --- a/src/redirects/lib/get-redirect.ts +++ b/src/redirects/lib/get-redirect.ts @@ -73,11 +73,10 @@ export default function getRedirect(uri: string, context: Context): string | und if (withoutLanguage.startsWith(nonEnterpriseDefaultVersionPrefix)) { // E.g. '/free-pro-team@latest/foo/bar' or '/free-pro-team@latest' - basicCorrection = - `/${language}` + withoutLanguage.replace(nonEnterpriseDefaultVersionPrefix, '') + basicCorrection = `/${language}${withoutLanguage.replace(nonEnterpriseDefaultVersionPrefix, '')}` } else if (withoutLanguage.replace('/', '') in allVersions && !languagePrefixRegex.test(uri)) { // E.g. just '/github-ae@latest' or '/enterprise-cloud@latest' - basicCorrection = `/${language}` + withoutLanguage + basicCorrection = `/${language}${withoutLanguage}` return basicCorrection } @@ -86,18 +85,20 @@ export default function getRedirect(uri: string, context: Context): string | und withoutLanguage.startsWith('/enterprise-server/') ) { // E.g. '/enterprise-server' or '/enterprise-server/3.0/foo' - basicCorrection = - `/${language}` + - withoutLanguage.replace('/enterprise-server', `/enterprise-server@${latestStable}`) + basicCorrection = `/${language}${withoutLanguage.replace( + '/enterprise-server', + `/enterprise-server@${latestStable}`, + )}` // If it's now just the version, without anything after, exit here if (withoutLanguage === '/enterprise-server') { return basicCorrection } } else if (withoutLanguage.startsWith('/enterprise-server@latest')) { // E.g. '/enterprise-server@latest' or '/enterprise-server@latest/3.3/foo' - basicCorrection = - `/${language}` + - withoutLanguage.replace('/enterprise-server@latest', `/enterprise-server@${latestStable}`) + basicCorrection = `/${language}${withoutLanguage.replace( + '/enterprise-server@latest', + `/enterprise-server@${latestStable}`, + )}` // If it was *just* '/enterprise-server@latest' all that's needed is // the language but with 'latest' replaced with the value of `latest` if (withoutLanguage === '/enterprise-server@latest') { @@ -115,14 +116,16 @@ export default function getRedirect(uri: string, context: Context): string | und const version = withoutLanguage.split('/')[2] if (withoutLanguage === `/enterprise/${version}`) { // E.g. `/enterprise/3.0` - basicCorrection = - `/${language}` + - withoutLanguage.replace(`/enterprise/${version}`, `/enterprise-server@${version}`) + basicCorrection = `/${language}${withoutLanguage.replace( + `/enterprise/${version}`, + `/enterprise-server@${version}`, + )}` return basicCorrection } else { - basicCorrection = - `/${language}` + - withoutLanguage.replace(`/enterprise/${version}/`, `/enterprise-server@${version}/`) + basicCorrection = `/${language}${withoutLanguage.replace( + `/enterprise/${version}/`, + `/enterprise-server@${version}/`, + )}` } } else if (withoutLanguage === '/enterprise') { // E.g. `/enterprise` exactly @@ -136,11 +139,9 @@ export default function getRedirect(uri: string, context: Context): string | und // If the URL is without a language, and no redirect is necessary, // but it has as version prefix, the language has to be there // otherwise it will never be found in `req.context.pages` - basicCorrection = - `/${language}` + - withoutLanguage - .replace(`/enterprise/`, `/enterprise-server@${latest}/`) - .replace('/user/', '/') + basicCorrection = `/${language}${withoutLanguage + .replace(`/enterprise/`, `/enterprise-server@${latest}/`) + .replace('/user/', '/')}` } else if (withoutLanguage.startsWith('/insights')) { // E.g. '/insights/foo' basicCorrection = uri.replace('/insights', `${language}/enterprise-server@${latest}/insights`) @@ -171,7 +172,7 @@ export default function getRedirect(uri: string, context: Context): string | und if (supported.includes(version) || version === 'latest') { prefix = `/${majorVersion}@${version}` - suffix = '/' + split.slice(2).join('/') + suffix = `/${split.slice(2).join('/')}` if ( suffix.includes('/user') || @@ -183,7 +184,7 @@ export default function getRedirect(uri: string, context: Context): string | und } else { // If version is not supported, we still need to set these values prefix = `/${majorVersion}@${version}` - suffix = '/' + split.slice(2).join('/') + suffix = `/${split.slice(2).join('/')}` } const newURL = prefix + suffix @@ -319,7 +320,7 @@ function tryReplacements(prefix: string, suffix: string, context: Context): stri return false } const candidateAsRedirect = prefix + suffix - const candidateAsURL = '/en' + candidateAsRedirect + const candidateAsURL = `/en${candidateAsRedirect}` return candidateAsRedirect in redirects || candidateAsURL in pages } diff --git a/src/redirects/middleware/handle-redirects.ts b/src/redirects/middleware/handle-redirects.ts index 82d88882dece..23d806c53a59 100644 --- a/src/redirects/middleware/handle-redirects.ts +++ b/src/redirects/middleware/handle-redirects.ts @@ -76,7 +76,7 @@ export default function handleRedirects(req: ExtendedRequest, res: Response, nex // have to do this now because searchPath replacement changes the path as well as the query params if (queryParams) { - queryParams = '?' + queryParams + queryParams = `?${queryParams}` } // remove query params temporarily so we can find the path in the redirects object diff --git a/src/rest/components/RestAuth.tsx b/src/rest/components/RestAuth.tsx index aaed841f339a..f808dfec8ce0 100644 --- a/src/rest/components/RestAuth.tsx +++ b/src/rest/components/RestAuth.tsx @@ -106,8 +106,8 @@ function FineGrainedAccess({ progAccess }: FineGrainedProps) { numPermissionSets === 0 ? t('no_permission_sets') : numPermissionSets > 1 - ? t('permission_sets') + ':' - : t('permission_set') + ':' + ? `${t('permission_sets')}:` + : `${t('permission_set')}:` const publicAccessMsg = numPermissionSets === 0 ? t('allows_public_read_access_no_permissions') diff --git a/src/rest/components/RestRedirect.tsx b/src/rest/components/RestRedirect.tsx index cbb16ee87fcb..cb6829bd5507 100644 --- a/src/rest/components/RestRedirect.tsx +++ b/src/rest/components/RestRedirect.tsx @@ -33,7 +33,7 @@ export function RestRedirect() { const params = new URLSearchParams(asPathQuery) params.set('apiVersion', date) - const url = `/${router.locale}${asPathRoot}?${params}${hash ? '#' + hash : ''}` + const url = `/${router.locale}${asPathRoot}?${params}${hash ? `#${hash}` : ''}` router.replace(url) } }, [router.asPath, currentVersion]) diff --git a/src/rest/docs.ts b/src/rest/docs.ts index 86cf35d0d7be..756ca4ab6a52 100755 --- a/src/rest/docs.ts +++ b/src/rest/docs.ts @@ -29,48 +29,46 @@ log(chalk.white.bold(' npm run dev\n')) log(chalk.green.bold.underline('REST docs script examples\n')) log(chalk.green.bold(' Examples of ways you can build the REST docs locally:\n')) log( - chalk.cyan.bold(' - REST All versions:') + - ' ' + - chalk.magenta('npm run sync-rest && npm run dev'), + `${chalk.cyan.bold(' - REST All versions:')} ${chalk.magenta( + 'npm run sync-rest && npm run dev', + )}`, ) log( - chalk.cyan.bold(' - REST Dotcom only:') + - ' ' + - chalk.magenta('npm run sync-rest -- --versions api.github.com && npm run dev'), + `${chalk.cyan.bold(' - REST Dotcom only:')} ${chalk.magenta( + 'npm run sync-rest -- --versions api.github.com && npm run dev', + )}`, ) log( - chalk.cyan.bold(' - REST Two versions:') + - ' ' + - chalk.magenta('npm run sync-rest -- --versions ghes-3.7 ghes-3.8 && npm run dev'), + `${chalk.cyan.bold(' - REST Two versions:')} ${chalk.magenta( + 'npm run sync-rest -- --versions ghes-3.7 ghes-3.8 && npm run dev', + )}`, ) log( - chalk.cyan.bold(' - REST Dotcom and next calendar date version:') + - ' ' + - chalk.magenta('npm run sync-rest -- --next --versions api.github.com && npm run dev'), + `${chalk.cyan.bold(' - REST Dotcom and next calendar date version:')} ${chalk.magenta( + 'npm run sync-rest -- --next --versions api.github.com && npm run dev', + )}`, ) log( - chalk.cyan.bold(' - REST Dotcom only, including unpublished operations:') + - ' ' + - chalk.magenta( - 'npm run sync-rest -- --versions api.github.com --include-unpublished && npm run dev', - ), + `${chalk.cyan.bold(' - REST Dotcom only, including unpublished operations:')} ${chalk.magenta( + 'npm run sync-rest -- --versions api.github.com --include-unpublished && npm run dev', + )}`, ) log(chalk.green.bold.underline('\nWebhook docs script examples\n')) log(chalk.green.bold(' Examples of ways you can build the Webhook docs locally:\n')) log( - chalk.cyan.bold(' - Webhooks All versions:') + - ' ' + - chalk.magenta('npm run sync-webhooks && npm run dev'), + `${chalk.cyan.bold(' - Webhooks All versions:')} ${chalk.magenta( + 'npm run sync-webhooks && npm run dev', + )}`, ) log( - chalk.cyan.bold(' - Webhooks Dotcom only:') + - ' ' + - chalk.magenta('npm run sync-webhooks -- --versions api.github.com && npm run dev'), + `${chalk.cyan.bold(' - Webhooks Dotcom only:')} ${chalk.magenta( + 'npm run sync-webhooks -- --versions api.github.com && npm run dev', + )}`, ) log( - chalk.cyan.bold(' - Webhooks Two versions:') + - ' ' + - chalk.magenta('npm run sync-webhooks -- --versions ghes-3.7 ghes-3.8 && npm run dev'), + `${chalk.cyan.bold(' - Webhooks Two versions:')} ${chalk.magenta( + 'npm run sync-webhooks -- --versions ghes-3.7 ghes-3.8 && npm run dev', + )}`, ) log(chalk.green.bold('\nFor more info and additional options, run:\n')) log(chalk.white.bold(' npm run sync-rest -- --help')) diff --git a/src/rest/scripts/utils/create-rest-examples.ts b/src/rest/scripts/utils/create-rest-examples.ts index f1fdf3a73358..e6d71b715316 100644 --- a/src/rest/scripts/utils/create-rest-examples.ts +++ b/src/rest/scripts/utils/create-rest-examples.ts @@ -72,11 +72,7 @@ export default async function getCodeSamples(operation: Operation): Promise 1 - ? example.request.description + - ' ' + - (i + 1) + - ': Status Code ' + - example.response!.statusCode + ? `${example.request.description} ${i + 1}: Status Code ${example.response!.statusCode}` : example.request.description, }, })) diff --git a/src/rest/tests/api.ts b/src/rest/tests/api.ts index 837301d0f0e3..2341262d0241 100644 --- a/src/rest/tests/api.ts +++ b/src/rest/tests/api.ts @@ -20,7 +20,7 @@ describe('anchor-redirect api', () => { const sp = new URLSearchParams() sp.set('path', path) sp.set('hash', hash) - const res = await get('/api/anchor-redirect?' + sp) + const res = await get(`/api/anchor-redirect?${sp}`) expect(res.statusCode).toBe(200) const { to } = JSON.parse(res.body) expect(to).toBe(value) @@ -31,7 +31,7 @@ describe('anchor-redirect api', () => { const hash = key.split('#')[1] const sp = new URLSearchParams() sp.set('hash', hash) - const res = await get('/api/anchor-redirect?' + sp) + const res = await get(`/api/anchor-redirect?${sp}`) expect(res.statusCode).toBe(400) }) test('errors when path is not passed', async () => { @@ -40,14 +40,14 @@ describe('anchor-redirect api', () => { const path = key.split('#')[0] const sp = new URLSearchParams() sp.set('path', path) - const res = await get('/api/anchor-redirect?' + sp) + const res = await get(`/api/anchor-redirect?${sp}`) expect(res.statusCode).toBe(400) }) test('unfound redirect returns undefined', async () => { const sp = new URLSearchParams() sp.set('path', 'foo') sp.set('hash', 'bar') - const res = await get('/api/anchor-redirect?' + sp) + const res = await get(`/api/anchor-redirect?${sp}`) const { to } = JSON.parse(res.body) expect(to).toBe(undefined) }) @@ -55,7 +55,7 @@ describe('anchor-redirect api', () => { const sp = new URLSearchParams() sp.set('path', 'foo') sp.set('hash', 'bar') - const res = await get('/api/anchor-redirect?' + sp) + const res = await get(`/api/anchor-redirect?${sp}`) expect(res.headers['cache-control']).toContain('public') expect(res.headers['cache-control']).toMatch(/max-age=[1-9]/) expect(res.headers['surrogate-control']).toContain('public') diff --git a/src/rest/tests/create-rest-examples.ts b/src/rest/tests/create-rest-examples.ts index 09b021fe0307..239f97e6de29 100644 --- a/src/rest/tests/create-rest-examples.ts +++ b/src/rest/tests/create-rest-examples.ts @@ -55,7 +55,7 @@ describe('rest example requests and responses', () => { // example is any because getCodeSamples returns objects from untyped JavaScript module mergedExamples.forEach((example: any, index: number) => { expect(example.request.description).toBe( - 'Example ' + (index + 1) + ': Status Code ' + example.response.statusCode, + `Example ${index + 1}: Status Code ${example.response.statusCode}`, ) }) }) diff --git a/src/rest/tests/rendering.ts b/src/rest/tests/rendering.ts index 56c9e85006ef..645401bf0072 100644 --- a/src/rest/tests/rendering.ts +++ b/src/rest/tests/rendering.ts @@ -109,7 +109,7 @@ describe('REST references docs', () => { .text() .trim() if (apiVersion === allVersions[version].latestApiVersion) { - expect(versionName).toBe(apiVersion + ' (latest)') + expect(versionName).toBe(`${apiVersion} (latest)`) } else { expect(versionName).toBe(apiVersion) } @@ -148,12 +148,11 @@ describe('REST references docs', () => { function formatErrors(differences: Record): string { let errorMessage = 'There are differences in Categories/Subcategories in:\n' for (const schema in differences) { - errorMessage += 'Version: ' + schema + '\n' + errorMessage += `Version: ${schema}\n` for (const category in differences[schema]) { - errorMessage += 'Category: ' + category + '\nSubcategories: \n' - errorMessage += - ' - content/rest directory: ' + differences[schema][category].contentDir + '\n' - errorMessage += ' - OpenAPI Schema: ' + differences[schema][category].openAPI + '\n' + errorMessage += `Category: ${category}\nSubcategories: \n` + errorMessage += ` - content/rest directory: ${differences[schema][category].contentDir}\n` + errorMessage += ` - OpenAPI Schema: ${differences[schema][category].openAPI}\n` errorMessage += '---\n' } } diff --git a/src/search/components/input/SearchOverlay.tsx b/src/search/components/input/SearchOverlay.tsx index af97f3a04992..bf9d40ea8ebd 100644 --- a/src/search/components/input/SearchOverlay.tsx +++ b/src/search/components/input/SearchOverlay.tsx @@ -299,7 +299,7 @@ export function SearchOverlay({ // When loading, capture the last height of the suggestions list so we can use it for the loading div const previousSuggestionsListHeight = useMemo(() => { if (generalSearchResults.length || aiAutocompleteOptions.length) { - return 7 * (generalSearchResults.length + aiAutocompleteOptions.length) + '' + return `${7 * (generalSearchResults.length + aiAutocompleteOptions.length)}` } else { return '150' // Default height for just 2 suggestions } diff --git a/src/search/lib/ai-search-proxy.ts b/src/search/lib/ai-search-proxy.ts index 75a4a42b4cbf..dd84458da35f 100644 --- a/src/search/lib/ai-search-proxy.ts +++ b/src/search/lib/ai-search-proxy.ts @@ -132,8 +132,7 @@ export const aiSearchProxy = async (req: ExtendedRequest, res: Response) => { res.status(500).json({ errors: [{ message: 'Internal server error' }] }) } else { // Send error message via the stream - const errorMessage = - JSON.stringify({ errors: [{ message: 'Internal server error' }] }) + '\n' + const errorMessage = `${JSON.stringify({ errors: [{ message: 'Internal server error' }] })}\n` res.write(errorMessage) res.end() } diff --git a/src/search/lib/helpers/strings.ts b/src/search/lib/helpers/strings.ts index d8ca26383cc4..f7e546377e9e 100644 --- a/src/search/lib/helpers/strings.ts +++ b/src/search/lib/helpers/strings.ts @@ -4,7 +4,7 @@ export function safeUrlDisplay(url: string): string { parsed.password = '***' } if (parsed.username) { - parsed.username = parsed.username.slice(0, 4) + '***' + parsed.username = `${parsed.username.slice(0, 4)}***` } return parsed.toString() } diff --git a/src/search/lib/helpers/time.ts b/src/search/lib/helpers/time.ts index 36579358d4ef..2353d513c1ea 100644 --- a/src/search/lib/helpers/time.ts +++ b/src/search/lib/helpers/time.ts @@ -29,7 +29,7 @@ export function utcTimestamp() { d.getUTCSeconds(), ] // If it's a number make it a zero-padding 2 character string - .map((x) => (typeof x === 'number' ? ('0' + x).slice(-2) : x)) + .map((x) => (typeof x === 'number' ? `0${x}`.slice(-2) : x)) .join('') ) } diff --git a/src/search/scripts/scrape/lib/build-records.ts b/src/search/scripts/scrape/lib/build-records.ts index b50ca772406e..7f40baac8bd3 100644 --- a/src/search/scripts/scrape/lib/build-records.ts +++ b/src/search/scripts/scrape/lib/build-records.ts @@ -178,36 +178,35 @@ export default async function buildRecords( // Report failed pages if any if (failedPages.length > 0) { - console.log( - '\n' + - boxen( - chalk.bold.red(`${failedPages.length} page(s) failed to scrape\n\n`) + - failedPages - .slice(0, 10) // Show first 10 failures - .map((failure, idx) => { - return ( - chalk.gray(`${idx + 1}. `) + - chalk.yellow(failure.errorType) + - '\n' + - (failure.relativePath - ? chalk.cyan(` Path: `) + failure.relativePath + '\n' - : '') + - (failure.url ? chalk.cyan(` URL: `) + failure.url + '\n' : '') + - chalk.gray(` Error: ${failure.error}`) - ) - }) - .join('\n\n') + - (failedPages.length > 10 - ? `\n\n${chalk.gray(`... and ${failedPages.length - 10} more`)}` - : ''), - { - title: chalk.red('⚠ Failed Pages'), - padding: 1, - borderColor: 'yellow', - }, - ) + - '\n', - ) + const failureCount = failedPages.length + const header = chalk.bold.red(`${failureCount} page(s) failed to scrape\n\n`) + + const failureList = failedPages + .slice(0, 10) // Show first 10 failures + .map((failure, idx) => { + const number = chalk.gray(`${idx + 1}. `) + const errorType = chalk.yellow(failure.errorType) + const pathLine = failure.relativePath + ? `\n${chalk.cyan(' Path: ')}${failure.relativePath}` + : '' + const urlLine = failure.url ? `\n${chalk.cyan(' URL: ')}${failure.url}` : '' + const errorLine = `\n${chalk.gray(` Error: ${failure.error}`)}` + + return `${number}${errorType}${pathLine}${urlLine}${errorLine}` + }) + .join('\n\n') + + const remaining = + failureCount > 10 ? `\n\n${chalk.gray(`... and ${failureCount - 10} more`)}` : '' + + const boxContent = header + failureList + remaining + const box = boxen(boxContent, { + title: chalk.red('⚠ Failed Pages'), + padding: 1, + borderColor: 'yellow', + }) + + console.log(`\n${box}\n`) // Log suggestion console.log( diff --git a/src/search/tests/api-search.ts b/src/search/tests/api-search.ts index a77abcdb9144..b20227b40b24 100644 --- a/src/search/tests/api-search.ts +++ b/src/search/tests/api-search.ts @@ -33,7 +33,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { // see src/search/tests/fixtures/search-indexes/github-docs-dotcom-en-records.json // which clearly has a record with the title "Foo" sp.set('query', 'foo') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) @@ -75,7 +75,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('debug', '1') // Note! - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) // safe because we know exactly the fixtures @@ -90,7 +90,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { { const sp = new URLSearchParams() sp.set('query', 'sill') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) // Fixtures contains no word called 'sill'. It does contain the term @@ -105,7 +105,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'sill') sp.set('autocomplete', 'true') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) expect(results.meta.found.value).toBeGreaterThanOrEqual(1) @@ -119,7 +119,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { test('find nothing', async () => { const sp = new URLSearchParams() sp.set('query', 'xojixjoiwejhfoiuwehjfioweufhj') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) expect(results.hits.length).toBe(0) @@ -130,7 +130,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'introduction heading') sp.append('highlights', 'content') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) expect(results.meta.found.value).toBeGreaterThanOrEqual(1) @@ -145,7 +145,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { // This will match because it's in the 'content' but not in 'headings' sp.set('query', 'Fact of life') sp.set('highlights', 'title') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) expect(results.meta.found.value).toBeGreaterThanOrEqual(1) @@ -159,12 +159,12 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('version', 'dotcom') - const res1 = await get('/api/search/v1?' + sp.toString()) + const res1 = await get(`/api/search/v1?${sp.toString()}`) expect(res1.statusCode).toBe(200) const results1: GeneralSearchResponse = JSON.parse(res1.body) sp.set('version', 'free-pro-team@latest') - const res2 = await get('/api/search/v1?' + sp.toString()) + const res2 = await get(`/api/search/v1?${sp.toString()}`) expect(res2.statusCode).toBe(200) const results2: GeneralSearchResponse = JSON.parse(res2.body) expect(results1.hits[0].id).toBe(results2.hits[0].id) @@ -185,7 +185,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { { const sp = new URLSearchParams() sp.set('query', ' ') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -198,7 +198,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'test') sp.set('language', 'xxx') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -211,7 +211,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'test') sp.set('page', '9999') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -224,7 +224,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'test') sp.set('version', 'xxxxx') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -238,7 +238,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'test') sp.set('size', 'not a number') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -251,7 +251,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'test') sp.set('sort', 'neverheardof') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -264,7 +264,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.set('query', 'test') sp.set('highlights', 'neverheardof') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -277,7 +277,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { const sp = new URLSearchParams() sp.append('query', 'test1') sp.append('query', 'test2') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const errorResponse = JSON.parse(res.body) as { error: string @@ -290,7 +290,7 @@ describeIfElasticsearchURL('search v1 middleware', () => { test('breadcrumbless records should always return a string', async () => { const sp = new URLSearchParams() sp.set('query', 'breadcrumbs') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) // safe because we know exactly the fixtures @@ -305,7 +305,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => { test("'intro' and 'headings' are omitted by default", async () => { const sp = new URLSearchParams() sp.set('query', 'foo') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) const firstKeys = Object.keys(results.hits[0]) @@ -317,7 +317,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('include', 'intro') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) const firstKeys = Object.keys(results.hits[0]) @@ -330,7 +330,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => { sp.set('query', 'foo') sp.append('include', 'intro') sp.append('include', 'headings') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) const firstKeys = Object.keys(results.hits[0]) @@ -342,7 +342,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('include', 'xxxxx') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const results = JSON.parse(res.body) as { error: string @@ -359,7 +359,7 @@ describeIfElasticsearchURL('filter by toplevel', () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('include', 'toplevel') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) // In the fixtures, there are two distinct `toplevel` that @@ -373,7 +373,7 @@ describeIfElasticsearchURL('filter by toplevel', () => { sp.set('query', 'foo') sp.set('include', 'toplevel') sp.set('toplevel', 'Baring') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) const toplevels = new Set(results.hits.map((hit) => hit.toplevel)) @@ -386,7 +386,7 @@ describeIfElasticsearchURL('filter by toplevel', () => { sp.set('include', 'toplevel') sp.append('toplevel', 'Baring') sp.append('toplevel', 'Fooing') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) const toplevels = new Set(results.hits.map((hit) => hit.toplevel)) @@ -398,7 +398,7 @@ describeIfElasticsearchURL('filter by toplevel', () => { sp.set('query', 'foo') sp.set('include', 'toplevel') sp.set('toplevel', 'Never heard of') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse = JSON.parse(res.body) expect(results.meta.found.value).toBe(0) @@ -412,7 +412,7 @@ describeIfElasticsearchURL('aggregate', () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('aggregate', 'toplevel') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(200) const results: GeneralSearchResponse & { aggregations?: SearchResultAggregations } = JSON.parse( res.body, @@ -428,7 +428,7 @@ describeIfElasticsearchURL('aggregate', () => { const sp = new URLSearchParams() sp.set('query', 'foo') sp.set('aggregate', 'unrecognizedxxx') - const res = await get('/api/search/v1?' + sp.toString()) + const res = await get(`/api/search/v1?${sp.toString()}`) expect(res.statusCode).toBe(400) const results = JSON.parse(res.body) as { error: string diff --git a/src/webhooks/tests/api.ts b/src/webhooks/tests/api.ts index 102738e413ef..a9e136f794c8 100644 --- a/src/webhooks/tests/api.ts +++ b/src/webhooks/tests/api.ts @@ -14,7 +14,7 @@ describe('webhooks v1 middleware', () => { // field which all webhook types should have. sp.set('category', 'branch_protection_rule') sp.set('version', 'free-pro-team@latest') - const res = await get('/api/webhooks/v1?' + sp) + const res = await get(`/api/webhooks/v1?${sp}`) expect(res.statusCode).toBe(200) const results = JSON.parse(res.body) const actionTypes = Object.keys(results) @@ -36,7 +36,7 @@ describe('webhooks v1 middleware', () => { const sp = new URLSearchParams() sp.set('category', 'branch_protection_rule') sp.set('version', 'enterprise-cloud@latest') - const res = await get('/api/webhooks/v1?' + sp) + const res = await get(`/api/webhooks/v1?${sp}`) expect(res.statusCode).toBe(200) const results = JSON.parse(res.body) const actionTypes = Object.keys(results) @@ -50,7 +50,7 @@ describe('webhooks v1 middleware', () => { const sp = new URLSearchParams() sp.set('category', 'no-such-category') sp.set('version', 'free-pro-team@latest') - const res = await get('/api/webhooks/v1?' + sp) + const res = await get(`/api/webhooks/v1?${sp}`) expect(res.statusCode).toBe(404) expect(JSON.parse(res.body).error).toBeTruthy() @@ -60,7 +60,7 @@ describe('webhooks v1 middleware', () => { const sp = new URLSearchParams() sp.set('category', 'branch_protection_rule') sp.set('version', 'no-such-version') - const res = await get('/api/webhooks/v1?' + sp) + const res = await get(`/api/webhooks/v1?${sp}`) expect(res.statusCode).toBe(404) expect(JSON.parse(res.body).error).toBeTruthy() diff --git a/src/workflows/content-changes-table-comment.ts b/src/workflows/content-changes-table-comment.ts index 26750f4658d3..7b49a7e9aa82 100755 --- a/src/workflows/content-changes-table-comment.ts +++ b/src/workflows/content-changes-table-comment.ts @@ -167,13 +167,13 @@ async function main(owner: string, repo: string, baseSHA: string, headSHA: strin `| ${headings.map((heading) => `**${heading}**`).join(' | ')} |`, `| ${headings.map(() => ':---').join(' | ')} |`, ] - let markdownTable = markdownTableHead.join('\n') + '\n' + let markdownTable = `${markdownTableHead.join('\n')}\n` for (const filteredLine of filteredLines) { if ((markdownTable + filteredLine).length > MAX_COMMENT_SIZE) { markdownTable += '\n**Note** There are more changes in this PR than we can show.' break } - markdownTable += filteredLine + '\n' + markdownTable += `${filteredLine}\n` } return markdownTable diff --git a/src/workflows/enable-automerge.ts b/src/workflows/enable-automerge.ts index 54217e763583..740a201ddab6 100644 --- a/src/workflows/enable-automerge.ts +++ b/src/workflows/enable-automerge.ts @@ -37,12 +37,13 @@ async function main() { } const graph: Record = await github.graphql(mutation, variables) - console.log('GraphQL mutation result:\n' + JSON.stringify(graph)) + console.log(`GraphQL mutation result:\n${JSON.stringify(graph)}`) if (graph.errors && graph.errors.length > 0) { console.error( - 'ERROR! Failed to enable auto-merge:\n - ' + - graph.errors.map((error: any) => error.message).join('\n - '), + `ERROR! Failed to enable auto-merge:\n - ${graph.errors + .map((error: any) => error.message) + .join('\n - ')}`, ) } else { console.log('Auto-merge enabled!') diff --git a/src/workflows/experimental/readability-report.ts b/src/workflows/experimental/readability-report.ts index 0ca6d7232415..c31ffe55d427 100644 --- a/src/workflows/experimental/readability-report.ts +++ b/src/workflows/experimental/readability-report.ts @@ -118,7 +118,7 @@ Note: Requires a local server running on localhost:4000 (npm start) const report = generateReport(results) // Always output to console for local development - console.log('\n' + report) + console.log(`\n${report}`) // If running in CI, also save report for commenting on PR if (process.env.GITHUB_ACTIONS) { @@ -198,12 +198,10 @@ async function waitForServer(): Promise { async function analyzeFile(filePath: string): Promise { // Convert file path to URL path // content/get-started/foo.md -> /get-started/foo - const urlPath = - '/' + - filePath - .replace(/^content\//, '') - .replace(/\.md$/, '') - .replace(/\/index$/, '') + const urlPath = `/${filePath + .replace(/^content\//, '') + .replace(/\.md$/, '') + .replace(/\/index$/, '')}` try { // Fetch the rendered page From 3d77b7e3ca336aa39ca90ab5e4a03434f17fe661 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Thu, 30 Oct 2025 17:09:15 +0000 Subject: [PATCH 5/9] Rename Actions minute multipliers article to Actions runner pricing (#58245) Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: felicitymay <1877141+felicitymay@users.noreply.github.com> --- .../reference/actions-minute-multipliers.md | 78 ------------------ .../reference/actions-runner-pricing.md | 80 +++++++++++++++++++ content/billing/reference/index.md | 8 +- 3 files changed, 84 insertions(+), 82 deletions(-) delete mode 100644 content/billing/reference/actions-minute-multipliers.md create mode 100644 content/billing/reference/actions-runner-pricing.md diff --git a/content/billing/reference/actions-minute-multipliers.md b/content/billing/reference/actions-minute-multipliers.md deleted file mode 100644 index 15c83973ba2e..000000000000 --- a/content/billing/reference/actions-minute-multipliers.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -title: Actions minute multiplier reference -shortTitle: Actions minute multipliers -intro: 'Reference information for calculating the cost of using different {% data variables.product.github %}-hosted runners.' -versions: - fpt: '*' - ghec: '*' - ghes: '*' -topics: - - Billing - - Actions -contentType: reference ---- - -{% data variables.product.github %} rounds the minutes and partial minutes each job uses up to the nearest whole minute. - -## Standard runners - -| Operating system | Per-minute rate (USD) | -|---------------------------------------| ----------------------| -| Linux 1-core | $0.002 | -| Linux 2-core | $0.008 | -| Windows 2-core | $0.016 | -| macOS 3-core or 4-core (M1 or Intel) | $0.08 | - -## x64-powered {% data variables.actions.hosted_runners %} - -| Operating system | Per-minute rate (USD) | -|------------------------| ----------------------| -| Linux Advanced 2-core | $0.008 | -| Linux 4-core | $0.016 | -| Linux 8-core | $0.032 | -| Linux 16-core | $0.064 | -| Linux 32-core | $0.128 | -| Linux 64-core | $0.256 | -| Linux 96-core | $0.384 | -| Windows 4-core | $0.032 | -| Windows 8-core | $0.064 | -| Windows 16-core | $0.128 | -| Windows 32-core | $0.256 | -| Windows 64-core | $0.512 | -| Windows 96-core | $0.768 | -| macOS 12-core | $0.12 | - -## arm64-powered {% data variables.actions.hosted_runners %} - -| Operating system | Per-minute rate (USD) | -|---------------------| -----------| -| Linux 2-core | $0.005 | -| Linux 4-core | $0.01 | -| Linux 8-core | $0.02 | -| Linux 16-core | $0.04 | -| Linux 32-core | $0.08 | -| Linux 64-core | $0.16 | -| Windows 2-core | $0.01 | -| Windows 4-core | $0.02 | -| Windows 8-core | $0.04 | -| Windows 16-core | $0.08 | -| Windows 32-core | $0.16 | -| Windows 64-core | $0.32 | -| macOS 6-core (M1) | $0.16 | - -## GPU-powered {% data variables.actions.hosted_runners %} - -| Operating system | Per-minute rate (USD) | -|---------------------| -----------| -| Linux 4-core | $0.07 | -| Windows 4-core | $0.14 | - -## Points to note about rates for runners - -* The number of jobs you can run concurrently across all repositories in your user or organization account depends on your {% data variables.product.github %} plan. For more information, see [AUTOTITLE](/actions/learn-github-actions/usage-limits-billing-and-administration) for {% data variables.product.github %}-hosted runners and [AUTOTITLE](/actions/hosting-your-own-runners/managing-self-hosted-runners/usage-limits-for-self-hosted-runners) for self-hosted runner usage limits. -* {% data reusables.actions.larger-runner-permissions %} -* {% data reusables.actions.about-larger-runners-billing %} -* For {% data variables.actions.hosted_runner %}s, there is no additional cost for configurations that assign public static IP addresses to a {% data variables.actions.hosted_runner %}. For more information on {% data variables.actions.hosted_runner %}s, see [AUTOTITLE](/actions/using-github-hosted-runners/using-larger-runners/about-larger-runners). -* Included minutes cannot be used for {% data variables.actions.hosted_runner %}s. -* The {% data variables.actions.hosted_runner %}s are not free for public repositories. -* Custom images can only be used with larger runners and are billed at the same per-minute rates as those runners. diff --git a/content/billing/reference/actions-runner-pricing.md b/content/billing/reference/actions-runner-pricing.md new file mode 100644 index 000000000000..f89a247b2ca1 --- /dev/null +++ b/content/billing/reference/actions-runner-pricing.md @@ -0,0 +1,80 @@ +--- +title: Actions runner pricing +shortTitle: Actions runner pricing +intro: Reference information for calculating the cost of using different {% data variables.product.github %}-hosted runners. +versions: + fpt: "*" + ghec: "*" + ghes: "*" +topics: + - Billing + - Actions +contentType: reference +redirect_from: + - /billing/reference/actions-minute-multipliers +--- + +{% data variables.product.github %} rounds the minutes and partial minutes each job uses up to the nearest whole minute. + +## Standard runners + +| Operating system | Per-minute rate (USD) | +| ------------------------------------ | --------------------- | +| Linux 1-core | $0.002 | +| Linux 2-core | $0.008 | +| Windows 2-core | $0.016 | +| macOS 3-core or 4-core (M1 or Intel) | $0.08 | + +## x64-powered {% data variables.actions.hosted_runners %} + +| Operating system | Per-minute rate (USD) | +| --------------------- | --------------------- | +| Linux Advanced 2-core | $0.008 | +| Linux 4-core | $0.016 | +| Linux 8-core | $0.032 | +| Linux 16-core | $0.064 | +| Linux 32-core | $0.128 | +| Linux 64-core | $0.256 | +| Linux 96-core | $0.384 | +| Windows 4-core | $0.032 | +| Windows 8-core | $0.064 | +| Windows 16-core | $0.128 | +| Windows 32-core | $0.256 | +| Windows 64-core | $0.512 | +| Windows 96-core | $0.768 | +| macOS 12-core | $0.12 | + +## arm64-powered {% data variables.actions.hosted_runners %} + +| Operating system | Per-minute rate (USD) | +| ----------------- | --------------------- | +| Linux 2-core | $0.005 | +| Linux 4-core | $0.01 | +| Linux 8-core | $0.02 | +| Linux 16-core | $0.04 | +| Linux 32-core | $0.08 | +| Linux 64-core | $0.16 | +| Windows 2-core | $0.01 | +| Windows 4-core | $0.02 | +| Windows 8-core | $0.04 | +| Windows 16-core | $0.08 | +| Windows 32-core | $0.16 | +| Windows 64-core | $0.32 | +| macOS 6-core (M1) | $0.16 | + +## GPU-powered {% data variables.actions.hosted_runners %} + +| Operating system | Per-minute rate (USD) | +| ---------------- | --------------------- | +| Linux 4-core | $0.07 | +| Windows 4-core | $0.14 | + +## Points to note about rates for runners + +* The number of jobs you can run concurrently across all repositories in your user or organization account depends on your {% data variables.product.github %} plan. For more information, see [AUTOTITLE](/actions/learn-github-actions/usage-limits-billing-and-administration) for {% data variables.product.github %}-hosted runners and [AUTOTITLE](/actions/hosting-your-own-runners/managing-self-hosted-runners/usage-limits-for-self-hosted-runners) for self-hosted runner usage limits. +* {% data reusables.actions.larger-runner-permissions %} +* {% data reusables.actions.about-larger-runners-billing %} +* For {% data variables.actions.hosted_runner %}s, there is no additional cost for configurations that assign public static IP addresses to a {% data variables.actions.hosted_runner %}. For more information on {% data variables.actions.hosted_runner %}s, see [AUTOTITLE](/actions/using-github-hosted-runners/using-larger-runners/about-larger-runners). +* Included minutes cannot be used for {% data variables.actions.hosted_runner %}s. +* The {% data variables.actions.hosted_runner %}s are not free for public repositories. +* Custom images can only be used with larger runners and are billed at the same per-minute rates as those runners. diff --git a/content/billing/reference/index.md b/content/billing/reference/index.md index b10dcc1ea86a..2184863c9582 100644 --- a/content/billing/reference/index.md +++ b/content/billing/reference/index.md @@ -3,14 +3,14 @@ title: Reference for billing shortTitle: Reference intro: Find information to support your use of billing. versions: - fpt: '*' - ghec: '*' - ghes: '*' + fpt: "*" + ghec: "*" + ghes: "*" topics: - Billing children: - /product-usage-included - - /actions-minute-multipliers + - /actions-runner-pricing - /billing-reports - /supported-payment-methods - /azure-billing From 48aaa70a396461a6bfc6495eed9b749f1a10d81e Mon Sep 17 00:00:00 2001 From: Steve-Glass <84886334+Steve-Glass@users.noreply.github.com> Date: Thu, 30 Oct 2025 13:54:57 -0400 Subject: [PATCH 6/9] Update limitations section for coding agent (#58275) --- .../copilot/concepts/agents/coding-agent/about-coding-agent.md | 1 - 1 file changed, 1 deletion(-) diff --git a/content/copilot/concepts/agents/coding-agent/about-coding-agent.md b/content/copilot/concepts/agents/coding-agent/about-coding-agent.md index dfadd483dd04..10bd9da4a7b2 100644 --- a/content/copilot/concepts/agents/coding-agent/about-coding-agent.md +++ b/content/copilot/concepts/agents/coding-agent/about-coding-agent.md @@ -153,7 +153,6 @@ Users can include hidden messages in issues assigned to {% data variables.produc ### Limitations in Copilot's compatibility with other features * **{% data variables.product.prodname_copilot_short %} does not sign its commits**. If you have the "Require signed commits" rule or branch protection enabled, you must rewrite the commit history in order to merge {% data variables.product.prodname_copilot_short %}'s pull requests. See [AUTOTITLE](/repositories/configuring-branches-and-merges-in-your-repository/managing-rulesets/available-rules-for-rulesets#require-signed-commits). -* **{% data variables.product.prodname_copilot_short %} does not work with self-hosted {% data variables.product.prodname_actions %} runners**. {% data variables.product.prodname_copilot_short %} has access to its own development environment, running in {% data variables.product.prodname_actions %}, and must use {% data variables.product.prodname_dotcom %}-hosted runners. See [AUTOTITLE](/copilot/customizing-copilot/customizing-the-development-environment-for-copilot-coding-agent#upgrading-to-larger-github-hosted-github-actions-runners). * **{% data variables.copilot.copilot_coding_agent %} does not work in personal repositories owned by {% data variables.enterprise.prodname_managed_users %}**. This is because {% data variables.copilot.copilot_coding_agent %} requires {% data variables.product.company_short %}-hosted runners, which are not available to repositories owned by {% data variables.enterprise.prodname_managed_users %}. See [AUTOTITLE](/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners). * **{% data variables.copilot.copilot_coding_agent %} doesn't account for content exclusions**. Content exclusions allow administrators to configure {% data variables.product.prodname_copilot_short %} to ignore certain files. When using {% data variables.copilot.copilot_coding_agent %}, {% data variables.product.prodname_copilot_short %} will not ignore these files, and will be able to see and update them. See [AUTOTITLE](/copilot/managing-copilot/configuring-and-auditing-content-exclusion/excluding-content-from-github-copilot). * **{% data variables.copilot.copilot_coding_agent %} only works with repositories hosted on {% data variables.product.github %}**. If your repository is stored using a different code hosting platform, {% data variables.product.prodname_copilot_short %} won't be able to work on it. From db3f4bfa9f8d88aaf68eea87dfc50ae48e9a5c34 Mon Sep 17 00:00:00 2001 From: Evan Bonsignori Date: Thu, 30 Oct 2025 12:24:38 -0700 Subject: [PATCH 7/9] fix Dockerfile for openapi check & add validator workflow (#58261) --- .github/workflows/validate-openapi-check.yml | 48 ++++++++++++++++++++ Dockerfile.openapi_decorator | 7 ++- 2 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/validate-openapi-check.yml diff --git a/.github/workflows/validate-openapi-check.yml b/.github/workflows/validate-openapi-check.yml new file mode 100644 index 000000000000..510f29752ef0 --- /dev/null +++ b/.github/workflows/validate-openapi-check.yml @@ -0,0 +1,48 @@ +name: Validate OpenAPI Check Docker + +# **What it does**: Tests building and running the OpenAPI check Docker container +# **Why we have it**: To ensure the Dockerfile and openapi-check script work correctly +# **Who does it impact**: Docs engineering. + +on: + workflow_dispatch: + pull_request: + paths: + - 'Dockerfile.openapi_decorator' + - 'src/rest/scripts/openapi-check.ts' + - 'src/rest/scripts/utils/get-operations.ts' + - 'src/rest/scripts/utils/operation.ts' + # In case dependencies change + - 'package.json' + - 'package-lock.json' + - 'tsconfig.json' + # Self-test + - '.github/workflows/validate-openapi-check.yml' + +permissions: + contents: read + +jobs: + validate-openapi-check: + runs-on: ubuntu-latest + if: github.repository == 'github/docs-internal' + steps: + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 + + - name: Build Docker image + run: | + docker build -f Dockerfile.openapi_decorator -t openapi-decorator:test . + + - name: Test Docker image with sample OpenAPI file + run: | + docker run --rm openapi-decorator:test -f "src/rest/data/fpt-2022-11-28/schema.json" + + - name: Test Docker image with multiple OpenAPI files + run: | + docker run --rm openapi-decorator:test \ + -f "src/rest/data/fpt-2022-11-28/schema.json" \ + "src/rest/data/ghec-2022-11-28/schema.json" diff --git a/Dockerfile.openapi_decorator b/Dockerfile.openapi_decorator index ddda5d8bf94d..8e8ca98b6e95 100644 --- a/Dockerfile.openapi_decorator +++ b/Dockerfile.openapi_decorator @@ -1,6 +1,4 @@ -FROM node:18.15-alpine - -RUN apk add --no-cache git python make g++ +FROM node:24-alpine WORKDIR /openapi-check @@ -10,10 +8,11 @@ USER node COPY --chown=node:node package.json /openapi-check COPY --chown=node:node package-lock.json /openapi-check +COPY --chown=node:node tsconfig.json /openapi-check ADD --chown=node:node src /openapi-check/src ADD --chown=node:node content /openapi-check/content ADD --chown=node:node data /openapi-check/data RUN npm ci -D -ENTRYPOINT ["node", "/openapi-check/src/rest/scripts/openapi-check.ts"] +ENTRYPOINT ["npx", "tsx", "/openapi-check/src/rest/scripts/openapi-check.ts"] From 23c9d95cb9c1bb3f5b163a790adb18821d2c9164 Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Thu, 30 Oct 2025 12:53:05 -0700 Subject: [PATCH 8/9] Remove 'github/no-then' eslint rule (#58220) --- eslint.config.ts | 1 - .../scripts/add-content-type.ts | 6 +- src/events/components/dotcom-cookies.ts | 18 +- src/frame/middleware/index.ts | 8 +- .../scripts/deprecate/archive-version.ts | 28 +-- src/ghes-releases/scripts/release-banner.ts | 6 +- src/links/components/LinkPreviewPopover.tsx | 15 +- .../scripts/rendered-content-link-checker.ts | 6 +- src/metrics/scripts/docstat.ts | 195 +++++------------- .../middleware/catch-middleware-error.ts | 8 +- src/search/scripts/analyze-text.ts | 6 +- .../scripts/scrape/lib/build-records.ts | 93 +++++---- src/search/scripts/scrape/lib/domwaiter.ts | 12 +- src/versions/scripts/use-short-versions.ts | 16 +- .../experimental/readability-report.ts | 6 +- .../fr-add-docs-reviewers-requests.ts | 6 +- src/workflows/ready-for-docs-review.ts | 6 +- 17 files changed, 188 insertions(+), 248 deletions(-) diff --git a/eslint.config.ts b/eslint.config.ts index 9e40b282ae79..2d4fdfca6433 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -96,7 +96,6 @@ export default [ camelcase: 'off', // Many gh apis use underscores, 600+ uses // Disabled rules to review - 'github/no-then': 'off', // 30+ '@typescript-eslint/ban-ts-comment': 'off', // 50+ 'no-shadow': 'off', // 150+ 'github/array-foreach': 'off', // 250+ diff --git a/src/content-render/scripts/add-content-type.ts b/src/content-render/scripts/add-content-type.ts index 4e52f67b3900..a73565a441d5 100644 --- a/src/content-render/scripts/add-content-type.ts +++ b/src/content-render/scripts/add-content-type.ts @@ -192,4 +192,8 @@ function determineContentType(relativePath: string, legacyType: string): string return OTHER_TYPE } -main().catch(console.error) +try { + await main() +} catch (error) { + console.error(error) +} diff --git a/src/events/components/dotcom-cookies.ts b/src/events/components/dotcom-cookies.ts index aebd8710ab25..e31369080a04 100644 --- a/src/events/components/dotcom-cookies.ts +++ b/src/events/components/dotcom-cookies.ts @@ -45,14 +45,13 @@ async function fetchCookies(): Promise { } // Make a single fetch request to the backend. - inFlightPromise = fetch(GET_COOKIES_ENDPOINT) - .then((response) => { + inFlightPromise = (async () => { + try { + const response = await fetch(GET_COOKIES_ENDPOINT) if (!response.ok) { throw new Error(`Failed to fetch cookies: ${response.statusText}`) } - return response.json() as Promise - }) - .then((data) => { + const data = (await response.json()) as DotcomCookies cachedCookies = data // Store the fetched cookies in local storage for future use. try { @@ -61,8 +60,7 @@ async function fetchCookies(): Promise { console.error('Error storing cookies in local storage:', e) } return data - }) - .catch((err) => { + } catch (err) { console.error('Error fetching cookies:', err) // On failure, return default values. const defaultCookies: DotcomCookies = { @@ -70,11 +68,11 @@ async function fetchCookies(): Promise { } cachedCookies = defaultCookies return defaultCookies - }) - .finally(() => { + } finally { // Clear the in-flight promise regardless of success or failure. inFlightPromise = null - }) + } + })() return inFlightPromise } diff --git a/src/frame/middleware/index.ts b/src/frame/middleware/index.ts index 54c543eae829..929d12a099ae 100644 --- a/src/frame/middleware/index.ts +++ b/src/frame/middleware/index.ts @@ -82,8 +82,12 @@ const asyncMiddleware = ( fn: (req: TReq, res: Response, next: NextFunction) => T | Promise, ) => - (req: Request, res: Response, next: NextFunction) => { - Promise.resolve(fn(req as TReq, res, next)).catch(next) + async (req: Request, res: Response, next: NextFunction) => { + try { + await fn(req as TReq, res, next) + } catch (error) { + next(error) + } } export default function index(app: Express) { diff --git a/src/ghes-releases/scripts/deprecate/archive-version.ts b/src/ghes-releases/scripts/deprecate/archive-version.ts index b0e037fe87e5..92eca838debc 100755 --- a/src/ghes-releases/scripts/deprecate/archive-version.ts +++ b/src/ghes-releases/scripts/deprecate/archive-version.ts @@ -96,21 +96,23 @@ async function main() { .listen(port, async () => { console.log(`started server on ${host}`) - await scrape({ - urls, - urlFilter: (url: string) => { - // Do not download assets from other hosts like S3 or octodex.github.com - // (this will keep them as remote references in the downloaded pages) - return url.startsWith(`http://localhost:${port}/`) - }, - directory: tmpArchivalDirectory, - filenameGenerator: 'bySiteStructure', - requestConcurrency: 6, - plugins: [new RewriteAssetPathsPlugin(tmpArchivalDirectory, localDev, GH_PAGES_URL)], - }).catch((err: Error) => { + try { + await scrape({ + urls, + urlFilter: (url: string) => { + // Do not download assets from other hosts like S3 or octodex.github.com + // (this will keep them as remote references in the downloaded pages) + return url.startsWith(`http://localhost:${port}/`) + }, + directory: tmpArchivalDirectory, + filenameGenerator: 'bySiteStructure', + requestConcurrency: 6, + plugins: [new RewriteAssetPathsPlugin(tmpArchivalDirectory, localDev, GH_PAGES_URL)], + }) + } catch (err) { console.error('scraping error') console.error(err) - }) + } fs.renameSync( path.join(tmpArchivalDirectory, `/localhost_${port}`), diff --git a/src/ghes-releases/scripts/release-banner.ts b/src/ghes-releases/scripts/release-banner.ts index 6d467f1667f6..6b2ac42ce897 100644 --- a/src/ghes-releases/scripts/release-banner.ts +++ b/src/ghes-releases/scripts/release-banner.ts @@ -68,7 +68,9 @@ async function main(): Promise { `) } -main().catch((error) => { +try { + await main() +} catch (error) { console.error('Error:', error) process.exit(1) -}) +} diff --git a/src/links/components/LinkPreviewPopover.tsx b/src/links/components/LinkPreviewPopover.tsx index 67bb9770e41b..75f26b466ad4 100644 --- a/src/links/components/LinkPreviewPopover.tsx +++ b/src/links/components/LinkPreviewPopover.tsx @@ -248,16 +248,19 @@ function popoverWrap(element: HTMLLinkElement, filledCallback?: (popover: HTMLDi const { pathname } = new URL(element.href) - fetch(`/api/article/meta?${new URLSearchParams({ pathname })}`, { - headers: { - 'X-Request-Source': 'hovercards', - }, - }).then(async (response) => { + async function fetchAndFillPopover() { + const response = await fetch(`/api/article/meta?${new URLSearchParams({ pathname })}`, { + headers: { + 'X-Request-Source': 'hovercards', + }, + }) if (response.ok) { const meta = (await response.json()) as PageMetadata fillPopover(element, meta, filledCallback) } - }) + } + + fetchAndFillPopover() } function fillPopover( diff --git a/src/links/scripts/rendered-content-link-checker.ts b/src/links/scripts/rendered-content-link-checker.ts index fbc81c0eddb6..e7b12b1af59b 100755 --- a/src/links/scripts/rendered-content-link-checker.ts +++ b/src/links/scripts/rendered-content-link-checker.ts @@ -128,10 +128,12 @@ async function limitConcurrency( const executing = new Set>() for (const item of items) { - const promise = asyncFn(item).then((result) => { + const createPromise = async () => { + const result = await asyncFn(item) executing.delete(promise) return result - }) + } + const promise = createPromise() results.push(promise) executing.add(promise) diff --git a/src/metrics/scripts/docstat.ts b/src/metrics/scripts/docstat.ts index 261bb0c821dc..72ca73dcc5a7 100644 --- a/src/metrics/scripts/docstat.ts +++ b/src/metrics/scripts/docstat.ts @@ -39,21 +39,6 @@ interface CliOptions { allVersions?: boolean } -interface QueryResults { - views?: string - viewsDocset?: string - users?: string - usersDocset?: string - viewDuration?: string - viewDurationDocset?: string - bounces?: string - bouncesDocset?: string - score?: string - scoreDocset?: string - exits?: string - exitsDocset?: string -} - interface JsonOutput { daysRange: string startDate: string @@ -222,130 +207,8 @@ async function main(): Promise { console.log(`\n\nSkipping comparison, since '${cleanPath}' is already a docset.\n`) } - // Create query promises for all requested metrics - const queryPromises: Promise[] = [] - const results: QueryResults = {} - - // Setup all the promises for parallel execution - if (options.views) { - const queryType = 'views' - queryPromises.push( - getViews(queryPaths, client, dates, version, options.verbose, queryType).then((data) => { - results.views = data - }), - ) - if (options.showDocset) { - const queryType = 'docset views' - queryPromises.push( - getViews(docsetPath, client, dates, version, options.verbose, queryType).then((data) => { - results.viewsDocset = data - }), - ) - } - } - - if (options.users) { - const queryType = 'users' - queryPromises.push( - getUsers(queryPaths, client, dates, version, options.verbose, queryType).then((data) => { - results.users = data - }), - ) - if (options.showDocset) { - const queryType = 'docset users' - queryPromises.push( - getUsers(docsetPath, client, dates, version, options.verbose, queryType).then((data) => { - results.usersDocset = data - }), - ) - } - } - - if (options.viewDuration) { - const queryType = 'view duration' - queryPromises.push( - getViewDuration(queryPaths, client, dates, version, options.verbose, queryType).then( - (data) => { - results.viewDuration = data - }, - ), - ) - if (options.showDocset) { - const queryType = 'docset view duration' - queryPromises.push( - getViewDuration(docsetPath, client, dates, version, options.verbose, queryType).then( - (data) => { - results.viewDurationDocset = data - }, - ), - ) - } - } - - if (options.bounces) { - const queryType = 'bounces' - queryPromises.push( - getBounces(queryPaths, client, dates, version, options.verbose, queryType).then((data) => { - results.bounces = data - }), - ) - if (options.showDocset) { - const queryType = 'docset bounces' - queryPromises.push( - getBounces(docsetPath, client, dates, version, options.verbose, queryType).then( - (data) => { - results.bouncesDocset = data - }, - ), - ) - } - } - - if (options.score) { - const queryType = 'score' - queryPromises.push( - getScore(queryPaths, client, dates, version, options.verbose, queryType).then((data) => { - results.score = data - }), - ) - if (options.showDocset) { - const queryType = 'docset score' - queryPromises.push( - getScore(docsetPath, client, dates, version, options.verbose, queryType).then((data) => { - results.scoreDocset = data - }), - ) - } - } - - if (options.exits) { - const queryType = 'exits' - queryPromises.push( - getExitsToSupport(queryPaths, client, dates, version, options.verbose, queryType).then( - (data) => { - results.exits = data - }, - ), - ) - if (options.showDocset) { - const queryType = 'docset exits' - queryPromises.push( - getExitsToSupport(docsetPath, client, dates, version, options.verbose, queryType).then( - (data) => { - results.exitsDocset = data - }, - ), - ) - } - } - - // Execute all queries in parallel - await Promise.all(queryPromises) - - spinner.succeed('Data retrieved successfully!\n') - - // Extract all results from the results object - const { + // Execute all queries in parallel and destructure results + const [ views, viewsDocset, users, @@ -358,7 +221,53 @@ async function main(): Promise { scoreDocset, exits, exitsDocset, - } = results + ] = await Promise.all([ + options.views + ? getViews(queryPaths, client, dates, version, options.verbose, 'views') + : undefined, + options.views && options.showDocset + ? getViews(docsetPath, client, dates, version, options.verbose, 'docset views') + : undefined, + options.users + ? getUsers(queryPaths, client, dates, version, options.verbose, 'users') + : undefined, + options.users && options.showDocset + ? getUsers(docsetPath, client, dates, version, options.verbose, 'docset users') + : undefined, + options.viewDuration + ? getViewDuration(queryPaths, client, dates, version, options.verbose, 'view duration') + : undefined, + options.viewDuration && options.showDocset + ? getViewDuration( + docsetPath, + client, + dates, + version, + options.verbose, + 'docset view duration', + ) + : undefined, + options.bounces + ? getBounces(queryPaths, client, dates, version, options.verbose, 'bounces') + : undefined, + options.bounces && options.showDocset + ? getBounces(docsetPath, client, dates, version, options.verbose, 'docset bounces') + : undefined, + options.score + ? getScore(queryPaths, client, dates, version, options.verbose, 'score') + : undefined, + options.score && options.showDocset + ? getScore(docsetPath, client, dates, version, options.verbose, 'docset score') + : undefined, + options.exits + ? getExitsToSupport(queryPaths, client, dates, version, options.verbose, 'exits') + : undefined, + options.exits && options.showDocset + ? getExitsToSupport(docsetPath, client, dates, version, options.verbose, 'docset exits') + : undefined, + ]) + + spinner.succeed('Data retrieved successfully!\n') // Output JSON and exit if (options.json) { @@ -491,11 +400,13 @@ async function main(): Promise { } } -main().catch((error) => { +try { + await main() +} catch (error) { console.error(red('Unexpected error:')) console.error(error) process.exit(1) -}) +} /* -------- UTILITY FUNCTIONS -------- */ diff --git a/src/observability/middleware/catch-middleware-error.ts b/src/observability/middleware/catch-middleware-error.ts index d8bc4e30666d..e937a73d4ff1 100644 --- a/src/observability/middleware/catch-middleware-error.ts +++ b/src/observability/middleware/catch-middleware-error.ts @@ -4,5 +4,11 @@ import type { NextFunction } from 'express' // This matches the original JavaScript behavior while providing some type safety // The assertion is necessary because Express middleware can have various request/response types export default function catchMiddlewareError(fn: any) { - return (req: any, res: any, next: NextFunction) => Promise.resolve(fn(req, res, next)).catch(next) + return async (req: any, res: any, next: NextFunction) => { + try { + await fn(req, res, next) + } catch (error) { + next(error) + } + } } diff --git a/src/search/scripts/analyze-text.ts b/src/search/scripts/analyze-text.ts index a2be919aca83..60833e28e3a5 100755 --- a/src/search/scripts/analyze-text.ts +++ b/src/search/scripts/analyze-text.ts @@ -72,10 +72,12 @@ program const options = program.opts() const args: string[] = program.args -main(options, args).catch((err) => { +try { + await main(options, args) +} catch (err) { console.error(chalk.red('Error:'), err) process.exit(1) -}) +} async function main(opts: Options, args: string[]): Promise { const texts = [args.join(' ')] diff --git a/src/search/scripts/scrape/lib/build-records.ts b/src/search/scripts/scrape/lib/build-records.ts index 7f40baac8bd3..59b0560fd52b 100644 --- a/src/search/scripts/scrape/lib/build-records.ts +++ b/src/search/scripts/scrape/lib/build-records.ts @@ -173,60 +173,59 @@ export default async function buildRecords( }) // Wait for 'done' event but ignore 'error' events (they're handled by the error listener above) - return eventToPromise(waiter, 'done', { ignoreErrors: true }).then(() => { - console.log('\nrecords in index: ', records.length) - - // Report failed pages if any - if (failedPages.length > 0) { - const failureCount = failedPages.length - const header = chalk.bold.red(`${failureCount} page(s) failed to scrape\n\n`) - - const failureList = failedPages - .slice(0, 10) // Show first 10 failures - .map((failure, idx) => { - const number = chalk.gray(`${idx + 1}. `) - const errorType = chalk.yellow(failure.errorType) - const pathLine = failure.relativePath - ? `\n${chalk.cyan(' Path: ')}${failure.relativePath}` - : '' - const urlLine = failure.url ? `\n${chalk.cyan(' URL: ')}${failure.url}` : '' - const errorLine = `\n${chalk.gray(` Error: ${failure.error}`)}` - - return `${number}${errorType}${pathLine}${urlLine}${errorLine}` - }) - .join('\n\n') + await eventToPromise(waiter, 'done', { ignoreErrors: true }) + console.log('\nrecords in index: ', records.length) + + // Report failed pages if any + if (failedPages.length > 0) { + const failureCount = failedPages.length + const header = chalk.bold.red(`${failureCount} page(s) failed to scrape\n\n`) + + const failureList = failedPages + .slice(0, 10) // Show first 10 failures + .map((failure, idx) => { + const number = chalk.gray(`${idx + 1}. `) + const errorType = chalk.yellow(failure.errorType) + const pathLine = failure.relativePath + ? `\n${chalk.cyan(' Path: ')}${failure.relativePath}` + : '' + const urlLine = failure.url ? `\n${chalk.cyan(' URL: ')}${failure.url}` : '' + const errorLine = `\n${chalk.gray(` Error: ${failure.error}`)}` + + return `${number}${errorType}${pathLine}${urlLine}${errorLine}` + }) + .join('\n\n') - const remaining = - failureCount > 10 ? `\n\n${chalk.gray(`... and ${failureCount - 10} more`)}` : '' + const remaining = + failureCount > 10 ? `\n\n${chalk.gray(`... and ${failureCount - 10} more`)}` : '' - const boxContent = header + failureList + remaining - const box = boxen(boxContent, { - title: chalk.red('⚠ Failed Pages'), - padding: 1, - borderColor: 'yellow', - }) + const boxContent = header + failureList + remaining + const box = boxen(boxContent, { + title: chalk.red('⚠ Failed Pages'), + padding: 1, + borderColor: 'yellow', + }) + + console.log(`\n${box}\n`) - console.log(`\n${box}\n`) + // Log suggestion + console.log( + chalk.yellow( + `💡 Tip: These failures won't stop the scraping process. The script will continue with the remaining pages.`, + ), + ) - // Log suggestion + if (failedPages.some((f) => f.errorType === 'Timeout')) { console.log( - chalk.yellow( - `💡 Tip: These failures won't stop the scraping process. The script will continue with the remaining pages.`, + chalk.gray( + ` For timeout errors, try: export BUILD_RECORDS_MAX_CONCURRENT=50 (currently ${MAX_CONCURRENT})`, ), ) - - if (failedPages.some((f) => f.errorType === 'Timeout')) { - console.log( - chalk.gray( - ` For timeout errors, try: export BUILD_RECORDS_MAX_CONCURRENT=50 (currently ${MAX_CONCURRENT})`, - ), - ) - } } + } - return { - records, - failedPages, - } - }) + return { + records, + failedPages, + } } diff --git a/src/search/scripts/scrape/lib/domwaiter.ts b/src/search/scripts/scrape/lib/domwaiter.ts index 2b9c0c6f9138..b33bc9a6bf04 100644 --- a/src/search/scripts/scrape/lib/domwaiter.ts +++ b/src/search/scripts/scrape/lib/domwaiter.ts @@ -52,12 +52,16 @@ export default function domwaiter(pages: Permalink[], opts: DomWaiterOptions = { const limiter = new Bottleneck(opts) pages.forEach((page) => { - limiter - .schedule(() => getPage(page, emitter, opts)) - .catch((err) => { + async function schedulePage() { + try { + await limiter.schedule(() => getPage(page, emitter, opts)) + } catch (err) { // Catch any unhandled promise rejections emitter.emit('error', err) - }) + } + } + + schedulePage() }) limiter.on('idle', () => { diff --git a/src/versions/scripts/use-short-versions.ts b/src/versions/scripts/use-short-versions.ts index bbcfc6cb8584..85b702300d74 100755 --- a/src/versions/scripts/use-short-versions.ts +++ b/src/versions/scripts/use-short-versions.ts @@ -117,15 +117,13 @@ async function main() { } } -main().then( - () => { - console.log('Done!') - }, - (err) => { - console.error(err) - process.exit(1) - }, -) +try { + await main() + console.log('Done!') +} catch (err) { + console.error(err) + process.exit(1) +} // Convenience function to help with readability by removing this large but unneded property. // Using any for token objects as liquidjs doesn't provide TypeScript types diff --git a/src/workflows/experimental/readability-report.ts b/src/workflows/experimental/readability-report.ts index c31ffe55d427..d3ffa06f13d8 100644 --- a/src/workflows/experimental/readability-report.ts +++ b/src/workflows/experimental/readability-report.ts @@ -635,7 +635,9 @@ function generateReport(results: PageReadability[]): string { return report } -main().catch((error) => { +try { + await main() +} catch (error) { console.error('Readability analysis failed:', error) process.exit(1) -}) +} diff --git a/src/workflows/fr-add-docs-reviewers-requests.ts b/src/workflows/fr-add-docs-reviewers-requests.ts index 01d20dff9bad..2fe491332eb4 100644 --- a/src/workflows/fr-add-docs-reviewers-requests.ts +++ b/src/workflows/fr-add-docs-reviewers-requests.ts @@ -225,7 +225,9 @@ async function run() { return newItemIDs } -run().catch((error) => { +try { + await run() +} catch (error) { console.log(`#ERROR# ${error}`) process.exit(1) -}) +} diff --git a/src/workflows/ready-for-docs-review.ts b/src/workflows/ready-for-docs-review.ts index eee739f24a22..3fa626f748a1 100644 --- a/src/workflows/ready-for-docs-review.ts +++ b/src/workflows/ready-for-docs-review.ts @@ -261,7 +261,9 @@ async function run() { export { run } -run().catch((error) => { +try { + await run() +} catch (error) { console.log(`#ERROR# ${error}`) process.exit(1) -}) +} From 8e897b2b822104b2261b3b1a9fa25a65fee1a8fd Mon Sep 17 00:00:00 2001 From: Kevin Heis Date: Thu, 30 Oct 2025 13:39:59 -0700 Subject: [PATCH 9/9] Fix all no-shadow ESLint violations (#58234) --- eslint.config.ts | 1 - next.config.ts | 10 +-- src/assets/middleware/dynamic-assets.ts | 6 +- src/assets/tests/static-assets.ts | 51 ++++++++------- src/codeql-cli/scripts/sync.ts | 4 +- .../lib/helpers/get-lintable-yml.ts | 12 ++-- .../linting-rules/frontmatter-hero-image.ts | 6 +- .../linting-rules/frontmatter-intro-links.ts | 4 +- .../lib/linting-rules/frontmatter-schema.ts | 2 +- src/content-linter/scripts/lint-content.ts | 63 ++++++++++--------- .../scripts/pretty-print-results.ts | 6 +- src/content-linter/tests/category-pages.ts | 24 +++---- .../tests/lint-frontmatter-links.ts | 6 +- .../scripts/add-content-type.ts | 8 +-- .../scripts/all-documents/cli.ts | 4 +- .../scripts/all-documents/lib.ts | 6 +- src/content-render/scripts/move-content.ts | 6 +- .../scripts/render-content-markdown.ts | 11 ++-- .../scripts/test-moved-content.ts | 14 +++-- .../scripts/update-filepaths.ts | 20 +++--- src/content-render/tests/annotate.ts | 16 ++--- src/content-render/unified/annotate.ts | 8 +-- src/content-render/unified/code-header.ts | 4 +- src/content-render/unified/copilot-prompt.ts | 4 +- .../unified/rewrite-local-links.ts | 10 +-- .../scripts/migrate-early-access-product.ts | 8 +-- src/events/components/Survey.tsx | 10 +-- .../experiments/useShouldShowExperiment.ts | 4 +- src/events/lib/analyze-comment.ts | 13 ++-- src/events/tests/middleware-errors.ts | 4 +- src/fixtures/helpers/turn-off-experiments.ts | 6 +- src/fixtures/tests/liquid.ts | 6 +- src/frame/components/UtmPreserver.tsx | 4 +- src/frame/lib/create-tree.ts | 6 +- src/frame/lib/page-data.ts | 6 +- src/frame/lib/read-json-file.ts | 8 +-- src/frame/middleware/app-router-gateway.ts | 4 +- src/frame/middleware/context/breadcrumbs.ts | 6 +- src/frame/middleware/context/context.ts | 14 ++--- src/frame/middleware/context/glossaries.ts | 4 +- src/frame/middleware/index.ts | 6 +- src/frame/middleware/next.ts | 4 +- src/frame/tests/manifest.ts | 8 +-- src/frame/tests/pages.ts | 2 +- src/github-apps/scripts/sync.ts | 14 ++--- src/graphql/components/Changelog.tsx | 12 ++-- src/graphql/scripts/utils/process-schemas.ts | 6 +- src/graphql/scripts/utils/schema-helpers.ts | 4 +- .../components/ProductSelectionCard.tsx | 12 ++-- src/languages/lib/languages-server.ts | 4 +- .../purge-fastly-edge-cache-per-language.ts | 8 +-- src/languages/tests/frame.ts | 4 +- .../tests/translation-error-comments.ts | 10 +-- src/learning-track/tests/lint-data.ts | 6 +- src/links/components/LinkPreviewPopover.tsx | 4 +- .../scripts/rendered-content-link-checker.ts | 4 +- .../generate-new-json.ts | 2 +- src/metrics/scripts/docstat.ts | 26 ++++---- src/observability/middleware/handle-errors.ts | 6 +- .../tests/get-automatic-request-logger.ts | 8 +-- src/redirects/lib/get-redirect.ts | 6 +- .../middleware/ghes-release-notes.ts | 10 ++- src/rest/lib/index.ts | 4 +- src/rest/scripts/test-open-api-schema.ts | 2 +- src/rest/scripts/utils/sync.ts | 8 +-- src/rest/tests/openapi-schema.ts | 6 +- src/rest/tests/rendering.ts | 2 +- src/search/components/input/SearchOverlay.tsx | 24 ++++--- .../components/results/SearchResults.tsx | 24 +++---- src/search/lib/elasticsearch-versions.ts | 10 +-- src/search/scripts/analyze-text.ts | 4 +- .../scripts/index/lib/index-general-search.ts | 6 +- .../utils/indexing-elasticsearch-utils.ts | 6 +- .../handle-invalid-query-string-values.ts | 6 +- src/shielding/tests/invalid-querystrings.ts | 6 +- src/tools/components/PlatformPicker.tsx | 4 +- src/tools/components/ToolPicker.tsx | 4 +- src/versions/middleware/features.ts | 6 +- src/webhooks/tests/rendering.ts | 2 +- .../experimental/readability-report.ts | 4 +- src/workflows/issue-report.ts | 20 +++--- src/workflows/projects.ts | 12 ++-- src/workflows/walk-files.ts | 5 +- 83 files changed, 374 insertions(+), 366 deletions(-) diff --git a/eslint.config.ts b/eslint.config.ts index 2d4fdfca6433..ebbdbc052a67 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -97,7 +97,6 @@ export default [ // Disabled rules to review '@typescript-eslint/ban-ts-comment': 'off', // 50+ - 'no-shadow': 'off', // 150+ 'github/array-foreach': 'off', // 250+ 'no-console': 'off', // 800+ '@typescript-eslint/no-explicit-any': 'off', // 1000+ diff --git a/next.config.ts b/next.config.ts index ce4e8d3a92aa..677f17c7886e 100644 --- a/next.config.ts +++ b/next.config.ts @@ -49,11 +49,11 @@ const config: NextConfig = { } }) }, - webpack: (config) => { - config.experiments = config.experiments || {} - config.experiments.topLevelAwait = true - config.resolve.fallback = { fs: false, async_hooks: false } - return config + webpack: (webpackConfig) => { + webpackConfig.experiments = webpackConfig.experiments || {} + webpackConfig.experiments.topLevelAwait = true + webpackConfig.resolve.fallback = { fs: false, async_hooks: false } + return webpackConfig }, // https://nextjs.org/docs/api-reference/next.config.js/compression diff --git a/src/assets/middleware/dynamic-assets.ts b/src/assets/middleware/dynamic-assets.ts index 662989110317..25ed3fb67c1f 100644 --- a/src/assets/middleware/dynamic-assets.ts +++ b/src/assets/middleware/dynamic-assets.ts @@ -143,9 +143,9 @@ export default async function dynamicAssets( const buffer = await image.webp({ effort }).toBuffer() assetCacheControl(res) return res.type('image/webp').send(buffer) - } catch (error) { - if (error instanceof Error && (error as any).code !== 'ENOENT') { - throw error + } catch (catchError) { + if (catchError instanceof Error && (catchError as any).code !== 'ENOENT') { + throw catchError } } } diff --git a/src/assets/tests/static-assets.ts b/src/assets/tests/static-assets.ts index 272aa862017f..4a1c9ed217e8 100644 --- a/src/assets/tests/static-assets.ts +++ b/src/assets/tests/static-assets.ts @@ -16,13 +16,13 @@ function getNextStaticAsset(directory: string) { return path.join(root, files[0]) } -function mockRequest(path: string, { headers }: { headers?: Record } = {}) { +function mockRequest(requestPath: string, { headers }: { headers?: Record } = {}) { const _headers = Object.fromEntries( Object.entries(headers || {}).map(([key, value]) => [key.toLowerCase(), value]), ) return { - path, - url: path, + path: requestPath, + url: requestPath, get: (header: string) => { return _headers[header.toLowerCase()] }, @@ -74,8 +74,8 @@ const mockResponse = () => { if (typeof key === 'string') { res.headers[key.toLowerCase()] = value } else { - for (const [k, value] of Object.entries(key)) { - res.headers[k.toLowerCase()] = value + for (const [k, v] of Object.entries(key)) { + res.headers[k.toLowerCase()] = v } } } @@ -319,9 +319,9 @@ describe('archived enterprise static assets', () => { }, ])( 'should return $expectStatus for $name', - ({ name, path, referrer, expectStatus, shouldCallNext }) => { + ({ name, path: testPath, referrer, expectStatus, shouldCallNext }) => { test(name, async () => { - const req = mockRequest(path, { + const req = mockRequest(testPath, { headers: { Referrer: referrer, }, @@ -359,22 +359,25 @@ describe('archived enterprise static assets', () => { expectStatus: undefined, shouldCallNext: true, }, - ])('should not suppress $name', ({ name, path, referrer, expectStatus, shouldCallNext }) => { - test(name, async () => { - const req = mockRequest(path, { - headers: { - Referrer: referrer, - }, + ])( + 'should not suppress $name', + ({ name, path: testPath, referrer, expectStatus, shouldCallNext }) => { + test(name, async () => { + const req = mockRequest(testPath, { + headers: { + Referrer: referrer, + }, + }) + const res = mockResponse() + let nexted = false + const next = () => { + nexted = true + } + setDefaultFastlySurrogateKey(req, res, () => {}) + await archivedEnterpriseVersionsAssets(req as any, res as any, next) + expect(nexted).toBe(shouldCallNext) + expect(res.statusCode).toBe(expectStatus) }) - const res = mockResponse() - let nexted = false - const next = () => { - nexted = true - } - setDefaultFastlySurrogateKey(req, res, () => {}) - await archivedEnterpriseVersionsAssets(req as any, res as any, next) - expect(nexted).toBe(shouldCallNext) - expect(res.statusCode).toBe(expectStatus) - }) - }) + }, + ) }) diff --git a/src/codeql-cli/scripts/sync.ts b/src/codeql-cli/scripts/sync.ts index 5f360da6bf72..7ca45f732e7b 100755 --- a/src/codeql-cli/scripts/sync.ts +++ b/src/codeql-cli/scripts/sync.ts @@ -83,8 +83,8 @@ async function setupEnvironment() { // copy the raw rst files to the temp directory and convert them // to Markdownusing pandoc -async function rstToMarkdown(sourceDirectory: string) { - const sourceFiles = walk(sourceDirectory, { +async function rstToMarkdown(rstSourceDirectory: string) { + const sourceFiles = walk(rstSourceDirectory, { includeBasePath: true, globs: ['**/*.rst'], }) diff --git a/src/content-linter/lib/helpers/get-lintable-yml.ts b/src/content-linter/lib/helpers/get-lintable-yml.ts index f697943349ea..087bf4b5ed20 100755 --- a/src/content-linter/lib/helpers/get-lintable-yml.ts +++ b/src/content-linter/lib/helpers/get-lintable-yml.ts @@ -80,15 +80,15 @@ export async function getLintableYml(dataFilePath: string): Promise, dataFilePath: string): Map { - const keys = Array.from(mdDict.keys()) +function addPathToKey(mdDictMap: Map, dataFilePath: string): Map { + const keys = Array.from(mdDictMap.keys()) keys.forEach((key) => { const newKey = `${dataFilePath} ${key}` - const value = mdDict.get(key) + const value = mdDictMap.get(key) if (value !== undefined) { - mdDict.delete(key) - mdDict.set(newKey, value) + mdDictMap.delete(key) + mdDictMap.set(newKey, value) } }) - return mdDict + return mdDictMap } diff --git a/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts b/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts index e7cd45bd8b17..e163cb641c8b 100644 --- a/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts +++ b/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts @@ -45,7 +45,7 @@ export const frontmatterHeroImage: Rule = { // Check if heroImage is an absolute path if (!heroImage.startsWith('/')) { - const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:')) + const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:')) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 addError( onError, @@ -59,7 +59,7 @@ export const frontmatterHeroImage: Rule = { // Check if heroImage points to banner-images directory if (!heroImage.startsWith('/assets/images/banner-images/')) { - const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:')) + const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:')) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 addError( onError, @@ -74,7 +74,7 @@ export const frontmatterHeroImage: Rule = { // Check if the file actually exists const validHeroImages = getValidHeroImages() if (validHeroImages.length > 0 && !validHeroImages.includes(heroImage)) { - const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:')) + const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:')) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 const availableImages = validHeroImages.join(', ') addError( diff --git a/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts b/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts index 2db601eb6fc6..f07a0394c8dc 100644 --- a/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts +++ b/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts @@ -48,8 +48,8 @@ export const frontmatterIntroLinks: Rule = { for (const key of Object.keys(introLinks)) { if (!validKeys.includes(key)) { // Find the line with this key - const line = params.lines.find((line: string) => { - const trimmed = line.trim() + const line = params.lines.find((ln: string) => { + const trimmed = ln.trim() return trimmed.startsWith(`${key}:`) && !trimmed.startsWith('introLinks:') }) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 diff --git a/src/content-linter/lib/linting-rules/frontmatter-schema.ts b/src/content-linter/lib/linting-rules/frontmatter-schema.ts index dc67da8449da..d616a2021d5a 100644 --- a/src/content-linter/lib/linting-rules/frontmatter-schema.ts +++ b/src/content-linter/lib/linting-rules/frontmatter-schema.ts @@ -24,7 +24,7 @@ export const frontmatterSchema: Rule = { for (const key of deprecatedKeys) { // Early access articles are allowed to have deprecated properties if (params.name.includes('early-access')) continue - const line = params.lines.find((line: string) => line.trim().startsWith(key)) + const line = params.lines.find((ln: string) => ln.trim().startsWith(key)) const lineNumber = params.lines.indexOf(line!) + 1 addError( onError, diff --git a/src/content-linter/scripts/lint-content.ts b/src/content-linter/scripts/lint-content.ts index b516d5f5917b..60b2264e0bcc 100755 --- a/src/content-linter/scripts/lint-content.ts +++ b/src/content-linter/scripts/lint-content.ts @@ -262,7 +262,7 @@ async function main() { } const fixableFiles = Object.entries(formattedResults) - .filter(([, results]) => results.some((result) => result.fixable)) + .filter(([, fileResults]) => fileResults.some((flaw) => flaw.fixable)) .map(([file]) => file) if (fixableFiles.length) { console.log('') // Just for some whitespace before the next message @@ -302,7 +302,7 @@ function pluralize(things, word, pluralForm = null) { // (e.g., heading linters) so we need to separate the // list of data files from all other files to run // through markdownlint individually -function getFilesToLint(paths) { +function getFilesToLint(inputPaths) { const fileList = { length: 0, content: [], @@ -316,7 +316,7 @@ function getFilesToLint(paths) { // The path passed to Markdownlint is what is displayed // in the error report, so we want to normalize it and // and make it relative if it's absolute. - for (const rawPath of paths) { + for (const rawPath of inputPaths) { const absPath = path.resolve(rawPath) if (fs.statSync(rawPath).isDirectory()) { if (isInDir(absPath, contentDir)) { @@ -427,16 +427,16 @@ function reportSummaryByRule(results, config) { result. Results are sorted by severity per file, with errors listed first then warnings. */ -function getFormattedResults(allResults, isPrecommit) { +function getFormattedResults(allResults, isInPrecommitMode) { const output = {} Object.entries(allResults) // Each result key always has an array value, but it may be empty .filter(([, results]) => results.length) - .forEach(([key, results]) => { + .forEach(([key, fileResults]) => { if (verbose) { - output[key] = [...results] + output[key] = [...fileResults] } else { - const formattedResults = results.map((flaw) => formatResult(flaw, isPrecommit)) + const formattedResults = fileResults.map((flaw) => formatResult(flaw, isInPrecommitMode)) // Only add the file to output if there are results after filtering if (formattedResults.length > 0) { @@ -465,8 +465,8 @@ function getErrorCountByFile(results, fixed = false) { return getCountBySeverity(results, 'error', fixed) } function getCountBySeverity(results, severityLookup, fixed) { - return Object.values(results).filter((results) => - results.some((result) => { + return Object.values(results).filter((fileResults) => + fileResults.some((result) => { // If --fix was applied, we don't want to know about files that // no longer have errors or warnings. return result.severity === severityLookup && (!fixed || !result.fixable) @@ -477,7 +477,7 @@ function getCountBySeverity(results, severityLookup, fixed) { // Removes null values and properties that are not relevant to content // writers, adds the severity to each result object, and transforms // some error and fix data into a more readable format. -function formatResult(object, isPrecommit) { +function formatResult(object, isInPrecommitMode) { const formattedResult = {} // Add severity to each result object @@ -486,7 +486,8 @@ function formatResult(object, isPrecommit) { throw new Error(`Rule not found in allConfig: '${ruleName}'`) } formattedResult.severity = - allConfig[ruleName].severity || getSearchReplaceRuleSeverity(ruleName, object, isPrecommit) + allConfig[ruleName].severity || + getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode) formattedResult.context = allConfig[ruleName].context || '' @@ -540,7 +541,7 @@ function listRules() { Rules that can't be run on partials have the property `partial-markdown-files` set to false. */ -function getMarkdownLintConfig(errorsOnly, runRules) { +function getMarkdownLintConfig(filterErrorsOnly, runRules) { const config = { content: structuredClone(defaultConfig), data: structuredClone(defaultConfig), @@ -559,7 +560,7 @@ function getMarkdownLintConfig(errorsOnly, runRules) { // search-replace is handled differently than other rules because // it has nested metadata and rules. if ( - errorsOnly && + filterErrorsOnly && getRuleSeverity(ruleConfig, isPrecommit) !== 'error' && ruleName !== 'search-replace' ) { @@ -585,7 +586,7 @@ function getMarkdownLintConfig(errorsOnly, runRules) { for (const searchRule of ruleConfig.rules) { const searchRuleSeverity = getRuleSeverity(searchRule, isPrecommit) - if (errorsOnly && searchRuleSeverity !== 'error') continue + if (filterErrorsOnly && searchRuleSeverity !== 'error') continue // Add search-replace rules to frontmatter configuration for rules that make sense in frontmatter // This ensures rules like TODOCS detection work in frontmatter // Rules with applyToFrontmatter should ONLY run in the frontmatter pass (which lints the entire file) @@ -640,14 +641,16 @@ function getMarkdownLintConfig(errorsOnly, runRules) { // Return the severity value of a rule but keep in mind it could be // running as a precommit hook, which means the severity could be // deliberately different. -function getRuleSeverity(rule, isPrecommit) { - return isPrecommit ? rule.precommitSeverity || rule.severity : rule.severity +function getRuleSeverity(ruleConfig, isInPrecommitMode) { + return isInPrecommitMode + ? ruleConfig.precommitSeverity || ruleConfig.severity + : ruleConfig.severity } // Gets a custom rule function from the name of the rule // in the configuration file function getCustomRule(ruleName) { - const rule = customRules.find((rule) => rule.names.includes(ruleName)) + const rule = customRules.find((r) => r.names.includes(ruleName)) if (!rule) throw new Error( `A content-lint rule ('${ruleName}') is configured in the markdownlint config file but does not have a corresponding rule function.`, @@ -696,24 +699,24 @@ export function shouldIncludeRule(ruleName, runRules) { fixInfo: null } */ -function getSearchReplaceRuleSeverity(ruleName, object, isPrecommit) { +function getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode) { const pluginRuleName = object.errorDetail.split(':')[0].trim() - const rule = allConfig[ruleName].rules.find((rule) => rule.name === pluginRuleName) - return isPrecommit ? rule.precommitSeverity || rule.severity : rule.severity + const rule = allConfig[ruleName].rules.find((r) => r.name === pluginRuleName) + return isInPrecommitMode ? rule.precommitSeverity || rule.severity : rule.severity } function isOptionsValid() { // paths should only contain existing files and directories - const paths = program.opts().paths || [] - for (const path of paths) { + const optionPaths = program.opts().paths || [] + for (const filePath of optionPaths) { try { - fs.statSync(path) + fs.statSync(filePath) } catch { - if ('paths'.includes(path)) { + if ('paths'.includes(filePath)) { console.log('error: did you mean --paths') } else { console.log( - `error: invalid --paths (-p) option. The value '${path}' is not a valid file or directory`, + `error: invalid --paths (-p) option. The value '${filePath}' is not a valid file or directory`, ) } return false @@ -722,14 +725,14 @@ function isOptionsValid() { // rules should only contain existing, correctly spelled rules const allRulesList = [...allRules.map((rule) => rule.names).flat(), ...Object.keys(allConfig)] - const rules = program.opts().rules || [] - for (const rule of rules) { - if (!allRulesList.includes(rule)) { - if ('rules'.includes(rule)) { + const optionRules = program.opts().rules || [] + for (const ruleName of optionRules) { + if (!allRulesList.includes(ruleName)) { + if ('rules'.includes(ruleName)) { console.log('error: did you mean --rules') } else { console.log( - `error: invalid --rules (-r) option. The value '${rule}' is not a valid rule name.`, + `error: invalid --rules (-r) option. The value '${ruleName}' is not a valid rule name.`, ) } return false diff --git a/src/content-linter/scripts/pretty-print-results.ts b/src/content-linter/scripts/pretty-print-results.ts index d447b1170c01..3c0a8124a095 100644 --- a/src/content-linter/scripts/pretty-print-results.ts +++ b/src/content-linter/scripts/pretty-print-results.ts @@ -53,10 +53,10 @@ export function prettyPrintResults( let ruleDescription = '' const errorDetailsByDescription = new Map() - for (const { errorDetail, ruleDescription } of sorted) { - const details = errorDetailsByDescription.get(ruleDescription) || new Set() + for (const { errorDetail, ruleDescription: ruleDesc } of sorted) { + const details = errorDetailsByDescription.get(ruleDesc) || new Set() details.add(errorDetail) - errorDetailsByDescription.set(ruleDescription, details) + errorDetailsByDescription.set(ruleDesc, details) } for (const result of sorted) { diff --git a/src/content-linter/tests/category-pages.ts b/src/content-linter/tests/category-pages.ts index 6366e3d66f6a..6ea2edb1a738 100644 --- a/src/content-linter/tests/category-pages.ts +++ b/src/content-linter/tests/category-pages.ts @@ -98,10 +98,10 @@ describe.skip('category pages', () => { const indexContents = await fs.promises.readFile(indexAbsPath, 'utf8') const parsed = matter(indexContents) if (!parsed.data) throw new Error('No frontmatter') - const data = parsed.data as MarkdownFrontmatter - categoryVersions = getApplicableVersions(data.versions, indexAbsPath) - allowTitleToDifferFromFilename = data.allowTitleToDifferFromFilename - const articleLinks = data.children.filter((child) => { + const categoryData = parsed.data as MarkdownFrontmatter + categoryVersions = getApplicableVersions(categoryData.versions, indexAbsPath) + allowTitleToDifferFromFilename = categoryData.allowTitleToDifferFromFilename + const articleLinks = categoryData.children.filter((child) => { const mdPath = getPath(productDir, indexLink, child) const fileExists = fs.existsSync(mdPath) return fileExists && fs.statSync(mdPath).isFile() @@ -137,10 +137,10 @@ describe.skip('category pages', () => { articleLinks.map(async (articleLink) => { const articlePath = getPath(productDir, indexLink, articleLink) const articleContents = await fs.promises.readFile(articlePath, 'utf8') - const data = getFrontmatterData(articleContents) + const articleData = getFrontmatterData(articleContents) // Do not include subcategories in list of published articles - if (data.subcategory || data.hidden) return null + if (articleData.subcategory || articleData.hidden) return null // ".../content/github/{category}/{article}.md" => "/{article}" return `/${path.relative(categoryDir, articlePath).replace(/\.md$/, '')}` @@ -159,10 +159,10 @@ describe.skip('category pages', () => { await Promise.all( childFilePaths.map(async (articlePath) => { const articleContents = await fs.promises.readFile(articlePath, 'utf8') - const data = getFrontmatterData(articleContents) + const availableArticleData = getFrontmatterData(articleContents) // Do not include subcategories nor hidden pages in list of available articles - if (data.subcategory || data.hidden) return null + if (availableArticleData.subcategory || availableArticleData.hidden) return null // ".../content/github/{category}/{article}.md" => "/{article}" return `/${path.relative(categoryDir, articlePath).replace(/\.md$/, '')}` @@ -173,10 +173,10 @@ describe.skip('category pages', () => { await Promise.all( childFilePaths.map(async (articlePath) => { const articleContents = await fs.promises.readFile(articlePath, 'utf8') - const data = getFrontmatterData(articleContents) + const versionData = getFrontmatterData(articleContents) articleVersions[articlePath] = getApplicableVersions( - data.versions, + versionData.versions, articlePath, ) as string[] }), @@ -196,8 +196,8 @@ describe.skip('category pages', () => { }) test('contains only articles and subcategories with versions that are also available in the parent category', () => { - Object.entries(articleVersions).forEach(([articleName, articleVersions]) => { - const unexpectedVersions = difference(articleVersions, categoryVersions) + Object.entries(articleVersions).forEach(([articleName, versions]) => { + const unexpectedVersions = difference(versions, categoryVersions) const errorMessage = `${articleName} has versions that are not available in parent category` expect(unexpectedVersions.length, errorMessage).toBe(0) }) diff --git a/src/content-linter/tests/lint-frontmatter-links.ts b/src/content-linter/tests/lint-frontmatter-links.ts index 5e5985dc9fb5..1b5fa16c7a9d 100644 --- a/src/content-linter/tests/lint-frontmatter-links.ts +++ b/src/content-linter/tests/lint-frontmatter-links.ts @@ -24,10 +24,10 @@ describe('front matter', () => { } // Using any type because trouble array contains objects with varying error properties const nonWarnings = trouble.filter((t: any) => !t.warning) - for (const { uri, index, redirects } of nonWarnings) { + for (const { uri, index, redirects: redirectTo } of nonWarnings) { customErrorMessage += `\nindex: ${index} URI: ${uri}` - if (redirects) { - customErrorMessage += `\n\tredirects to ${redirects}` + if (redirectTo) { + customErrorMessage += `\n\tredirects to ${redirectTo}` } else { customErrorMessage += '\tPage not found' } diff --git a/src/content-render/scripts/add-content-type.ts b/src/content-render/scripts/add-content-type.ts index a73565a441d5..15c642f6977a 100644 --- a/src/content-render/scripts/add-content-type.ts +++ b/src/content-render/scripts/add-content-type.ts @@ -88,7 +88,7 @@ async function main() { console.log(`\nUpdated ${updatedCount} files out of ${processedCount}`) } -function processFile(filePath: string, options: ScriptOptions) { +function processFile(filePath: string, scriptOptions: ScriptOptions) { const fileContent = fs.readFileSync(filePath, 'utf8') const relativePath = path.relative(contentDir, filePath) @@ -100,11 +100,11 @@ function processFile(filePath: string, options: ScriptOptions) { if (!data) return { processed: false, updated: false } // Remove the legacy type property if option is passed - const removeLegacyType = Boolean(options.removeType && data.type) + const removeLegacyType = Boolean(scriptOptions.removeType && data.type) const newContentType = determineContentType(relativePath, data.type || '') - if (options.dryRun) { + if (scriptOptions.dryRun) { console.log(`\n${relativePath}`) if (!data.contentType) { console.log(` ✅ Would set contentType: "${newContentType}"`) @@ -144,7 +144,7 @@ function processFile(filePath: string, options: ScriptOptions) { // Write the file back fs.writeFileSync(filePath, frontmatter.stringify(content, data, { lineWidth: -1 } as any)) - if (options.verbose) { + if (scriptOptions.verbose) { console.log(`\n${relativePath}`) console.log(` ✅ Set contentType: "${newContentType}"`) if (removeLegacyType) { diff --git a/src/content-render/scripts/all-documents/cli.ts b/src/content-render/scripts/all-documents/cli.ts index 45c7701cb3bb..3e4893ef9793 100644 --- a/src/content-render/scripts/all-documents/cli.ts +++ b/src/content-render/scripts/all-documents/cli.ts @@ -115,10 +115,10 @@ async function main(options: Options) { const toJson: AllDocument[] = [] for (const doc of documents) { - const { documents, ...rest } = doc + const { documents: docDocuments, ...rest } = doc toJson.push({ ...rest, - documents, + documents: docDocuments, }) } diff --git a/src/content-render/scripts/all-documents/lib.ts b/src/content-render/scripts/all-documents/lib.ts index 4f2e393180e8..fc7f30427613 100644 --- a/src/content-render/scripts/all-documents/lib.ts +++ b/src/content-render/scripts/all-documents/lib.ts @@ -34,7 +34,7 @@ export async function allDocuments(options: Options): Promise { const site = await warmServer(options.languages) const pages: Page[] = site.pageList - const allDocuments: AllDocument[] = [] + const allDocumentsResult: AllDocument[] = [] type ByVersion = Map const byLanguage = new Map() @@ -96,8 +96,8 @@ export async function allDocuments(options: Options): Promise { } for (const [language, byVersion] of byLanguage) { for (const [version, documents] of byVersion) { - allDocuments.push({ version, language, documents }) + allDocumentsResult.push({ version, language, documents }) } } - return allDocuments + return allDocumentsResult } diff --git a/src/content-render/scripts/move-content.ts b/src/content-render/scripts/move-content.ts index 2a699d7c00e1..f4b7c86eb680 100755 --- a/src/content-render/scripts/move-content.ts +++ b/src/content-render/scripts/move-content.ts @@ -400,11 +400,11 @@ function addToChildren(newPath, positions, opts) { } if (CHILDGROUPS_KEY in data) { - for (const [groupIndex, childrenPosition] of childGroupPositions) { + for (const [groupIndex, groupChildPosition] of childGroupPositions) { if (groupIndex < data[CHILDGROUPS_KEY].length) { const group = data[CHILDGROUPS_KEY][groupIndex] - if (childrenPosition < group.children.length) { - group.children.splice(childrenPosition, 0, newName) + if (groupChildPosition < group.children.length) { + group.children.splice(groupChildPosition, 0, newName) } else { group.children.push(newName) } diff --git a/src/content-render/scripts/render-content-markdown.ts b/src/content-render/scripts/render-content-markdown.ts index 8e976014d324..76e0f7e1f5cf 100755 --- a/src/content-render/scripts/render-content-markdown.ts +++ b/src/content-render/scripts/render-content-markdown.ts @@ -35,11 +35,12 @@ for (const page of pages) { fs.mkdirSync(`${contentCopilotDir}/${dirnames}`, { recursive: true }) // Context needed to render the content liquid const req = { language: 'en' } as ExtendedRequest - const contextualize = (req: ExtendedRequest): void => { - if (!req.context) return - if (!req.context.currentVersion) return - req.context.currentVersionObj = req.context.allVersions?.[req.context.currentVersion] - shortVersionsMiddleware(req, null, () => {}) + const contextualize = (request: ExtendedRequest): void => { + if (!request.context) return + if (!request.context.currentVersion) return + request.context.currentVersionObj = + request.context.allVersions?.[request.context.currentVersion] + shortVersionsMiddleware(request, null, () => {}) } req.context = { diff --git a/src/content-render/scripts/test-moved-content.ts b/src/content-render/scripts/test-moved-content.ts index 7b5dcac10151..20983915c136 100644 --- a/src/content-render/scripts/test-moved-content.ts +++ b/src/content-render/scripts/test-moved-content.ts @@ -29,10 +29,11 @@ async function main(nameTuple: [string, string]) { if (data) assert(data.redirect_from.includes(oldHref), `Redirect from ${oldHref} not found`) { const parentIndexMd = path.join(path.dirname(after), 'index.md') - const fileContent = fs.readFileSync(parentIndexMd, 'utf-8') - const { data } = readFrontmatter(fileContent) + const parentFileContent = fs.readFileSync(parentIndexMd, 'utf-8') + const { data: parentData } = readFrontmatter(parentFileContent) const afterShortname = `/${after.split('/').slice(-1)[0].replace(/\.md$/, '')}` - if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`) + if (parentData) + assert(parentData.children.includes(afterShortname), `Child ${afterShortname} not found`) } } else { const fileContent = fs.readFileSync(path.join(after, 'index.md'), 'utf-8') @@ -41,10 +42,11 @@ async function main(nameTuple: [string, string]) { if (data) assert(data.redirect_from.includes(oldHref), `Redirect from ${oldHref} not found`) { const parentIndexMd = path.join(path.dirname(after), 'index.md') - const fileContent = fs.readFileSync(parentIndexMd, 'utf-8') - const { data } = readFrontmatter(fileContent) + const parentFileContent = fs.readFileSync(parentIndexMd, 'utf-8') + const { data: parentData } = readFrontmatter(parentFileContent) const afterShortname = `/${after.split('/').slice(-1)}` - if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`) + if (parentData) + assert(parentData.children.includes(afterShortname), `Child ${afterShortname} not found`) } } } diff --git a/src/content-render/scripts/update-filepaths.ts b/src/content-render/scripts/update-filepaths.ts index 9ee8a696b897..221a1522dcee 100755 --- a/src/content-render/scripts/update-filepaths.ts +++ b/src/content-render/scripts/update-filepaths.ts @@ -96,7 +96,7 @@ async function main(): Promise { async function processFile( file: string, slugger: GithubSlugger, - options: ScriptOptions, + scriptOptions: ScriptOptions, ): Promise { const { data } = frontmatter(fs.readFileSync(file, 'utf8')) as unknown as { data: PageFrontmatter @@ -105,7 +105,7 @@ async function processFile( const isDirectory = isDirectoryCheck(file) // Assess the frontmatter and other conditions to determine if we want to process the path. - const processPage: boolean = determineProcessStatus(data, isDirectory, options) + const processPage: boolean = determineProcessStatus(data, isDirectory, scriptOptions) if (!processPage) return null let stringToSlugify: string = data.shortTitle || data.title @@ -153,10 +153,10 @@ async function processFile( return [contentPath, newContentPath] } -function moveFile(result: string[], options: ScriptOptions): void { +function moveFile(result: string[], scriptOptions: ScriptOptions): void { const [contentPath, newContentPath] = result - if (options.dryRun) { + if (scriptOptions.dryRun) { console.log('Move:\n', contentPath, '\nto:\n', newContentPath, '\n') return } @@ -214,7 +214,7 @@ function sortFiles(filesArray: string[]): string[] { }) } -function filterFiles(contentDir: string, options: ScriptOptions) { +function filterFiles(contentDir: string, scriptOptions: ScriptOptions) { return walkFiles(contentDir, ['.md']).filter((file: string) => { // Never move readmes if (file.endsWith('README.md')) return false @@ -226,9 +226,9 @@ function filterFiles(contentDir: string, options: ScriptOptions) { if (path.relative(contentDir, file).split(path.sep)[1] === 'index.md') return false // If no specific paths are passed, we are done filtering. - if (!options.paths) return true + if (!scriptOptions.paths) return true - return options.paths.some((p: string) => { + return scriptOptions.paths.some((p: string) => { // Allow either a full content path like "content/foo/bar.md" // or a top-level directory name like "copilot" if (!p.startsWith('content')) { @@ -247,15 +247,15 @@ function filterFiles(contentDir: string, options: ScriptOptions) { function determineProcessStatus( data: PageFrontmatter, isDirectory: boolean, - options: ScriptOptions, + scriptOptions: ScriptOptions, ): boolean { // Assess the conditions in this order: // If it's a directory AND we're excluding dirs, do not process it no matter what. - if (isDirectory && options.excludeDirs) { + if (isDirectory && scriptOptions.excludeDirs) { return false } // If the force option is passed, process it no matter what. - if (options.force) { + if (scriptOptions.force) { return true } // If the page has the override set, do not process it. diff --git a/src/content-render/tests/annotate.ts b/src/content-render/tests/annotate.ts index bd1b3be98279..03a3f50b56fd 100644 --- a/src/content-render/tests/annotate.ts +++ b/src/content-render/tests/annotate.ts @@ -66,7 +66,7 @@ describe('annotate', () => { }) test('renders bash with hash bang annotations', async () => { - const example = ` + const bashExample = ` \`\`\`bash annotate # The next line is the hash bang #!/usr/bin/env bash @@ -75,11 +75,11 @@ describe('annotate', () => { echo "Hello, world!" \`\`\` `.trim() - const res = await renderContent(example) + const res = await renderContent(bashExample) const $ = cheerio.load(res) const headerCode = $('header pre').text() - expect(headerCode).toMatch(example.split('\n').slice(1, -1).join('\n')) + expect(headerCode).toMatch(bashExample.split('\n').slice(1, -1).join('\n')) const rows = $('.annotate-row') const notes = $('.annotate-note p', rows) const noteTexts = notes.map((i, el) => $(el).text()).get() @@ -90,7 +90,7 @@ echo "Hello, world!" }) test("doesn't complain if the first comment is empty", async () => { - const example = ` + const emptyCommentExample = ` \`\`\`yaml annotate copy # name: Create and publish a Docker image @@ -103,11 +103,11 @@ on: \`\`\` `.trim() - const res = await renderContent(example) + const res = await renderContent(emptyCommentExample) const $ = cheerio.load(res) const headerCode = $('header pre').text() - expect(headerCode).toMatch(example.split('\n').slice(1, -1).join('\n')) + expect(headerCode).toMatch(emptyCommentExample.split('\n').slice(1, -1).join('\n')) const rows = $('.annotate-row') const notes = $('.annotate-note p', rows) const noteTexts = notes.map((i, el) => $(el).text()).get() @@ -121,7 +121,7 @@ on: }) test('supports AUTOTITLE links in annotations', async () => { - const example = ` + const autotitleExample = ` \`\`\`yaml annotate copy # For more information about workflow syntax, see [AUTOTITLE](/get-started/start-your-journey/hello-world). name: Test workflow @@ -151,7 +151,7 @@ on: [push] // Mock test object doesn't need all Context properties, using 'as unknown as' to bypass strict type checking } as unknown as Context - const res = await renderContent(example, mockContext) + const res = await renderContent(autotitleExample, mockContext) const $ = cheerio.load(res) const rows = $('.annotate-row') diff --git a/src/content-render/unified/annotate.ts b/src/content-render/unified/annotate.ts index 02f6a9331d64..bca73be290e6 100644 --- a/src/content-render/unified/annotate.ts +++ b/src/content-render/unified/annotate.ts @@ -122,8 +122,8 @@ function createAnnotatedNode(node: ElementNode, context: any): any { const rows = chunk(groups, 2) // Check the rows are formatted correctly - for (const [note, code] of rows) { - if (note === undefined || code === undefined) { + for (const [note, codeBlock] of rows) { + if (note === undefined || codeBlock === undefined) { throw new Error( "Each annotation must have a note and a code block. If you're trying to create a blank annotation, you can use a single line comment with a space after it.", ) @@ -231,13 +231,13 @@ function template({ h( 'div', { className: 'annotate-beside' }, - rows.map(([note, code]) => + rows.map(([note, codeBlock]) => h('div', { className: 'annotate-row' }, [ h( 'div', { className: 'annotate-code' }, // pre > code matches the mdast -> hast tree of a regular fenced code block. - h('pre', h('code', { className: `language-${lang}` }, code.join('\n'))), + h('pre', h('code', { className: `language-${lang}` }, codeBlock.join('\n'))), ), h( 'div', diff --git a/src/content-render/unified/code-header.ts b/src/content-render/unified/code-header.ts index 958c01b21595..80c670dd3131 100644 --- a/src/content-render/unified/code-header.ts +++ b/src/content-render/unified/code-header.ts @@ -108,8 +108,8 @@ function btnIcon(): Element { const btnIconHtml: string = octicons.copy.toSVG() const btnIconAst = parse(String(btnIconHtml), { sourceCodeLocationInfo: true }) // @ts-ignore - fromParse5 file option typing issue - const btnIcon = fromParse5(btnIconAst, { file: btnIconHtml }) - return btnIcon as Element + const btnIconElement = fromParse5(btnIconAst, { file: btnIconHtml }) + return btnIconElement as Element } // Using any due to conflicting unist/hast type definitions between dependencies diff --git a/src/content-render/unified/copilot-prompt.ts b/src/content-render/unified/copilot-prompt.ts index 1874b4aec91e..5def4ee14052 100644 --- a/src/content-render/unified/copilot-prompt.ts +++ b/src/content-render/unified/copilot-prompt.ts @@ -85,6 +85,6 @@ function findMatchingCode(ref: string, tree: any): any { function copilotIcon(): any { const copilotIconHtml = octicons.copilot.toSVG() const copilotIconAst = parse(String(copilotIconHtml), { sourceCodeLocationInfo: true }) - const copilotIcon = fromParse5(copilotIconAst, { file: copilotIconHtml }) - return copilotIcon + const copilotIconElement = fromParse5(copilotIconAst, { file: copilotIconHtml }) + return copilotIconElement } diff --git a/src/content-render/unified/rewrite-local-links.ts b/src/content-render/unified/rewrite-local-links.ts index c33e7e512be4..53687ffdd31e 100644 --- a/src/content-render/unified/rewrite-local-links.ts +++ b/src/content-render/unified/rewrite-local-links.ts @@ -185,14 +185,14 @@ function processLinkNode(node: Link, language: string, version: string, nodes: N language === 'en' ) { // Throw if the link text *almost* is AUTOTITLE - const textChild = child as Text + const childText = child as Text if ( - textChild.value.toUpperCase() === 'AUTOTITLE' || - distance(textChild.value.toUpperCase(), 'AUTOTITLE') <= 2 + childText.value.toUpperCase() === 'AUTOTITLE' || + distance(childText.value.toUpperCase(), 'AUTOTITLE') <= 2 ) { throw new Error( - `Found link text '${textChild.value}', expected 'AUTOTITLE'. ` + - `Find the mention of the link text '${textChild.value}' and change it to 'AUTOTITLE'. Case matters.`, + `Found link text '${childText.value}', expected 'AUTOTITLE'. ` + + `Find the mention of the link text '${childText.value}' and change it to 'AUTOTITLE'. Case matters.`, ) } } diff --git a/src/early-access/scripts/migrate-early-access-product.ts b/src/early-access/scripts/migrate-early-access-product.ts index a242feeab1fd..a2b253bcc92c 100644 --- a/src/early-access/scripts/migrate-early-access-product.ts +++ b/src/early-access/scripts/migrate-early-access-product.ts @@ -177,23 +177,23 @@ function moveVariable(dataRef: string): void { const nonAltPath: string = newVariablePath.replace('-alt.yml', '.yml') const oldAltPath: string = oldVariablePath.replace('.yml', '-alt.yml') - let oldPath: string = oldVariablePath + let oldVariableFinalPath: string = oldVariablePath // If the old variable path doesn't exist, assume no migration needed. - if (!fs.existsSync(oldVariablePath)) { + if (!fs.existsSync(oldVariableFinalPath)) { if (!fs.existsSync(newVariablePath)) { console.log(`Problem migrating files for ${dataRef}`) return } if (fs.existsSync(oldAltPath)) { - oldPath = oldAltPath + oldVariableFinalPath = oldAltPath } else { return } } const variableFileContent: Record = yaml.load( - fs.readFileSync(oldPath, 'utf8'), + fs.readFileSync(oldVariableFinalPath, 'utf8'), ) as Record const value: any = variableFileContent[variableKey] diff --git a/src/events/components/Survey.tsx b/src/events/components/Survey.tsx index 9beac8fe77fb..447740d3477b 100644 --- a/src/events/components/Survey.tsx +++ b/src/events/components/Survey.tsx @@ -59,10 +59,10 @@ export const Survey = () => { } }, [state]) - function vote(vote: VoteState) { + function vote(userVote: VoteState) { return () => { - trackEvent(getEventData(vote === VoteState.YES)) - setVoteState(vote) + trackEvent(getEventData(userVote === VoteState.YES)) + setVoteState(userVote) } } @@ -93,9 +93,9 @@ export const Survey = () => { setComment('') } - function getEventData(vote: boolean): EventData { + function getEventData(voteValue: boolean): EventData { return { - vote, + vote: voteValue, comment, email, token, diff --git a/src/events/components/experiments/useShouldShowExperiment.ts b/src/events/components/experiments/useShouldShowExperiment.ts index 490b9133cfff..28211883f69b 100644 --- a/src/events/components/experiments/useShouldShowExperiment.ts +++ b/src/events/components/experiments/useShouldShowExperiment.ts @@ -46,13 +46,13 @@ export function useShouldShowExperiment(experimentKey: ExperimentNames | { key: useEffect(() => { const updateShouldShow = async () => { - const isStaff = await getIsStaff() + const staffStatus = await getIsStaff() setShowExperiment( shouldShowExperiment( experimentKey, router.locale || '', mainContext.currentVersion || '', - isStaff, + staffStatus, router.query, ), ) diff --git a/src/events/lib/analyze-comment.ts b/src/events/lib/analyze-comment.ts index d27de5ccee56..931b01782969 100644 --- a/src/events/lib/analyze-comment.ts +++ b/src/events/lib/analyze-comment.ts @@ -48,17 +48,20 @@ export const SIGNAL_RATINGS = [ { reduction: 0.2, name: 'not-language', - validator: (comment: string, language: string) => isNotLanguage(comment, language), + validator: (comment: string, commentLanguage: string) => + isNotLanguage(comment, commentLanguage), }, { reduction: 0.3, name: 'cuss-words-likely', - validator: (comment: string, language: string) => isLikelyCussWords(comment, language), + validator: (comment: string, commentLanguage: string) => + isLikelyCussWords(comment, commentLanguage), }, { reduction: 0.1, name: 'cuss-words-maybe', - validator: (comment: string, language: string) => isMaybeCussWords(comment, language), + validator: (comment: string, commentLanguage: string) => + isMaybeCussWords(comment, commentLanguage), }, { reduction: 0.2, @@ -91,11 +94,11 @@ export async function getGuessedLanguage(comment: string) { return bestGuess.alpha2 || undefined } -export async function analyzeComment(text: string, language = 'en') { +export async function analyzeComment(text: string, commentLanguage = 'en') { const signals = [] let rating = 1.0 for (const { reduction, name, validator } of SIGNAL_RATINGS) { - if (validator(text, language)) { + if (validator(text, commentLanguage)) { signals.push(name) rating -= reduction } diff --git a/src/events/tests/middleware-errors.ts b/src/events/tests/middleware-errors.ts index 8da3ed798b06..2f139590d317 100644 --- a/src/events/tests/middleware-errors.ts +++ b/src/events/tests/middleware-errors.ts @@ -10,9 +10,9 @@ describe('formatErrors', () => { const { errors } = validateJson({ type: 'string' }, 0) const formattedErrors = formatErrors(errors || [], '') for (const formatted of formattedErrors) { - const { isValid, errors } = validateJson(schemas.validation, formatted) + const { isValid, errors: validationErrors } = validateJson(schemas.validation, formatted) if (!isValid) { - throw new Error(errors?.map((e) => e.message).join(' -- ')) + throw new Error(validationErrors?.map((e) => e.message).join(' -- ')) } } }) diff --git a/src/fixtures/helpers/turn-off-experiments.ts b/src/fixtures/helpers/turn-off-experiments.ts index 9767310f3edd..cd4065eb7968 100644 --- a/src/fixtures/helpers/turn-off-experiments.ts +++ b/src/fixtures/helpers/turn-off-experiments.ts @@ -27,11 +27,11 @@ async function alterExperimentsInPage( } for (const experiment of getActiveExperiments('all')) { await page.evaluate( - ({ experimentKey, variation }) => { + ({ experimentKey, variationType }) => { // @ts-expect-error overrideControlGroup is a custom function added to the window object - window.overrideControlGroup(experimentKey, variation) + window.overrideControlGroup(experimentKey, variationType) }, - { experimentKey: experiment.key, variation }, + { experimentKey: experiment.key, variationType: variation }, ) } } diff --git a/src/fixtures/tests/liquid.ts b/src/fixtures/tests/liquid.ts index 21208f1e75ad..545bdfc879c5 100644 --- a/src/fixtures/tests/liquid.ts +++ b/src/fixtures/tests/liquid.ts @@ -78,13 +78,13 @@ describe('post', () => { // Test what happens to `Cram{% ifversion fpt %}FPT{% endif %}ped.` // when it's not free-pro-team. { - const $: cheerio.Root = await getDOM( + const $inner: cheerio.Root = await getDOM( '/enterprise-server@latest/get-started/liquid/whitespace', ) - const html = $('#article-contents').html() + const innerHtml = $inner('#article-contents').html() // Assures that there's not whitespace left when the `{% ifversion %}` // yields an empty string. - expect(html).toMatch('Cramped') + expect(innerHtml).toMatch('Cramped') } }) }) diff --git a/src/frame/components/UtmPreserver.tsx b/src/frame/components/UtmPreserver.tsx index 987df9fd7408..6e829c706891 100644 --- a/src/frame/components/UtmPreserver.tsx +++ b/src/frame/components/UtmPreserver.tsx @@ -36,11 +36,11 @@ export const UtmPreserver = () => { } // Add UTM parameters to a URL - const addUtmParamsToUrl = (url: string, utmParams: URLSearchParams): string => { + const addUtmParamsToUrl = (url: string, params: URLSearchParams): string => { try { const urlObj = new URL(url) - for (const [key, value] of utmParams) { + for (const [key, value] of params) { urlObj.searchParams.set(key, value) } diff --git a/src/frame/lib/create-tree.ts b/src/frame/lib/create-tree.ts index 8f47312a9d35..bd0e7dff6e5b 100644 --- a/src/frame/lib/create-tree.ts +++ b/src/frame/lib/create-tree.ts @@ -34,9 +34,9 @@ export default async function createTree( // wrong. try { mtime = await getMtime(filepath) - } catch (error) { - if ((error as NodeJS.ErrnoException).code !== 'ENOENT') { - throw error + } catch (innerError) { + if ((innerError as NodeJS.ErrnoException).code !== 'ENOENT') { + throw innerError } // Throw an error if we can't find a content file associated with the children: entry. // But don't throw an error if the user is running the site locally and hasn't cloned the Early Access repo. diff --git a/src/frame/lib/page-data.ts b/src/frame/lib/page-data.ts index 560fb57ac9b9..e301bd223e31 100644 --- a/src/frame/lib/page-data.ts +++ b/src/frame/lib/page-data.ts @@ -391,11 +391,11 @@ export const loadPages = loadPageList // Create an object from the list of all pages with permalinks as keys for fast lookup. export function createMapFromArray(pageList: Page[]): Record { const pageMap = pageList.reduce( - (pageMap: Record, page: Page) => { + (accumulatedMap: Record, page: Page) => { for (const permalink of page.permalinks) { - pageMap[permalink.href] = page + accumulatedMap[permalink.href] = page } - return pageMap + return accumulatedMap }, {} as Record, ) diff --git a/src/frame/lib/read-json-file.ts b/src/frame/lib/read-json-file.ts index 44b6758fd2ea..1c8ebe6d1f33 100644 --- a/src/frame/lib/read-json-file.ts +++ b/src/frame/lib/read-json-file.ts @@ -58,12 +58,12 @@ export function readCompressedJsonFileFallbackLazily(xpath: string): () => any { if (err.code === 'ENOENT') { try { fs.accessSync(`${xpath}.br`) - } catch (err: any) { - // err is any because fs errors can have various shapes with code property - if (err.code === 'ENOENT') { + } catch (innerErr: any) { + // innerErr is any because fs errors can have various shapes with code property + if (innerErr.code === 'ENOENT') { throw new Error(`Neither ${xpath} nor ${xpath}.br is accessible`) } - throw err + throw innerErr } } else { throw err diff --git a/src/frame/middleware/app-router-gateway.ts b/src/frame/middleware/app-router-gateway.ts index 1c2318277b4f..b84e4f3f5b4c 100644 --- a/src/frame/middleware/app-router-gateway.ts +++ b/src/frame/middleware/app-router-gateway.ts @@ -68,10 +68,10 @@ export default function appRouterGateway(req: ExtendedRequest, res: Response, ne if (shouldUseAppRouter(path, pageFound)) { console.log(`[INFO] Using App Router for path: ${path} (pageFound: ${!!pageFound})`) - const strippedPath = stripLocalePrefix(path) + const innerStrippedPath = stripLocalePrefix(path) // For 404 routes, always route to our 404 page - if (strippedPath === '/404' || strippedPath === '/_not-found' || !pageFound) { + if (innerStrippedPath === '/404' || innerStrippedPath === '/_not-found' || !pageFound) { req.url = '/404' res.status(404) defaultCacheControl(res) diff --git a/src/frame/middleware/context/breadcrumbs.ts b/src/frame/middleware/context/breadcrumbs.ts index 166d125fd35c..5a4659588f54 100644 --- a/src/frame/middleware/context/breadcrumbs.ts +++ b/src/frame/middleware/context/breadcrumbs.ts @@ -44,13 +44,13 @@ function getBreadcrumbs(req: ExtendedRequest, isEarlyAccess: boolean) { } } - const breadcrumbs = traverseTreeTitles( + const breadcrumbsResult = traverseTreeTitles( req.context.currentPath, req.context.currentProductTreeTitles, ) - ;[...Array(cutoff)].forEach(() => breadcrumbs.shift()) + ;[...Array(cutoff)].forEach(() => breadcrumbsResult.shift()) - return breadcrumbs + return breadcrumbsResult } // Return an array as if you'd traverse down a tree. Imagine a tree like diff --git a/src/frame/middleware/context/context.ts b/src/frame/middleware/context/context.ts index ef4caa955a8d..8fa16fc2e2e4 100644 --- a/src/frame/middleware/context/context.ts +++ b/src/frame/middleware/context/context.ts @@ -91,20 +91,20 @@ export default async function contextualize( // The reason this is a function is because most of the time, we don't // need to know the English equivalent. It only comes into play if a // translated - req.context.getEnglishPage = (context) => { - if (!context.enPage) { - const { page } = context + req.context.getEnglishPage = (ctx) => { + if (!ctx.enPage) { + const { page } = ctx if (!page) { throw new Error("The 'page' has not been put into the context yet.") } - const enPath = context.currentPath!.replace(`/${page.languageCode}`, '/en') - const enPage = context.pages![enPath] + const enPath = ctx.currentPath!.replace(`/${page.languageCode}`, '/en') + const enPage = ctx.pages![enPath] if (!enPage) { throw new Error(`Unable to find equivalent English page by the path '${enPath}'`) } - context.enPage = enPage + ctx.enPage = enPage } - return context.enPage + return ctx.enPage } } diff --git a/src/frame/middleware/context/glossaries.ts b/src/frame/middleware/context/glossaries.ts index 047bf246e671..d76d34d4a6d3 100644 --- a/src/frame/middleware/context/glossaries.ts +++ b/src/frame/middleware/context/glossaries.ts @@ -41,7 +41,7 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne 'glossaries.external', req.context.currentLanguage!, ) - const glossaries = ( + const glossariesList = ( await Promise.all( glossariesRaw.map(async (glossary) => { let { description } = glossary @@ -80,7 +80,7 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne ) ).filter(Boolean) - req.context.glossaries = glossaries.sort((a, b) => + req.context.glossaries = glossariesList.sort((a, b) => a.term.localeCompare(b.term, req.context!.currentLanguage), ) diff --git a/src/frame/middleware/index.ts b/src/frame/middleware/index.ts index 929d12a099ae..eb69e7acffac 100644 --- a/src/frame/middleware/index.ts +++ b/src/frame/middleware/index.ts @@ -82,11 +82,11 @@ const asyncMiddleware = ( fn: (req: TReq, res: Response, next: NextFunction) => T | Promise, ) => - async (req: Request, res: Response, next: NextFunction) => { + async (req: Request, res: Response, nextFn: NextFunction) => { try { - await fn(req as TReq, res, next) + await fn(req as TReq, res, nextFn) } catch (error) { - next(error) + nextFn(error) } } diff --git a/src/frame/middleware/next.ts b/src/frame/middleware/next.ts index 5ee2f41f9302..ebe123a365e1 100644 --- a/src/frame/middleware/next.ts +++ b/src/frame/middleware/next.ts @@ -11,7 +11,7 @@ export const nextApp = next({ dev: isDevelopment }) export const nextHandleRequest = nextApp.getRequestHandler() await nextApp.prepare() -function renderPageWithNext(req: ExtendedRequest, res: Response, next: NextFunction) { +function renderPageWithNext(req: ExtendedRequest, res: Response, nextFn: NextFunction) { if (req.path.startsWith('/_next') && !req.path.startsWith('/_next/data')) { return nextHandleRequest(req, res) } @@ -20,7 +20,7 @@ function renderPageWithNext(req: ExtendedRequest, res: Response, next: NextFunct // '/_next/static/webpack/64e44ef62e261d3a.webpack.hot-update.json' has to // go through here. - return next() + return nextFn() } export default renderPageWithNext diff --git a/src/frame/tests/manifest.ts b/src/frame/tests/manifest.ts index aff2fd9f7906..7a2d27d794fc 100644 --- a/src/frame/tests/manifest.ts +++ b/src/frame/tests/manifest.ts @@ -42,11 +42,11 @@ describe('manifest', () => { expect(manifest.icons.length).toBeGreaterThan(0) await Promise.all( manifest.icons.map(async (icon) => { - const res = await get(icon.src, { responseType: 'buffer' }) - expect(res.statusCode).toBe(200) - expect(res.headers['content-type']).toBe(icon.type) + const iconRes = await get(icon.src, { responseType: 'buffer' }) + expect(iconRes.statusCode).toBe(200) + expect(iconRes.headers['content-type']).toBe(icon.type) // The `sizes` should match the payload - const image = sharp(res.body) + const image = sharp(iconRes.body) const [width, height] = icon.sizes.split('x').map((s) => parseInt(s)) const dimensions = await image.metadata() expect(dimensions.width).toBe(width) diff --git a/src/frame/tests/pages.ts b/src/frame/tests/pages.ts index 99d000e884e5..99bddf054210 100644 --- a/src/frame/tests/pages.ts +++ b/src/frame/tests/pages.ts @@ -75,7 +75,7 @@ describe('pages module', () => { // Only consider as duplicate if more than one unique file defines the same redirect const duplicates = Array.from(redirectToFiles.entries()) .filter(([, files]) => files.size > 1) - .map(([path]) => path) + .map(([redirectPath]) => redirectPath) // Build a detailed message with sources for each duplicate const message = `Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}. diff --git a/src/github-apps/scripts/sync.ts b/src/github-apps/scripts/sync.ts index 01a6cfa91443..47ceb5a6d7f8 100755 --- a/src/github-apps/scripts/sync.ts +++ b/src/github-apps/scripts/sync.ts @@ -126,7 +126,7 @@ export async function syncGitHubAppsData( const { progAccessData, progActorResources } = await getProgAccessData(progAccessSource) for (const schemaName of sourceSchemas) { - const data = JSON.parse( + const schemaData = JSON.parse( await readFile(path.join(openApiSource, schemaName), 'utf8'), ) as OpenApiData const appsDataConfig = JSON.parse(await readFile(CONFIG_FILE, 'utf8')) as AppsDataConfig @@ -138,7 +138,7 @@ export async function syncGitHubAppsData( } // Because the information used on the apps page doesn't require any // rendered content we can parse the dereferenced files directly - for (const [requestPath, operationsAtPath] of Object.entries(data.paths)) { + for (const [requestPath, operationsAtPath] of Object.entries(schemaData.paths)) { for (const [verb, operation] of Object.entries(operationsAtPath)) { // We only want to process operations that have programmatic access data if (!progAccessData[operation.operationId]) continue @@ -491,17 +491,17 @@ export function shouldFilterMetadataPermission( export function isActorExcluded( excludedActors: string[] | undefined | null | unknown, actorType: string, - actorTypeMap: Record = {}, + actorMapping: Record = {}, ): boolean { if (!excludedActors || !Array.isArray(excludedActors)) { return false } // Map generic actor type to actual YAML value if mapping exists - const actualActorType = actorTypeMap[actorType] || actorType + const mappedActorType = actorMapping[actorType] || actorType // Check if the mapped actor type is excluded - if (excludedActors.includes(actualActorType)) { + if (excludedActors.includes(mappedActorType)) { return true } @@ -571,7 +571,7 @@ async function getProgActorResourceContent({ owner, repo, branch, - path, + path: resourcePath, gitHubSourceDirectory = null, }: ProgActorResourceContentOptions): Promise { // Get files either locally from disk or from the GitHub remote repo @@ -579,7 +579,7 @@ async function getProgActorResourceContent({ if (gitHubSourceDirectory) { files = await getProgActorContentFromDisk(gitHubSourceDirectory) } else { - files = await getDirectoryContents(owner!, repo!, branch!, path!) + files = await getDirectoryContents(owner!, repo!, branch!, resourcePath!) } // We need to format the file content into a single object. Each file diff --git a/src/graphql/components/Changelog.tsx b/src/graphql/components/Changelog.tsx index 64550eee2043..9976d8e095ef 100644 --- a/src/graphql/components/Changelog.tsx +++ b/src/graphql/components/Changelog.tsx @@ -20,8 +20,8 @@ export function Changelog({ changelogItems }: Props) {

{change.title}

    - {change.changes.map((change) => ( -
  • + {change.changes.map((changeItem) => ( +
  • ))}
@@ -30,8 +30,8 @@ export function Changelog({ changelogItems }: Props) {

{change.title}

    - {change.changes.map((change) => ( -
  • + {change.changes.map((changeItem) => ( +
  • ))}
@@ -39,8 +39,8 @@ export function Changelog({ changelogItems }: Props) { {(item.upcomingChanges || []).map((change, index) => (

{change.title}

- {change.changes.map((change) => ( -
  • + {change.changes.map((changeItem) => ( +
  • ))} ))} diff --git a/src/graphql/scripts/utils/process-schemas.ts b/src/graphql/scripts/utils/process-schemas.ts index b2a2c95edda3..fe8aa867e3ed 100755 --- a/src/graphql/scripts/utils/process-schemas.ts +++ b/src/graphql/scripts/utils/process-schemas.ts @@ -389,10 +389,10 @@ export default async function processSchemas( } await Promise.all( - mutationReturnFields.fields!.map(async (field: FieldDefinitionNode) => { + mutationReturnFields.fields!.map(async (returnFieldDef: FieldDefinitionNode) => { const returnField: Partial = {} - returnField.name = field.name.value - const fieldType = helpers.getType(field) + returnField.name = returnFieldDef.name.value + const fieldType = helpers.getType(returnFieldDef) if (!fieldType) return returnField.type = fieldType returnField.id = helpers.getId(returnField.type) diff --git a/src/graphql/scripts/utils/schema-helpers.ts b/src/graphql/scripts/utils/schema-helpers.ts index 0bd516f1e257..846e9c623035 100644 --- a/src/graphql/scripts/utils/schema-helpers.ts +++ b/src/graphql/scripts/utils/schema-helpers.ts @@ -128,8 +128,8 @@ function getFullLink(baseType: string, id: string): string { return `/graphql/reference/${baseType}#${id}` } -function getId(path: string): string { - return removeMarkers(path).toLowerCase() +function getId(typeName: string): string { + return removeMarkers(typeName).toLowerCase() } // e.g., given `ObjectTypeDefinition`, get `objects` diff --git a/src/landings/components/ProductSelectionCard.tsx b/src/landings/components/ProductSelectionCard.tsx index 415d217cc941..8db2f8d4eb5f 100644 --- a/src/landings/components/ProductSelectionCard.tsx +++ b/src/landings/components/ProductSelectionCard.tsx @@ -45,18 +45,18 @@ export const ProductSelectionCard = ({ group }: ProductSelectionCardProps) => { height: '22px', } - function icon(group: ProductGroupT) { - if (group.icon) { + function icon(productGroup: ProductGroupT) { + if (productGroup.icon) { return (
    - {group.name} + {productGroup.name}
    ) - } else if (group.octicon) { - const octicon: React.FunctionComponent = octiconMap[group.octicon] + } else if (productGroup.octicon) { + const octicon: React.FunctionComponent = octiconMap[productGroup.octicon] if (!octicon) { - throw new Error(`Octicon ${group.octicon} not found`) + throw new Error(`Octicon ${productGroup.octicon} not found`) } return ( diff --git a/src/languages/lib/languages-server.ts b/src/languages/lib/languages-server.ts index 97e41f2ff2b7..61dff66d6b49 100644 --- a/src/languages/lib/languages-server.ts +++ b/src/languages/lib/languages-server.ts @@ -90,8 +90,8 @@ export const languagePrefixPathRegex: RegExp = new RegExp(`^/(${languageKeys.joi * if it's something like /foo or /foo/bar or /fr (because French (fr) * is currently not an active language) */ -export function pathLanguagePrefixed(path: string): boolean { - return languagePrefixPathRegex.test(path) +export function pathLanguagePrefixed(urlPath: string): boolean { + return languagePrefixPathRegex.test(urlPath) } export default languages diff --git a/src/languages/scripts/purge-fastly-edge-cache-per-language.ts b/src/languages/scripts/purge-fastly-edge-cache-per-language.ts index bb4f2fa1754e..df987328d05e 100644 --- a/src/languages/scripts/purge-fastly-edge-cache-per-language.ts +++ b/src/languages/scripts/purge-fastly-edge-cache-per-language.ts @@ -41,14 +41,14 @@ for (const language of languages) { } function languagesFromString(str: string): string[] { - const languages = str + const parsedLanguages = str .split(/,/) .map((x) => x.trim()) .filter(Boolean) - if (!languages.every((lang) => languageKeys.includes(lang))) { + if (!parsedLanguages.every((lang) => languageKeys.includes(lang))) { throw new Error( - `Unrecognized language code (${languages.find((lang) => !languageKeys.includes(lang))})`, + `Unrecognized language code (${parsedLanguages.find((lang) => !languageKeys.includes(lang))})`, ) } - return languages + return parsedLanguages } diff --git a/src/languages/tests/frame.ts b/src/languages/tests/frame.ts index 55122e69d787..fca75c4a18e7 100644 --- a/src/languages/tests/frame.ts +++ b/src/languages/tests/frame.ts @@ -96,7 +96,7 @@ describe('release notes', () => { // // This is useful because if we test every single individual version of // every plan the test just takes way too long. - const getReleaseNotesVersionCombinations = (langs: string[]) => { + const getReleaseNotesVersionCombinations = (languages: string[]) => { const combinations = [] const prefixes: string[] = [] for (const version of page!.applicableVersions) { @@ -105,7 +105,7 @@ describe('release notes', () => { continue } prefixes.push(prefix) - combinations.push(...langs.map((lang) => [lang, version])) + combinations.push(...languages.map((lang) => [lang, version])) } return combinations } diff --git a/src/languages/tests/translation-error-comments.ts b/src/languages/tests/translation-error-comments.ts index 26c163cb8ce4..19bf22a914bb 100644 --- a/src/languages/tests/translation-error-comments.ts +++ b/src/languages/tests/translation-error-comments.ts @@ -320,8 +320,8 @@ describe('Translation Error Comments', () => { } // Mock renderContent to simulate error for Japanese, success for English - mockRenderContent.mockImplementation((template: string, context: any) => { - if (context.currentLanguage !== 'en' && template.includes('badtag')) { + mockRenderContent.mockImplementation((template: string, innerContext: any) => { + if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) { const error = new Error("Unknown tag 'badtag'") error.name = 'ParseError' ;(error as any).token = { @@ -330,7 +330,7 @@ describe('Translation Error Comments', () => { } throw error } - return context.currentLanguage === 'en' ? 'English Title' : template + return innerContext.currentLanguage === 'en' ? 'English Title' : template }) const result = await renderContentWithFallback(mockPage, 'rawTitle', context) @@ -357,8 +357,8 @@ describe('Translation Error Comments', () => { }, } - mockRenderContent.mockImplementation((template: string, context: any) => { - if (context.currentLanguage !== 'en' && template.includes('badtag')) { + mockRenderContent.mockImplementation((template: string, innerContext: any) => { + if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) { const error = new Error("Unknown tag 'badtag'") error.name = 'ParseError' throw error diff --git a/src/learning-track/tests/lint-data.ts b/src/learning-track/tests/lint-data.ts index b7ef60d3e99d..ef4f2a9ba1c9 100644 --- a/src/learning-track/tests/lint-data.ts +++ b/src/learning-track/tests/lint-data.ts @@ -57,10 +57,10 @@ describe('learning tracks', () => { let fixables = 0 for (const [key, guides] of troubles) { errorMessage += `Under "${key}"...\n` - for (const { uri, index, redirects } of guides) { - if (redirects) { + for (const { uri, index, redirects: redirectTo } of guides) { + if (redirectTo) { fixables += 1 - errorMessage += ` guide: #${index + 1} ${uri} redirects to ${redirects}\n` + errorMessage += ` guide: #${index + 1} ${uri} redirects to ${redirectTo}\n` } else { errorMessage += ` guide: #${index + 1} ${uri} is broken.\n` } diff --git a/src/links/components/LinkPreviewPopover.tsx b/src/links/components/LinkPreviewPopover.tsx index 75f26b466ad4..47b5e2d32bc0 100644 --- a/src/links/components/LinkPreviewPopover.tsx +++ b/src/links/components/LinkPreviewPopover.tsx @@ -284,8 +284,8 @@ function fillPopover( const regex = /^\/(?\w{2}\/)?(?[\w-]+@[\w-.]+\/)?(?[\w-]+\/)?/ const match = regex.exec(linkURL.pathname) if (match?.groups) { - const { lang, version, product } = match.groups - const productURL = [lang, version, product].map((n) => n || '').join('') + const { lang, version, product: productPath } = match.groups + const productURL = [lang, version, productPath].map((n) => n || '').join('') productHeadLink.href = `${linkURL.origin}/${productURL}` } productHead.style.display = 'block' diff --git a/src/links/scripts/rendered-content-link-checker.ts b/src/links/scripts/rendered-content-link-checker.ts index e7b12b1af59b..bea6ed6b2567 100755 --- a/src/links/scripts/rendered-content-link-checker.ts +++ b/src/links/scripts/rendered-content-link-checker.ts @@ -483,8 +483,8 @@ async function commentOnPR(core: CoreInject, octokit: Octokit, flaws: LinkFlaw[] issue_number: pullNumber, }) let previousCommentId - for (const { body, id } of data) { - if (body && body.includes(findAgainSymbol)) { + for (const { body: commentBody, id } of data) { + if (commentBody && commentBody.includes(findAgainSymbol)) { previousCommentId = id } } diff --git a/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts b/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts index 20ad6e3ab5bb..c30ebaef2e3f 100644 --- a/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts +++ b/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts @@ -20,7 +20,7 @@ export function generateNewJSON( let countChanges = 0 for (const [identifier, url] of Object.entries(destination)) { - const check = checks.find((check) => check.identifier === identifier) + const check = checks.find((foundCheck) => foundCheck.identifier === identifier) if (check) { // At the moment, the only possible correction is if the URL is // found but required a redirect. diff --git a/src/metrics/scripts/docstat.ts b/src/metrics/scripts/docstat.ts index 72ca73dcc5a7..2e291ccc4a61 100644 --- a/src/metrics/scripts/docstat.ts +++ b/src/metrics/scripts/docstat.ts @@ -412,8 +412,8 @@ try { // Given input: https://docs.github.com/en/copilot/managing-copilot/ // Use: copilot/managing-copilot -function getCleanPath(providedPath: string): string { - let clean = providedPath +function getCleanPath(inputPath: string): string { + let clean = inputPath const cleanArr = clean.split('?') // remove query params if (cleanArr.length > 1) cleanArr.pop() clean = cleanArr.join('/') @@ -431,29 +431,29 @@ function getCleanPath(providedPath: string): string { return clean } -function getVersion(cleanPath: string): string { - const pathParts = cleanPath.split('/') - const version = ENTERPRISE_REGEX.test(pathParts[0]) ? pathParts[0] : FREE_PRO_TEAM - return version +function getVersion(pathToCheck: string): string { + const pathParts = pathToCheck.split('/') + const versionString = ENTERPRISE_REGEX.test(pathParts[0]) ? pathParts[0] : FREE_PRO_TEAM + return versionString } -function removeVersionSegment(cleanPath: string, version: string): string { - if (version === FREE_PRO_TEAM) return cleanPath - const pathParts = cleanPath.split('/') +function removeVersionSegment(pathToProcess: string, versionString: string): string { + if (versionString === FREE_PRO_TEAM) return pathToProcess + const pathParts = pathToProcess.split('/') pathParts.shift() if (!pathParts.length) return 'index' return pathParts.join('/') } // Try to find the path in the list of valid pages at https://docs.github.com/api/pagelist/en -async function validatePath(cleanPath: string, version: string): Promise { +async function validatePath(pathToValidate: string, versionToValidate: string): Promise { // Only Kusto uses 'index' for the homepage; the Docs API uses '/en' - const basePath = cleanPath === 'index' ? '' : cleanPath + const basePath = pathToValidate === 'index' ? '' : pathToValidate const pathToCheck = - version === FREE_PRO_TEAM + versionToValidate === FREE_PRO_TEAM ? path.join('/', 'en', basePath) - : path.join('/', 'en', version, basePath) + : path.join('/', 'en', versionToValidate, basePath) let data: string try { diff --git a/src/observability/middleware/handle-errors.ts b/src/observability/middleware/handle-errors.ts index dd637d91a1be..4b1dcda5636d 100644 --- a/src/observability/middleware/handle-errors.ts +++ b/src/observability/middleware/handle-errors.ts @@ -141,9 +141,9 @@ async function handleError( // Report to Failbot AFTER responding to the user await logException(error, req) } - } catch (error) { - console.error('An error occurred in the error handling middleware!', error) - next(error) + } catch (handlingError) { + console.error('An error occurred in the error handling middleware!', handlingError) + next(handlingError) return } } diff --git a/src/observability/tests/get-automatic-request-logger.ts b/src/observability/tests/get-automatic-request-logger.ts index 1f66fe812ff9..9b05c169ef14 100644 --- a/src/observability/tests/get-automatic-request-logger.ts +++ b/src/observability/tests/get-automatic-request-logger.ts @@ -116,7 +116,7 @@ describe('getAutomaticRequestLogger', () => { // Create a completely isolated test environment for each iteration const isolatedLogs: string[] = [] - const originalConsoleLog = console.log + const savedConsoleLog = console.log // Replace console.log with isolated capture console.log = vi.fn((message: string) => { @@ -174,7 +174,7 @@ describe('getAutomaticRequestLogger', () => { expect(isolatedLogs[0]).toContain(testCase.expectedInLog) } finally { // Always restore console.log - console.log = originalConsoleLog + console.log = savedConsoleLog } } }) @@ -281,7 +281,7 @@ describe('getAutomaticRequestLogger', () => { // Create isolated log capture for this specific test const isolatedLogs: string[] = [] - const originalConsoleLog = console.log + const savedConsoleLog = console.log console.log = vi.fn((message: string) => { isolatedLogs.push(message) @@ -299,7 +299,7 @@ describe('getAutomaticRequestLogger', () => { expect(isolatedLogs).toHaveLength(0) } finally { // Always restore console.log - console.log = originalConsoleLog + console.log = savedConsoleLog } }) diff --git a/src/redirects/lib/get-redirect.ts b/src/redirects/lib/get-redirect.ts index 99b1d7556453..ff5c89640086 100644 --- a/src/redirects/lib/get-redirect.ts +++ b/src/redirects/lib/get-redirect.ts @@ -312,14 +312,14 @@ function tryReplacements(prefix: string, suffix: string, context: Context): stri return undefined } - const test = (suffix: string): boolean => { + const test = (testSuffix: string): boolean => { // This is a generally broad search and replace and this particular // replacement has never been present in api documentation only enterprise // admin documentation, so we're excluding the REST api pages - if (suffix.includes('/rest')) { + if (testSuffix.includes('/rest')) { return false } - const candidateAsRedirect = prefix + suffix + const candidateAsRedirect = prefix + testSuffix const candidateAsURL = `/en${candidateAsRedirect}` return candidateAsRedirect in redirects || candidateAsURL in pages } diff --git a/src/release-notes/middleware/ghes-release-notes.ts b/src/release-notes/middleware/ghes-release-notes.ts index 0e3540ffa05d..cebe47ab4678 100644 --- a/src/release-notes/middleware/ghes-release-notes.ts +++ b/src/release-notes/middleware/ghes-release-notes.ts @@ -71,12 +71,10 @@ export default async function ghesReleaseNotesContext( // notes instead. enContext.ghesReleases = formatReleases(ghesReleaseNotes) - const matchedReleaseNotes = enContext.ghesReleases!.find( - (r) => r.version === requestedRelease, - ) - if (!matchedReleaseNotes) throw new Error('Release notes not found') - const currentReleaseNotes = matchedReleaseNotes.patches - return renderPatchNotes(currentReleaseNotes, enContext) + const enMatchedNotes = enContext.ghesReleases!.find((r) => r.version === requestedRelease) + if (!enMatchedNotes) throw new Error('Release notes not found') + const enCurrentNotes = enMatchedNotes.patches + return renderPatchNotes(enCurrentNotes, enContext) }, ) } finally { diff --git a/src/rest/lib/index.ts b/src/rest/lib/index.ts index 9702abf218db..49a191a30bfb 100644 --- a/src/rest/lib/index.ts +++ b/src/rest/lib/index.ts @@ -120,7 +120,7 @@ export async function getRestMiniTocItems( category: string, subCategory: string, apiVersion: string | undefined, - restOperations: Operation[], + operations: Operation[], language: string, version: string, context: Context, @@ -148,7 +148,7 @@ export async function getRestMiniTocItems( const categoryData = apiData.get(category)! if (!categoryData.get(subCategory)) { - const titles = restOperations.map((operation: Operation) => operation.title) + const titles = operations.map((operation: Operation) => operation.title) const restOperationsMiniTocItems = await getAutomatedPageMiniTocItems(titles, context, 3) categoryData.set(subCategory, { restOperationsMiniTocItems, diff --git a/src/rest/scripts/test-open-api-schema.ts b/src/rest/scripts/test-open-api-schema.ts index bf006936a5ec..4efddd638f50 100755 --- a/src/rest/scripts/test-open-api-schema.ts +++ b/src/rest/scripts/test-open-api-schema.ts @@ -163,5 +163,5 @@ function difference(obj1: Record, obj2: Record !file.includes('index.md')) - .filter((file) => !nonAutomatedRestPaths.some((path) => file.includes(path))) + .filter((file) => !nonAutomatedRestPaths.some((excludePath) => file.includes(excludePath))) } diff --git a/src/rest/scripts/utils/sync.ts b/src/rest/scripts/utils/sync.ts index b245f127d090..3215caae5ee7 100644 --- a/src/rest/scripts/utils/sync.ts +++ b/src/rest/scripts/utils/sync.ts @@ -151,23 +151,21 @@ export async function getOpenApiSchemaFiles( // bundling the OpenAPI in github/github const schemaNames = schemas.map((schema) => path.basename(schema, '.json')) - const OPENAPI_VERSION_NAMES = Object.keys(allVersions).map( - (elem) => allVersions[elem].openApiVersionName, - ) + const versionNames = Object.keys(allVersions).map((elem) => allVersions[elem].openApiVersionName) for (const schema of schemaNames) { const schemaBasename = `${schema}.json` // If the version doesn't have calendar date versioning // it should have an exact match with one of the versions defined // in the allVersions object. - if (OPENAPI_VERSION_NAMES.includes(schema)) { + if (versionNames.includes(schema)) { webhookSchemas.push(schemaBasename) } // If the schema version has calendar date versioning, then one of // the versions defined in allVersions should be a substring of the // schema version. This means the schema version is a supported version - if (OPENAPI_VERSION_NAMES.some((elem) => schema.startsWith(elem))) { + if (versionNames.some((elem) => schema.startsWith(elem))) { // If the schema being evaluated is a calendar-date version, then // there would only be one exact match in the list of schema names. // If the schema being evaluated is a non-calendar-date version, then diff --git a/src/rest/tests/openapi-schema.ts b/src/rest/tests/openapi-schema.ts index 810489d5bed0..f479a0ae651e 100644 --- a/src/rest/tests/openapi-schema.ts +++ b/src/rest/tests/openapi-schema.ts @@ -190,10 +190,12 @@ describe('OpenAPI schema validation', () => { }) }) -async function findOperation(version: string, method: string, path: string) { +async function findOperation(version: string, method: string, requestPath: string) { const allOperations = await getFlatListOfOperations(version) return allOperations.find((operation) => { - return operation.requestPath === path && operation.verb.toLowerCase() === method.toLowerCase() + return ( + operation.requestPath === requestPath && operation.verb.toLowerCase() === method.toLowerCase() + ) }) } diff --git a/src/rest/tests/rendering.ts b/src/rest/tests/rendering.ts index 645401bf0072..ce2718f98cad 100644 --- a/src/rest/tests/rendering.ts +++ b/src/rest/tests/rendering.ts @@ -23,7 +23,7 @@ describe('REST references docs', () => { .map((i, h2) => $(h2).attr('id')) .get() const schemaSlugs = checksRestOperations.map((operation) => slug(operation.title)) - expect(schemaSlugs.every((slug) => domH2Ids.includes(slug))).toBe(true) + expect(schemaSlugs.every((operationSlug) => domH2Ids.includes(operationSlug))).toBe(true) } }) diff --git a/src/search/components/input/SearchOverlay.tsx b/src/search/components/input/SearchOverlay.tsx index bf9d40ea8ebd..1b2894f77ca4 100644 --- a/src/search/components/input/SearchOverlay.tsx +++ b/src/search/components/input/SearchOverlay.tsx @@ -187,47 +187,45 @@ export function SearchOverlay({ // Combine options for key navigation const [combinedOptions, generalOptionsWithViewStatus, aiOptionsWithUserInput] = useMemo(() => { setAnnouncement('') - let generalOptionsWithViewStatus = [...generalSearchResults] - const aiOptionsWithUserInput = [...userInputOptions, ...filteredAIOptions] - const combinedOptions = [] as Array<{ + let generalWithView = [...generalSearchResults] + const aiWithUser = [...userInputOptions, ...filteredAIOptions] + const combined = [] as Array<{ group: 'general' | 'ai' | string url?: string option: AutocompleteSearchHitWithUserQuery | GeneralSearchHitWithOptions }> if (generalSearchResults.length > 0) { - generalOptionsWithViewStatus.push({ + generalWithView.push({ title: t('search.overlay.view_all_search_results'), isViewAllResults: true, } as any) } else if (autoCompleteSearchError) { if (urlSearchInputQuery.trim() !== '') { - generalOptionsWithViewStatus.push({ + generalWithView.push({ ...(userInputOptions[0] || {}), isSearchDocsOption: true, } as unknown as GeneralSearchHit) } } else if (urlSearchInputQuery.trim() !== '' && !searchLoading) { setAnnouncement(t('search.overlay.no_results_found_announcement')) - generalOptionsWithViewStatus.push({ + generalWithView.push({ title: t('search.overlay.no_results_found'), isNoResultsFound: true, } as any) } else { - generalOptionsWithViewStatus = [] + generalWithView = [] } // NOTE: Order of combinedOptions is important, since 'selectedIndex' is used to navigate the combinedOptions array // Add general options _before_ AI options - combinedOptions.push( - ...generalOptionsWithViewStatus.map((option) => ({ group: 'general', option })), - ) + combined.push(...generalWithView.map((option) => ({ group: 'general', option }))) // On AI Error, don't include AI suggestions, only user input if (!aiSearchError && !isAskAIState) { - combinedOptions.push(...aiOptionsWithUserInput.map((option) => ({ group: 'ai', option }))) + combined.push(...aiWithUser.map((option) => ({ group: 'ai', option }))) } else if (isAskAIState && !aiCouldNotAnswer) { // When "ask ai" state is reached, we have references that are ActionList items. // We want to navigate these items via the keyboard, so include them in the combinedOptions array - combinedOptions.push( + combined.push( ...aiReferences.map((option) => ({ group: 'reference', // The references are actually article URLs that we want to navigate to url: option.url, @@ -240,7 +238,7 @@ export function SearchOverlay({ ) } - return [combinedOptions, generalOptionsWithViewStatus, aiOptionsWithUserInput] + return [combined, generalWithView, aiWithUser] }, [ generalSearchResults, totalGeneralSearchResults, diff --git a/src/search/components/results/SearchResults.tsx b/src/search/components/results/SearchResults.tsx index 27c3a0d5b260..399d011cc6f0 100644 --- a/src/search/components/results/SearchResults.tsx +++ b/src/search/components/results/SearchResults.tsx @@ -159,12 +159,12 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num } }, [asPath]) - function hrefBuilder(page: number) { + function hrefBuilder(pageNumber: number) { const params = new URLSearchParams(asPathQuery) - if (page === 1) { + if (pageNumber === 1) { params.delete('page') } else { - params.set('page', `${page}`) + params.set('page', `${pageNumber}`) } return `/${router.locale}${asPathRoot}?${params}` } @@ -176,22 +176,22 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num pageCount={Math.min(totalPages, 10)} currentPage={page} hrefBuilder={hrefBuilder} - onPageChange={(event, page) => { + onPageChange={(event, pageNum) => { event.preventDefault() - const [asPathRoot, asPathQuery = ''] = router.asPath.split('#')[0].split('?') - const params = new URLSearchParams(asPathQuery) - if (page !== 1) { - params.set('page', `${page}`) + const [pathRoot, pathQuery = ''] = router.asPath.split('#')[0].split('?') + const params = new URLSearchParams(pathQuery) + if (pageNum !== 1) { + params.set('page', `${pageNum}`) } else { params.delete('page') } - let asPath = `/${router.locale}${asPathRoot}` + let newPath = `/${router.locale}${pathRoot}` if (params.toString()) { - asPath += `?${params}` + newPath += `?${params}` } - setAsPath(asPath) - router.push(asPath) + setAsPath(newPath) + router.push(newPath) }} /> diff --git a/src/search/lib/elasticsearch-versions.ts b/src/search/lib/elasticsearch-versions.ts index d365ddda57a7..7e09dc27181b 100644 --- a/src/search/lib/elasticsearch-versions.ts +++ b/src/search/lib/elasticsearch-versions.ts @@ -100,11 +100,11 @@ export function getPlanVersionFromIndexVersion(indexVersion: string): string { // This is needed for scraping since the pages use the 'allVersions' key as their version export function getAllVersionsKeyFromIndexVersion(indexVersion: string): string { const key = Object.keys(allVersions).find( - (key) => - key === indexVersion || - allVersions[key].shortName === indexVersion || - allVersions[key].plan === indexVersion || - allVersions[key].miscVersionName === indexVersion, + (versionKey) => + versionKey === indexVersion || + allVersions[versionKey].shortName === indexVersion || + allVersions[versionKey].plan === indexVersion || + allVersions[versionKey].miscVersionName === indexVersion, ) if (!key) { diff --git a/src/search/scripts/analyze-text.ts b/src/search/scripts/analyze-text.ts index 60833e28e3a5..3a13566ab666 100755 --- a/src/search/scripts/analyze-text.ts +++ b/src/search/scripts/analyze-text.ts @@ -79,8 +79,8 @@ try { process.exit(1) } -async function main(opts: Options, args: string[]): Promise { - const texts = [args.join(' ')] +async function main(opts: Options, textArgs: string[]): Promise { + const texts = [textArgs.join(' ')] if (!opts.elasticsearchUrl && !process.env.ELASTICSEARCH_URL) { throw new Error( 'Must pass the elasticsearch URL option or ' + diff --git a/src/search/scripts/index/lib/index-general-search.ts b/src/search/scripts/index/lib/index-general-search.ts index c588489899c8..7116794379e7 100644 --- a/src/search/scripts/index/lib/index-general-search.ts +++ b/src/search/scripts/index/lib/index-general-search.ts @@ -98,7 +98,7 @@ export async function indexGeneralSearch(sourceDirectory: string, opts: Options) versionsToIndex, ) - for (const language of languages) { + for (const lang of languages) { let count = 0 for (const versionKey of versionsToIndex) { const startTime = new Date() @@ -106,11 +106,11 @@ export async function indexGeneralSearch(sourceDirectory: string, opts: Options) const { indexName, indexAlias } = getElasticSearchIndex( 'generalSearch', versionKey, - language, + lang, opts.indexPrefix || '', ) - await indexVersion(client, indexName, indexAlias, language, sourceDirectory, opts) + await indexVersion(client, indexName, indexAlias, lang, sourceDirectory, opts) count++ if (opts.staggerSeconds && count < versionsToIndex.length - 1) { diff --git a/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts b/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts index 5b2b559d5499..0db377ab6798 100644 --- a/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts +++ b/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts @@ -64,11 +64,11 @@ export async function populateIndex( { attempts, sleepTime, - onError: (_, attempts, sleepTime) => { + onError: (_, remainingAttempts, sleepMs) => { console.warn( chalk.yellow( - `Failed to bulk index ${indexName}. Will attempt ${attempts} more times (after ${ - sleepTime / 1000 + `Failed to bulk index ${indexName}. Will attempt ${remainingAttempts} more times (after ${ + sleepMs / 1000 }s sleep).`, ), ) diff --git a/src/shielding/middleware/handle-invalid-query-string-values.ts b/src/shielding/middleware/handle-invalid-query-string-values.ts index b0a660dfc2cd..6efa459610d3 100644 --- a/src/shielding/middleware/handle-invalid-query-string-values.ts +++ b/src/shielding/middleware/handle-invalid-query-string-values.ts @@ -42,9 +42,9 @@ export default function handleInvalidQuerystringValues( for (const [key, value] of Object.entries(query)) { if (RECOGNIZED_VALUES_KEYS.has(key)) { const validValues = RECOGNIZED_VALUES[key as keyof typeof RECOGNIZED_VALUES] - const value = query[key] - const values = Array.isArray(value) ? value : [value] - if (values.some((value) => typeof value === 'string' && !validValues.includes(value))) { + const queryValue = query[key] + const values = Array.isArray(queryValue) ? queryValue : [queryValue] + if (values.some((val) => typeof val === 'string' && !validValues.includes(val))) { if (process.env.NODE_ENV === 'development') { console.warn( 'Warning! Invalid query string *value* detected. %O is not one of %O', diff --git a/src/shielding/tests/invalid-querystrings.ts b/src/shielding/tests/invalid-querystrings.ts index 992d805c389c..3527f2ca662b 100644 --- a/src/shielding/tests/invalid-querystrings.ts +++ b/src/shielding/tests/invalid-querystrings.ts @@ -53,9 +53,9 @@ describe('invalid query strings', () => { expect(res.headers.location).toBe('/en') // But note that it only applies to the home page! { - const url = `/en/get-started?${randomCharacters(8)}` - const res = await get(url) - expect(res.statusCode).toBe(200) + const nestedUrl = `/en/get-started?${randomCharacters(8)}` + const nestedRes = await get(nestedUrl) + expect(nestedRes.statusCode).toBe(200) } }) diff --git a/src/tools/components/PlatformPicker.tsx b/src/tools/components/PlatformPicker.tsx index c2d427fdd8fd..ceb02cbeb7ae 100644 --- a/src/tools/components/PlatformPicker.tsx +++ b/src/tools/components/PlatformPicker.tsx @@ -19,7 +19,7 @@ const platforms = [ function showPlatformSpecificContent(platform: string) { const markdowns = Array.from(document.querySelectorAll('.ghd-tool')) markdowns - .filter((el) => platforms.some((platform) => el.classList.contains(platform.value))) + .filter((el) => platforms.some((platformValue) => el.classList.contains(platformValue.value))) .forEach((el) => { el.style.display = el.classList.contains(platform) ? '' : 'none' @@ -36,7 +36,7 @@ function showPlatformSpecificContent(platform: string) { // example: inline content const platformEls = Array.from( document.querySelectorAll( - platforms.map((platform) => `.platform-${platform.value}`).join(', '), + platforms.map((platformOption) => `.platform-${platformOption.value}`).join(', '), ), ) platformEls.forEach((el) => { diff --git a/src/tools/components/ToolPicker.tsx b/src/tools/components/ToolPicker.tsx index df023209f96c..32472ac8fa15 100644 --- a/src/tools/components/ToolPicker.tsx +++ b/src/tools/components/ToolPicker.tsx @@ -14,7 +14,7 @@ import { InArticlePicker } from './InArticlePicker' function showToolSpecificContent(tool: string, supportedTools: Array) { const markdowns = Array.from(document.querySelectorAll('.ghd-tool')) markdowns - .filter((el) => supportedTools.some((tool) => el.classList.contains(tool))) + .filter((el) => supportedTools.some((toolName) => el.classList.contains(toolName))) .forEach((el) => { el.style.display = el.classList.contains(tool) ? '' : 'none' @@ -31,7 +31,7 @@ function showToolSpecificContent(tool: string, supportedTools: Array) { // example: inline content const toolEls = Array.from( document.querySelectorAll( - supportedTools.map((tool) => `.tool-${tool}`).join(', '), + supportedTools.map((toolOption) => `.tool-${toolOption}`).join(', '), ), ) toolEls.forEach((el) => { diff --git a/src/versions/middleware/features.ts b/src/versions/middleware/features.ts index 45aa77eb21ab..4b170e932984 100644 --- a/src/versions/middleware/features.ts +++ b/src/versions/middleware/features.ts @@ -37,7 +37,7 @@ function getFeaturesByVersion(currentVersion: string): Record { allFeatures = getDeepDataByLanguage('features', 'en') as Record } - const features: { + const featureFlags: { [feature: string]: boolean } = {} // Determine whether the currentVersion belongs to the list of versions the feature is available in. @@ -51,9 +51,9 @@ function getFeaturesByVersion(currentVersion: string): Record { // Adding the resulting boolean to the context object gives us the ability to use // `{% if featureName ... %}` conditionals in content files. const isFeatureAvailableInCurrentVersion = applicableVersions.includes(currentVersion) - features[featureName] = isFeatureAvailableInCurrentVersion + featureFlags[featureName] = isFeatureAvailableInCurrentVersion } - cache.set(currentVersion, features) + cache.set(currentVersion, featureFlags) } return cache.get(currentVersion) diff --git a/src/webhooks/tests/rendering.ts b/src/webhooks/tests/rendering.ts index cec11e66dd28..f290eaeaf995 100644 --- a/src/webhooks/tests/rendering.ts +++ b/src/webhooks/tests/rendering.ts @@ -78,7 +78,7 @@ describe('webhooks events and payloads', () => { payloadExampleElem.each((i, elem) => { const siblings = $(elem) .nextUntil('[id^=webhook-payload-example]') - .filter((i, elem) => $(elem).hasClass('height-constrained-code-block')) + .filter((idx, sibling) => $(sibling).hasClass('height-constrained-code-block')) expect(siblings.length).toBeGreaterThan(0) }) } diff --git a/src/workflows/experimental/readability-report.ts b/src/workflows/experimental/readability-report.ts index d3ffa06f13d8..98ab9a67d2cc 100644 --- a/src/workflows/experimental/readability-report.ts +++ b/src/workflows/experimental/readability-report.ts @@ -162,8 +162,8 @@ function getChangedContentFiles(): string[] { }) } -function makeURL(path: string): string { - return `http://localhost:4000${path}` +function makeURL(urlPath: string): string { + return `http://localhost:4000${urlPath}` } async function waitForServer(): Promise { diff --git a/src/workflows/issue-report.ts b/src/workflows/issue-report.ts index e6fa75e09fc0..7d0c3873005a 100644 --- a/src/workflows/issue-report.ts +++ b/src/workflows/issue-report.ts @@ -101,13 +101,13 @@ export async function linkReports({ } // Comment on all previous reports that are still open - for (const previousReport of previousReports) { - if (previousReport.state === 'closed' || previousReport.html_url === newReport.html_url) { + for (const oldReport of previousReports) { + if (oldReport.state === 'closed' || oldReport.html_url === newReport.html_url) { continue } // If an old report is not assigned to someone we close it - const shouldClose = !previousReport.assignees?.length + const shouldClose = !oldReport.assignees?.length let body = `➡️ [Newer report](${newReport.html_url})` if (shouldClose) { body += '\n\nClosing in favor of newer report since there are no assignees on this issue' @@ -116,14 +116,12 @@ export async function linkReports({ await octokit.rest.issues.createComment({ owner, repo, - issue_number: previousReport.number, + issue_number: oldReport.number, body, }) - core.info( - `Linked old report to new report via comment on old report: #${previousReport.number}.`, - ) + core.info(`Linked old report to new report via comment on old report: #${oldReport.number}.`) } catch (error) { - core.setFailed(`Error commenting on previousReport, #${previousReport.number}`) + core.setFailed(`Error commenting on previousReport, #${oldReport.number}`) throw error } if (shouldClose) { @@ -131,12 +129,12 @@ export async function linkReports({ await octokit.rest.issues.update({ owner, repo, - issue_number: previousReport.number, + issue_number: oldReport.number, state: 'closed', }) - core.info(`Closing old report: #${previousReport.number} because it doesn't have assignees`) + core.info(`Closing old report: #${oldReport.number} because it doesn't have assignees`) } catch (error) { - core.setFailed(`Error closing previousReport, #${previousReport.number}`) + core.setFailed(`Error closing previousReport, #${oldReport.number}`) throw error } } diff --git a/src/workflows/projects.ts b/src/workflows/projects.ts index 10d9ba747a1d..fc953142d864 100644 --- a/src/workflows/projects.ts +++ b/src/workflows/projects.ts @@ -6,7 +6,7 @@ import { graphql } from '@octokit/graphql' // Pull out the node ID of a project field export function findFieldID(fieldName: string, data: Record) { const field = data.organization.projectV2.fields.nodes.find( - (field: Record) => field.name === fieldName, + (fieldNode: Record) => fieldNode.name === fieldName, ) if (field && field.id) { @@ -23,14 +23,14 @@ export function findSingleSelectID( data: Record, ) { const field = data.organization.projectV2.fields.nodes.find( - (field: Record) => field.name === fieldName, + (fieldData: Record) => fieldData.name === fieldName, ) if (!field) { throw new Error(`A field called "${fieldName}" was not found. Check if the field was renamed.`) } const singleSelect = field.options.find( - (field: Record) => field.name === singleSelectName, + (option: Record) => option.name === singleSelectName, ) if (singleSelect && singleSelect.id) { @@ -203,7 +203,7 @@ export function generateUpdateProjectV2ItemFieldMutation({ // Build the mutation to update a single project field // Specify literal=true to indicate that the value should be used as a string, not a variable function generateMutationToUpdateField({ - item, + item: itemId, fieldID, value, fieldType, @@ -220,12 +220,12 @@ export function generateUpdateProjectV2ItemFieldMutation({ // Strip all non-alphanumeric out of the item ID when creating the mutation ID to avoid a GraphQL parsing error // (statistically, this should still give us a unique mutation ID) return ` - set_${fieldID.slice(1)}_item_${item.replaceAll( + set_${fieldID.slice(1)}_item_${itemId.replaceAll( /[^a-z0-9]/g, '', )}: updateProjectV2ItemFieldValue(input: { projectId: $project - itemId: "${item}" + itemId: "${itemId}" fieldId: ${fieldID} value: { ${parsedValue} } }) { diff --git a/src/workflows/walk-files.ts b/src/workflows/walk-files.ts index 017855e7f982..9c4c6cc2cb11 100644 --- a/src/workflows/walk-files.ts +++ b/src/workflows/walk-files.ts @@ -16,7 +16,10 @@ export default function walkFiles( const walkSyncOpts = { includeBasePath: true, directories: false } return walk(dir, walkSyncOpts) - .filter((file) => extensions.some((ext) => file.endsWith(ext)) && !file.endsWith('README.md')) + .filter( + (file) => + extensions.some((extension) => file.endsWith(extension)) && !file.endsWith('README.md'), + ) .filter((file) => (opts.includeEarlyAccess ? file : !file.includes('/early-access/'))) }