Skip to content

Commit 9f60f32

Browse files
committed
Update dev-dependencies
1 parent 95687c8 commit 9f60f32

File tree

2 files changed

+5
-34
lines changed

2 files changed

+5
-34
lines changed

lib/index.js

Lines changed: 3 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -199,10 +199,8 @@ function text(node, state) {
199199

200200
resetTokenizer(state, pointStart(node))
201201
// @ts-expect-error: private.
202-
// type-coverage:ignore-next-line
203202
state.parser.currentToken = token
204203
// @ts-expect-error: private.
205-
// type-coverage:ignore-next-line
206204
state.parser._processToken(state.parser.currentToken)
207205
}
208206

@@ -229,10 +227,8 @@ function doctype(node, state) {
229227

230228
resetTokenizer(state, pointStart(node))
231229
// @ts-expect-error: private.
232-
// type-coverage:ignore-next-line
233230
state.parser.currentToken = token
234231
// @ts-expect-error: private.
235-
// type-coverage:ignore-next-line
236232
state.parser._processToken(state.parser.currentToken)
237233
}
238234

@@ -292,10 +288,8 @@ function comment(node, state) {
292288
}
293289
resetTokenizer(state, pointStart(node))
294290
// @ts-expect-error: private.
295-
// type-coverage:ignore-next-line
296291
state.parser.currentToken = token
297292
// @ts-expect-error: private.
298-
// type-coverage:ignore-next-line
299293
state.parser._processToken(state.parser.currentToken)
300294
}
301295

@@ -313,8 +307,6 @@ function handleRaw(node, state) {
313307
// Reset preprocessor:
314308
// See: <https://github.com/inikulin/parse5/blob/6f7ca60/packages/parse5/lib/tokenizer/preprocessor.ts#L18-L31>.
315309
state.parser.tokenizer.preprocessor.html = ''
316-
// @ts-expect-error: private.
317-
// type-coverage:ignore-next-line
318310
state.parser.tokenizer.preprocessor.pos = -1
319311
// @ts-expect-error: private.
320312
// type-coverage:ignore-next-line
@@ -325,8 +317,6 @@ function handleRaw(node, state) {
325317
// @ts-expect-error: private.
326318
// type-coverage:ignore-next-line
327319
state.parser.tokenizer.preprocessor.skipNextNewLine = false
328-
// @ts-expect-error: private.
329-
// type-coverage:ignore-next-line
330320
state.parser.tokenizer.preprocessor.lastChunkWritten = false
331321
state.parser.tokenizer.preprocessor.endOfChunkHit = false
332322
// @ts-expect-error: private.
@@ -337,7 +327,6 @@ function handleRaw(node, state) {
337327
setPoint(state, pointStart(node))
338328
state.parser.tokenizer.write(node.value, false)
339329
// @ts-expect-error: private.
340-
// type-coverage:ignore-next-line
341330
state.parser.tokenizer._runParsingLoop()
342331

343332
// Character references hang, so if we ended there, we need to flush
@@ -350,19 +339,18 @@ function handleRaw(node, state) {
350339
// See: <https://github.com/inikulin/parse5/blob/46cba43/packages/parse5/lib/tokenizer/index.ts#L58>
351340
// Note: a change to `parse5`, which breaks this, was merged but not released.
352341
// Investigate when it is.
342+
// To do: remove next major.
343+
/* c8 ignore next 12 -- removed in <https://github.com/inikulin/parse5/pull/897> */
353344
if (
354345
state.parser.tokenizer.state === 72 /* NAMED_CHARACTER_REFERENCE */ ||
346+
// @ts-expect-error: removed.
355347
state.parser.tokenizer.state === 78 /* NUMERIC_CHARACTER_REFERENCE_END */
356348
) {
357-
// @ts-expect-error: private.
358-
// type-coverage:ignore-next-line
359349
state.parser.tokenizer.preprocessor.lastChunkWritten = true
360350
/** @type {number} */
361351
// @ts-expect-error: private.
362-
// type-coverage:ignore-next-line
363352
const cp = state.parser.tokenizer._consume()
364353
// @ts-expect-error: private.
365-
// type-coverage:ignore-next-line
366354
state.parser.tokenizer._callState(cp)
367355
}
368356
}
@@ -413,18 +401,15 @@ function resetTokenizer(state, point) {
413401
// Process final characters if they’re still there after hibernating.
414402
/** @type {Token.CharacterToken} */
415403
// @ts-expect-error: private.
416-
// type-coverage:ignore-next-line
417404
const token = state.parser.tokenizer.currentCharacterToken
418405

419406
if (token && token.location) {
420407
token.location.endLine = state.parser.tokenizer.preprocessor.line
421408
token.location.endCol = state.parser.tokenizer.preprocessor.col + 1
422409
token.location.endOffset = state.parser.tokenizer.preprocessor.offset + 1
423410
// @ts-expect-error: private.
424-
// type-coverage:ignore-next-line
425411
state.parser.currentToken = token
426412
// @ts-expect-error: private.
427-
// type-coverage:ignore-next-line
428413
state.parser._processToken(state.parser.currentToken)
429414
}
430415

@@ -436,35 +421,26 @@ function resetTokenizer(state, point) {
436421
// But also if broken HTML is in `raw`, and then a correct element is given.
437422
// See GH-11.
438423
// @ts-expect-error: private.
439-
// type-coverage:ignore-next-line
440424
state.parser.tokenizer.paused = false
441425
// @ts-expect-error: private.
442-
// type-coverage:ignore-next-line
443426
state.parser.tokenizer.inLoop = false
444427

445428
// Note: don’t reset `state`, `inForeignNode`, or `lastStartTagName`, we
446429
// manually update those when needed.
447430
state.parser.tokenizer.active = false
448431
// @ts-expect-error: private.
449-
// type-coverage:ignore-next-line
450432
state.parser.tokenizer.returnState = TokenizerMode.DATA
451433
// @ts-expect-error: private.
452-
// type-coverage:ignore-next-line
453434
state.parser.tokenizer.charRefCode = -1
454435
// @ts-expect-error: private.
455-
// type-coverage:ignore-next-line
456436
state.parser.tokenizer.consumedAfterSnapshot = -1
457437
// @ts-expect-error: private.
458-
// type-coverage:ignore-next-line
459438
state.parser.tokenizer.currentLocation = null
460439
// @ts-expect-error: private.
461-
// type-coverage:ignore-next-line
462440
state.parser.tokenizer.currentCharacterToken = null
463441
// @ts-expect-error: private.
464-
// type-coverage:ignore-next-line
465442
state.parser.tokenizer.currentToken = null
466443
// @ts-expect-error: private.
467-
// type-coverage:ignore-next-line
468444
state.parser.tokenizer.currentAttr = {name: '', value: ''}
469445
}
470446

@@ -496,7 +472,6 @@ function setPoint(state, point) {
496472
state.parser.tokenizer.preprocessor.droppedBufferSize = point.offset
497473
state.parser.tokenizer.preprocessor.line = point.line
498474
// @ts-expect-error: private.
499-
// type-coverage:ignore-next-line
500475
state.parser.tokenizer.currentLocation = location
501476
}
502477
}
@@ -552,10 +527,8 @@ function startTag(node, state) {
552527
// So we act *as if* the tokenizer emits tokens:
553528

554529
// @ts-expect-error: private.
555-
// type-coverage:ignore-next-line
556530
state.parser.currentToken = tag
557531
// @ts-expect-error: private.
558-
// type-coverage:ignore-next-line
559532
state.parser._processToken(state.parser.currentToken)
560533

561534
// …but then we still need a bunch of work that the tokenizer would normally
@@ -609,10 +582,8 @@ function endTag(node, state) {
609582
// So we act *as if* the tokenizer emits tokens:
610583

611584
// @ts-expect-error: private.
612-
// type-coverage:ignore-next-line
613585
state.parser.currentToken = tag
614586
// @ts-expect-error: private.
615-
// type-coverage:ignore-next-line
616587
state.parser._processToken(state.parser.currentToken)
617588

618589
// …but then we still need a bunch of work that the tokenizer would normally

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
"zwitch": "^2.0.0"
5050
},
5151
"devDependencies": {
52-
"@types/node": "^20.0.0",
52+
"@types/node": "^22.0.0",
5353
"@types/ungap__structured-clone": "^1.0.0",
5454
"c8": "^10.0.0",
5555
"hast-util-to-html": "^9.0.0",
@@ -62,7 +62,7 @@
6262
"type-coverage": "^2.0.0",
6363
"typescript": "^5.0.0",
6464
"unist-builder": "^4.0.0",
65-
"xo": "^0.58.0"
65+
"xo": "^0.59.0"
6666
},
6767
"scripts": {
6868
"prepack": "npm run build && npm run format",

0 commit comments

Comments
 (0)