Quellcode durchsuchen

fix: Remove unused plus typescript tightening (#3527)

* chore: remove unused build file

* chore: remove unused package

* chore: remove unused function

* chore: remove unnecessary | undefineds

* core: replace unnecessary &&s with optional chaining

* chore: use .at(-x) instead of .length - x property access

gives stricter TS typing, is more concise

* chore: tighten TS types

* chore: sort tokens alphabetically

* fix: typeof plus !== null check

* chore: type test for .parse, .use

* fix: if check
Spencer Whitehead vor 9 Monaten
Ursprung
Commit
1f579f7628
17 geänderte Dateien mit 223 neuen und 232 gelöschten Zeilen
  1. 0 23
      bower.json
  2. 1 1
      docs/demo/demo.js
  3. 0 7
      package-lock.json
  4. 0 1
      package.json
  5. 1 1
      src/Hooks.ts
  6. 2 2
      src/Instance.ts
  7. 57 57
      src/Lexer.ts
  8. 14 14
      src/MarkedOptions.ts
  9. 3 4
      src/Parser.ts
  10. 1 1
      src/Renderer.ts
  11. 8 5
      src/Tokenizer.ts
  12. 104 104
      src/Tokens.ts
  13. 1 1
      src/helpers.ts
  14. 2 2
      src/marked.ts
  15. 2 2
      test/rules.js
  16. 25 5
      test/types/marked.ts
  17. 2 2
      test/unit/marked.test.js

+ 0 - 23
bower.json

@@ -1,23 +0,0 @@
-{
-  "name": "marked",
-  "homepage": "https://github.com/markedjs/marked",
-  "authors": [
-    "Christopher Jeffrey <chjjeffrey@gmail.com>"
-  ],
-  "description": "A markdown parser built for speed",
-  "keywords": [
-    "markdown",
-    "markup",
-    "html"
-  ],
-  "main": "lib/marked.cjs",
-  "license": "MIT",
-  "ignore": [
-    "**/.*",
-    "node_modules",
-    "bower_components",
-    "app/bower_components",
-    "test",
-    "tests"
-  ]
-}

+ 1 - 1
docs/demo/demo.js

@@ -185,7 +185,7 @@ function setOptions(opts) {
   $optionsElem.value = JSON.stringify(
     opts,
     (key, value) => {
-      if (value && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
+      if (value !== null && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
         return undefined;
       }
       return value;

+ 0 - 7
package-lock.json

@@ -35,7 +35,6 @@
         "rollup": "^4.25.0",
         "semantic-release": "^24.2.0",
         "titleize": "^4.0.0",
-        "ts-expect": "^1.3.0",
         "tslib": "^2.8.1",
         "typescript": "5.6.3"
       },
@@ -8698,12 +8697,6 @@
         "typescript": ">=4.2.0"
       }
     },
-    "node_modules/ts-expect": {
-      "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/ts-expect/-/ts-expect-1.3.0.tgz",
-      "integrity": "sha512-e4g0EJtAjk64xgnFPD6kTBUtpnMVzDrMb12N1YZV0VvSlhnVT3SGxiYTLdGy8Q5cYHOIC/FAHmZ10eGrAguicQ==",
-      "dev": true
-    },
     "node_modules/tslib": {
       "version": "2.8.1",
       "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",

+ 0 - 1
package.json

@@ -76,7 +76,6 @@
     "rollup": "^4.25.0",
     "semantic-release": "^24.2.0",
     "titleize": "^4.0.0",
-    "ts-expect": "^1.3.0",
     "tslib": "^2.8.1",
     "typescript": "5.6.3"
   },

+ 1 - 1
src/Hooks.ts

@@ -6,7 +6,7 @@ import type { Token, TokensList } from './Tokens.ts';
 
 export class _Hooks {
   options: MarkedOptions;
-  block: boolean | undefined;
+  block?: boolean;
 
   constructor(options?: MarkedOptions) {
     this.options = options || _defaults;

+ 2 - 2
src/Instance.ts

@@ -265,11 +265,11 @@ export class Marked {
     type overloadedParse = {
       (src: string, options: MarkedOptions & { async: true }): Promise<string>;
       (src: string, options: MarkedOptions & { async: false }): string;
-      (src: string, options?: MarkedOptions | undefined | null): string | Promise<string>;
+      (src: string, options?: MarkedOptions | null): string | Promise<string>;
     };
 
     // eslint-disable-next-line @typescript-eslint/no-explicit-any
-    const parse: overloadedParse = (src: string, options?: MarkedOptions | undefined | null): any => {
+    const parse: overloadedParse = (src: string, options?: MarkedOptions | null): any => {
       const origOpt = { ...options };
       const opt = { ...this.defaults, ...origOpt };
 

+ 57 - 57
src/Lexer.ts

@@ -2,7 +2,7 @@ import { _Tokenizer } from './Tokenizer.ts';
 import { _defaults } from './defaults.ts';
 import { other, block, inline } from './rules.ts';
 import type { Token, TokensList, Tokens } from './Tokens.ts';
-import type { MarkedOptions, TokenizerExtension } from './MarkedOptions.ts';
+import type { MarkedOptions } from './MarkedOptions.ts';
 
 /**
  * Block Lexer
@@ -85,8 +85,7 @@ export class _Lexer {
    * Preprocessing
    */
   lex(src: string) {
-    src = src
-      .replace(other.carriageReturn, '\n');
+    src = src.replace(other.carriageReturn, '\n');
 
     this.blockTokens(src, this.tokens);
 
@@ -109,31 +108,28 @@ export class _Lexer {
       src = src.replace(other.tabCharGlobal, '    ').replace(other.spaceLine, '');
     }
 
-    let token: Tokens.Generic | undefined;
-    let lastToken;
-    let cutSrc;
-
     while (src) {
-      if (this.options.extensions
-        && this.options.extensions.block
-        && this.options.extensions.block.some((extTokenizer: TokenizerExtension['tokenizer']) => {
-          if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
-            src = src.substring(token.raw.length);
-            tokens.push(token);
-            return true;
-          }
-          return false;
-        })) {
+      let token: Tokens.Generic | undefined;
+
+      if (this.options.extensions?.block?.some((extTokenizer) => {
+        if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
+          src = src.substring(token.raw.length);
+          tokens.push(token);
+          return true;
+        }
+        return false;
+      })) {
         continue;
       }
 
       // newline
       if (token = this.tokenizer.space(src)) {
         src = src.substring(token.raw.length);
-        if (token.raw.length === 1 && tokens.length > 0) {
+        const lastToken = tokens.at(-1);
+        if (token.raw.length === 1 && lastToken !== undefined) {
           // if there's a single \n as a spacer, it's terminating the last line,
           // so move it there so that we don't get unnecessary paragraph tags
-          tokens[tokens.length - 1].raw += '\n';
+          lastToken.raw += '\n';
         } else {
           tokens.push(token);
         }
@@ -143,12 +139,12 @@ export class _Lexer {
       // code
       if (token = this.tokenizer.code(src)) {
         src = src.substring(token.raw.length);
-        lastToken = tokens[tokens.length - 1];
+        const lastToken = tokens.at(-1);
         // An indented code block cannot interrupt a paragraph.
-        if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
+        if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
           lastToken.raw += '\n' + token.raw;
           lastToken.text += '\n' + token.text;
-          this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+          this.inlineQueue.at(-1)!.src = lastToken.text;
         } else {
           tokens.push(token);
         }
@@ -200,11 +196,11 @@ export class _Lexer {
       // def
       if (token = this.tokenizer.def(src)) {
         src = src.substring(token.raw.length);
-        lastToken = tokens[tokens.length - 1];
-        if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
+        const lastToken = tokens.at(-1);
+        if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
           lastToken.raw += '\n' + token.raw;
           lastToken.text += '\n' + token.raw;
-          this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+          this.inlineQueue.at(-1)!.src = lastToken.text;
         } else if (!this.tokens.links[token.tag]) {
           this.tokens.links[token.tag] = {
             href: token.href,
@@ -230,30 +226,32 @@ export class _Lexer {
 
       // top-level paragraph
       // prevent paragraph consuming extensions by clipping 'src' to extension start
-      cutSrc = src;
-      if (this.options.extensions && this.options.extensions.startBlock) {
+      let cutSrc = src;
+      if (this.options.extensions?.startBlock) {
         let startIndex = Infinity;
         const tempSrc = src.slice(1);
         let tempStart;
         this.options.extensions.startBlock.forEach((getStartIndex) => {
           tempStart = getStartIndex.call({ lexer: this }, tempSrc);
-          if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
+          if (typeof tempStart === 'number' && tempStart >= 0) {
+            startIndex = Math.min(startIndex, tempStart);
+          }
         });
         if (startIndex < Infinity && startIndex >= 0) {
           cutSrc = src.substring(0, startIndex + 1);
         }
       }
       if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
-        lastToken = tokens[tokens.length - 1];
+        const lastToken = tokens.at(-1);
         if (lastParagraphClipped && lastToken?.type === 'paragraph') {
           lastToken.raw += '\n' + token.raw;
           lastToken.text += '\n' + token.text;
           this.inlineQueue.pop();
-          this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+          this.inlineQueue.at(-1)!.src = lastToken.text;
         } else {
           tokens.push(token);
         }
-        lastParagraphClipped = (cutSrc.length !== src.length);
+        lastParagraphClipped = cutSrc.length !== src.length;
         src = src.substring(token.raw.length);
         continue;
       }
@@ -261,12 +259,12 @@ export class _Lexer {
       // text
       if (token = this.tokenizer.text(src)) {
         src = src.substring(token.raw.length);
-        lastToken = tokens[tokens.length - 1];
-        if (lastToken && lastToken.type === 'text') {
+        const lastToken = tokens.at(-1);
+        if (lastToken?.type === 'text') {
           lastToken.raw += '\n' + token.raw;
           lastToken.text += '\n' + token.text;
           this.inlineQueue.pop();
-          this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+          this.inlineQueue.at(-1)!.src = lastToken.text;
         } else {
           tokens.push(token);
         }
@@ -297,12 +295,9 @@ export class _Lexer {
    * Lexing/Compiling
    */
   inlineTokens(src: string, tokens: Token[] = []): Token[] {
-    let token, lastToken, cutSrc;
-
     // String with links masked to avoid interference with em and strong
     let maskedSrc = src;
-    let match;
-    let keepPrevChar, prevChar;
+    let match: RegExpExecArray | null = null;
 
     // Mask out reflinks
     if (this.tokens.links) {
@@ -310,7 +305,9 @@ export class _Lexer {
       if (links.length > 0) {
         while ((match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != null) {
           if (links.includes(match[0].slice(match[0].lastIndexOf('[') + 1, -1))) {
-            maskedSrc = maskedSrc.slice(0, match.index) + '[' + 'a'.repeat(match[0].length - 2) + ']' + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
+            maskedSrc = maskedSrc.slice(0, match.index)
+              + '[' + 'a'.repeat(match[0].length - 2) + ']'
+              + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
           }
         }
       }
@@ -325,23 +322,25 @@ export class _Lexer {
       maskedSrc = maskedSrc.slice(0, match.index) + '++' + maskedSrc.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);
     }
 
+    let keepPrevChar = false;
+    let prevChar = '';
     while (src) {
       if (!keepPrevChar) {
         prevChar = '';
       }
       keepPrevChar = false;
 
+      let token: Tokens.Generic | undefined;
+
       // extensions
-      if (this.options.extensions
-        && this.options.extensions.inline
-        && this.options.extensions.inline.some((extTokenizer) => {
-          if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
-            src = src.substring(token.raw.length);
-            tokens.push(token);
-            return true;
-          }
-          return false;
-        })) {
+      if (this.options.extensions?.inline?.some((extTokenizer) => {
+        if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
+          src = src.substring(token.raw.length);
+          tokens.push(token);
+          return true;
+        }
+        return false;
+      })) {
         continue;
       }
 
@@ -355,7 +354,6 @@ export class _Lexer {
       // tag
       if (token = this.tokenizer.tag(src)) {
         src = src.substring(token.raw.length);
-        lastToken = tokens[tokens.length - 1];
         tokens.push(token);
         continue;
       }
@@ -370,8 +368,8 @@ export class _Lexer {
       // reflink, nolink
       if (token = this.tokenizer.reflink(src, this.tokens.links)) {
         src = src.substring(token.raw.length);
-        lastToken = tokens[tokens.length - 1];
-        if (lastToken && token.type === 'text' && lastToken.type === 'text') {
+        const lastToken = tokens.at(-1);
+        if (token.type === 'text' && lastToken?.type === 'text') {
           lastToken.raw += token.raw;
           lastToken.text += token.text;
         } else {
@@ -424,14 +422,16 @@ export class _Lexer {
 
       // text
       // prevent inlineText consuming extensions by clipping 'src' to extension start
-      cutSrc = src;
-      if (this.options.extensions && this.options.extensions.startInline) {
+      let cutSrc = src;
+      if (this.options.extensions?.startInline) {
         let startIndex = Infinity;
         const tempSrc = src.slice(1);
         let tempStart;
         this.options.extensions.startInline.forEach((getStartIndex) => {
           tempStart = getStartIndex.call({ lexer: this }, tempSrc);
-          if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
+          if (typeof tempStart === 'number' && tempStart >= 0) {
+            startIndex = Math.min(startIndex, tempStart);
+          }
         });
         if (startIndex < Infinity && startIndex >= 0) {
           cutSrc = src.substring(0, startIndex + 1);
@@ -443,8 +443,8 @@ export class _Lexer {
           prevChar = token.raw.slice(-1);
         }
         keepPrevChar = true;
-        lastToken = tokens[tokens.length - 1];
-        if (lastToken && lastToken.type === 'text') {
+        const lastToken = tokens.at(-1);
+        if (lastToken?.type === 'text') {
           lastToken.raw += token.raw;
           lastToken.text += token.text;
         } else {

+ 14 - 14
src/MarkedOptions.ts

@@ -16,9 +16,9 @@ export type TokenizerStartFunction = (this: TokenizerThis, src: string) => numbe
 export interface TokenizerExtension {
   name: string;
   level: 'block' | 'inline';
-  start?: TokenizerStartFunction | undefined;
+  start?: TokenizerStartFunction;
   tokenizer: TokenizerExtensionFunction;
-  childTokens?: string[] | undefined;
+  childTokens?: string[];
 }
 
 export interface RendererThis {
@@ -58,19 +58,19 @@ export interface MarkedExtension {
   /**
    * Enable GFM line breaks. This option requires the gfm option to be true.
    */
-  breaks?: boolean | undefined;
+  breaks?: boolean;
 
   /**
    * Add tokenizers and renderers to marked
    */
   extensions?:
     | TokenizerAndRendererExtension[]
-    | undefined | null;
+    | null;
 
   /**
    * Enable GitHub flavored markdown.
    */
-  gfm?: boolean | undefined;
+  gfm?: boolean;
 
   /**
    * Hooks are methods that hook into some part of marked.
@@ -80,29 +80,29 @@ export interface MarkedExtension {
    * provideLexer is called to provide a function to tokenize markdown.
    * provideParser is called to provide a function to parse tokens.
    */
-  hooks?: HooksObject | undefined | null;
+  hooks?: HooksObject | null;
 
   /**
    * Conform to obscure parts of markdown.pl as much as possible. Don't fix any of the original markdown bugs or poor behavior.
    */
-  pedantic?: boolean | undefined;
+  pedantic?: boolean;
 
   /**
    * Type: object Default: new Renderer()
    *
    * An object containing functions to render tokens to HTML.
    */
-  renderer?: RendererObject | undefined | null;
+  renderer?: RendererObject | null;
 
   /**
    * Shows an HTML error message when rendering fails.
    */
-  silent?: boolean | undefined;
+  silent?: boolean;
 
   /**
    * The tokenizer defines how to turn markdown text into tokens.
    */
-  tokenizer?: TokenizerObject | undefined | null;
+  tokenizer?: TokenizerObject | null;
 
   /**
    * The walkTokens function gets called with every token.
@@ -110,26 +110,26 @@ export interface MarkedExtension {
    * Each token is passed by reference so updates are persisted when passed to the parser.
    * The return value of the function is ignored.
    */
-  walkTokens?: ((token: Token) => void | Promise<void>) | undefined | null;
+  walkTokens?: ((token: Token) => void | Promise<void>) | null;
 }
 
 export interface MarkedOptions extends Omit<MarkedExtension, 'hooks' | 'renderer' | 'tokenizer' | 'extensions' | 'walkTokens'> {
   /**
    * Hooks are methods that hook into some part of marked.
    */
-  hooks?: _Hooks | undefined | null;
+  hooks?: _Hooks | null;
 
   /**
    * Type: object Default: new Renderer()
    *
    * An object containing functions to render tokens to HTML.
    */
-  renderer?: _Renderer | undefined | null;
+  renderer?: _Renderer | null;
 
   /**
    * The tokenizer defines how to turn markdown text into tokens.
    */
-  tokenizer?: _Tokenizer | undefined | null;
+  tokenizer?: _Tokenizer | null;
 
   /**
    * Custom extensions

+ 3 - 4
src/Parser.ts

@@ -46,7 +46,7 @@ export class _Parser {
       const anyToken = tokens[i];
 
       // Run any renderer extensions
-      if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[anyToken.type]) {
+      if (this.options.extensions?.renderers?.[anyToken.type]) {
         const genericToken = anyToken as Tokens.Generic;
         const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
         if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
@@ -132,15 +132,14 @@ export class _Parser {
   /**
    * Parse Inline Tokens
    */
-  parseInline(tokens: Token[], renderer?: _Renderer | _TextRenderer): string {
-    renderer = renderer || this.renderer;
+  parseInline(tokens: Token[], renderer: _Renderer | _TextRenderer = this.renderer): string {
     let out = '';
 
     for (let i = 0; i < tokens.length; i++) {
       const anyToken = tokens[i];
 
       // Run any renderer extensions
-      if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[anyToken.type]) {
+      if (this.options.extensions?.renderers?.[anyToken.type]) {
         const ret = this.options.extensions.renderers[anyToken.type].call({ parser: this }, anyToken);
         if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(anyToken.type)) {
           out += ret || '';

+ 1 - 1
src/Renderer.ts

@@ -77,7 +77,7 @@ export class _Renderer {
     if (item.task) {
       const checkbox = this.checkbox({ checked: !!item.checked });
       if (item.loose) {
-        if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
+        if (item.tokens[0]?.type === 'paragraph') {
           item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
           if (item.tokens[0].tokens && item.tokens[0].tokens.length > 0 && item.tokens[0].tokens[0].type === 'text') {
             item.tokens[0].tokens[0].text = checkbox + ' ' + escape(item.tokens[0].tokens[0].text);

+ 8 - 5
src/Tokenizer.ts

@@ -198,7 +198,7 @@ export class _Tokenizer {
           break;
         }
 
-        const lastToken = tokens[tokens.length - 1];
+        const lastToken = tokens.at(-1);
 
         if (lastToken?.type === 'code') {
           // blockquote continuation cannot be preceded by a code block
@@ -222,7 +222,7 @@ export class _Tokenizer {
 
           raw = raw.substring(0, raw.length - lastToken.raw.length) + newToken.raw;
           text = text.substring(0, text.length - oldToken.raw.length) + newToken.raw;
-          lines = newText.substring(tokens[tokens.length - 1].raw.length).split('\n');
+          lines = newText.substring(tokens.at(-1)!.raw.length).split('\n');
           continue;
         }
       }
@@ -414,8 +414,11 @@ export class _Tokenizer {
       }
 
       // Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic
-      list.items[list.items.length - 1].raw = list.items[list.items.length - 1].raw.trimEnd();
-      list.items[list.items.length - 1].text = list.items[list.items.length - 1].text.trimEnd();
+      const lastItem = list.items.at(-1);
+      if (lastItem) {
+        lastItem.raw = lastItem.raw.trimEnd();
+        lastItem.text = lastItem.text.trimEnd();
+      }
       list.raw = list.raw.trimEnd();
 
       // Item child tokens handled here at end because we needed to have the final item to trim it first
@@ -486,7 +489,7 @@ export class _Tokenizer {
 
     const headers = splitCells(cap[1]);
     const aligns = cap[2].replace(this.rules.other.tableAlignChars, '').split('|');
-    const rows = cap[3] && cap[3].trim() ? cap[3].replace(this.rules.other.tableRowBlankLine, '').split('\n') : [];
+    const rows = cap[3]?.trim() ? cap[3].replace(this.rules.other.tableRowBlankLine, '').split('\n') : [];
 
     const item: Tokens.Table = {
       type: 'table',

+ 104 - 104
src/Tokens.ts

@@ -1,115 +1,112 @@
 /* eslint-disable no-use-before-define */
 
 export type MarkedToken = (
-    Tokens.Space
+  Tokens.Blockquote
+  | Tokens.Br
   | Tokens.Code
+  | Tokens.Codespan
+  | Tokens.Def
+  | Tokens.Del
+  | Tokens.Em
+  | Tokens.Escape
   | Tokens.Heading
-  | Tokens.Table
   | Tokens.Hr
-  | Tokens.Blockquote
-  | Tokens.List
-  | Tokens.ListItem
-  | Tokens.Paragraph
   | Tokens.HTML
-  | Tokens.Text
-  | Tokens.Def
-  | Tokens.Escape
-  | Tokens.Tag
   | Tokens.Image
   | Tokens.Link
+  | Tokens.List
+  | Tokens.ListItem
+  | Tokens.Paragraph
+  | Tokens.Space
   | Tokens.Strong
-  | Tokens.Em
-  | Tokens.Codespan
-  | Tokens.Br
-  | Tokens.Del);
+  | Tokens.Table
+  | Tokens.Tag
+  | Tokens.Text
+);
 
 export type Token = (
-    MarkedToken
+  MarkedToken
   | Tokens.Generic);
 
 export namespace Tokens {
-  export interface Space {
-    type: 'space';
+  export interface Blockquote {
+    type: 'blockquote';
+    raw: string;
+    text: string;
+    tokens: Token[];
+  }
+
+  export interface Br {
+    type: 'br';
     raw: string;
   }
 
+  export interface Checkbox {
+    checked: boolean;
+  }
+
   export interface Code {
     type: 'code';
     raw: string;
-    codeBlockStyle?: 'indented' | undefined;
-    lang?: string | undefined;
+    codeBlockStyle?: 'indented';
+    lang?: string;
     text: string;
     escaped?: boolean;
   }
 
-  export interface Heading {
-    type: 'heading';
+  export interface Codespan {
+    type: 'codespan';
     raw: string;
-    depth: number;
     text: string;
-    tokens: Token[];
   }
 
-  export interface Table {
-    type: 'table';
+  export interface Def {
+    type: 'def';
     raw: string;
-    align: Array<'center' | 'left' | 'right' | null>;
-    header: TableCell[];
-    rows: TableCell[][];
-  }
-
-  export interface TableRow {
-    text: string;
+    tag: string;
+    href: string;
+    title: string;
   }
 
-  export interface TableCell {
+  export interface Del {
+    type: 'del';
+    raw: string;
     text: string;
     tokens: Token[];
-    header: boolean;
-    align: 'center' | 'left' | 'right' | null;
   }
 
-  export interface Hr {
-    type: 'hr';
+  export interface Em {
+    type: 'em';
     raw: string;
+    text: string;
+    tokens: Token[];
   }
 
-  export interface Blockquote {
-    type: 'blockquote';
+  export interface Escape {
+    type: 'escape';
     raw: string;
     text: string;
-    tokens: Token[];
   }
 
-  export interface List {
-    type: 'list';
+  export interface Generic {
+    // eslint-disable-next-line @typescript-eslint/no-explicit-any
+    [index: string]: any;
+    type: string;
     raw: string;
-    ordered: boolean;
-    start: number | '';
-    loose: boolean;
-    items: ListItem[];
+    tokens?: Token[];
   }
 
-  export interface ListItem {
-    type: 'list_item';
+  export interface Heading {
+    type: 'heading';
     raw: string;
-    task: boolean;
-    checked?: boolean | undefined;
-    loose: boolean;
+    depth: number;
     text: string;
     tokens: Token[];
   }
 
-  export interface Checkbox {
-    checked: boolean;
-  }
-
-  export interface Paragraph {
-    type: 'paragraph';
+  export interface Hr {
+    type: 'hr';
     raw: string;
-    pre?: boolean | undefined;
-    text: string;
-    tokens: Token[];
   }
 
   export interface HTML {
@@ -120,52 +117,53 @@ export namespace Tokens {
     block: boolean;
   }
 
-  export interface Text {
-    type: 'text';
+  export interface Image {
+    type: 'image';
     raw: string;
+    href: string;
+    title: string | null;
     text: string;
-    tokens?: Token[];
-    escaped?: boolean;
   }
 
-  export interface Def {
-    type: 'def';
+  export interface Link {
+    type: 'link';
     raw: string;
-    tag: string;
     href: string;
-    title: string;
+    title?: string | null;
+    text: string;
+    tokens: Token[];
   }
 
-  export interface Escape {
-    type: 'escape';
+  export interface List {
+    type: 'list';
     raw: string;
-    text: string;
+    ordered: boolean;
+    start: number | '';
+    loose: boolean;
+    items: ListItem[];
   }
 
-  export interface Tag {
-    type: 'html';
+  export interface ListItem {
+    type: 'list_item';
     raw: string;
-    inLink: boolean;
-    inRawBlock: boolean;
+    task: boolean;
+    checked?: boolean;
+    loose: boolean;
     text: string;
-    block: boolean;
+    tokens: Token[];
   }
 
-  export interface Link {
-    type: 'link';
+  export interface Paragraph {
+    type: 'paragraph';
     raw: string;
-    href: string;
-    title?: string | null;
+    pre?: boolean;
     text: string;
     tokens: Token[];
   }
 
-  export interface Image {
-    type: 'image';
+  export interface Space {
+    type: 'space';
     raw: string;
-    href: string;
-    title: string | null;
-    text: string;
   }
 
   export interface Strong {
@@ -175,38 +173,40 @@ export namespace Tokens {
     tokens: Token[];
   }
 
-  export interface Em {
-    type: 'em';
+  export interface Table {
+    type: 'table';
     raw: string;
-    text: string;
-    tokens: Token[];
+    align: Array<'center' | 'left' | 'right' | null>;
+    header: TableCell[];
+    rows: TableCell[][];
   }
 
-  export interface Codespan {
-    type: 'codespan';
-    raw: string;
+  export interface TableCell {
     text: string;
+    tokens: Token[];
+    header: boolean;
+    align: 'center' | 'left' | 'right' | null;
   }
 
-  export interface Br {
-    type: 'br';
-    raw: string;
+  export interface TableRow {
+    text: string;
   }
 
-  export interface Del {
-    type: 'del';
+  export interface Tag {
+    type: 'html';
     raw: string;
+    inLink: boolean;
+    inRawBlock: boolean;
     text: string;
-    tokens: Token[];
+    block: boolean;
   }
 
-  export interface Generic {
-    // eslint-disable-next-line @typescript-eslint/no-explicit-any
-    [index: string]: any;
-
-    type: string;
+  export interface Text {
+    type: 'text';
     raw: string;
-    tokens?: Token[] | undefined;
+    text: string;
+    tokens?: Token[];
+    escaped?: boolean;
   }
 }
 

+ 1 - 1
src/helpers.ts

@@ -72,7 +72,7 @@ export function splitCells(tableRow: string, count?: number) {
   if (!cells[0].trim()) {
     cells.shift();
   }
-  if (cells.length > 0 && !cells[cells.length - 1].trim()) {
+  if (cells.length > 0 && !cells.at(-1)?.trim()) {
     cells.pop();
   }
 

+ 2 - 2
src/marked.ts

@@ -34,8 +34,8 @@ export function marked(src: string, options: MarkedOptions & { async: true }): P
  */
 export function marked(src: string, options: MarkedOptions & { async: false }): string;
 export function marked(src: string, options: MarkedOptions & { async: true }): Promise<string>;
-export function marked(src: string, options?: MarkedOptions | undefined | null): string | Promise<string>;
-export function marked(src: string, opt?: MarkedOptions | undefined | null): string | Promise<string> {
+export function marked(src: string, options?: MarkedOptions | null): string | Promise<string>;
+export function marked(src: string, opt?: MarkedOptions | null): string | Promise<string> {
   return markedInstance.parse(src, opt);
 }
 

+ 2 - 2
test/rules.js

@@ -34,7 +34,7 @@ function propsToString(obj) {
     return null;
   }
   if (obj.constructor.name === 'Object') {
-    if (obj.exec && obj.exec.name === 'noopTest') {
+    if (obj.exec?.name === 'noopTest') {
       return null;
     }
     for (const prop in obj) {
@@ -61,7 +61,7 @@ if (process.argv.length > 2) {
         rule = rule[prop];
       }
     }
-    rulesList[rulePath[0]] = rule && rule[rulePath[0]] ? rule[rulePath[0]] : null;
+    rulesList[rulePath[0]] = rule?.[rulePath[0]] ?? null;
   }
 } else {
   rulesObj = rules;

+ 25 - 5
test/types/marked.ts

@@ -46,11 +46,6 @@ let options: MarkedOptions = {
 options = marked.getDefaults();
 options = marked.defaults;
 
-function callback(err: Error | null, markdown: string | undefined) {
-  console.log('Callback called!');
-  console.log(markdown);
-}
-
 let myOldMarked: typeof marked = marked.options(options);
 myOldMarked = marked.setOptions(options);
 
@@ -378,3 +373,28 @@ import { inline } from 'marked';
 // Rules is exported
 import type { Rules } from 'marked';
 
+marked.parse('', {
+  async: undefined,
+  breaks: undefined,
+  extensions: undefined,
+  gfm: undefined,
+  hooks: undefined,
+  pedantic: undefined,
+  renderer: undefined,
+  silent: undefined,
+  tokenizer: undefined,
+  walkTokens: undefined,
+});
+
+marked.use({
+  async: undefined,
+  breaks: undefined,
+  extensions: undefined,
+  gfm: undefined,
+  hooks: undefined,
+  pedantic: undefined,
+  renderer: undefined,
+  silent: undefined,
+  tokenizer: undefined,
+  walkTokens: undefined,
+});

+ 2 - 2
test/unit/marked.test.js

@@ -673,8 +673,8 @@ used extension2 walked</p>
         }],
         walkTokens(token) {
           if (token.tokens) {
-            const finalChildToken = token.tokens[token.tokens.length - 1];
-            if (finalChildToken && finalChildToken.type === 'inlineStyleTag') {
+            const finalChildToken = token.tokens.at(-1);
+            if (finalChildToken?.type === 'inlineStyleTag') {
               token.originalType = token.type;
               token.type = 'styled';
               token.style = `style="color:${finalChildToken.text};"`;