From 95dad58839df30b85e2b60262de5a4b9ea1ec00b Mon Sep 17 00:00:00 2001 From: jaywcjlove Date: Tue, 4 Apr 2023 17:16:26 +0000 Subject: [PATCH] v3.10.2 released v3.10.2 #399 (#424) 33487c6f22aa44c1417068ef1b5261c124d486e9 --- asset-manifest.json | 6 +++--- index.html | 2 +- static/js/{main.ade238e5.js => main.817cd47f.js} | 6 +++--- ...ade238e5.js.LICENSE.txt => main.817cd47f.js.LICENSE.txt} | 0 static/js/{main.ade238e5.js.map => main.817cd47f.js.map} | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) rename static/js/{main.ade238e5.js => main.817cd47f.js} (99%) rename static/js/{main.ade238e5.js.LICENSE.txt => main.817cd47f.js.LICENSE.txt} (100%) rename static/js/{main.ade238e5.js.map => main.817cd47f.js.map} (99%) diff --git a/asset-manifest.json b/asset-manifest.json index 94217179..b33d752e 100644 --- a/asset-manifest.json +++ b/asset-manifest.json @@ -1,13 +1,13 @@ { "files": { "main.css": "/hotkeys/static/css/main.43d08dfb.css", - "main.js": "/hotkeys/static/js/main.ade238e5.js", + "main.js": "/hotkeys/static/js/main.817cd47f.js", "refractor-vendor.js": "/hotkeys/static/js/refractor-vendor.5a67e873.js", "react-vendor.js": "/hotkeys/static/js/react-vendor.a606ee0f.js", "static/media/bg.jpg": "/hotkeys/static/media/bg.c118c911b775de37ecf2.jpg", "index.html": "/hotkeys/index.html", "main.43d08dfb.css.map": "/hotkeys/static/css/main.43d08dfb.css.map", - "main.ade238e5.js.map": "/hotkeys/static/js/main.ade238e5.js.map", + "main.817cd47f.js.map": "/hotkeys/static/js/main.817cd47f.js.map", "refractor-vendor.5a67e873.js.map": "/hotkeys/static/js/refractor-vendor.5a67e873.js.map", "react-vendor.a606ee0f.js.map": "/hotkeys/static/js/react-vendor.a606ee0f.js.map" }, @@ -15,6 +15,6 @@ "static/js/refractor-vendor.5a67e873.js", "static/js/react-vendor.a606ee0f.js", "static/css/main.43d08dfb.css", - "static/js/main.ade238e5.js" + "static/js/main.817cd47f.js" ] } \ No newline at end of file diff --git a/index.html b/index.html index ed09977a..9cece998 100644 --- a/index.html +++ b/index.html @@ -1 +1 @@ -hotkeys.js - A robust Javascript library for capturing keyboard input.
\ No newline at end of file +hotkeys.js - A robust Javascript library for capturing keyboard input.
\ No newline at end of file diff --git a/static/js/main.ade238e5.js b/static/js/main.817cd47f.js similarity index 99% rename from static/js/main.ade238e5.js rename to static/js/main.817cd47f.js index 8fcf09a2..8b6d2173 100644 --- a/static/js/main.ade238e5.js +++ b/static/js/main.817cd47f.js @@ -1,3 +1,3 @@ -/*! For license information please see main.ade238e5.js.LICENSE.txt */ -!function(){var e={5469:function(e,t,n){"use strict";var r={};n.r(r),n.d(r,{attentionMarkers:function(){return un},contentInitial:function(){return en},disable:function(){return sn},document:function(){return Jt},flow:function(){return nn},flowInitial:function(){return tn},insideSpan:function(){return an},string:function(){return rn},text:function(){return on}});var o=n(5848),i=n(3824),a=n(5671),u=n(3144),s=n(7326),l=n(136),c=n(3668);function f(){return f=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}var h=n(3433),d=n(1120),m=n(9611);var E=n(8814);function T(e,t,n){return T=(0,E.Z)()?Reflect.construct.bind():function(e,t,n){var r=[null];r.push.apply(r,t);var o=new(Function.bind.apply(e,r));return n&&(0,m.Z)(o,n.prototype),o},T.apply(null,arguments)}function g(e){var t="function"===typeof Map?new Map:void 0;return g=function(e){if(null===e||(n=e,-1===Function.toString.call(n).indexOf("[native code]")))return e;var n;if("function"!==typeof e)throw new TypeError("Super expression must either be null or a function");if("undefined"!==typeof t){if(t.has(e))return t.get(e);t.set(e,r)}function r(){return T(e,arguments,(0,d.Z)(this).constructor)}return r.prototype=Object.create(e.prototype,{constructor:{value:r,enumerable:!1,writable:!0,configurable:!0}}),(0,m.Z)(r,e)},g(e)}var v=document.createElement("template");v.innerHTML='\n\n\n';var y=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(){var e;return(0,a.Z)(this,n),(e=t.call(this)).right="0",e.shadow=e.attachShadow({mode:"open"}),e.shadow.appendChild(e.ownerDocument.importNode(v.content,!0)),e.update(),e}return(0,u.Z)(n,[{key:"setAttr",value:function(e,t){var n=this.shadow.querySelector("svg");/(href)/.test(e.toLocaleLowerCase())?n.lastElementChild.setAttribute("xlink:href",t):/(color|fill)/.test(e.toLocaleLowerCase())?n.firstElementChild.style[e]=t:/(z-index|position|top|left|right|bottom|transform)/.test(e.toLocaleLowerCase())?n.style[e]=t:n.setAttribute(e,t)}},{key:"update",value:function(){var e=this;[].concat((0,h.Z)(this.getAttributeNames()),["right"]).forEach((function(t){var n=e.getAttribute(t)||e[t]||"";e.setAttr(t,n)}))}},{key:"attributeChangedCallback",value:function(e,t,n){t!==n&&this.setAttr(e,n)}}],[{key:"observedAttributes",get:function(){return["style","z-index","target","height","width","href","color","fill","position","top","left","right","bottom","transform"]}}]),n}(g(HTMLElement));customElements.define("github-corners",y);var A=n(2466),k=["size","fixed","bottom","zIndex","className","style","bgColor","color","position"];function C(e){var t=e.size,n=void 0===t?80:t,r=e.fixed,o=void 0!==r&&r,i=e.bottom,a=e.zIndex,u=e.style,s=e.bgColor,l=void 0===s?"#151513":s,c=e.color,h=void 0===c?"#fff":c,d=e.position,m=void 0===d?"right":d,E=p(e,k),T="left"===m?{left:0,right:"initial",transform:"scale(-1, 1)"}:{right:0,left:"initial",transform:"scale(1, 1)"};return i?(T.bottom=0,T.top="initial",T.transform="left"===m?"scale(-1, -1)":"scale(1, -1)"):(T.bottom="initial",T.top=0),(0,A.jsx)("github-corners",f({target:"__blank",width:n,height:n,href:e.href,position:o?"fixed":"absolute","z-index":a,style:u,fill:l,color:h},T,E))}var _=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(){return(0,a.Z)(this,n),t.apply(this,arguments)}return(0,u.Z)(n,[{key:"render",value:function(){var e=this;return(0,A.jsx)(A.Fragment,{children:o.Children.toArray(this.props.children).map((function(t){return o.isValidElement(t)?o.cloneElement(t,f({},e.props,t.props)):null}))})}}]),n}(o.Component),D=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e,r,o){var i;return(0,a.Z)(this,n),(i=t.call(this,e)).getUrl=function(){return""},i.state=Object.assign({},f({},r,e),o),i}return(0,u.Z)(n,[{key:"render",value:function(){var e=this.state.href;return e?(0,A.jsxs)("a",{href:e,children:[" ",(0,A.jsx)("img",{alt:"",src:this.getUrl()})," "]}):(0,A.jsx)("img",{alt:"",src:this.getUrl()})}}]),n}(o.Component);D.defaultProps={platform:"github",base:"https://img.shields.io"};var b=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"issues"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.property,i=e.user,a=e.repo,u=e.label,s=e.base;return"github"!==n?"":"issues"===t||/^issues-(raw|closed|closed-raw|pr|pr-raw|pr-closed|pr-closed-raw)/.test(t)?u&&("issues"===t||/^issues-(raw|pr|pr-raw)/.test(t||""))?[s,n,t,i,a,u].join("/"):[s,n,t,i,a].join("/"):"issueKind"===t?[s,n,t,"detail",o,i,a].join("/"):""},r}return(0,u.Z)(n)}(D),N=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"languages"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base,u=e.path;return"github"!==n?"":"languages"===t?[a,n,t,"code-size",o,i].join("/"):"repo-size"===t?[a,n,t,o,i].join("/"):"size"===t&&u?[a,n,t,o,i,u].join("/"):""},r}return(0,u.Z)(n)}(D),S=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"downloads",total:!0})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base,u=e.tag,s=e.path,l=e.total;if("github"!==n||!/^(downloads|downloads-pre)/.test(t||""))return"";var c=[a,n,t,o,i];return s&&u?[].concat(c,[u,s]).join("/"):l&&u||"downloads-pre"===t&&u&&l?[].concat(c,[u,"total"]).join("/"):l?[].concat(c,["total"]).join("/"):""},r}return(0,u.Z)(n)}(D),O=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"commits-since"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base,u=e.interval,s=e.variant,l=e.version,c=e.branch;return"github"!==n?"":"commits-since"===t&&l&&c?[a,n,t,o,i,l,c].join("/"):"commit-activity"===t&&u?[a,n,t,u,o,i].join("/"):"variant"===t&&s?[a,n,t,s,o,i].join("/"):"last-commit"===t?c?[a,n,t,o,i,c].join("/"):[a,n,t,o,i].join("/"):t&&/^release-(date|date-pre)/.test(t)?[a,n,t,o,i].join("/"):""},r}return(0,u.Z)(n)}(D),F=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"version-release"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base,u=e.branch,s="";switch(t){case"version-release":s="v/release";break;case"version-tag":s="v/tag";break;case"package-json":s="package-json/v";break;case"manifest-json":s="manifest-json/v";break;case"go-mod":s="go-mod/go-version"}if("github"!==n||!s)return"";var l=[a,n,s,o,i];return/(go-mod|manifest-json|package-json)/.test(t||"")&&u?[].concat(l,[u]).join("/"):l.join("/")},r}return(0,u.Z)(n)}(D),x=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"license"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base;return"github"!==n?"":[a,n,t,o,i].join("/")},r}return(0,u.Z)(n)}(D),I=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"languages-count"})).getUrl=function(){var e=r.state,t=e.base,n=e.platform,o=e.type,i=e.user,a=e.repo,u=e.query,s="";switch(o){case"languages-count":s="languages/count";break;case"languages-top":s="languages/top";break;case"search":s="search"}if("github"!==n||!s)return"";var l=[t,n,s,i,a];return u?[].concat(l,[u]).join("/"):l.join("/")},r}return(0,u.Z)(n)}(D),R=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{platform:"github",type:"followers"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base;return"github"!==n?"":"followers"===t?[a,n,t,o].join("/"):[a,n,t,o,i].join("/")},r}return(0,u.Z)(n)}(D),L=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(){return(0,a.Z)(this,n),t.apply(this,arguments)}return(0,u.Z)(n)}(_);L.Issues=b,L.Size=N,L.Activity=O,L.Downloads=S,L.Version=F,L.License=x,L.Analysis=I,L.Social=R;var M=L,w=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{type:"github"},{platform:"coveralls"})).getUrl=function(){var e=r.state,t=e.type,n=e.platform,o=e.user,i=e.repo,a=e.base;return"coveralls"!==n?"":[a,n,t,o,i].join("/")},r}return(0,u.Z)(n)}(D),P=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{},{platform:"coveralls"})).getUrl=function(){var e=r.state,t=e.base,n=e.platform,o=e.projectId,i=e.branch;return"coveralls"!==n?"":i?[t,n,o,i].join("/"):[t,"codacy/coverage",o].join("/")},r}return(0,u.Z)(n)}(D),B=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(){return(0,a.Z)(this,n),t.apply(this,arguments)}return(0,u.Z)(n)}(_);B.Coverages=w,B.Codacy=P;var H=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{type:"version"},{platform:"npm"})).getUrl=function(){var e=r.state,t=e.base,n=e.platform,o=e.type,i=e.scope,a=e.packageName,u=e.dependency,s=e.dependencyScope,l=e.version,c=e.registryUri;if("npm"!==n||!a)return"";var f="",p=[t,n];return"version"===o&&a?(p.push("v"),i&&p.push(i),p.push(a),l&&p.push(l),f=p.join("/"),c&&(f=f+"?registry_uri="+c)):"peer-dependency"===o&&a&&u?(p.push("dependency-version"),i&&p.push(i),f=[].concat(p,[a,"peer",u]).join("/")):"dev-dependency"===o&&a&&u?(p.push("dependency-version"),i&&p.push(i),p.concat([a,"dev"]),s&&p.push(s),p.push(u),f=p.join("/")):a&&u&&(p.concat([a,u]),f=p.join("/")),f},r}return(0,u.Z)(n)}(D),U=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(e){var r;return(0,a.Z)(this,n),(r=t.call(this,e,{format:"min"},{platform:"npm"})).getUrl=function(){var e=r.state,t=e.base,n=e.platform,o=e.format,i=e.packageName,a=e.scope,u=e.version;if("npm"!==n)return"";var s=[t,"bundlephobia"],l="";return i&&o&&(l=[].concat(s,[o,i]).join("/")),i&&o&&a&&(l=[].concat(s,[o,a,i]).join("/")),i&&o&&a&&u&&(l=[].concat(s,[o,a,i,u]).join("/")),i&&o&&u&&(l=[].concat(s,[o,i,u]).join("/")),l},r}return(0,u.Z)(n)}(D),j=function(e){(0,l.Z)(n,e);var t=(0,c.Z)(n);function n(){return(0,a.Z)(this,n),t.apply(this,arguments)}return(0,u.Z)(n)}(_);j.Version=H,j.Size=U;function K(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function G(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function z(e){for(var t=1;te.length){for(;i--;)if(47===e.charCodeAt(i)){if(n){r=i+1;break}}else o<0&&(n=!0,o=i+1);return o<0?"":e.slice(r,o)}if(t===e)return"";var a=-1,u=t.length-1;for(;i--;)if(47===e.charCodeAt(i)){if(n){r=i+1;break}}else a<0&&(n=!0,a=i+1),u>-1&&(e.charCodeAt(i)===t.charCodeAt(u--)?u<0&&(o=i):(u=-1,o=a));r===o?o=a:o<0&&(o=e.length);return e.slice(r,o)},dirname:function(e){if(J(e),0===e.length)return".";var t,n=-1,r=e.length;for(;--r;)if(47===e.charCodeAt(r)){if(t){n=r;break}}else t||(t=!0);return n<0?47===e.charCodeAt(0)?"/":".":1===n&&47===e.charCodeAt(0)?"//":e.slice(0,n)},extname:function(e){J(e);var t,n=e.length,r=-1,o=0,i=-1,a=0;for(;n--;){var u=e.charCodeAt(n);if(47!==u)r<0&&(t=!0,r=n+1),46===u?i<0?i=n:1!==a&&(a=1):i>-1&&(a=-1);else if(t){o=n+1;break}}if(i<0||r<0||0===a||1===a&&i===r-1&&i===o+1)return"";return e.slice(i,r)},join:function(){for(var e,t=-1,n=arguments.length,r=new Array(n),o=0;o2){if((r=o.lastIndexOf("/"))!==o.length-1){r<0?(o="",i=0):i=(o=o.slice(0,r)).length-1-o.lastIndexOf("/"),a=s,u=0;continue}}else if(o.length>0){o="",i=0,a=s,u=0;continue}t&&(o=o.length>0?o+"/..":"..",i=2)}else o.length>0?o+="/"+e.slice(a+1,s):o=e.slice(a+1,s),i=s-a-1;a=s,u=0}else 46===n&&u>-1?u++:u=-1}return o}(e,!t);return 0!==n.length||t||(n="."),n.length>0&&47===e.charCodeAt(e.length-1)&&(n+="/"),t?"/"+n:n}function J(e){if("string"!==typeof e)throw new TypeError("Path must be a string. Received "+JSON.stringify(e))}var ee={cwd:function(){return"/"}};function te(e){return null!==e&&"object"===typeof e&&e.href&&e.origin}function ne(e){if("string"===typeof e)e=new URL(e);else if(!te(e)){var t=new TypeError('The "path" argument must be of type string or an instance of URL. Received `'+e+"`");throw t.code="ERR_INVALID_ARG_TYPE",t}if("file:"!==e.protocol){var n=new TypeError("The URL must be of scheme file");throw n.code="ERR_INVALID_URL_SCHEME",n}return function(e){if(""!==e.hostname){var t=new TypeError('File URL host must be "localhost" or empty on darwin');throw t.code="ERR_INVALID_FILE_URL_HOST",t}var n=e.pathname,r=-1;for(;++ri.length;s&&i.push(r);try{u=e.apply(this,i)}catch(c){var l=c;if(s&&n)throw l;return r(l)}s||(u instanceof Promise?u.then(o,r):u instanceof Error?r(u):o(u))};function r(e){if(!n){n=!0;for(var r=arguments.length,o=new Array(r>1?r-1:0),i=1;i1?s-1:0),c=1;c1?n-1:0),u=1;ui?0:i+t:t>i?i:t,n=n>0?n:0,r.length<1e4)(o=Array.from(r)).unshift(t,n),[].splice.apply(e,o);else for(n&&[].splice.apply(e,[t,n]);a0?(Ne(e,e.length,0,t),e):t}var Oe={}.hasOwnProperty;function Fe(e){for(var t={},n=-1;++nr))return;for(var l,c,f=o.events.length,p=f;p--;)if("exit"===o.events[p][0]&&"chunkFlow"===o.events[p][1].type){if(l){c=o.events[p][1].end;break}l=!0}for(T(a),s=f;st;){var r=i[n];o.containerState=r[1],r[0].exit.call(o,e)}i.length=t}function g(){t.write([null]),n=void 0,t=void 0,o.containerState._closeFlow=void 0}}},Ve={tokenize:function(e,t,n){return Ze(e,e.attempt(this.parser.constructs.document,t,n),"linePrefix",this.parser.constructs.disable.null.includes("codeIndented")?void 0:4)}};var qe={tokenize:function(e,t,n){return Ze(e,(function(e){return null===e||je(e)?t(e):n(e)}),"linePrefix")},partial:!0};function Xe(e){for(var t,n,r,o,i,a,u,s={},l=-1;++l=4?t(o):e.interrupt(r.parser.constructs.flow,n,t)(o)}},partial:!0};var tt={tokenize:function(e){var t=this,n=e.attempt(qe,(function(r){if(null===r)return void e.consume(r);return e.enter("lineEndingBlank"),e.consume(r),e.exit("lineEndingBlank"),t.currentConstruct=void 0,n}),e.attempt(this.parser.constructs.flowInitial,r,Ze(e,e.attempt(this.parser.constructs.flow,r,e.attempt(Je,r)),"linePrefix")));return n;function r(r){if(null!==r)return e.enter("lineEnding"),e.consume(r),e.exit("lineEnding"),t.currentConstruct=void 0,n;e.consume(r)}}};var nt={resolveAll:at()},rt=it("string"),ot=it("text");function it(e){return{tokenize:function(t){var n=this,r=this.parser.constructs[e],o=t.attempt(r,i,a);return i;function i(e){return s(e)?o(e):a(e)}function a(e){if(null!==e)return t.enter("data"),t.consume(e),u;t.consume(e)}function u(e){return s(e)?(t.exit("data"),o(e)):(t.consume(e),u)}function s(e){if(null===e)return!0;var t=r[e],o=-1;if(t)for(;++o-1&&(n[0]=n[0].slice(o)),a>0&&n.push(e[i].slice(0,a)));return n}(a,e)}function p(){return Object.assign({},r)}function d(){for(var e;r._indexl?n(o):(e.consume(o),d):41===o?c--?(e.consume(o),d):(e.exit("chunkString"),e.exit(u),e.exit(a),e.exit(r),t(o)):null===o||Ue(o)?c?n(o):(e.exit("chunkString"),e.exit(u),e.exit(a),e.exit(r),t(o)):He(o)?n(o):(e.consume(o),92===o?m:d)}function m(t){return 40===t||41===t||92===t?(e.consume(t),d):d(t)}}function Et(e,t,n,r,o,i){var a,u=this,s=0;return function(t){return e.enter(r),e.enter(o),e.consume(t),e.exit(o),e.enter(i),l};function l(f){return null===f||91===f||93===f&&!a||94===f&&!s&&"_hiddenFootnoteSupport"in u.parser.constructs||s>999?n(f):93===f?(e.exit(i),e.enter(o),e.consume(f),e.exit(o),e.exit(r),t):je(f)?(e.enter("lineEnding"),e.consume(f),e.exit("lineEnding"),l):(e.enter("chunkString",{contentType:"string"}),c(f))}function c(t){return null===t||91===t||93===t||je(t)||s++>999?(e.exit("chunkString"),l(t)):(e.consume(t),a=a||!Ke(t),92===t?f:c)}function f(t){return 91===t||92===t||93===t?(e.consume(t),s++,c):c(t)}}function Tt(e,t,n,r,o,i){var a;return function(t){return e.enter(r),e.enter(o),e.consume(t),e.exit(o),a=40===t?41:t,u};function u(n){return n===a?(e.enter(o),e.consume(n),e.exit(o),e.exit(r),t):(e.enter(i),s(n))}function s(t){return t===a?(e.exit(i),u(a)):null===t?n(t):je(t)?(e.enter("lineEnding"),e.consume(t),e.exit("lineEnding"),Ze(e,s,"linePrefix")):(e.enter("chunkString",{contentType:"string"}),l(t))}function l(t){return t===a||null===t||je(t)?(e.exit("chunkString"),s(t)):(e.consume(t),92===t?c:l)}function c(t){return t===a||92===t?(e.consume(t),l):l(t)}}function gt(e,t){var n;return function r(o){if(je(o))return e.enter("lineEnding"),e.consume(o),e.exit("lineEnding"),n=!0,r;if(Ke(o))return Ze(e,r,n?"linePrefix":"lineSuffix")(o);return t(o)}}function vt(e){return e.replace(/[\t\n\r ]+/g," ").replace(/^ | $/g,"").toLowerCase().toUpperCase()}var yt={name:"definition",tokenize:function(e,t,n){var r,o=this;return function(t){return e.enter("definition"),Et.call(o,e,i,n,"definitionLabel","definitionLabelMarker","definitionLabelString")(t)};function i(t){return r=vt(o.sliceSerialize(o.events[o.events.length-1][1]).slice(1,-1)),58===t?(e.enter("definitionMarker"),e.consume(t),e.exit("definitionMarker"),gt(e,mt(e,e.attempt(At,Ze(e,a,"whitespace"),Ze(e,a,"whitespace")),n,"definitionDestination","definitionDestinationLiteral","definitionDestinationLiteralMarker","definitionDestinationRaw","definitionDestinationString"))):n(t)}function a(i){return null===i||je(i)?(e.exit("definition"),o.parser.defined.includes(r)||o.parser.defined.push(r),t(i)):n(i)}}},At={tokenize:function(e,t,n){return function(t){return Ue(t)?gt(e,r)(t):n(t)};function r(t){return 34===t||39===t||40===t?Tt(e,Ze(e,o,"whitespace"),n,"definitionTitle","definitionTitleMarker","definitionTitleString")(t):n(t)}function o(e){return null===e||je(e)?t(e):n(e)}},partial:!0};var kt={name:"codeIndented",tokenize:function(e,t,n){var r=this;return function(t){return e.enter("codeIndented"),Ze(e,o,"linePrefix",5)(t)};function o(e){var t=r.events[r.events.length-1];return t&&"linePrefix"===t[1].type&&t[2].sliceSerialize(t[1],!0).length>=4?i(e):n(e)}function i(t){return null===t?u(t):je(t)?e.attempt(Ct,i,u)(t):(e.enter("codeFlowValue"),a(t))}function a(t){return null===t||je(t)?(e.exit("codeFlowValue"),i(t)):(e.consume(t),a)}function u(n){return e.exit("codeIndented"),t(n)}}},Ct={tokenize:function(e,t,n){var r=this;return o;function o(t){return r.parser.lazy[r.now().line]?n(t):je(t)?(e.enter("lineEnding"),e.consume(t),e.exit("lineEnding"),o):Ze(e,i,"linePrefix",5)(t)}function i(e){var i=r.events[r.events.length-1];return i&&"linePrefix"===i[1].type&&i[2].sliceSerialize(i[1],!0).length>=4?t(e):je(e)?o(e):n(e)}},partial:!0};var _t={name:"headingAtx",tokenize:function(e,t,n){var r=this,o=0;return function(t){return e.enter("atxHeading"),e.enter("atxHeadingSequence"),i(t)};function i(u){return 35===u&&o++<6?(e.consume(u),i):null===u||Ue(u)?(e.exit("atxHeadingSequence"),r.interrupt?t(u):a(u)):n(u)}function a(n){return 35===n?(e.enter("atxHeadingSequence"),u(n)):null===n||je(n)?(e.exit("atxHeading"),t(n)):Ke(n)?Ze(e,a,"whitespace")(n):(e.enter("atxHeadingText"),s(n))}function u(t){return 35===t?(e.consume(t),u):(e.exit("atxHeadingSequence"),a(t))}function s(t){return null===t||35===t||Ue(t)?(e.exit("atxHeadingText"),a(t)):(e.consume(t),s)}},resolve:function(e,t){var n,r,o=e.length-2,i=3;"whitespace"===e[i][1].type&&(i+=2);o-2>i&&"whitespace"===e[o][1].type&&(o-=2);"atxHeadingSequence"===e[o][1].type&&(i===o-1||o-4>i&&"whitespace"===e[o-2][1].type)&&(o-=i+1===o?2:4);o>i&&(n={type:"atxHeadingText",start:e[i][1].start,end:e[o][1].end},r={type:"chunkText",start:e[i][1].start,end:e[o][1].end,contentType:"text"},Ne(e,i,o-i+1,[["enter",n,t],["enter",r,t],["exit",r,t],["exit",n,t]]));return e}};var Dt={name:"setextUnderline",tokenize:function(e,t,n){var r,o,i=this,a=i.events.length;for(;a--;)if("lineEnding"!==i.events[a][1].type&&"linePrefix"!==i.events[a][1].type&&"content"!==i.events[a][1].type){o="paragraph"===i.events[a][1].type;break}return function(t){if(!i.parser.lazy[i.now().line]&&(i.interrupt||o))return e.enter("setextHeadingLine"),e.enter("setextHeadingLineSequence"),r=t,u(t);return n(t)};function u(t){return t===r?(e.consume(t),u):(e.exit("setextHeadingLineSequence"),Ze(e,s,"lineSuffix")(t))}function s(r){return null===r||je(r)?(e.exit("setextHeadingLine"),t(r)):n(r)}},resolveTo:function(e,t){var n,r,o,i=e.length;for(;i--;)if("enter"===e[i][0]){if("content"===e[i][1].type){n=i;break}"paragraph"===e[i][1].type&&(r=i)}else"content"===e[i][1].type&&e.splice(i,1),o||"definition"!==e[i][1].type||(o=i);var a={type:"setextHeading",start:Object.assign({},e[r][1].start),end:Object.assign({},e[e.length-1][1].end)};e[r][1].type="setextHeadingText",o?(e.splice(r,0,["enter",a,t]),e.splice(o+1,0,["exit",e[n][1],t]),e[n][1].end=Object.assign({},e[o][1].end)):e[n][1]=a;return e.push(["exit",a,t]),e}};var bt=["address","article","aside","base","basefont","blockquote","body","caption","center","col","colgroup","dd","details","dialog","dir","div","dl","dt","fieldset","figcaption","figure","footer","form","frame","frameset","h1","h2","h3","h4","h5","h6","head","header","hr","html","iframe","legend","li","link","main","menu","menuitem","nav","noframes","ol","optgroup","option","p","param","section","source","summary","table","tbody","td","tfoot","th","thead","title","tr","track","ul"],Nt=["pre","script","style","textarea"],St={name:"htmlFlow",tokenize:function(e,t,n){var r,o,i,a,u,s=this;return function(t){return e.enter("htmlFlow"),e.enter("htmlFlowData"),e.consume(t),l};function l(a){return 33===a?(e.consume(a),c):47===a?(e.consume(a),h):63===a?(e.consume(a),r=3,s.interrupt?t:L):Re(a)?(e.consume(a),i=String.fromCharCode(a),o=!0,d):n(a)}function c(o){return 45===o?(e.consume(o),r=2,f):91===o?(e.consume(o),r=5,i="CDATA[",a=0,p):Re(o)?(e.consume(o),r=4,s.interrupt?t:L):n(o)}function f(r){return 45===r?(e.consume(r),s.interrupt?t:L):n(r)}function p(r){return r===i.charCodeAt(a++)?(e.consume(r),a===i.length?s.interrupt?t:b:p):n(r)}function h(t){return Re(t)?(e.consume(t),i=String.fromCharCode(t),d):n(t)}function d(a){return null===a||47===a||62===a||Ue(a)?47!==a&&o&&Nt.includes(i.toLowerCase())?(r=1,s.interrupt?t(a):b(a)):bt.includes(i.toLowerCase())?(r=6,47===a?(e.consume(a),m):s.interrupt?t(a):b(a)):(r=7,s.interrupt&&!s.parser.lazy[s.now().line]?n(a):o?T(a):E(a)):45===a||we(a)?(e.consume(a),i+=String.fromCharCode(a),d):n(a)}function m(r){return 62===r?(e.consume(r),s.interrupt?t:b):n(r)}function E(t){return Ke(t)?(e.consume(t),E):_(t)}function T(t){return 47===t?(e.consume(t),_):58===t||95===t||Re(t)?(e.consume(t),g):Ke(t)?(e.consume(t),T):_(t)}function g(t){return 45===t||46===t||58===t||95===t||we(t)?(e.consume(t),g):v(t)}function v(t){return 61===t?(e.consume(t),y):Ke(t)?(e.consume(t),v):T(t)}function y(t){return null===t||60===t||61===t||62===t||96===t?n(t):34===t||39===t?(e.consume(t),u=t,A):Ke(t)?(e.consume(t),y):(u=null,k(t))}function A(t){return null===t||je(t)?n(t):t===u?(e.consume(t),C):(e.consume(t),A)}function k(t){return null===t||34===t||39===t||60===t||61===t||62===t||96===t||Ue(t)?v(t):(e.consume(t),k)}function C(e){return 47===e||62===e||Ke(e)?T(e):n(e)}function _(t){return 62===t?(e.consume(t),D):n(t)}function D(t){return Ke(t)?(e.consume(t),D):null===t||je(t)?b(t):n(t)}function b(t){return 45===t&&2===r?(e.consume(t),F):60===t&&1===r?(e.consume(t),x):62===t&&4===r?(e.consume(t),M):63===t&&3===r?(e.consume(t),L):93===t&&5===r?(e.consume(t),R):!je(t)||6!==r&&7!==r?null===t||je(t)?N(t):(e.consume(t),b):e.check(Ot,M,N)(t)}function N(t){return e.exit("htmlFlowData"),S(t)}function S(t){return null===t?w(t):je(t)?e.attempt({tokenize:O,partial:!0},S,w)(t):(e.enter("htmlFlowData"),b(t))}function O(e,t,n){return function(t){return e.enter("lineEnding"),e.consume(t),e.exit("lineEnding"),r};function r(e){return s.parser.lazy[s.now().line]?n(e):t(e)}}function F(t){return 45===t?(e.consume(t),L):b(t)}function x(t){return 47===t?(e.consume(t),i="",I):b(t)}function I(t){return 62===t&&Nt.includes(i.toLowerCase())?(e.consume(t),M):Re(t)&&i.length<8?(e.consume(t),i+=String.fromCharCode(t),I):b(t)}function R(t){return 93===t?(e.consume(t),L):b(t)}function L(t){return 62===t?(e.consume(t),M):45===t&&2===r?(e.consume(t),L):b(t)}function M(t){return null===t||je(t)?(e.exit("htmlFlowData"),w(t)):(e.consume(t),M)}function w(n){return e.exit("htmlFlow"),t(n)}},resolveTo:function(e){var t=e.length;for(;t--&&("enter"!==e[t][0]||"htmlFlow"!==e[t][1].type););t>1&&"linePrefix"===e[t-2][1].type&&(e[t][1].start=e[t-2][1].start,e[t+1][1].start=e[t-2][1].start,e.splice(t-2,2));return e},concrete:!0},Ot={tokenize:function(e,t,n){return function(r){return e.exit("htmlFlowData"),e.enter("lineEndingBlank"),e.consume(r),e.exit("lineEndingBlank"),e.attempt(qe,t,n)}},partial:!0};var Ft={name:"codeFenced",tokenize:function(e,t,n){var r,o=this,i={tokenize:function(e,t,n){var o=0;return Ze(e,i,"linePrefix",this.parser.constructs.disable.null.includes("codeIndented")?void 0:4);function i(t){return e.enter("codeFencedFence"),e.enter("codeFencedFenceSequence"),a(t)}function a(t){return t===r?(e.consume(t),o++,a):o1&&e[c][1].end.offset-e[c][1].start.offset>1?2:1;var f=Object.assign({},e[n][1].end),p=Object.assign({},e[c][1].start);Kt(f,-u),Kt(p,u),i={type:u>1?"strongSequence":"emphasisSequence",start:f,end:Object.assign({},e[n][1].end)},a={type:u>1?"strongSequence":"emphasisSequence",start:Object.assign({},e[c][1].start),end:p},o={type:u>1?"strongText":"emphasisText",start:Object.assign({},e[n][1].end),end:Object.assign({},e[c][1].start)},r={type:u>1?"strong":"emphasis",start:Object.assign({},i.start),end:Object.assign({},a.end)},e[n][1].end=Object.assign({},i.start),e[c][1].start=Object.assign({},a.end),s=[],e[n][1].end.offset-e[n][1].start.offset&&(s=Se(s,[["enter",e[n][1],t],["exit",e[n][1],t]])),s=Se(s,[["enter",r,t],["enter",i,t],["exit",i,t],["enter",o,t]]),s=Se(s,st(t.parser.constructs.insideSpan.null,e.slice(n+1,c),t)),s=Se(s,[["exit",o,t],["enter",a,t],["exit",a,t],["exit",r,t]]),e[c][1].end.offset-e[c][1].start.offset?(l=2,s=Se(s,[["enter",e[c][1],t],["exit",e[c][1],t]])):l=0,Ne(e,n-1,c-n+3,s),c=n+s.length-l-2;break}c=-1;for(;++c13&&n<32||n>126&&n<160||n>55295&&n<57344||n>64975&&n<65008||65535===(65535&n)||65534===(65535&n)||n>1114111?"\ufffd":String.fromCharCode(n)}var fn=/\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;function pn(e){return e.replace(fn,hn)}function hn(e,t,n){if(t)return t;if(35===n.charCodeAt(0)){var r=n.charCodeAt(1),o=120===r||88===r;return cn(n.slice(o?2:1),o?16:10)}return(0,xt.T)(n)||e}var dn={}.hasOwnProperty,mn=function(e,t,n){return"string"!==typeof t&&(n=t,t=void 0),function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=En({transforms:[],canContainEols:["emphasis","fragment","heading","paragraph","strong"],enter:{autolink:s(oe),autolinkProtocol:S,autolinkEmail:S,atxHeading:s(ee),blockQuote:s(V),characterEscape:S,characterReference:S,codeFenced:s(q),codeFencedFenceInfo:l,codeFencedFenceMeta:l,codeIndented:s(q,l),codeText:s(X,l),codeTextData:S,data:S,codeFlowValue:S,definition:s($),definitionDestinationString:l,definitionLabelString:l,definitionTitleString:l,emphasis:s(J),hardBreakEscape:s(te),hardBreakTrailing:s(te),htmlFlow:s(ne,l),htmlFlowData:S,htmlText:s(ne,l),htmlTextData:S,image:s(re),label:l,link:s(oe),listItem:s(ae),listItemValue:m,listOrdered:s(ie,d),listUnordered:s(ie),paragraph:s(ue),reference:K,referenceString:l,resourceDestinationString:l,resourceTitleString:l,setextHeading:s(ee),strong:s(se),thematicBreak:s(ce)},exit:{atxHeading:f(),atxHeadingSequence:_,autolink:f(),autolinkEmail:Q,autolinkProtocol:W,blockQuote:f(),characterEscapeValue:O,characterReferenceMarkerHexadecimal:z,characterReferenceMarkerNumeric:z,characterReferenceValue:Y,codeFenced:f(v),codeFencedFence:g,codeFencedFenceInfo:E,codeFencedFenceMeta:T,codeFlowValue:O,codeIndented:f(y),codeText:f(L),codeTextData:O,data:O,definition:f(),definitionDestinationString:C,definitionLabelString:A,definitionTitleString:k,emphasis:f(),hardBreakEscape:f(x),hardBreakTrailing:f(x),htmlFlow:f(I),htmlFlowData:O,htmlText:f(R),htmlTextData:O,image:f(w),label:B,labelText:P,lineEnding:F,link:f(M),listItem:f(),listOrdered:f(),listUnordered:f(),paragraph:f(),referenceString:G,resourceDestinationString:H,resourceTitleString:U,resource:j,setextHeading:f(N),setextHeadingLineSequence:b,setextHeadingText:D,strong:f(),thematicBreak:f()}},e.mdastExtensions||[]),n={};return r;function r(e){for(var n={type:"root",children:[]},r=[],s=[],f={stack:[n],tokenStack:r,config:t,enter:c,exit:p,buffer:l,resume:h,setData:i,getData:a},d=-1;++d0){var E=r[r.length-1];(E[1]||gn).call(f,void 0,E[0])}for(n.position={start:u(e.length>0?e[0][1].start:{line:1,column:1,offset:0}),end:u(e.length>0?e[e.length-2][1].end:{line:1,column:1,offset:0})},d=-1;++d0&&void 0!==arguments[0]?arguments[0]:{}).extensions||[])),content:t(We),document:t(Qe),flow:t(tt),string:t(rt),text:t(ot)};return e;function t(t){return function(n){return lt(e,t,n)}}}(n).document().write(function(){var e,t=1,n="",r=!0;return function(o,i,a){var u,s,l,c,f,p=[];for(o=n+o.toString(i),l=0,n="",r&&(65279===o.charCodeAt(0)&&l++,r=void 0);l":"")+")"}));return f;function f(){var l,c,f,p=[];if((!t||o(a,u,s[s.length-1]||null))&&(p=function(e){if(Array.isArray(e))return e;if("number"===typeof e)return[true,e];return[e]}(n(a,s)),p[0]===bn))return p;if(a.children&&p[0]!==Dn)for(c=(r?a.children.length:-1)+i,f=s.concat(a);c>-1&&c-1?n.offset:null}}}var In=function(e,t,n,r){"function"===typeof t&&"function"!==typeof n&&(r=n,n=t,t=null);var o=An(t),i=r?-1:1;!function e(a,u,s){var l,c="object"===typeof a&&null!==a?a:{};"string"===typeof c.type&&(l="string"===typeof c.tagName?c.tagName:"string"===typeof c.name?c.name:void 0,Object.defineProperty(f,"name",{value:"node ("+c.type+(l?"<"+l+">":"")+")"}));return f;function f(){var l,c,f,p=[];if((!t||o(a,u,s[s.length-1]||null))&&(p=function(e){if(Array.isArray(e))return e;if("number"===typeof e)return[true,e];return[e]}(n(a,s)),false===p[0]))return p;if(a.children&&"skip"!==p[0])for(c=(r?a.children.length:-1)+i,f=s.concat(a);c>-1&&c":"gt"};function Kn(e,t){var n=function(e){return e.replace(/["&<>]/g,(function(e){return"&"+jn[e]+";"}))}(function(e){var t=[],n=-1,r=0,o=0;for(;++n55295&&i<57344){var u=e.charCodeAt(n+1);i<56320&&u>56319&&u<57344?(a=String.fromCharCode(i,u),o=1):a="\ufffd"}else a=String.fromCharCode(i);a&&(t.push(e.slice(r,n),encodeURIComponent(a)),r=n+o+1,a=""),o&&(n+=o,o=0)}return t.join("")+e.slice(r)}(e||""));if(!t)return n;var r=n.indexOf(":"),o=n.indexOf("?"),i=n.indexOf("#"),a=n.indexOf("/");return r<0||a>-1&&r>a||o>-1&&r>o||i>-1&&r>i||t.test(n.slice(0,r))?n:""}function Gn(e,t){var n=[],r=-1;for(t&&n.push(yn("text","\n"));++r0&&n.push(yn("text","\n")),n}function zn(e,t){var n,r=String(t.identifier),o=Kn(r.toLowerCase()),i=e.footnoteOrder.indexOf(r);-1===i?(e.footnoteOrder.push(r),e.footnoteCounts[r]=1,n=e.footnoteOrder.length):(e.footnoteCounts[r]++,n=i+1);var a=e.footnoteCounts[r];return e(t,"sup",[e(t.position,"a",{href:"#"+e.clobberPrefix+"fn-"+o,id:e.clobberPrefix+"fnref-"+o+(a>1?"-"+a:""),dataFootnoteRef:!0,ariaDescribedBy:"footnote-label"},[yn("text",String(n))])])}var Yn=n(9930);function Zn(e,t){var n=t.referenceType,r="]";if("collapsed"===n?r+="[]":"full"===n&&(r+="["+(t.label||t.identifier)+"]"),"imageReference"===t.type)return yn("text","!["+t.alt+r);var o=Un(e,t),i=o[0];i&&"text"===i.type?i.value="["+i.value:o.unshift(yn("text","["));var a=o[o.length-1];return a&&"text"===a.type?a.value+=r:o.push(yn("text",r)),o}function Wn(e){var t=e.spread;return void 0===t||null===t?e.children.length>1:t}var Qn={blockquote:function(e,t){return e(t,"blockquote",Gn(Un(e,t),!0))},break:function(e,t){return[e(t,"br"),yn("text","\n")]},code:function(e,t){var n=t.value?t.value+"\n":"",r=t.lang&&t.lang.match(/^[^ \t]+(?=[ \t]|$)/),o={};r&&(o.className=["language-"+r]);var i=e(t,"code",o,[yn("text",n)]);return t.meta&&(i.data={meta:t.meta}),e(t.position,"pre",[i])},delete:function(e,t){return e(t,"del",Un(e,t))},emphasis:function(e,t){return e(t,"em",Un(e,t))},footnoteReference:zn,footnote:function(e,t){for(var n=e.footnoteById,r=1;r in n;)r++;var o=String(r);return n[o]={type:"footnoteDefinition",identifier:o,children:[{type:"paragraph",children:t.children}],position:t.position},zn(e,{type:"footnoteReference",identifier:o,position:t.position})},heading:function(e,t){return e(t,"h"+t.depth,Un(e,t))},html:function(e,t){return e.dangerous?e.augment(t,yn("raw",t.value)):null},imageReference:function(e,t){var n=e.definition(t.identifier);if(!n)return Zn(e,t);var r={src:Yn(n.url||""),alt:t.alt};return null!==n.title&&void 0!==n.title&&(r.title=n.title),e(t,"img",r)},image:function(e,t){var n={src:Yn(t.url),alt:t.alt};return null!==t.title&&void 0!==t.title&&(n.title=t.title),e(t,"img",n)},inlineCode:function(e,t){return e(t,"code",[yn("text",t.value.replace(/\r?\n|\r/g," "))])},linkReference:function(e,t){var n=e.definition(t.identifier);if(!n)return Zn(e,t);var r={href:Yn(n.url||"")};return null!==n.title&&void 0!==n.title&&(r.title=n.title),e(t,"a",r,Un(e,t))},link:function(e,t){var n={href:Yn(t.url)};return null!==t.title&&void 0!==t.title&&(n.title=t.title),e(t,"a",n,Un(e,t))},listItem:function(e,t,n){var r,o=Un(e,t),i=n?function(e){var t=e.spread,n=e.children,r=-1;for(;!t&&++r0&&r.children.unshift(yn("text"," ")),r.children.unshift(e(null,"input",{type:"checkbox",checked:t.checked,disabled:!0})),a.className=["task-list-item"]);for(var s=-1;++s1?"-"+u:""),dataFootnoteBackref:!0,className:["data-footnote-backref"],ariaLabel:e.footnoteBackLabel},children:[{type:"text",value:"\u21a9"}]};u>1&&l.children.push({type:"element",tagName:"sup",children:[{type:"text",value:String(u)}]}),s.length>0&&s.push({type:"text",value:" "}),s.push(l)}var c=o[o.length-1];if(c&&"element"===c.type&&"p"===c.tagName){var f,p=c.children[c.children.length-1];p&&"text"===p.type?p.value+=" ":c.children.push({type:"text",value:" "}),(f=c.children).push.apply(f,s)}else o.push.apply(o,s);var h={type:"element",tagName:"li",properties:{id:e.clobberPrefix+"fn-"+a},children:Gn(o,!0)};r.position&&(h.position=r.position),n.push(h)}}return 0===n.length?null:{type:"element",tagName:"section",properties:{dataFootnotes:!0,className:["footnotes"]},children:[{type:"element",tagName:"h2",properties:{id:"footnote-label",className:["sr-only"]},children:[yn("text",e.footnoteLabel)]},{type:"text",value:"\n"},{type:"element",tagName:"ol",properties:{},children:Gn(n,!0)},{type:"text",value:"\n"}]}}(n);return o&&r.children.push(yn("text","\n"),o),Array.isArray(r)?{type:"root",children:r}:r}var $n=function(e,t){return e&&"run"in e?function(e,t){return function(n,r,o){e.run(Xn(n,t),r,(function(e){o(e)}))}}(e,t):function(e){return function(t){return Xn(t,e)}}(e||t)};var Jn=n(4808),er=n.n(Jn),tr=n(2925);function nr(e){if(e.allowedElements&&e.disallowedElements)throw new TypeError("Only one of `allowedElements` and `disallowedElements` should be defined");if(e.allowedElements||e.disallowedElements||e.allowElement)return function(t){Sn(t,"element",(function(t,n,r){var o,i=r;if(e.allowedElements?o=!e.allowedElements.includes(t.tagName):e.disallowedElements&&(o=e.disallowedElements.includes(t.tagName)),!o&&e.allowElement&&"number"===typeof n&&(o=!e.allowElement(t,n,i)),o&&"number"===typeof n){var a;if(e.unwrapDisallowed&&t.children)(a=i.children).splice.apply(a,[n,1].concat((0,h.Z)(t.children)));else i.children.splice(n,1);return n}}))}}var rr=["http","https","mailto","tel"];var or=n(8684);function ir(e){var t=e&&"object"===typeof e&&"text"===e.type?e.value||"":e;return"string"===typeof t&&""===t.replace(/[ \t\n\f\r]/g,"")}var ar=n(316),ur={classId:"classID",dataType:"datatype",itemId:"itemID",strokeDashArray:"strokeDasharray",strokeDashOffset:"strokeDashoffset",strokeLineCap:"strokeLinecap",strokeLineJoin:"strokeLinejoin",strokeMiterLimit:"strokeMiterlimit",typeOf:"typeof",xLinkActuate:"xlinkActuate",xLinkArcRole:"xlinkArcrole",xLinkHref:"xlinkHref",xLinkRole:"xlinkRole",xLinkShow:"xlinkShow",xLinkTitle:"xlinkTitle",xLinkType:"xlinkType",xmlnsXLink:"xmlnsXlink"},sr=n(1642),lr=n(6991),cr=n(821),fr={}.hasOwnProperty,pr=new Set(["table","thead","tbody","tfoot","tr"]);function hr(e,t){for(var n,r=[],o=-1;++o0?o.createElement(d,l,f):o.createElement(d,l)}function mr(e,t){for(var n=-1,r=0;++n for more info)")),delete vr[t]}var r=me().use(vn).use(e.remarkPlugins||[]).use($n,z(z({},e.remarkRehypeOptions),{},{allowDangerousHtml:!0})).use(e.rehypePlugins||[]).use(nr,e),i=new oe;"string"===typeof e.children?i.value=e.children:void 0!==e.children&&null!==e.children&&console.warn("[react-markdown] Warning: please pass a string as `children` (not: `".concat(e.children,"`)"));var a=r.runSync(r.parse(i),i);if("root"!==a.type)throw new TypeError("Expected a `root` node");var u=o.createElement(o.Fragment,{},hr({options:e,schema:tr.dy,listDepth:0},a));return e.className&&(u=o.createElement("div",{className:e.className},u)),u}yr.defaultProps={transformLinkUri:function(e){var t=(e||"").trim(),n=t.charAt(0);if("#"===n||"/"===n)return t;var r=t.indexOf(":");if(-1===r)return t;for(var o=-1;++oo||-1!==(o=t.indexOf("#"))&&r>o?t:"javascript:void(0)"}},yr.propTypes={children:Jn.string,className:Jn.string,allowElement:Jn.func,allowedElements:Jn.arrayOf(Jn.string),disallowedElements:Jn.arrayOf(Jn.string),unwrapDisallowed:Jn.bool,remarkPlugins:Jn.arrayOf(Jn.oneOfType([Jn.object,Jn.func,Jn.arrayOf(Jn.oneOfType([Jn.object,Jn.func]))])),rehypePlugins:Jn.arrayOf(Jn.oneOfType([Jn.object,Jn.func,Jn.arrayOf(Jn.oneOfType([Jn.object,Jn.func]))])),sourcePos:Jn.bool,rawSourcePos:Jn.bool,skipHtml:Jn.bool,includeElementIndex:Jn.bool,transformLinkUri:Jn.oneOfType([Jn.func,Jn.bool]),linkTarget:Jn.oneOfType([Jn.func,Jn.string]),transformImageUri:Jn.func,components:Jn.object};for(var Ar={tokenize:function(e,t,n){return function(t){return e.consume(t),r};function r(t){return 87===t||119===t?(e.consume(t),o):n(t)}function o(t){return 87===t||119===t?(e.consume(t),i):n(t)}function i(t){return 46===t?(e.consume(t),a):n(t)}function a(e){return null===e||je(e)?n(e):t(e)}},partial:!0},kr={tokenize:function(e,t,n){var r,o;return i;function i(t){return 38===t?e.check(Dr,u,a)(t):46===t||95===t?e.check(_r,u,a)(t):null===t||He(t)||Ge(t)||45!==t&&ze(t)?u(t):(e.consume(t),i)}function a(t){return 46===t?(o=r,r=void 0,e.consume(t),i):(95===t&&(r=!0),e.consume(t),i)}function u(e){return o||r?n(e):t(e)}},partial:!0},Cr={tokenize:function(e,t){var n=0;return r;function r(a){return 38===a?e.check(Dr,t,o)(a):(40===a&&n++,41===a?e.check(_r,i,o)(a):Rr(a)?t(a):Ir(a)?e.check(_r,t,o)(a):(e.consume(a),r))}function o(t){return e.consume(t),r}function i(e){return--n<0?t(e):o(e)}},partial:!0},_r={tokenize:function(e,t,n){return function(t){return e.consume(t),r};function r(o){return Ir(o)?(e.consume(o),r):Rr(o)?t(o):n(o)}},partial:!0},Dr={tokenize:function(e,t,n){return function(t){return e.consume(t),r};function r(t){return Re(t)?(e.consume(t),r):59===t?(e.consume(t),o):n(t)}function o(e){return Rr(e)?t(e):n(e)}},partial:!0},br={tokenize:function(e,t,n){var r=this;return function(t){if(87!==t&&119!==t||!Mr(r.previous)||Br(r.events))return n(t);return e.enter("literalAutolink"),e.enter("literalAutolinkWww"),e.check(Ar,e.attempt(kr,e.attempt(Cr,o),n),n)(t)};function o(n){return e.exit("literalAutolinkWww"),e.exit("literalAutolink"),t(n)}},previous:Mr},Nr={tokenize:function(e,t,n){var r=this;return function(t){if(72!==t&&104!==t||!wr(r.previous)||Br(r.events))return n(t);return e.enter("literalAutolink"),e.enter("literalAutolinkHttp"),e.consume(t),o};function o(t){return 84===t||116===t?(e.consume(t),i):n(t)}function i(t){return 84===t||116===t?(e.consume(t),a):n(t)}function a(t){return 80===t||112===t?(e.consume(t),u):n(t)}function u(t){return 83===t||115===t?(e.consume(t),s):s(t)}function s(t){return 58===t?(e.consume(t),l):n(t)}function l(t){return 47===t?(e.consume(t),c):n(t)}function c(t){return 47===t?(e.consume(t),f):n(t)}function f(t){return null===t||He(t)||Ge(t)||ze(t)?n(t):e.attempt(kr,e.attempt(Cr,p),n)(t)}function p(n){return e.exit("literalAutolinkHttp"),e.exit("literalAutolink"),t(n)}},previous:wr},Sr={tokenize:function(e,t,n){var r,o,i=this;return function(t){if(!Lr(t)||!Pr(i.previous)||Br(i.events))return n(t);return e.enter("literalAutolink"),e.enter("literalAutolinkEmail"),a(t)};function a(t){return Lr(t)?(e.consume(t),a):64===t?(e.consume(t),u):n(t)}function u(t){return 46===t?e.check(_r,f,s)(t):45===t||95===t?e.check(_r,n,l)(t):we(t)?(!o&&Le(t)&&(o=!0),e.consume(t),u):f(t)}function s(t){return e.consume(t),r=!0,o=void 0,u}function l(t){return e.consume(t),c}function c(t){return 46===t?e.check(_r,n,s)(t):u(t)}function f(i){return r&&!o?(e.exit("literalAutolinkEmail"),e.exit("literalAutolink"),t(i)):n(i)}},previous:Pr},Or={},Fr={text:Or},xr=48;xr<123;)Or[xr]=Sr,58===++xr?xr=65:91===xr&&(xr=97);function Ir(e){return 33===e||34===e||39===e||41===e||42===e||44===e||46===e||58===e||59===e||60===e||63===e||95===e||126===e}function Rr(e){return null===e||60===e||Ue(e)}function Lr(e){return 43===e||45===e||46===e||95===e||we(e)}function Mr(e){return null===e||40===e||42===e||95===e||126===e||Ue(e)}function wr(e){return null===e||!Re(e)}function Pr(e){return 47!==e&&wr(e)}function Br(e){for(var t=e.length,n=!1;t--;){var r=e[t][1];if(("labelLink"===r.type||"labelImage"===r.type)&&!r._balanced){n=!0;break}if(r._gfmAutolinkLiteralWalkedInto){n=!1;break}}return e.length>0&&!n&&(e[e.length-1][1]._gfmAutolinkLiteralWalkedInto=!0),n}Or[43]=Sr,Or[45]=Sr,Or[46]=Sr,Or[95]=Sr,Or[72]=[Sr,Nr],Or[104]=[Sr,Nr],Or[87]=[Sr,br],Or[119]=[Sr,br];var Hr={tokenize:function(e,t,n){var r=this;return Ze(e,(function(e){var o=r.events[r.events.length-1];return o&&"gfmFootnoteDefinitionIndent"===o[1].type&&4===o[2].sliceSerialize(o[1],!0).length?t(e):n(e)}),"gfmFootnoteDefinitionIndent",5)},partial:!0};function Ur(){var e;return{document:K({},91,{tokenize:zr,continuation:{tokenize:Yr},exit:Zr}),text:(e={},K(e,91,{tokenize:Gr}),K(e,93,{add:"after",tokenize:jr,resolveTo:Kr}),e)}}function jr(e,t,n){for(var r,o=this,i=o.events.length,a=o.parser.gfmFootnotes||(o.parser.gfmFootnotes=[]);i--;){var u=o.events[i][1];if("labelImage"===u.type){r=u;break}if("gfmFootnoteCall"===u.type||"labelLink"===u.type||"label"===u.type||"image"===u.type||"link"===u.type)break}return function(i){if(!r||!r._balanced)return n(i);var u=vt(o.sliceSerialize({start:r.end,end:o.now()}));if(94!==u.charCodeAt(0)||!a.includes(u.slice(1)))return n(i);return e.enter("gfmFootnoteCallLabelMarker"),e.consume(i),e.exit("gfmFootnoteCallLabelMarker"),t(i)}}function Kr(e,t){for(var n=e.length;n--;)if("labelImage"===e[n][1].type&&"enter"===e[n][0]){e[n][1];break}e[n+1][1].type="data",e[n+3][1].type="gfmFootnoteCallLabelMarker";var r={type:"gfmFootnoteCall",start:Object.assign({},e[n+3][1].start),end:Object.assign({},e[e.length-1][1].end)},o={type:"gfmFootnoteCallMarker",start:Object.assign({},e[n+3][1].end),end:Object.assign({},e[n+3][1].end)};o.end.column++,o.end.offset++,o.end._bufferIndex++;var i={type:"gfmFootnoteCallString",start:Object.assign({},o.end),end:Object.assign({},e[e.length-1][1].start)},a={type:"chunkString",contentType:"string",start:Object.assign({},i.start),end:Object.assign({},i.end)},u=[e[n+1],e[n+2],["enter",r,t],e[n+3],e[n+4],["enter",o,t],["exit",o,t],["enter",i,t],["enter",a,t],["exit",a,t],["exit",i,t],e[e.length-2],e[e.length-1],["exit",r,t]];return e.splice.apply(e,[n,e.length-n+1].concat(u)),e}function Gr(e,t,n){var r,o=this,i=o.parser.gfmFootnotes||(o.parser.gfmFootnotes=[]),a=0;return function(t){return e.enter("gfmFootnoteCall"),e.enter("gfmFootnoteCallLabelMarker"),e.consume(t),e.exit("gfmFootnoteCallLabelMarker"),u};function u(t){return 94!==t?n(t):(e.enter("gfmFootnoteCallMarker"),e.consume(t),e.exit("gfmFootnoteCallMarker"),e.enter("gfmFootnoteCallString"),e.enter("chunkString").contentType="string",s)}function s(u){var c;return null===u||91===u||a++>999?n(u):93===u?r?(e.exit("chunkString"),c=e.exit("gfmFootnoteCallString"),i.includes(vt(o.sliceSerialize(c)))?function(n){return e.enter("gfmFootnoteCallLabelMarker"),e.consume(n),e.exit("gfmFootnoteCallLabelMarker"),e.exit("gfmFootnoteCall"),t}(u):n(u)):n(u):(e.consume(u),Ue(u)||(r=!0),92===u?l:s)}function l(t){return 91===t||92===t||93===t?(e.consume(t),a++,s):s(t)}}function zr(e,t,n){var r,o,i=this,a=i.parser.gfmFootnotes||(i.parser.gfmFootnotes=[]),u=0;return function(t){return e.enter("gfmFootnoteDefinition")._container=!0,e.enter("gfmFootnoteDefinitionLabel"),e.enter("gfmFootnoteDefinitionLabelMarker"),e.consume(t),e.exit("gfmFootnoteDefinitionLabelMarker"),s};function s(t){return 94===t?(e.enter("gfmFootnoteDefinitionMarker"),e.consume(t),e.exit("gfmFootnoteDefinitionMarker"),e.enter("gfmFootnoteDefinitionLabelString"),l):n(t)}function l(t){var a;return null===t||91===t||u>999?n(t):93===t?o?(a=e.exit("gfmFootnoteDefinitionLabelString"),r=vt(i.sliceSerialize(a)),e.enter("gfmFootnoteDefinitionLabelMarker"),e.consume(t),e.exit("gfmFootnoteDefinitionLabelMarker"),e.exit("gfmFootnoteDefinitionLabel"),p):n(t):je(t)?(e.enter("lineEnding"),e.consume(t),e.exit("lineEnding"),u++,l):(e.enter("chunkString").contentType="string",c(t))}function c(t){return null===t||je(t)||91===t||93===t||u>999?(e.exit("chunkString"),l(t)):(Ue(t)||(o=!0),u++,e.consume(t),92===t?f:c)}function f(t){return 91===t||92===t||93===t?(e.consume(t),u++,c):c(t)}function p(t){return 58===t?(e.enter("definitionMarker"),e.consume(t),e.exit("definitionMarker"),Ze(e,h,"gfmFootnoteDefinitionWhitespace")):n(t)}function h(e){return a.includes(r)||a.push(r),t(e)}}function Yr(e,t,n){return e.check(qe,t,e.attempt(Hr,t,n))}function Zr(e){e.exit("gfmFootnoteDefinition")}function Wr(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.singleTilde,n={tokenize:o,resolveAll:r};return null!==t&&void 0!==t||(t=!0),{text:K({},126,n),insideSpan:{null:[n]},attentionMarkers:{null:[126]}};function r(e,t){for(var n=-1;++n1?r(i):(e.consume(i),a++,u);if(a<2&&!t)return r(i);var l=e.exit("strikethroughSequenceTemporary"),c=Ut(i);return l._open=!c||2===c&&Boolean(s),l._close=!s||2===s&&Boolean(c),n(i)}}}var Qr={flow:{null:{tokenize:function(e,t,n){var r,o,i=this,a=[],u=0;return function(t){if(e.enter("table")._align=a,e.enter("tableHead"),e.enter("tableRow"),124===t)return s(t);return u++,e.enter("temporaryTableCellContent"),f(t)};function s(t){return e.enter("tableCellDivider"),e.consume(t),e.exit("tableCellDivider"),r=!0,l}function l(t){return null===t||je(t)?function(t){if(null===t)return n(t);e.exit("tableRow"),e.exit("tableHead");var r=i.interrupt;return i.interrupt=!0,e.attempt({tokenize:S,partial:!0},(function(t){return i.interrupt=r,e.enter("tableDelimiterRow"),h(t)}),(function(e){return i.interrupt=r,n(e)}))(t)}(t):Ke(t)?(e.enter("whitespace"),e.consume(t),c):(r&&(r=void 0,u++),124===t?s(t):(e.enter("temporaryTableCellContent"),f(t)))}function c(t){return Ke(t)?(e.consume(t),c):(e.exit("whitespace"),l(t))}function f(t){return null===t||124===t||Ue(t)?(e.exit("temporaryTableCellContent"),l(t)):(e.consume(t),92===t?p:f)}function p(t){return 92===t||124===t?(e.consume(t),f):f(t)}function h(t){return null===t||je(t)?g(t):Ke(t)?(e.enter("whitespace"),e.consume(t),d):45===t?(e.enter("tableDelimiterFiller"),e.consume(t),o=!0,a.push("none"),m):58===t?(e.enter("tableDelimiterAlignment"),e.consume(t),e.exit("tableDelimiterAlignment"),a.push("left"),E):124===t?(e.enter("tableCellDivider"),e.consume(t),e.exit("tableCellDivider"),h):n(t)}function d(t){return Ke(t)?(e.consume(t),d):(e.exit("whitespace"),h(t))}function m(t){return 45===t?(e.consume(t),m):(e.exit("tableDelimiterFiller"),58===t?(e.enter("tableDelimiterAlignment"),e.consume(t),e.exit("tableDelimiterAlignment"),a[a.length-1]="left"===a[a.length-1]?"center":"right",T):h(t))}function E(t){return 45===t?(e.enter("tableDelimiterFiller"),e.consume(t),o=!0,m):n(t)}function T(t){return null===t||je(t)?g(t):Ke(t)?(e.enter("whitespace"),e.consume(t),d):124===t?(e.enter("tableCellDivider"),e.consume(t),e.exit("tableCellDivider"),h):n(t)}function g(t){return e.exit("tableDelimiterRow"),o&&u===a.length?null===t?v(t):e.check(Vr,v,e.attempt({tokenize:S,partial:!0},Ze(e,y,"linePrefix",4),v))(t):n(t)}function v(n){return e.exit("table"),t(n)}function y(t){return e.enter("tableBody"),A(t)}function A(t){return e.enter("tableRow"),124===t?k(t):(e.enter("temporaryTableCellContent"),D(t))}function k(t){return e.enter("tableCellDivider"),e.consume(t),e.exit("tableCellDivider"),C}function C(t){return null===t||je(t)?function(t){if(e.exit("tableRow"),null===t)return N(t);return e.check(Vr,N,e.attempt({tokenize:S,partial:!0},Ze(e,A,"linePrefix",4),N))(t)}(t):Ke(t)?(e.enter("whitespace"),e.consume(t),_):124===t?k(t):(e.enter("temporaryTableCellContent"),D(t))}function _(t){return Ke(t)?(e.consume(t),_):(e.exit("whitespace"),C(t))}function D(t){return null===t||124===t||Ue(t)?(e.exit("temporaryTableCellContent"),C(t)):(e.consume(t),92===t?b:D)}function b(t){return 92===t||124===t?(e.consume(t),D):D(t)}function N(t){return e.exit("tableBody"),v(t)}function S(e,t,n){return function(t){return e.enter("lineEnding"),e.consume(t),e.exit("lineEnding"),Ze(e,r,"linePrefix")};function r(r){if(i.parser.lazy[i.now().line]||null===r||je(r))return n(r);var o=i.events[i.events.length-1];return!i.parser.constructs.disable.null.includes("codeIndented")&&o&&"linePrefix"===o[1].type&&o[2].sliceSerialize(o[1],!0).length>=4?n(r):(i._gfmTableDynamicInterruptHack=!0,e.check(i.parser.constructs.flow,(function(e){return i._gfmTableDynamicInterruptHack=!1,n(e)}),(function(e){return i._gfmTableDynamicInterruptHack=!1,t(e)}))(r))}}},resolve:function(e,t){var n,r,o,i,a,u,s,l=-1;for(;++l0?{type:"text",value:f}:void 0),!1!==f){var p;if(i!==n&&l.push({type:"text",value:e.value.slice(i,n)}),Array.isArray(f))(p=l).push.apply(p,(0,h.Z)(f));else f&&l.push(f);i=n+c[0].length}if(!r.global)break;c=r.exec(e.value)}if(void 0===n)l=[e],a--;else{var d;i?\]}]+$/.exec(e);if(i)for(e=e.slice(0,i.index),t=(o=i[0]).indexOf(")"),n=eo(e,"("),r=eo(e,")");-1!==t&&n>r;)e+=o.slice(0,t+1),t=(o=o.slice(t+1)).indexOf(")"),r++;return[e,o]}(n+r);if(!a[0])return!1;var u={type:"link",title:null,url:i+t+a[0],children:[{type:"text",value:t+a[0]}]};return a[1]?[u,{type:"text",value:a[1]}]:u}function fo(e,t,n,r){return!(!po(r,!0)||/[_-\d]$/.test(n))&&{type:"link",title:null,url:"mailto:"+t+"@"+n,children:[{type:"text",value:t+"@"+n}]}}function po(e,t){var n=e.input.charCodeAt(e.index-1);return(0===e.index||Ge(n)||ze(n))&&(!t||47!==n)}function ho(e){return e.label||!e.identifier?e.label||"":pn(e.identifier)}function mo(e){var t=e||{},n=t.now||{},r=t.lineShift||0,o=n.line||1,i=n.column||1;return{move:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=e.split(/\r?\n|\r/g),n=t[t.length-1];return o+=t.length-1,i=1===t.length?i+n.length:1+n.length+r,e},current:function(){return{now:{line:o,column:i},lineShift:r}},shift:function(e){r+=e}}}function Eo(e,t,n){var r=t.indexStack,o=e.children||[],i=mo(n),a=[],u=-1;for(r.push(-1);++u=m||(E+10?" ":"")),o.shift(4),i+=o.move(go(Eo(e,n,o.current()),(function(e,t,n){if(t)return(n?"":" ")+e;return e}))),a(),i},footnoteReference:e}};function e(e,t,n,r){var o=mo(r),i=o.move("[^"),a=n.enter("footnoteReference"),u=n.enter("reference");return i+=o.move(ko(n,ho(e),z(z({},o.current()),{},{before:i,after:"]"}))),u(),a(),i+=o.move("]")}}function bo(e,t,n){var r=t.indexStack,o=e.children||[],i=[],a=-1,u=n.before;r.push(-1);for(var s=mo(n);++a0&&("\r"===u||"\n"===u)&&"html"===l.type&&(i[i.length-1]=i[i.length-1].replace(/(\r?\n|\r)$/," "),u=" ",(s=mo(n)).move(i.join(""))),i.push(s.move(t.handle(l,e,t,z(z({},s.current()),{},{before:u,after:c})))),u=i[i.length-1].slice(-1)}return r.pop(),i.join("")}var No={canContainEols:["delete"],enter:{strikethrough:function(e){this.enter({type:"delete",children:[]},e)}},exit:{strikethrough:function(e){this.exit(e)}}},So={unsafe:[{character:"~",inConstruct:"phrasing"}],handlers:{delete:Oo}};function Oo(e,t,n,r){var o=mo(r),i=n.enter("emphasis"),a=o.move("~~");return a+=bo(e,n,z(z({},o.current()),{},{before:a,after:"~"})),a+=o.move("~~"),i(),a}function Fo(e,t,n){for(var r=e.value||"",o="`",i=-1;new RegExp("(^|[^`])"+o+"([^`]|$)").test(r);)o+="`";for(/[^ \r\n]/.test(r)&&(/^[ \r\n]/.test(r)&&/[ \r\n]$/.test(r)||/^`|`$/.test(r))&&(r=" "+r+" ");++i1&&void 0!==arguments[1]?arguments[1]:{},n=(t.align||[]).concat(),r=t.stringLength||Io,o=[],i=[],a=[],u=[],s=0,l=-1;++ls&&(s=e[l].length);++pu[p])&&(u[p]=d)}c.push(h)}i[l]=c,a[l]=f}var m=-1;if("object"===typeof n&&"length"in n)for(;++mu[m]&&(u[m]=k),g[m]=k),T[m]=C}i.splice(1,0,T),a.splice(1,0,g),l=-1;for(var _=[];++l-1?t.start:1)+(!1===n.options.incrementListMarker?0:t.children.indexOf(e))+i);var a=i.length+1;("tab"===o||"mixed"===o&&(t&&"list"===t.type&&t.spread||e.spread))&&(a=4*Math.ceil(a/4));var u=mo(r);u.move(i+" ".repeat(a-i.length)),u.shift(a);var s=n.enter("listItem"),l=go(Eo(e,n,u.current()),(function(e,t,n){if(t)return(n?"":" ".repeat(a))+e;return(n?i:i+" ".repeat(a-i.length))+e}));return s(),l}var Uo={exit:{taskListCheckValueChecked:Ko,taskListCheckValueUnchecked:Ko,paragraph:function(e){var t,n=this.stack[this.stack.length-2],r=this.stack[this.stack.length-1],o=n.children,i=r.children[0],a=-1;if(n&&"listItem"===n.type&&"boolean"===typeof n.checked&&i&&"text"===i.type){for(;++a0&&void 0!==arguments[0]?arguments[0]:{},t=this.data();function n(e,n){(t[e]?t[e]:t[e]=[]).push(n)}n("micromarkExtensions",Jr(e)),n("fromMarkdownExtensions",Go()),n("toMarkdownExtensions",zo(e))}var Zo=n(2186),Wo={}.hasOwnProperty;function Qo(e,t){var n=t&&e&&"object"===typeof e&&"element"===e.type&&e.properties&&Wo.call(e.properties,t)&&e.properties[t];return null!==n&&void 0!==n&&!1!==n}function Vo(e){var t=e&&"element"===e.type&&e.tagName.toLowerCase()||"",n=2===t.length&&104===t.charCodeAt(0)?t.charCodeAt(1):0;return n>48&&n<55?n-48:null}function qo(e){return"children"in e?$o(e):"value"in e?e.value:""}function Xo(e){return"text"===e.type?e.value:"children"in e?$o(e):""}function $o(e){for(var t=-1,n=[];++t1?n-1:0),o=1;o0&&void 0!==arguments[0]?arguments[0]:{},n=t.properties,r=t.behaviour||t.behavior||"prepend",o=t.content||ai,i=t.group,a=ni(t.test);return"wrap"===r?e=l:"before"===r||"after"===r?e=s:(n||(n={ariaHidden:"true",tabIndex:-1}),e=u),function(t){Sn(t,"element",(function(t,n,r){if(Vo(t)&&Qo(t,"id")&&a(t,n,r))return e(t,n,r)}))};function u(e){return e.children["prepend"===r?"unshift":"push"](p(e,pe(!0,{},n),c(o,e))),[Dn]}function s(e,t,a){var u;if("number"===typeof t&&a){var s=p(e,pe(!0,{},n),c(o,e)),l="before"===r?[s,e]:[e,s];if(i){var d=f(i,e);d&&!Array.isArray(d)&&"element"===d.type&&(d.children=l,l=[d])}return(u=a.children).splice.apply(u,[t,1].concat((0,h.Z)(l))),[Dn,t+l.length]}}function l(e){return e.children=[p(e,pe(!0,{},n),e.children)],[Dn]}function c(e,t){var n=f(e,t);return Array.isArray(n)?n:[n]}function f(e,t){return"function"===typeof e?e(t):pe(!0,Array.isArray(e)?[]:{},e)}function p(e,t,n){return{type:"element",tagName:"a",properties:Object.assign({},t,{href:"#"+(e.properties||{}).id}),children:n}}}var si=n(3854),li=(0,n(7780).v)(tr.YP,"g",["altGlyph","altGlyphDef","altGlyphItem","animateColor","animateMotion","animateTransform","clipPath","feBlend","feColorMatrix","feComponentTransfer","feComposite","feConvolveMatrix","feDiffuseLighting","feDisplacementMap","feDistantLight","feDropShadow","feFlood","feFuncA","feFuncB","feFuncG","feFuncR","feGaussianBlur","feImage","feMerge","feMergeNode","feMorphology","feOffset","fePointLight","feSpecularLighting","feSpotLight","feTile","feTurbulence","foreignObject","glyphRef","linearGradient","radialGradient","solidColor","textArea","textPath"]),ci=n(6350);var fi={html:"http://www.w3.org/1999/xhtml",mathml:"http://www.w3.org/1998/Math/MathML",svg:"http://www.w3.org/2000/svg",xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"},pi={}.hasOwnProperty,hi={"#document":mi,"#document-fragment":mi,"#text":function(e,t){return{type:"text",value:t.value}},"#comment":function(e,t){return{type:"comment",value:t.data}},"#documentType":function(){return{type:"doctype"}}};function di(e,t){var n,r=e.schema,o=pi.call(hi,t.nodeName)?hi[t.nodeName]:Ei;"tagName"in t&&(e.schema=t.namespaceURI===fi.svg?tr.YP:tr.dy),"childNodes"in t&&(n=function(e,t){var n=-1,r=[];for(;++n-1&&ee)return{line:t+1,column:e-(n[t-1]||0)+1,offset:e};return{line:void 0,column:void 0,offset:void 0}},toOffset:function(e){var t,r=e&&e.line,o=e&&e.column;return"number"!==typeof r||"number"!==typeof o||Number.isNaN(r)||Number.isNaN(o)||!(r-1 in n)||(t=(n[r-2]||0)+o-1||0),t>-1&&t0?e.call(t,a,u,s):e.call(t,a,u)}function Si(e,t,n,r,o){var i,a=(0,ar.s)(r.schema,t);void 0===n||null===n||"number"===typeof n&&Number.isNaN(n)||!1===n&&(r.vue||r.vdom||r.hyperscript)||!n&&a.boolean&&(r.vue||r.vdom||r.hyperscript)||(Array.isArray(n)&&(n=a.commaSeparated?(0,lr.P)(n):(0,sr.P)(n)),a.boolean&&r.hyperscript&&(n=""),"style"===a.property&&"string"===typeof n&&(r.react||r.vue||r.vdom)&&(n=function(e,t){var n={};try{cr(e,(function(e,t){"-ms-"===e.slice(0,4)&&(e="ms-"+e.slice(4)),n[e.replace(/-([a-z])/g,(function(e,t){return t.toUpperCase()}))]=t}))}catch(r){throw r.message=t+"[style]"+r.message.slice("undefined".length),r}return n}(n,o)),r.vue?"style"!==a.property&&(i="attrs"):a.mustUseProperty||(r.vdom?"style"!==a.property&&(i="attributes"):r.hyperscript&&(i="attrs")),i?e[i]=Object.assign(e[i]||{},K({},a.attribute,n)):a.space&&r.react?e[Ai[a.property]||a.property]=n:e[a.attribute]=n)}function Oi(e){return"context"in e&&"cleanup"in e}var Fi={}.hasOwnProperty;function xi(e,t){var n=t||{};function r(t){var n=r.invalid,o=r.handlers;if(t&&Fi.call(t,e)&&(n=Fi.call(o,t[e])?o[t[e]]:r.unknown),n)return n.apply(this,arguments)}return r.handlers=n.handlers||{},r.invalid=n.invalid,r.unknown=n.unknown,r}var Ii={}.hasOwnProperty,Ri=xi("type",{handlers:{root:function(e,t){var n={nodeName:"#document",mode:(e.data||{}).quirksMode?"quirks":"no-quirks",childNodes:[]};return n.childNodes=Li(e.children,n,t),Mi(e,n)},element:function(e,t){var n=t.space;return bi((function(n,r){var o,i,a,u,s,l=[];for(a in r)Ii.call(r,a)&&!1!==r[a]&&((o=(0,ar.s)(t,a)).boolean&&!r[a]||(i={name:a,value:!0===r[a]?"":String(r[a])},o.space&&"html"!==o.space&&"svg"!==o.space&&((u=a.indexOf(":"))<0?i.prefix="":(i.name=a.slice(u+1),i.prefix=a.slice(0,u)),i.namespace=fi[o.space]),l.push(i)));"html"===t.space&&"svg"===e.tagName&&(t=tr.YP);(s=Mi(e,{nodeName:n,tagName:n,attrs:l,namespaceURI:fi[t.space],childNodes:[],parentNode:void 0})).childNodes=Li(e.children,s,t),"template"===n&&(s.content=function(e,t){var n={nodeName:"#document-fragment",childNodes:[]};return n.childNodes=Li(e.children,n,t),Mi(e,n)}(e.content,t));return s}),Object.assign({},e,{children:[]}),{space:n})},text:function(e){return Mi(e,{nodeName:"#text",value:e.value,parentNode:void 0})},comment:function(e){return Mi(e,{nodeName:"#comment",data:e.value,parentNode:void 0})},doctype:function(e){return Mi(e,{nodeName:"#documentType",name:"html",publicId:"",systemId:"",parentNode:void 0})}}});function Li(e,t,n){var r,o=-1,i=[];if(e)for(;++o1&&void 0!==arguments[1]?arguments[1]:{};return vi(r)?(n=r,t={}):(n=r.file,t=r),di({schema:"svg"===t.space?tr.YP:tr.dy,file:n,verbose:t.verbose,location:!1},e)}(function(e){var t="root"===e.type?e.children[0]:e;return Boolean(t&&("doctype"===t.type||"element"===t.type&&"html"===t.tagName))}(t)?function(){var e=f.treeAdapter.createDocument();if(f._bootstrap(e,void 0),!(i=f.tokenizer))throw new Error("Expected `tokenizer`");return a=i.preprocessor,s=i.__mixins[0],u=s.posTracker,p(t),e}():function(){var e={nodeName:"template",tagName:"template",attrs:[],namespaceURI:fi.html,childNodes:[]},n={nodeName:"documentmock",tagName:"documentmock",attrs:[],namespaceURI:fi.html,childNodes:[]},r={nodeName:"#document-fragment",childNodes:[]};if(f._bootstrap(n,e),f._pushTmplInsertionMode("IN_TEMPLATE_MODE"),f._initTokenizerForFragmentParsing(),f._insertFakeRootElement(),f._resetInsertionMode(),f._findFormInFragmentContext(),!(i=f.tokenizer))throw new Error("Expected `tokenizer`");return a=i.preprocessor,s=i.__mixins[0],u=s.posTracker,p(t),f._adoptNodes(n.childNodes[0],r),r}(),n);return o&&Sn(h,"comment",(function(e,t,n){var r=e;if(r.value.stitch&&null!==n&&null!==t)return n.children[t]=r.value.stitch,t})),"root"!==t.type&&"root"===h.type&&1===h.children.length?h.children[0]:h;function d(e){var t=-1;if(e)for(;++t0&&void 0!==arguments[0]?arguments[0]:{};return function(t,n){return Hi(t,n,e)}}var Yi=function e(){for(var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],n=arguments.length>1?arguments[1]:void 0,r=arguments.length>2?arguments[2]:void 0,o=n;o1){var o=e.children[0];if(o&&"code"===o.tagName&&"number"===typeof n){var i=function(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1?arguments[1]:void 0;t>-1;){if(!e[--t])return;if(e[t]&&e[t].value&&""!==e[t].value.replace(/(\n|\s)/g,"")||"text"!==e[t].type){if(!/^rehype:/.test(e[t].value)||"comment"!==e[t].type)return;return e[t]}}}(r.children,n);if(i){var a=Zi(i);Object.keys(a).length>0&&(e.properties=z(z({},e.properties),{"data-type":"rehyp"}),o.properties=Wi(o.properties,a,t.properties))}}}if(/^(em|strong|b|a|i|p|pre|kbd|blockquote|h(1|2|3|4|5|6)|code|table|img|del|ul|ol)$/.test(e.tagName)&&r&&Array.isArray(r.children)&&"number"===typeof n){var u=Yi(r.children,n);if(u){var s=Zi(u);Object.keys(s).length>0&&(e.properties=Wi(e.properties,s,t.properties))}}}))}},qi=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.openDelimiter,n=void 0===t?"rehype:ignore:start":t,r=e.closeDelimiter,o=void 0===r?"rehype:ignore:end":r;return function(e){Sn(e,(function(e,t,r){if("element"===e.type||"root"===e.type){var i=!1;e.children=e.children.filter((function(e){return"comment"===e.type&&e.value.trim()===n?(i=!0,!1):"comment"===e.type&&e.value.trim()===o?(i=!1,!1):!i}))}}))}},Xi={}.hasOwnProperty,$i=n(3871),Ji=n(6018),ea=n(4124);function ta(){ta=function(e,t){return new n(e,void 0,t)};var e=RegExp.prototype,t=new WeakMap;function n(e,r,o){var i=new RegExp(e,r);return t.set(i,o||t.get(e)),ra(i,n.prototype)}function r(e,n){var r=t.get(n);return Object.keys(r).reduce((function(t,n){return t[n]=e[r[n]],t}),Object.create(null))}return na(n,RegExp),n.prototype.exec=function(t){var n=e.exec.call(this,t);return n&&(n.groups=r(n,this)),n},n.prototype[Symbol.replace]=function(n,o){if("string"==typeof o){var i=t.get(this);return e[Symbol.replace].call(this,n,o.replace(/\$<([^>]+)>/g,(function(e,t){return"$"+i[t]})))}if("function"==typeof o){var a=this;return e[Symbol.replace].call(this,n,(function(){var e=arguments;return"object"!=typeof e[e.length-1]&&(e=[].slice.call(e)).push(r(e,a)),o.apply(this,e)}))}return e[Symbol.replace].call(this,n,o)},ta.apply(this,arguments)}function na(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&ra(e,t)}function ra(e,t){return ra=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e},ra(e,t)}function oa(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n=e.length?{done:!0}:{done:!1,value:e[r++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var aa=function(e){return function(t){return void 0===t&&(t={}),function(e){Sn(e,"element",n)};function n(n,r,o){if(o&&"pre"===o.tagName&&"code"===n.tagName){var i=n.data&&n.data.meta?n.data.meta:"";n.properties.className?"boolean"==typeof n.properties.className?n.properties.className=[]:Array.isArray(n.properties.className)||(n.properties.className=[n.properties.className]):n.properties.className=[],n.properties.className.push("code-highlight");var a,u,s=function(e){for(var t,n=ia(e.properties.className);!(t=n()).done;){var r=t.value;if("language-"===r.slice(0,9))return r.slice(9).toLowerCase()}return null}(n);if(s)try{a=e.highlight(qo(n),s),o.properties.className=(o.properties.className||[]).concat("language-"+s)}catch(E){if(!t.ignoreMissing||!/Unknown language/.test(E.message))throw E;a=n}else a=n;a.children=(u=1,function e(t){return t.reduce((function(t,n){if("text"===n.type){var r=n.value,o=(r.match(/\n/g)||"").length;if(0===o)n.position={start:{line:u,column:0},end:{line:u,column:0}},t.push(n);else for(var i,a=r.split("\n"),s=ia(a.entries());!(i=s()).done;){var l=i.value,c=l[0],f=l[1];t.push({type:"text",value:c===a.length-1?f:f+"\n",position:{start:{line:u+c},end:{line:u+c}}})}return u+=o,t}if(Object.prototype.hasOwnProperty.call(n,"children")){var p=u;return n.children=e(n.children),t.push(n),n.position={start:{line:p,column:0},end:{line:u,column:0}},t}return t.push(n),t}),[])})(a.children),a.position=a.children.length>0?{start:{line:a.children[0].position.start.line,column:0},end:{line:a.children[a.children.length-1].position.end.line,column:0}}:{start:{line:0,column:0},end:{line:0,column:0}};for(var l,c=function(e){var t=/{([\d,-]+)}/,n=e.split(",").map((function(e){return e.trim()})).join();if(t.test(n)){var r=t.exec(n)[1],o=$i(r);return function(e){return o.includes(e+1)}}return function(){return!1}}(i),f=function(e){var t=ta(/showLineNumbers=([0-9]+)/i,{lines:1});if(t.test(e)){var n=t.exec(e);return Number(n.groups.lines)}return 1}(i),p=function(e){for(var t=new Array(e),n=0;n0&&0===l.length)return null}var c={};for(s in t)Xi.call(t,s)&&(c[s]="children"===s?l:t[s]);return c}(e)}(a,(function(e){return e.position.start.line<=n+1&&e.position.end.line>=n+1}));r.children=o.children,!i.toLowerCase().includes("showLineNumbers".toLowerCase())&&!t.showLineNumbers||h.some((function(e){return i.toLowerCase().includes(e)}))||(r.properties.line=[(n+f).toString()],r.properties.className.push("line-number")),c(n)&&r.properties.className.push("highlight-line"),"diff"===s&&"-"===qo(r).substring(0,1)?r.properties.className.push("deleted"):"diff"===s&&"+"===qo(r).substring(0,1)&&r.properties.className.push("inserted")},m=ia(p.entries());!(l=m()).done;)d();p.length>0&&""===qo(p[p.length-1]).trim()&&p.pop(),n.children=p}}}},ua=(aa(Ji.$),aa(ea.$)),sa="A-Za-z\xc0-\xd6\xd8-\xf6\xf8-\u02b8\u0300-\u0590\u0800-\u1fff\u200e\u2c00-\ufb1c\ufe00-\ufe6f\ufefd-\uffff",la=new RegExp("^[^"+sa+"]*["+"\u0591-\u07ff\ufb1d-\ufdfd\ufe70-\ufefc]"),ca=new RegExp("^[^\u0591-\u07ff\ufb1d-\ufdfd\ufe70-\ufefc]*["+sa+"]");var fa=ni();function pa(e,t){var n,r,o=e.schema,i=e.language,a=e.direction,u=e.editableOrEditingHost;if(fa(t)&&t.properties){var s=t.properties.xmlLang||t.properties.lang,l=t.properties.type||"text",c=da(t);void 0!==s&&null!==s&&(e.language=String(s),r=!0),o&&"html"===o.space?("true"===t.properties.contentEditable&&(e.editableOrEditingHost=!0,r=!0),ti(t,"svg")&&(e.schema=tr.YP,r=!0),"rtl"===c?n=c:"ltr"===c||"auto"!==c&&ti(t,"html")||"auto"!==c&&ti(t,"input")&&"tel"===l?n="ltr":("auto"===c||ti(t,"bdi"))&&(ti(t,"textarea")?n=ha(qo(t)):!ti(t,"input")||"email"!==l&&"search"!==l&&"tel"!==l&&"text"!==l?Sn(t,(function(e){if("text"===e.type)return(n=ha(e.value))?bn:null;if(e!==t&&(ti(e,["bdi","script","style","textare"])||da(e)))return Dn})):n=t.properties.value?ha(t.properties.value):"ltr"),n&&(e.direction=n,r=!0)):e.editableOrEditingHost&&(e.editableOrEditingHost=!1,r=!0)}return r?function(){e.schema=o,e.language=i,e.direction=a,e.editableOrEditingHost=u}:ma}function ha(e){var t=function(e){var t=String(e||"");return la.test(t)?"rtl":ca.test(t)?"ltr":"neutral"}(e);return"neutral"===t?void 0:t}function da(e){var t=fa(e)&&e.properties&&"string"===typeof e.properties.dir?e.properties.dir.toLowerCase():void 0;return"auto"===t||"ltr"===t||"rtl"===t?t:void 0}function ma(){}var Ea={}.hasOwnProperty,Ta=xi("nestingOperator",{unknown:function(e){throw new Error("Unexpected nesting `"+e.nestingOperator+"`")},invalid:function(e,t,n,r,o){if(r||null===n)throw new Error("topScan is supposed to be called from the root node");if(!o.iterator)throw new Error("Expected `iterator`");o.iterator(e,t,n,r,o),o.shallow||va(e,t,n,r,o)},handlers:{null:va,">":ya,"+":function(e,t,n,r,o){if(!r||null===n)return;Aa(e,r,o,n+1,!0)},"~":function(e,t,n,r,o){if(!r||null===n)return;Aa(e,r,o,n+1)}}});function ga(e,t,n,r,o){Ta(e,t,n,r,o)}function va(e,t,n,r,o){var i=o.iterator;o.iterator=function e(t,n,r,o,a){if(!i)throw new Error("Expected `iterator`");if(a.iterator=i,i(t,n,r,o,a),a.iterator=e,a.one&&a.found)return;ya(t,n,r,o,a)},ya(e,t,n,r,o)}function ya(e,t,n,r,o){(function(e){return Array.isArray(e.children)})(t)&&0!==t.children.length&&Aa(e,t,o)}function Aa(e,t,n,r,o){var i=n.index?function(e,t){var r=u,o=Ea.call(l,e.tagName)?l[e.tagName]:0;d(e.tagName),c.push((function(){n.elementIndex=r,n.typeIndex=o,n.elementCount=u,n.typeCount=l[e.tagName],h(e,t)}))}:h,a=t.children,u=0,s=-1,l={},c=[];if(void 0!==r&&null!==r||(r=0),!(r>=a.length)){if(n.index)for(;++s0;return o.shallow=i,o.one=a,o.scopeElements=u,l},lang:function(e,t,n,r,o){return""!==o.language&&void 0!==o.language&&null!==o.language&&Ca(o.language,(0,lr.Q)(e.value)).length>0},"last-child":function(e,t,n,r,o){return Ia(o,e),Boolean(o.elementCount&&o.elementIndex===o.elementCount-1)},"last-of-type":function(e,t,n,r,o){return Ia(o,e),"number"===typeof o.typeIndex&&"number"===typeof o.typeCount&&o.typeIndex===o.typeCount-1},matches:Na,not:function(e,t,n,r,o){return!Na(e,t,n,r,o)},"nth-child":function(e,t,n,r,o){return Ia(o,e),"number"===typeof o.elementIndex&&e.value(o.elementIndex)},"nth-last-child":function(e,t,n,r,o){return Ia(o,e),Boolean("number"===typeof o.elementCount&&"number"===typeof o.elementIndex&&e.value(o.elementCount-o.elementIndex-1))},"nth-of-type":function(e,t,n,r,o){return Ia(o,e),"number"===typeof o.typeIndex&&e.value(o.typeIndex)},"nth-last-of-type":function(e,t,n,r,o){return Ia(o,e),"number"===typeof o.typeCount&&"number"===typeof o.typeIndex&&e.value(o.typeCount-1-o.typeIndex)},"only-child":function(e,t,n,r,o){return Ia(o,e),1===o.elementCount},"only-of-type":function(e,t,n,r,o){return Ia(o,e),1===o.typeCount},optional:function(e,t){return!Oa(e,t)},"read-only":function(e,t,n,r,o){return!Fa(e,t,n,r,o)},"read-write":Fa,required:Oa,root:function(e,t,n,r,o){return Boolean((!r||"root"===r.type)&&o.schema&&("html"===o.schema.space||"svg"===o.schema.space)&&ti(t,["html","svg"]))},scope:function(e,t,n,r,o){return Boolean(ti(t)&&o.scopeElements&&o.scopeElements.includes(t))}}});function ba(e,t,n,r,o){for(var i=e.pseudos,a=-1;++a","+","~");var Qa=function e(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";return t.forEach((function(t){"text"===t.type?n+=t.value:"element"===t.type&&t.children&&Array.isArray(t.children)&&(n+=e(t.children))})),n},Va=function(e){var t=e||{},n=t.selector,r=t.rewrite;return function(e){if(r&&"function"===typeof r)if(n&&"string"===typeof n){var t=Wa(n,e);t&&t.length>0&&Sn(e,t,(function(e,t,n){r(e,t,n)}))}else Sn(e,(function(e,t,n){r(e,t,n)}))}},qa={type:"element",tagName:"svg",properties:{className:"octicon octicon-link",viewBox:"0 0 16 16",version:"1.1",width:"16",height:"16",ariaHidden:"true"},children:[{type:"element",tagName:"path",children:[],properties:{fillRule:"evenodd",d:"M7.775 3.275a.75.75 0 001.06 1.06l1.25-1.25a2 2 0 112.83 2.83l-2.5 2.5a2 2 0 01-2.83 0 .75.75 0 00-1.06 1.06 3.5 3.5 0 004.95 0l2.5-2.5a3.5 3.5 0 00-4.95-4.95l-1.25 1.25zm-4.69 9.64a2 2 0 010-2.83l2.5-2.5a2 2 0 012.83 0 .75.75 0 001.06-1.06 3.5 3.5 0 00-4.95 0l-2.5 2.5a3.5 3.5 0 004.95 4.95l1.25-1.25a.75.75 0 00-1.06-1.06l-1.25 1.25a2 2 0 01-2.83 0z"}}]},Xa=n(6871),$a=n.n(Xa);var Ja=function(e){return void 0===e&&(e={}),function(e){Sn(e,(function(e){"element"===e.type&&"code"===e.tagName&&e.data&&e.data.meta&&(e.properties=f({},e.properties,{"data-meta":String(e.data.meta)}))}))}},eu=["prefixCls","className","source","style","disableCopy","onScroll","onMouseOver","pluginsFilter","rehypeRewrite","warpperElement"],tu=o.forwardRef((function(e,t){var n=e.prefixCls,r=void 0===n?"wmde-markdown wmde-markdown-color":n,i=e.className,a=e.source,u=e.style,s=e.disableCopy,l=void 0!==s&&s,c=e.onScroll,d=e.onMouseOver,m=e.pluginsFilter,E=e.rehypeRewrite,T=e.warpperElement,g=void 0===T?{}:T,v=p(e,eu),y=o.createRef();(0,o.useImperativeHandle)(t,(function(){return f({},e,{mdp:y})}),[y,e]);var k=(r||"")+" "+(i||""),C=[Ja,[ua,{ignoreMissing:!0}],zi,ei,ui,qi,[Va,{rewrite:function(e,t,n){if("element"===e.type&&n&&"root"===n.type&&/h(1|2|3|4|5|6)/.test(e.tagName)){var r=e.children&&e.children[0];r&&r.properties&&"true"===r.properties.ariaHidden&&(r.properties=f({class:"anchor"},r.properties),r.children=[qa])}if("element"===e.type&&"pre"===e.tagName&&!l){var o=Qa(e.children);e.children.push((void 0===(i=o)&&(i=""),{type:"element",tagName:"div",properties:{onClick:function(e){var t=e.currentTarget||e.target;t.classList.add("active"),$a()(t.dataset.code,(function(){setTimeout((function(){t.classList.remove("active")}),2e3)}))},"data-code":i,class:"copied"},children:[{type:"element",tagName:"svg",properties:{className:"octicon-copy",ariaHidden:"true",viewBox:"0 0 16 16",fill:"currentColor",height:12,width:12},children:[{type:"element",tagName:"path",properties:{fillRule:"evenodd",d:"M0 6.75C0 5.784.784 5 1.75 5h1.5a.75.75 0 010 1.5h-1.5a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-1.5a.75.75 0 011.5 0v1.5A1.75 1.75 0 019.25 16h-7.5A1.75 1.75 0 010 14.25v-7.5z"},children:[]},{type:"element",tagName:"path",properties:{fillRule:"evenodd",d:"M5 1.75C5 .784 5.784 0 6.75 0h7.5C15.216 0 16 .784 16 1.75v7.5A1.75 1.75 0 0114.25 11h-7.5A1.75 1.75 0 015 9.25v-7.5zm1.75-.25a.25.25 0 00-.25.25v7.5c0 .138.112.25.25.25h7.5a.25.25 0 00.25-.25v-7.5a.25.25 0 00-.25-.25h-7.5z"},children:[]}]},{type:"element",tagName:"svg",properties:{className:"octicon-check",ariaHidden:"true",viewBox:"0 0 16 16",fill:"currentColor",height:12,width:12},children:[{type:"element",tagName:"path",properties:{fillRule:"evenodd",d:"M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z"},children:[]}]}]}))}var i;E&&E(e,t,n)}}],[Vi,{properties:"attr"}]].concat((0,h.Z)(v.rehypePlugins||[])),_={allowElement:function(e,t,n){return v.allowElement?v.allowElement(e,t,n):/^[A-Za-z0-9]+$/.test(e.tagName)}},D=[].concat((0,h.Z)(v.remarkPlugins||[]),[Yo]);return(0,A.jsx)("div",f({ref:y,onScroll:c,onMouseOver:d},g,{className:k,style:u,children:(0,A.jsx)(yr,f({},v,_,{rehypePlugins:m?m("rehype",C):C,remarkPlugins:m?m("remark",D):D,children:a||""}))}))})),nu=["prefixCls","className","style","keyCode","onMouseDown","onMouseUp"],ru=[{keycode:27,name:["esc"]},{keycode:112,name:["F1"]},{keycode:113,name:["F2"]},{keycode:114,name:["F3"]},{keycode:115,name:["F4"]},{keycode:116,name:["F5"]},{keycode:117,name:["F6"]},{keycode:118,name:["F7"]},{keycode:119,name:["F8"]},{keycode:120,name:["F9"]},{keycode:121,name:["F10"]},{keycode:122,name:["F11"]},{keycode:123,name:["F12"]},{keycode:-1,name:["\u3007"]},{keycode:192,name:["~","`"]},{keycode:49,name:["!","1"]},{keycode:50,name:["@","2"]},{keycode:51,name:["#","3"]},{keycode:52,name:["$","4"]},{keycode:53,name:["%","5"]},{keycode:54,name:["^","6"]},{keycode:55,name:["&","7"]},{keycode:56,name:["*","8"]},{keycode:57,name:["(","9"]},{keycode:48,name:[")","0"]},{keycode:189,name:["\uff3f","-"]},{keycode:187,name:["\uff0b",": "]},{keycode:8,name:["delete"]},{keycode:9,name:["tab"]},{keycode:81,name:["Q"]},{keycode:87,name:["W"]},{keycode:69,name:["E"]},{keycode:82,name:["R"]},{keycode:84,name:["T"]},{keycode:89,name:["Y"]},{keycode:85,name:["U"]},{keycode:73,name:["I"]},{keycode:79,name:["O"]},{keycode:80,name:["P"]},{keycode:219,name:["{","["]},{keycode:221,name:["}","]"]},{keycode:220,name:["|","\\"]},{keycode:20,name:["","CapsLock"]},{keycode:65,name:["A"]},{keycode:83,name:["S"]},{keycode:68,name:["D"]},{keycode:70,name:["F"]},{keycode:71,name:["G"]},{keycode:72,name:["H"]},{keycode:74,name:["J"]},{keycode:75,name:["K"]},{keycode:76,name:["L"]},{keycode:186,name:[":",";"]},{keycode:222,name:['"',"'"]},{keycode:13,name:["enter","return"]},{keycode:16,name:["\u21e7"]},{keycode:90,name:["Z"]},{keycode:88,name:["X"]},{keycode:67,name:["C"]},{keycode:86,name:["V"]},{keycode:66,name:["B"]},{keycode:78,name:["N"]},{keycode:77,name:["M"]},{keycode:188,name:["<",","]},{keycode:190,name:[">","."]},{keycode:191,name:["?","/"]},{keycode:16,name:["\u21e7"]},{keycode:-1,name:["fn"]},{keycode:17,name:["control"]},{keycode:18,name:["alt","option"]},{keycode:91,name:["command"]},{keycode:32,name:[""]},{keycode:91,name:["command"]},{keycode:18,name:["alt","option"]},{keycode:37,name:["\u25c0"]},{keycode:38,name:["\u25b2"]},{keycode:39,name:["\u25b6"]},{keycode:40,name:["\u25bc"]}];function ou(e){var t=e.prefixCls,n=e.className,r=e.style,o=e.keyCode,i=void 0===o?[]:o,a=e.onMouseDown,u=e.onMouseUp,s=p(e,nu);return(0,A.jsx)("div",{className:(t||"")+" "+(n||""),style:r,children:(0,A.jsx)("ul",{children:ru.map((function(e,t){var n=e.name.map((function(e,t){return(0,A.jsx)("span",{children:e},""+t)}));return(0,A.jsx)("li",f({onMouseDown:function(t){return a&&a(t,e)},onMouseUp:function(t){return u&&u(t,e)},className:i.indexOf(e.keycode)>-1?"pressed":"","data-key":e.keycode},s,{children:n}),t)}))})})}ou.defaultProps={prefixCls:"w-mac-keyboard"},ou.propTypes={prefixCls:er().string,className:er().string,keyCode:er().arrayOf(er().number),onMouseDown:er().func,onMouseUp:er().func};n(144);var iu="Footer_footer__6AZaJ";function au(e){var t=e.name,n=e.href,r=e.year,o=e.children;return(0,A.jsxs)("div",{className:iu,children:[o,(0,A.jsxs)("div",{children:["Licensed under MIT. (Yes it\xb4s free and",(0,A.jsx)("a",{href:"https://github.com/jaywcjlove/hotkeys",children:" open-sourced"}),")"]}),(0,A.jsxs)("div",{children:["\xa9",(0,A.jsx)("a",{target:"_blank",rel:"noopener noreferrer",href:n,children:t}),r]})]})}var uu="index_tools__z-HKb",su="index_version__-ytTx",lu="index_keyCodeInfo__cHMZ+",cu="index_header__jwPmf",fu="index_title__X9GPx",pu="index_info__QmMpI",hu="index_github__CJgBc",du="# Hotkeys\n\n\x3c!--dividing--\x3e\n\n[![CDN jsdelivr](https://data.jsdelivr.com/v1/package/npm/hotkeys-js/badge?style=rounded)](https://www.jsdelivr.com/package/npm/hotkeys-js)\n[![](https://img.shields.io/npm/dm/hotkeys-js?logo=npm)](https://www.npmjs.com/package/hotkeys-js)\n[![](https://img.shields.io/github/stars/jaywcjlove/hotkeys.svg)](https://github.com/jaywcjlove/hotkeys/stargazers)\n![no dependencies](http://jaywcjlove.github.io/sb/status/no-dependencies.svg)\n[![GitHub Actions CI](https://github.com/jaywcjlove/hotkeys/actions/workflows/ci.yml/badge.svg)](https://github.com/jaywcjlove/hotkeys/actions/workflows/ci.yml)\n[![Coverage Status](https://coveralls.io/repos/github/jaywcjlove/hotkeys/badge.svg?branch=master)](https://coveralls.io/github/jaywcjlove/hotkeys?branch=master)\n[![jaywcjlove/hotkeys](https://jaywcjlove.github.io/sb/lang/chinese.svg)](https://github.com/jaywcjlove/hotkeys/blob/master/README-zh.md)\n[![jaywcjlove/hotkeys](https://jaywcjlove.github.io/sb/ico/gitee.svg)](https://gitee.com/jaywcjlove/hotkeys)\n\nHotKeys.js is an input capture library with some very special features, it is easy to pick up and use, has a reasonable footprint ([~3kb](https://bundlephobia.com/result?p=hotkeys-js)) (gzipped: 1.73kb), and has no dependencies. It should not interfere with any JavaScript libraries or frameworks. Official document [demo preview](http://jaywcjlove.github.io/hotkeys). [More examples](https://github.com/jaywcjlove/hotkeys/issues?q=label%3ADemo+).\n\n```bash\n\u256d\u2508\u2508\u256e \u256d\u2508\u2508\u256e \u256d\u2508\u2508\u256e\n\u2506 \u251c\u2508\u2508..\u2508\u2508\u2508\u2508\u2508.\u2506 \u2514\u2508\u256e\u2506 \u251c\u2508\u2508..\u2508\u2508\u2508\u2508\u2508..\u2508\u2508.\u2508\u2508..\u2508\u2508\u2508\u2508\u2508.\n\u2506 \u2506\u2506 \u25a1 \u2506\u2506 \u2508\u2524\u2506 < \u2506 -__\u2518\u2506 \u2506 \u2506\u2506__ \u2508\u2508\u2524\n\u2570\u2508\u2508\u2534\u2508\u2508\u256f\u2570\u2508\u2508\u2508\u2508\u2508\u256f\u2570\u2508\u2508\u2508\u2508\u256f\u2570\u2508\u2508\u2534\u2508\u2508\u256f\u2570\u2508\u2508\u2508\u2508\u2508\u256f\u2570\u2508\u2508\u2508 \u2506\u2570\u2508\u2508\u2508\u2508\u2508\u256f\n \u2570\u2508\u2508\u2508\u2508\u2508\u256f\n```\n\n## Usage\n\nYou will need `Node.js` installed on your system.\n\n```bash\nnpm install hotkeys-js --save\n```\n\n```js\nimport hotkeys from 'hotkeys-js';\n\nhotkeys('f5', function(event, handler){\n // Prevent the default refresh event under WINDOWS system\n event.preventDefault()\n alert('you pressed F5!')\n});\n```\n\nOr manually download and link **hotkeys.js** in your HTML, It can also be downloaded via [UNPKG](https://unpkg.com/hotkeys-js/dist/):\n\nCDN: [UNPKG](https://unpkg.com/hotkeys-js/dist/) | [jsDelivr](https://cdn.jsdelivr.net/npm/hotkeys-js@3.7.3/) | [Githack](https://raw.githack.com/jaywcjlove/hotkeys/master/dist/hotkeys.min.js) | [Statically](https://cdn.statically.io/gh/jaywcjlove/hotkeys/master/dist/hotkeys.min.js) | [bundle.run](https://bundle.run/hotkeys-js@3.7.3)\n\n```html\n\n * ```\n *\n * Elements by their ID are made available in browsers on the `window` object.\n * Using a prefix prevents this from being a problem.\n * @property {string} [footnoteLabel='Footnotes']\n * Label to use for the footnotes section.\n * Affects screen reader users.\n * Change it if you’re authoring in a different language.\n * @property {string} [footnoteBackLabel='Back to content']\n * Label to use from backreferences back to their footnote call.\n * Affects screen reader users.\n * Change it if you’re authoring in a different language.\n * @property {Handlers} [handlers]\n * Object mapping mdast nodes to functions handling them\n * @property {Array} [passThrough]\n * List of custom mdast node types to pass through (keep) in hast\n * @property {Handler} [unknownHandler]\n * Handler for all unknown nodes.\n *\n * @typedef {Record} Handlers\n * Map of node types to handlers\n * @typedef {HFunctionProps & HFunctionNoProps & HFields} H\n * Handle context\n */\n\nimport {u} from 'unist-builder'\nimport {visit} from 'unist-util-visit'\nimport {pointStart, pointEnd} from 'unist-util-position'\nimport {generated} from 'unist-util-generated'\nimport {definitions} from 'mdast-util-definitions'\nimport {one} from './traverse.js'\nimport {footer} from './footer.js'\nimport {handlers} from './handlers/index.js'\n\nconst own = {}.hasOwnProperty\n\n/**\n * Factory to transform.\n * @param {MdastNode} tree mdast node\n * @param {Options} [options] Configuration\n * @returns {H} `h` function\n */\nfunction factory(tree, options) {\n const settings = options || {}\n const dangerous = settings.allowDangerousHtml || false\n /** @type {Record} */\n const footnoteById = {}\n\n h.dangerous = dangerous\n h.clobberPrefix =\n settings.clobberPrefix === undefined || settings.clobberPrefix === null\n ? 'user-content-'\n : settings.clobberPrefix\n h.footnoteLabel = settings.footnoteLabel || 'Footnotes'\n h.footnoteBackLabel = settings.footnoteBackLabel || 'Back to content'\n h.definition = definitions(tree)\n h.footnoteById = footnoteById\n /** @type {Array} */\n h.footnoteOrder = []\n /** @type {Record} */\n h.footnoteCounts = {}\n h.augment = augment\n h.handlers = {...handlers, ...settings.handlers}\n h.unknownHandler = settings.unknownHandler\n h.passThrough = settings.passThrough\n\n visit(tree, 'footnoteDefinition', (definition) => {\n const id = String(definition.identifier).toUpperCase()\n\n // Mimick CM behavior of link definitions.\n // See: .\n if (!own.call(footnoteById, id)) {\n footnoteById[id] = definition\n }\n })\n\n // @ts-expect-error Hush, it’s fine!\n return h\n\n /**\n * Finalise the created `right`, a hast node, from `left`, an mdast node.\n * @param {(NodeWithData|PositionLike)?} left\n * @param {Content} right\n * @returns {Content}\n */\n function augment(left, right) {\n // Handle `data.hName`, `data.hProperties, `data.hChildren`.\n if (left && 'data' in left && left.data) {\n /** @type {Data} */\n const data = left.data\n\n if (data.hName) {\n if (right.type !== 'element') {\n right = {\n type: 'element',\n tagName: '',\n properties: {},\n children: []\n }\n }\n\n right.tagName = data.hName\n }\n\n if (right.type === 'element' && data.hProperties) {\n right.properties = {...right.properties, ...data.hProperties}\n }\n\n if ('children' in right && right.children && data.hChildren) {\n right.children = data.hChildren\n }\n }\n\n if (left) {\n const ctx = 'type' in left ? left : {position: left}\n\n if (!generated(ctx)) {\n right.position = {start: pointStart(ctx), end: pointEnd(ctx)}\n }\n }\n\n return right\n }\n\n /**\n * Create an element for `node`.\n *\n * @type {HFunctionProps}\n */\n function h(node, tagName, props, children) {\n if (Array.isArray(props)) {\n children = props\n props = {}\n }\n\n // @ts-expect-error augmenting an element yields an element.\n return augment(node, {\n type: 'element',\n tagName,\n properties: props || {},\n children: children || []\n })\n }\n}\n\n/**\n * Transform `tree` (an mdast node) to a hast node.\n *\n * @param {MdastNode} tree mdast node\n * @param {Options} [options] Configuration\n * @returns {HastNode|null|undefined} hast node\n */\nexport function toHast(tree, options) {\n const h = factory(tree, options)\n const node = one(h, tree, null)\n const foot = footer(h)\n\n if (foot) {\n // @ts-expect-error If there’s a footer, there were definitions, meaning block\n // content.\n // So assume `node` is a parent node.\n node.children.push(u('text', '\\n'), foot)\n }\n\n return Array.isArray(node) ? {type: 'root', children: node} : node\n}\n\nexport {handlers as defaultHandlers} from './handlers/index.js'\n","/**\n * @typedef {Object} PointLike\n * @property {number} [line]\n * @property {number} [column]\n * @property {number} [offset]\n *\n * @typedef {Object} PositionLike\n * @property {PointLike} [start]\n * @property {PointLike} [end]\n *\n * @typedef {Object} NodeLike\n * @property {PositionLike} [position]\n */\n\n/**\n * Check if `node` is *generated*.\n *\n * @param {NodeLike} [node]\n * @returns {boolean}\n */\nexport function generated(node) {\n return (\n !node ||\n !node.position ||\n !node.position.start ||\n !node.position.start.line ||\n !node.position.start.column ||\n !node.position.end ||\n !node.position.end.line ||\n !node.position.end.column\n )\n}\n","/**\n * @typedef {import('mdast').BlockContent} BlockContent\n * @typedef {import('mdast').FootnoteDefinition} FootnoteDefinition\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').ElementContent} ElementContent\n * @typedef {import('./index.js').H} H\n */\n\nimport {sanitizeUri} from 'micromark-util-sanitize-uri'\nimport {u} from 'unist-builder'\nimport {all} from './traverse.js'\nimport {wrap} from './wrap.js'\n\n/**\n * @param {H} h\n */\nexport function footer(h) {\n let index = -1\n /** @type {Array} */\n const listItems = []\n\n while (++index < h.footnoteOrder.length) {\n const def = h.footnoteById[h.footnoteOrder[index].toUpperCase()]\n\n if (!def) {\n continue\n }\n\n const content = all(h, def)\n const id = String(def.identifier)\n const safeId = sanitizeUri(id.toLowerCase())\n let referenceIndex = 0\n /** @type {Array} */\n const backReferences = []\n\n while (++referenceIndex <= h.footnoteCounts[id]) {\n /** @type {Element} */\n const backReference = {\n type: 'element',\n tagName: 'a',\n properties: {\n href:\n '#' +\n h.clobberPrefix +\n 'fnref-' +\n safeId +\n (referenceIndex > 1 ? '-' + referenceIndex : ''),\n dataFootnoteBackref: true,\n className: ['data-footnote-backref'],\n ariaLabel: h.footnoteBackLabel\n },\n children: [{type: 'text', value: '↩'}]\n }\n\n if (referenceIndex > 1) {\n backReference.children.push({\n type: 'element',\n tagName: 'sup',\n children: [{type: 'text', value: String(referenceIndex)}]\n })\n }\n\n if (backReferences.length > 0) {\n backReferences.push({type: 'text', value: ' '})\n }\n\n backReferences.push(backReference)\n }\n\n const tail = content[content.length - 1]\n\n if (tail && tail.type === 'element' && tail.tagName === 'p') {\n const tailTail = tail.children[tail.children.length - 1]\n if (tailTail && tailTail.type === 'text') {\n tailTail.value += ' '\n } else {\n tail.children.push({type: 'text', value: ' '})\n }\n\n tail.children.push(...backReferences)\n } else {\n content.push(...backReferences)\n }\n\n /** @type {Element} */\n const listItem = {\n type: 'element',\n tagName: 'li',\n properties: {id: h.clobberPrefix + 'fn-' + safeId},\n children: wrap(content, true)\n }\n\n if (def.position) {\n listItem.position = def.position\n }\n\n listItems.push(listItem)\n }\n\n if (listItems.length === 0) {\n return null\n }\n\n return {\n type: 'element',\n tagName: 'section',\n properties: {dataFootnotes: true, className: ['footnotes']},\n children: [\n {\n type: 'element',\n tagName: 'h2',\n properties: {id: 'footnote-label', className: ['sr-only']},\n children: [u('text', h.footnoteLabel)]\n },\n {type: 'text', value: '\\n'},\n {\n type: 'element',\n tagName: 'ol',\n properties: {},\n children: wrap(listItems, true)\n },\n {type: 'text', value: '\\n'}\n ]\n }\n}\n","/**\n * @typedef {import('hast').Root} HastRoot\n * @typedef {import('mdast').Root} MdastRoot\n * @typedef {import('mdast-util-to-hast').Options} Options\n * @typedef {import('unified').Processor} Processor\n *\n * @typedef {import('mdast-util-to-hast')} DoNotTouchAsThisImportIncludesRawInTree\n */\n\nimport {toHast} from 'mdast-util-to-hast'\n\n// Note: the `` overload doesn’t seem to work :'(\n\n/**\n * Plugin that turns markdown into HTML to support rehype.\n *\n * * If a destination processor is given, that processor runs with a new HTML\n * (hast) tree (bridge-mode).\n * As the given processor runs with a hast tree, and rehype plugins support\n * hast, that means rehype plugins can be used with the given processor.\n * The hast tree is discarded in the end.\n * It’s highly unlikely that you want to do this.\n * * The common case is to not pass a destination processor, in which case the\n * current processor continues running with a new HTML (hast) tree\n * (mutate-mode).\n * As the current processor continues with a hast tree, and rehype plugins\n * support hast, that means rehype plugins can be used after\n * `remark-rehype`.\n * It’s likely that this is what you want to do.\n *\n * @param destination\n * Optional unified processor.\n * @param options\n * Options passed to `mdast-util-to-hast`.\n */\nconst remarkRehype =\n /** @type {(import('unified').Plugin<[Processor, Options?]|[null|undefined, Options?]|[Options]|[], MdastRoot>)} */\n (\n function (destination, options) {\n return destination && 'run' in destination\n ? bridge(destination, options)\n : mutate(destination || options)\n }\n )\n\nexport default remarkRehype\n\n/**\n * Bridge-mode.\n * Runs the destination with the new hast tree.\n *\n * @type {import('unified').Plugin<[Processor, Options?], MdastRoot>}\n */\nfunction bridge(destination, options) {\n return (node, file, next) => {\n destination.run(toHast(node, options), file, (error) => {\n next(error)\n })\n }\n}\n\n/**\n * Mutate-mode.\n * Further plugins run on the hast tree.\n *\n * @type {import('unified').Plugin<[Options?]|void[], MdastRoot, HastRoot>}\n */\nfunction mutate(options) {\n // @ts-expect-error: assume a corresponding node is returned by `toHast`.\n return (node) => toHast(node, options)\n}\n","import {visit} from 'unist-util-visit'\n\n/**\n * @typedef {import('unist').Node} Node\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').Element} Element\n *\n * @callback AllowElement\n * @param {Element} element\n * @param {number} index\n * @param {Element|Root} parent\n * @returns {boolean|undefined}\n *\n * @typedef Options\n * @property {Array} [allowedElements]\n * @property {Array} [disallowedElements=[]]\n * @property {AllowElement} [allowElement]\n * @property {boolean} [unwrapDisallowed=false]\n */\n\n/**\n * @type {import('unified').Plugin<[Options], Root>}\n */\nexport default function rehypeFilter(options) {\n if (options.allowedElements && options.disallowedElements) {\n throw new TypeError(\n 'Only one of `allowedElements` and `disallowedElements` should be defined'\n )\n }\n\n if (\n options.allowedElements ||\n options.disallowedElements ||\n options.allowElement\n ) {\n return (tree) => {\n visit(tree, 'element', (node, index, parent_) => {\n const parent = /** @type {Element|Root} */ (parent_)\n /** @type {boolean|undefined} */\n let remove\n\n if (options.allowedElements) {\n remove = !options.allowedElements.includes(node.tagName)\n } else if (options.disallowedElements) {\n remove = options.disallowedElements.includes(node.tagName)\n }\n\n if (!remove && options.allowElement && typeof index === 'number') {\n remove = !options.allowElement(node, index, parent)\n }\n\n if (remove && typeof index === 'number') {\n if (options.unwrapDisallowed && node.children) {\n parent.children.splice(index, 1, ...node.children)\n } else {\n parent.children.splice(index, 1)\n }\n\n return index\n }\n\n return undefined\n })\n }\n }\n}\n","const protocols = ['http', 'https', 'mailto', 'tel']\n\n/**\n * @param {string} uri\n * @returns {string}\n */\nexport function uriTransformer(uri) {\n const url = (uri || '').trim()\n const first = url.charAt(0)\n\n if (first === '#' || first === '/') {\n return url\n }\n\n const colon = url.indexOf(':')\n if (colon === -1) {\n return url\n }\n\n let index = -1\n\n while (++index < protocols.length) {\n const protocol = protocols[index]\n\n if (\n colon === protocol.length &&\n url.slice(0, protocol.length).toLowerCase() === protocol\n ) {\n return url\n }\n }\n\n index = url.indexOf('?')\n if (index !== -1 && colon > index) {\n return url\n }\n\n index = url.indexOf('#')\n if (index !== -1 && colon > index) {\n return url\n }\n\n // eslint-disable-next-line no-script-url\n return 'javascript:void(0)'\n}\n","/**\n * @param {unknown} thing\n * @returns {boolean}\n */\nexport function whitespace(thing) {\n /** @type {string} */\n var value =\n // @ts-ignore looks like a node.\n thing && typeof thing === 'object' && thing.type === 'text'\n ? // @ts-ignore looks like a text.\n thing.value || ''\n : thing\n\n // HTML whitespace expression.\n // See .\n return typeof value === 'string' && value.replace(/[ \\t\\n\\f\\r]/g, '') === ''\n}\n","export const hastToReact = {\n classId: 'classID',\n dataType: 'datatype',\n itemId: 'itemID',\n strokeDashArray: 'strokeDasharray',\n strokeDashOffset: 'strokeDashoffset',\n strokeLineCap: 'strokeLinecap',\n strokeLineJoin: 'strokeLinejoin',\n strokeMiterLimit: 'strokeMiterlimit',\n typeOf: 'typeof',\n xLinkActuate: 'xlinkActuate',\n xLinkArcRole: 'xlinkArcrole',\n xLinkHref: 'xlinkHref',\n xLinkRole: 'xlinkRole',\n xLinkShow: 'xlinkShow',\n xLinkTitle: 'xlinkTitle',\n xLinkType: 'xlinkType',\n xmlnsXLink: 'xmlnsXlink'\n}\n","/**\n * @template T\n * @typedef {import('react').ComponentType} ComponentType\n */\n\n/**\n * @template T\n * @typedef {import('react').ComponentPropsWithoutRef} ComponentPropsWithoutRef\n */\n\n/**\n * @typedef {import('react').ReactNode} ReactNode\n * @typedef {import('unist').Position} Position\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').ElementContent} ElementContent\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').Text} Text\n * @typedef {import('hast').Comment} Comment\n * @typedef {import('hast').DocType} Doctype\n * @typedef {import('property-information').Info} Info\n * @typedef {import('property-information').Schema} Schema\n * @typedef {import('./complex-types').ReactMarkdownProps} ReactMarkdownProps\n *\n * @typedef Raw\n * @property {'raw'} type\n * @property {string} value\n *\n * @typedef Context\n * @property {Options} options\n * @property {Schema} schema\n * @property {number} listDepth\n *\n * @callback TransformLink\n * @param {string} href\n * @param {Array} children\n * @param {string?} title\n * @returns {string}\n *\n * @callback TransformImage\n * @param {string} src\n * @param {string} alt\n * @param {string?} title\n * @returns {string}\n *\n * @typedef {import('react').HTMLAttributeAnchorTarget} TransformLinkTargetType\n *\n * @callback TransformLinkTarget\n * @param {string} href\n * @param {Array} children\n * @param {string?} title\n * @returns {TransformLinkTargetType|undefined}\n *\n * @typedef {keyof JSX.IntrinsicElements} ReactMarkdownNames\n *\n * To do: is `data-sourcepos` typeable?\n *\n * @typedef {ComponentPropsWithoutRef<'code'> & ReactMarkdownProps & {inline?: boolean}} CodeProps\n * @typedef {ComponentPropsWithoutRef<'h1'> & ReactMarkdownProps & {level: number}} HeadingProps\n * @typedef {ComponentPropsWithoutRef<'li'> & ReactMarkdownProps & {checked: boolean|null, index: number, ordered: boolean}} LiProps\n * @typedef {ComponentPropsWithoutRef<'ol'> & ReactMarkdownProps & {depth: number, ordered: true}} OrderedListProps\n * @typedef {ComponentPropsWithoutRef<'table'> & ReactMarkdownProps & {style?: Record, isHeader: boolean}} TableCellProps\n * @typedef {ComponentPropsWithoutRef<'tr'> & ReactMarkdownProps & {isHeader: boolean}} TableRowProps\n * @typedef {ComponentPropsWithoutRef<'ul'> & ReactMarkdownProps & {depth: number, ordered: false}} UnorderedListProps\n *\n * @typedef {ComponentType} CodeComponent\n * @typedef {ComponentType} HeadingComponent\n * @typedef {ComponentType} LiComponent\n * @typedef {ComponentType} OrderedListComponent\n * @typedef {ComponentType} TableCellComponent\n * @typedef {ComponentType} TableRowComponent\n * @typedef {ComponentType} UnorderedListComponent\n *\n * @typedef SpecialComponents\n * @property {CodeComponent|ReactMarkdownNames} code\n * @property {HeadingComponent|ReactMarkdownNames} h1\n * @property {HeadingComponent|ReactMarkdownNames} h2\n * @property {HeadingComponent|ReactMarkdownNames} h3\n * @property {HeadingComponent|ReactMarkdownNames} h4\n * @property {HeadingComponent|ReactMarkdownNames} h5\n * @property {HeadingComponent|ReactMarkdownNames} h6\n * @property {LiComponent|ReactMarkdownNames} li\n * @property {OrderedListComponent|ReactMarkdownNames} ol\n * @property {TableCellComponent|ReactMarkdownNames} td\n * @property {TableCellComponent|ReactMarkdownNames} th\n * @property {TableRowComponent|ReactMarkdownNames} tr\n * @property {UnorderedListComponent|ReactMarkdownNames} ul\n *\n * @typedef {Partial & SpecialComponents>} Components\n *\n * @typedef Options\n * @property {boolean} [sourcePos=false]\n * @property {boolean} [rawSourcePos=false]\n * @property {boolean} [skipHtml=false]\n * @property {boolean} [includeElementIndex=false]\n * @property {null|false|TransformLink} [transformLinkUri]\n * @property {TransformImage} [transformImageUri]\n * @property {TransformLinkTargetType|TransformLinkTarget} [linkTarget]\n * @property {Components} [components]\n */\n\nimport React from 'react'\nimport ReactIs from 'react-is'\nimport {whitespace} from 'hast-util-whitespace'\nimport {svg, find, hastToReact} from 'property-information'\nimport {stringify as spaces} from 'space-separated-tokens'\nimport {stringify as commas} from 'comma-separated-tokens'\nimport style from 'style-to-object'\n\nconst own = {}.hasOwnProperty\n\n// The table-related elements that must not contain whitespace text according\n// to React.\nconst tableElements = new Set(['table', 'thead', 'tbody', 'tfoot', 'tr'])\n\n/**\n * @param {Context} context\n * @param {Element|Root} node\n */\nexport function childrenToReact(context, node) {\n /** @type {Array} */\n const children = []\n let childIndex = -1\n /** @type {Comment|Doctype|Element|Raw|Text} */\n let child\n\n while (++childIndex < node.children.length) {\n child = node.children[childIndex]\n\n if (child.type === 'element') {\n children.push(toReact(context, child, childIndex, node))\n } else if (child.type === 'text') {\n // Currently, a warning is triggered by react for *any* white space in\n // tables.\n // So we drop it.\n // See: .\n // See: .\n // See: .\n // See: .\n if (\n node.type !== 'element' ||\n !tableElements.has(node.tagName) ||\n !whitespace(child)\n ) {\n children.push(child.value)\n }\n } else if (child.type === 'raw' && !context.options.skipHtml) {\n // Default behavior is to show (encoded) HTML.\n children.push(child.value)\n }\n }\n\n return children\n}\n\n/**\n * @param {Context} context\n * @param {Element} node\n * @param {number} index\n * @param {Element|Root} parent\n */\nfunction toReact(context, node, index, parent) {\n const options = context.options\n const parentSchema = context.schema\n /** @type {ReactMarkdownNames} */\n // @ts-expect-error assume a known HTML/SVG element.\n const name = node.tagName\n /** @type {Record} */\n const properties = {}\n let schema = parentSchema\n /** @type {string} */\n let property\n\n if (parentSchema.space === 'html' && name === 'svg') {\n schema = svg\n context.schema = schema\n }\n\n if (node.properties) {\n for (property in node.properties) {\n if (own.call(node.properties, property)) {\n addProperty(properties, property, node.properties[property], context)\n }\n }\n }\n\n if (name === 'ol' || name === 'ul') {\n context.listDepth++\n }\n\n const children = childrenToReact(context, node)\n\n if (name === 'ol' || name === 'ul') {\n context.listDepth--\n }\n\n // Restore parent schema.\n context.schema = parentSchema\n\n // Nodes created by plugins do not have positional info, in which case we use\n // an object that matches the position interface.\n const position = node.position || {\n start: {line: null, column: null, offset: null},\n end: {line: null, column: null, offset: null}\n }\n const component =\n options.components && own.call(options.components, name)\n ? options.components[name]\n : name\n const basic = typeof component === 'string' || component === React.Fragment\n\n if (!ReactIs.isValidElementType(component)) {\n throw new TypeError(\n `Component for name \\`${name}\\` not defined or is not renderable`\n )\n }\n\n properties.key = [\n name,\n position.start.line,\n position.start.column,\n index\n ].join('-')\n\n if (name === 'a' && options.linkTarget) {\n properties.target =\n typeof options.linkTarget === 'function'\n ? options.linkTarget(\n String(properties.href || ''),\n node.children,\n typeof properties.title === 'string' ? properties.title : null\n )\n : options.linkTarget\n }\n\n if (name === 'a' && options.transformLinkUri) {\n properties.href = options.transformLinkUri(\n String(properties.href || ''),\n node.children,\n typeof properties.title === 'string' ? properties.title : null\n )\n }\n\n if (\n !basic &&\n name === 'code' &&\n parent.type === 'element' &&\n parent.tagName !== 'pre'\n ) {\n properties.inline = true\n }\n\n if (\n !basic &&\n (name === 'h1' ||\n name === 'h2' ||\n name === 'h3' ||\n name === 'h4' ||\n name === 'h5' ||\n name === 'h6')\n ) {\n properties.level = Number.parseInt(name.charAt(1), 10)\n }\n\n if (name === 'img' && options.transformImageUri) {\n properties.src = options.transformImageUri(\n String(properties.src || ''),\n String(properties.alt || ''),\n typeof properties.title === 'string' ? properties.title : null\n )\n }\n\n if (!basic && name === 'li' && parent.type === 'element') {\n const input = getInputElement(node)\n properties.checked =\n input && input.properties ? Boolean(input.properties.checked) : null\n properties.index = getElementsBeforeCount(parent, node)\n properties.ordered = parent.tagName === 'ol'\n }\n\n if (!basic && (name === 'ol' || name === 'ul')) {\n properties.ordered = name === 'ol'\n properties.depth = context.listDepth\n }\n\n if (name === 'td' || name === 'th') {\n if (properties.align) {\n if (!properties.style) properties.style = {}\n // @ts-expect-error assume `style` is an object\n properties.style.textAlign = properties.align\n delete properties.align\n }\n\n if (!basic) {\n properties.isHeader = name === 'th'\n }\n }\n\n if (!basic && name === 'tr' && parent.type === 'element') {\n properties.isHeader = Boolean(parent.tagName === 'thead')\n }\n\n // If `sourcePos` is given, pass source information (line/column info from markdown source).\n if (options.sourcePos) {\n properties['data-sourcepos'] = flattenPosition(position)\n }\n\n if (!basic && options.rawSourcePos) {\n properties.sourcePosition = node.position\n }\n\n // If `includeElementIndex` is given, pass node index info to components.\n if (!basic && options.includeElementIndex) {\n properties.index = getElementsBeforeCount(parent, node)\n properties.siblingCount = getElementsBeforeCount(parent)\n }\n\n if (!basic) {\n properties.node = node\n }\n\n // Ensure no React warnings are emitted for void elements w/ children.\n return children.length > 0\n ? React.createElement(component, properties, children)\n : React.createElement(component, properties)\n}\n\n/**\n * @param {Element|Root} node\n * @returns {Element?}\n */\nfunction getInputElement(node) {\n let index = -1\n\n while (++index < node.children.length) {\n const child = node.children[index]\n\n if (child.type === 'element' && child.tagName === 'input') {\n return child\n }\n }\n\n return null\n}\n\n/**\n * @param {Element|Root} parent\n * @param {Element} [node]\n * @returns {number}\n */\nfunction getElementsBeforeCount(parent, node) {\n let index = -1\n let count = 0\n\n while (++index < parent.children.length) {\n if (parent.children[index] === node) break\n if (parent.children[index].type === 'element') count++\n }\n\n return count\n}\n\n/**\n * @param {Record} props\n * @param {string} prop\n * @param {unknown} value\n * @param {Context} ctx\n */\nfunction addProperty(props, prop, value, ctx) {\n const info = find(ctx.schema, prop)\n let result = value\n\n // Ignore nullish and `NaN` values.\n // eslint-disable-next-line no-self-compare\n if (result === null || result === undefined || result !== result) {\n return\n }\n\n // Accept `array`.\n // Most props are space-separated.\n if (Array.isArray(result)) {\n result = info.commaSeparated ? commas(result) : spaces(result)\n }\n\n if (info.property === 'style' && typeof result === 'string') {\n result = parseStyle(result)\n }\n\n if (info.space && info.property) {\n props[\n own.call(hastToReact, info.property)\n ? hastToReact[info.property]\n : info.property\n ] = result\n } else if (info.attribute) {\n props[info.attribute] = result\n }\n}\n\n/**\n * @param {string} value\n * @returns {Record}\n */\nfunction parseStyle(value) {\n /** @type {Record} */\n const result = {}\n\n try {\n style(value, iterator)\n } catch {\n // Silent.\n }\n\n return result\n\n /**\n * @param {string} name\n * @param {string} v\n */\n function iterator(name, v) {\n const k = name.slice(0, 4) === '-ms-' ? `ms-${name.slice(4)}` : name\n result[k.replace(/-([a-z])/g, styleReplacer)] = v\n }\n}\n\n/**\n * @param {unknown} _\n * @param {string} $1\n */\nfunction styleReplacer(_, $1) {\n return $1.toUpperCase()\n}\n\n/**\n * @param {Position|{start: {line: null, column: null, offset: null}, end: {line: null, column: null, offset: null}}} pos\n * @returns {string}\n */\nfunction flattenPosition(pos) {\n return [\n pos.start.line,\n ':',\n pos.start.column,\n '-',\n pos.end.line,\n ':',\n pos.end.column\n ]\n .map((d) => String(d))\n .join('')\n}\n","/**\n * @typedef {import('react').ReactNode} ReactNode\n * @typedef {import('react').ReactElement<{}>} ReactElement\n * @typedef {import('unified').PluggableList} PluggableList\n * @typedef {import('hast').Root} Root\n * @typedef {import('./rehype-filter.js').Options} FilterOptions\n * @typedef {import('./ast-to-react.js').Options} TransformOptions\n *\n * @typedef CoreOptions\n * @property {string} children\n *\n * @typedef PluginOptions\n * @property {PluggableList} [remarkPlugins=[]]\n * @property {PluggableList} [rehypePlugins=[]]\n * @property {import('remark-rehype').Options | undefined} [remarkRehypeOptions={}]\n *\n * @typedef LayoutOptions\n * @property {string} [className]\n *\n * @typedef {CoreOptions & PluginOptions & LayoutOptions & FilterOptions & TransformOptions} ReactMarkdownOptions\n *\n * @typedef Deprecation\n * @property {string} id\n * @property {string} [to]\n */\n\nimport React from 'react'\nimport {VFile} from 'vfile'\nimport {unified} from 'unified'\nimport remarkParse from 'remark-parse'\nimport remarkRehype from 'remark-rehype'\nimport PropTypes from 'prop-types'\nimport {html} from 'property-information'\nimport rehypeFilter from './rehype-filter.js'\nimport {uriTransformer} from './uri-transformer.js'\nimport {childrenToReact} from './ast-to-react.js'\n\nconst own = {}.hasOwnProperty\nconst changelog =\n 'https://github.com/remarkjs/react-markdown/blob/main/changelog.md'\n\n/** @type {Record} */\nconst deprecated = {\n plugins: {to: 'plugins', id: 'change-plugins-to-remarkplugins'},\n renderers: {to: 'components', id: 'change-renderers-to-components'},\n astPlugins: {id: 'remove-buggy-html-in-markdown-parser'},\n allowDangerousHtml: {id: 'remove-buggy-html-in-markdown-parser'},\n escapeHtml: {id: 'remove-buggy-html-in-markdown-parser'},\n source: {to: 'children', id: 'change-source-to-children'},\n allowNode: {\n to: 'allowElement',\n id: 'replace-allownode-allowedtypes-and-disallowedtypes'\n },\n allowedTypes: {\n to: 'allowedElements',\n id: 'replace-allownode-allowedtypes-and-disallowedtypes'\n },\n disallowedTypes: {\n to: 'disallowedElements',\n id: 'replace-allownode-allowedtypes-and-disallowedtypes'\n },\n includeNodeIndex: {\n to: 'includeElementIndex',\n id: 'change-includenodeindex-to-includeelementindex'\n }\n}\n\n/**\n * React component to render markdown.\n *\n * @param {ReactMarkdownOptions} options\n * @returns {ReactElement}\n */\nexport function ReactMarkdown(options) {\n for (const key in deprecated) {\n if (own.call(deprecated, key) && own.call(options, key)) {\n const deprecation = deprecated[key]\n console.warn(\n `[react-markdown] Warning: please ${\n deprecation.to ? `use \\`${deprecation.to}\\` instead of` : 'remove'\n } \\`${key}\\` (see <${changelog}#${deprecation.id}> for more info)`\n )\n delete deprecated[key]\n }\n }\n\n const processor = unified()\n .use(remarkParse)\n .use(options.remarkPlugins || [])\n .use(remarkRehype, {\n ...options.remarkRehypeOptions,\n allowDangerousHtml: true\n })\n .use(options.rehypePlugins || [])\n .use(rehypeFilter, options)\n\n const file = new VFile()\n\n if (typeof options.children === 'string') {\n file.value = options.children\n } else if (options.children !== undefined && options.children !== null) {\n console.warn(\n `[react-markdown] Warning: please pass a string as \\`children\\` (not: \\`${options.children}\\`)`\n )\n }\n\n const hastNode = processor.runSync(processor.parse(file), file)\n\n if (hastNode.type !== 'root') {\n throw new TypeError('Expected a `root` node')\n }\n\n /** @type {ReactElement} */\n let result = React.createElement(\n React.Fragment,\n {},\n childrenToReact({options, schema: html, listDepth: 0}, hastNode)\n )\n\n if (options.className) {\n result = React.createElement('div', {className: options.className}, result)\n }\n\n return result\n}\n\nReactMarkdown.defaultProps = {transformLinkUri: uriTransformer}\n\nReactMarkdown.propTypes = {\n // Core options:\n children: PropTypes.string,\n // Layout options:\n className: PropTypes.string,\n // Filter options:\n allowElement: PropTypes.func,\n allowedElements: PropTypes.arrayOf(PropTypes.string),\n disallowedElements: PropTypes.arrayOf(PropTypes.string),\n unwrapDisallowed: PropTypes.bool,\n // Plugin options:\n remarkPlugins: PropTypes.arrayOf(\n PropTypes.oneOfType([\n PropTypes.object,\n PropTypes.func,\n PropTypes.arrayOf(PropTypes.oneOfType([PropTypes.object, PropTypes.func]))\n ])\n ),\n rehypePlugins: PropTypes.arrayOf(\n PropTypes.oneOfType([\n PropTypes.object,\n PropTypes.func,\n PropTypes.arrayOf(PropTypes.oneOfType([PropTypes.object, PropTypes.func]))\n ])\n ),\n // Transform options:\n sourcePos: PropTypes.bool,\n rawSourcePos: PropTypes.bool,\n skipHtml: PropTypes.bool,\n includeElementIndex: PropTypes.bool,\n transformLinkUri: PropTypes.oneOfType([PropTypes.func, PropTypes.bool]),\n linkTarget: PropTypes.oneOfType([PropTypes.func, PropTypes.string]),\n transformImageUri: PropTypes.func,\n components: PropTypes.object\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Code} Code\n */\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n asciiControl,\n asciiDigit,\n markdownLineEndingOrSpace,\n markdownLineEnding,\n unicodePunctuation,\n unicodeWhitespace\n} from 'micromark-util-character'\nconst www = {\n tokenize: tokenizeWww,\n partial: true\n}\nconst domain = {\n tokenize: tokenizeDomain,\n partial: true\n}\nconst path = {\n tokenize: tokenizePath,\n partial: true\n}\nconst punctuation = {\n tokenize: tokenizePunctuation,\n partial: true\n}\nconst namedCharacterReference = {\n tokenize: tokenizeNamedCharacterReference,\n partial: true\n}\nconst wwwAutolink = {\n tokenize: tokenizeWwwAutolink,\n previous: previousWww\n}\nconst httpAutolink = {\n tokenize: tokenizeHttpAutolink,\n previous: previousHttp\n}\nconst emailAutolink = {\n tokenize: tokenizeEmailAutolink,\n previous: previousEmail\n}\n/** @type {ConstructRecord} */\n\nconst text = {}\n/** @type {Extension} */\n\nexport const gfmAutolinkLiteral = {\n text\n}\nlet code = 48 // Add alphanumerics.\n\nwhile (code < 123) {\n text[code] = emailAutolink\n code++\n if (code === 58) code = 65\n else if (code === 91) code = 97\n}\n\ntext[43] = emailAutolink\ntext[45] = emailAutolink\ntext[46] = emailAutolink\ntext[95] = emailAutolink\ntext[72] = [emailAutolink, httpAutolink]\ntext[104] = [emailAutolink, httpAutolink]\ntext[87] = [emailAutolink, wwwAutolink]\ntext[119] = [emailAutolink, wwwAutolink]\n/** @type {Tokenizer} */\n\nfunction tokenizeEmailAutolink(effects, ok, nok) {\n const self = this\n /** @type {boolean} */\n\n let hasDot\n /** @type {boolean|undefined} */\n\n let hasDigitInLastSegment\n return start\n /** @type {State} */\n\n function start(code) {\n if (\n !gfmAtext(code) ||\n !previousEmail(self.previous) ||\n previousUnbalanced(self.events)\n ) {\n return nok(code)\n }\n\n effects.enter('literalAutolink')\n effects.enter('literalAutolinkEmail')\n return atext(code)\n }\n /** @type {State} */\n\n function atext(code) {\n if (gfmAtext(code)) {\n effects.consume(code)\n return atext\n }\n\n if (code === 64) {\n effects.consume(code)\n return label\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function label(code) {\n if (code === 46) {\n return effects.check(punctuation, done, dotContinuation)(code)\n }\n\n if (code === 45 || code === 95) {\n return effects.check(punctuation, nok, dashOrUnderscoreContinuation)(code)\n }\n\n if (asciiAlphanumeric(code)) {\n if (!hasDigitInLastSegment && asciiDigit(code)) {\n hasDigitInLastSegment = true\n }\n\n effects.consume(code)\n return label\n }\n\n return done(code)\n }\n /** @type {State} */\n\n function dotContinuation(code) {\n effects.consume(code)\n hasDot = true\n hasDigitInLastSegment = undefined\n return label\n }\n /** @type {State} */\n\n function dashOrUnderscoreContinuation(code) {\n effects.consume(code)\n return afterDashOrUnderscore\n }\n /** @type {State} */\n\n function afterDashOrUnderscore(code) {\n if (code === 46) {\n return effects.check(punctuation, nok, dotContinuation)(code)\n }\n\n return label(code)\n }\n /** @type {State} */\n\n function done(code) {\n if (hasDot && !hasDigitInLastSegment) {\n effects.exit('literalAutolinkEmail')\n effects.exit('literalAutolink')\n return ok(code)\n }\n\n return nok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeWwwAutolink(effects, ok, nok) {\n const self = this\n return start\n /** @type {State} */\n\n function start(code) {\n if (\n (code !== 87 && code !== 119) ||\n !previousWww(self.previous) ||\n previousUnbalanced(self.events)\n ) {\n return nok(code)\n }\n\n effects.enter('literalAutolink')\n effects.enter('literalAutolinkWww') // For `www.` we check instead of attempt, because when it matches, GH\n // treats it as part of a domain (yes, it says a valid domain must come\n // after `www.`, but that’s not how it’s implemented by them).\n\n return effects.check(\n www,\n effects.attempt(domain, effects.attempt(path, done), nok),\n nok\n )(code)\n }\n /** @type {State} */\n\n function done(code) {\n effects.exit('literalAutolinkWww')\n effects.exit('literalAutolink')\n return ok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeHttpAutolink(effects, ok, nok) {\n const self = this\n return start\n /** @type {State} */\n\n function start(code) {\n if (\n (code !== 72 && code !== 104) ||\n !previousHttp(self.previous) ||\n previousUnbalanced(self.events)\n ) {\n return nok(code)\n }\n\n effects.enter('literalAutolink')\n effects.enter('literalAutolinkHttp')\n effects.consume(code)\n return t1\n }\n /** @type {State} */\n\n function t1(code) {\n if (code === 84 || code === 116) {\n effects.consume(code)\n return t2\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function t2(code) {\n if (code === 84 || code === 116) {\n effects.consume(code)\n return p\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function p(code) {\n if (code === 80 || code === 112) {\n effects.consume(code)\n return s\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function s(code) {\n if (code === 83 || code === 115) {\n effects.consume(code)\n return colon\n }\n\n return colon(code)\n }\n /** @type {State} */\n\n function colon(code) {\n if (code === 58) {\n effects.consume(code)\n return slash1\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function slash1(code) {\n if (code === 47) {\n effects.consume(code)\n return slash2\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function slash2(code) {\n if (code === 47) {\n effects.consume(code)\n return after\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function after(code) {\n return code === null ||\n asciiControl(code) ||\n unicodeWhitespace(code) ||\n unicodePunctuation(code)\n ? nok(code)\n : effects.attempt(domain, effects.attempt(path, done), nok)(code)\n }\n /** @type {State} */\n\n function done(code) {\n effects.exit('literalAutolinkHttp')\n effects.exit('literalAutolink')\n return ok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeWww(effects, ok, nok) {\n return start\n /** @type {State} */\n\n function start(code) {\n effects.consume(code)\n return w2\n }\n /** @type {State} */\n\n function w2(code) {\n if (code === 87 || code === 119) {\n effects.consume(code)\n return w3\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function w3(code) {\n if (code === 87 || code === 119) {\n effects.consume(code)\n return dot\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function dot(code) {\n if (code === 46) {\n effects.consume(code)\n return after\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function after(code) {\n return code === null || markdownLineEnding(code) ? nok(code) : ok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeDomain(effects, ok, nok) {\n /** @type {boolean|undefined} */\n let hasUnderscoreInLastSegment\n /** @type {boolean|undefined} */\n\n let hasUnderscoreInLastLastSegment\n return domain\n /** @type {State} */\n\n function domain(code) {\n if (code === 38) {\n return effects.check(\n namedCharacterReference,\n done,\n punctuationContinuation\n )(code)\n }\n\n if (code === 46 || code === 95) {\n return effects.check(punctuation, done, punctuationContinuation)(code)\n } // GH documents that only alphanumerics (other than `-`, `.`, and `_`) can\n // occur, which sounds like ASCII only, but they also support `www.點看.com`,\n // so that’s Unicode.\n // Instead of some new production for Unicode alphanumerics, markdown\n // already has that for Unicode punctuation and whitespace, so use those.\n\n if (\n code === null ||\n asciiControl(code) ||\n unicodeWhitespace(code) ||\n (code !== 45 && unicodePunctuation(code))\n ) {\n return done(code)\n }\n\n effects.consume(code)\n return domain\n }\n /** @type {State} */\n\n function punctuationContinuation(code) {\n if (code === 46) {\n hasUnderscoreInLastLastSegment = hasUnderscoreInLastSegment\n hasUnderscoreInLastSegment = undefined\n effects.consume(code)\n return domain\n }\n\n if (code === 95) hasUnderscoreInLastSegment = true\n effects.consume(code)\n return domain\n }\n /** @type {State} */\n\n function done(code) {\n if (!hasUnderscoreInLastLastSegment && !hasUnderscoreInLastSegment) {\n return ok(code)\n }\n\n return nok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizePath(effects, ok) {\n let balance = 0\n return inPath\n /** @type {State} */\n\n function inPath(code) {\n if (code === 38) {\n return effects.check(\n namedCharacterReference,\n ok,\n continuedPunctuation\n )(code)\n }\n\n if (code === 40) {\n balance++\n }\n\n if (code === 41) {\n return effects.check(\n punctuation,\n parenAtPathEnd,\n continuedPunctuation\n )(code)\n }\n\n if (pathEnd(code)) {\n return ok(code)\n }\n\n if (trailingPunctuation(code)) {\n return effects.check(punctuation, ok, continuedPunctuation)(code)\n }\n\n effects.consume(code)\n return inPath\n }\n /** @type {State} */\n\n function continuedPunctuation(code) {\n effects.consume(code)\n return inPath\n }\n /** @type {State} */\n\n function parenAtPathEnd(code) {\n balance--\n return balance < 0 ? ok(code) : continuedPunctuation(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeNamedCharacterReference(effects, ok, nok) {\n return start\n /** @type {State} */\n\n function start(code) {\n effects.consume(code)\n return inside\n }\n /** @type {State} */\n\n function inside(code) {\n if (asciiAlpha(code)) {\n effects.consume(code)\n return inside\n }\n\n if (code === 59) {\n effects.consume(code)\n return after\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function after(code) {\n // If the named character reference is followed by the end of the path, it’s\n // not continued punctuation.\n return pathEnd(code) ? ok(code) : nok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizePunctuation(effects, ok, nok) {\n return start\n /** @type {State} */\n\n function start(code) {\n effects.consume(code)\n return after\n }\n /** @type {State} */\n\n function after(code) {\n // Check the next.\n if (trailingPunctuation(code)) {\n effects.consume(code)\n return after\n } // If the punctuation marker is followed by the end of the path, it’s not\n // continued punctuation.\n\n return pathEnd(code) ? ok(code) : nok(code)\n }\n}\n/**\n * @param {Code} code\n * @returns {boolean}\n */\n\nfunction trailingPunctuation(code) {\n return (\n code === 33 ||\n code === 34 ||\n code === 39 ||\n code === 41 ||\n code === 42 ||\n code === 44 ||\n code === 46 ||\n code === 58 ||\n code === 59 ||\n code === 60 ||\n code === 63 ||\n code === 95 ||\n code === 126\n )\n}\n/**\n * @param {Code} code\n * @returns {boolean}\n */\n\nfunction pathEnd(code) {\n return code === null || code === 60 || markdownLineEndingOrSpace(code)\n}\n/**\n * @param {Code} code\n * @returns {boolean}\n */\n\nfunction gfmAtext(code) {\n return (\n code === 43 ||\n code === 45 ||\n code === 46 ||\n code === 95 ||\n asciiAlphanumeric(code)\n )\n}\n/** @type {Previous} */\n\nfunction previousWww(code) {\n return (\n code === null ||\n code === 40 ||\n code === 42 ||\n code === 95 ||\n code === 126 ||\n markdownLineEndingOrSpace(code)\n )\n}\n/** @type {Previous} */\n\nfunction previousHttp(code) {\n return code === null || !asciiAlpha(code)\n}\n/** @type {Previous} */\n\nfunction previousEmail(code) {\n return code !== 47 && previousHttp(code)\n}\n/**\n * @param {Array} events\n * @returns {boolean}\n */\n\nfunction previousUnbalanced(events) {\n let index = events.length\n let result = false\n\n while (index--) {\n const token = events[index][1]\n\n if (\n (token.type === 'labelLink' || token.type === 'labelImage') &&\n !token._balanced\n ) {\n result = true\n break\n } // @ts-expect-error If we’ve seen this token, and it was marked as not\n // having any unbalanced bracket before it, we can exit.\n\n if (token._gfmAutolinkLiteralWalkedInto) {\n result = false\n break\n }\n }\n\n if (events.length > 0 && !result) {\n // @ts-expect-error Mark the last token as “walked into” w/o finding\n // anything.\n events[events.length - 1][1]._gfmAutolinkLiteralWalkedInto = true\n }\n\n return result\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Event} Event\n */\nimport {blankLine} from 'micromark-core-commonmark'\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace\n} from 'micromark-util-character'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\nconst indent = {\n tokenize: tokenizeIndent,\n partial: true\n}\n/**\n * @returns {Extension}\n */\n\nexport function gfmFootnote() {\n /** @type {Extension} */\n return {\n document: {\n [91]: {\n tokenize: tokenizeDefinitionStart,\n continuation: {\n tokenize: tokenizeDefinitionContinuation\n },\n exit: gfmFootnoteDefinitionEnd\n }\n },\n text: {\n [91]: {\n tokenize: tokenizeGfmFootnoteCall\n },\n [93]: {\n add: 'after',\n tokenize: tokenizePotentialGfmFootnoteCall,\n resolveTo: resolveToPotentialGfmFootnoteCall\n }\n }\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizePotentialGfmFootnoteCall(effects, ok, nok) {\n const self = this\n let index = self.events.length\n /** @type {Array} */\n // @ts-expect-error It’s fine!\n\n const defined = self.parser.gfmFootnotes || (self.parser.gfmFootnotes = [])\n /** @type {Token} */\n\n let labelStart // Find an opening.\n\n while (index--) {\n const token = self.events[index][1]\n\n if (token.type === 'labelImage') {\n labelStart = token\n break\n } // Exit if we’ve walked far enough.\n\n if (\n token.type === 'gfmFootnoteCall' ||\n token.type === 'labelLink' ||\n token.type === 'label' ||\n token.type === 'image' ||\n token.type === 'link'\n ) {\n break\n }\n }\n\n return start\n /** @type {State} */\n\n function start(code) {\n if (!labelStart || !labelStart._balanced) {\n return nok(code)\n }\n\n const id = normalizeIdentifier(\n self.sliceSerialize({\n start: labelStart.end,\n end: self.now()\n })\n )\n\n if (id.charCodeAt(0) !== 94 || !defined.includes(id.slice(1))) {\n return nok(code)\n }\n\n effects.enter('gfmFootnoteCallLabelMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteCallLabelMarker')\n return ok(code)\n }\n}\n/** @type {Resolver} */\n\nfunction resolveToPotentialGfmFootnoteCall(events, context) {\n let index = events.length\n /** @type {Token|undefined} */\n\n let labelStart // Find an opening.\n\n while (index--) {\n if (\n events[index][1].type === 'labelImage' &&\n events[index][0] === 'enter'\n ) {\n labelStart = events[index][1]\n break\n }\n }\n\n // Change the `labelImageMarker` to a `data`.\n events[index + 1][1].type = 'data'\n events[index + 3][1].type = 'gfmFootnoteCallLabelMarker' // The whole (without `!`):\n\n const call = {\n type: 'gfmFootnoteCall',\n start: Object.assign({}, events[index + 3][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n } // The `^` marker\n\n const marker = {\n type: 'gfmFootnoteCallMarker',\n start: Object.assign({}, events[index + 3][1].end),\n end: Object.assign({}, events[index + 3][1].end)\n } // Increment the end 1 character.\n\n marker.end.column++\n marker.end.offset++\n marker.end._bufferIndex++\n const string = {\n type: 'gfmFootnoteCallString',\n start: Object.assign({}, marker.end),\n end: Object.assign({}, events[events.length - 1][1].start)\n }\n const chunk = {\n type: 'chunkString',\n contentType: 'string',\n start: Object.assign({}, string.start),\n end: Object.assign({}, string.end)\n }\n /** @type {Array} */\n\n const replacement = [\n // Take the `labelImageMarker` (now `data`, the `!`)\n events[index + 1],\n events[index + 2],\n ['enter', call, context], // The `[`\n events[index + 3],\n events[index + 4], // The `^`.\n ['enter', marker, context],\n ['exit', marker, context], // Everything in between.\n ['enter', string, context],\n ['enter', chunk, context],\n ['exit', chunk, context],\n ['exit', string, context], // The ending (`]`, properly parsed and labelled).\n events[events.length - 2],\n events[events.length - 1],\n ['exit', call, context]\n ]\n events.splice(index, events.length - index + 1, ...replacement)\n return events\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeGfmFootnoteCall(effects, ok, nok) {\n const self = this\n /** @type {Array} */\n // @ts-expect-error It’s fine!\n\n const defined = self.parser.gfmFootnotes || (self.parser.gfmFootnotes = [])\n let size = 0\n /** @type {boolean} */\n\n let data\n return start\n /** @type {State} */\n\n function start(code) {\n effects.enter('gfmFootnoteCall')\n effects.enter('gfmFootnoteCallLabelMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteCallLabelMarker')\n return callStart\n }\n /** @type {State} */\n\n function callStart(code) {\n if (code !== 94) return nok(code)\n effects.enter('gfmFootnoteCallMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteCallMarker')\n effects.enter('gfmFootnoteCallString')\n effects.enter('chunkString').contentType = 'string'\n return callData\n }\n /** @type {State} */\n\n function callData(code) {\n /** @type {Token} */\n let token\n\n if (code === null || code === 91 || size++ > 999) {\n return nok(code)\n }\n\n if (code === 93) {\n if (!data) {\n return nok(code)\n }\n\n effects.exit('chunkString')\n token = effects.exit('gfmFootnoteCallString')\n return defined.includes(normalizeIdentifier(self.sliceSerialize(token)))\n ? end(code)\n : nok(code)\n }\n\n effects.consume(code)\n\n if (!markdownLineEndingOrSpace(code)) {\n data = true\n }\n\n return code === 92 ? callEscape : callData\n }\n /** @type {State} */\n\n function callEscape(code) {\n if (code === 91 || code === 92 || code === 93) {\n effects.consume(code)\n size++\n return callData\n }\n\n return callData(code)\n }\n /** @type {State} */\n\n function end(code) {\n effects.enter('gfmFootnoteCallLabelMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteCallLabelMarker')\n effects.exit('gfmFootnoteCall')\n return ok\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeDefinitionStart(effects, ok, nok) {\n const self = this\n /** @type {Array} */\n // @ts-expect-error It’s fine!\n\n const defined = self.parser.gfmFootnotes || (self.parser.gfmFootnotes = [])\n /** @type {string} */\n\n let identifier\n let size = 0\n /** @type {boolean|undefined} */\n\n let data\n return start\n /** @type {State} */\n\n function start(code) {\n effects.enter('gfmFootnoteDefinition')._container = true\n effects.enter('gfmFootnoteDefinitionLabel')\n effects.enter('gfmFootnoteDefinitionLabelMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteDefinitionLabelMarker')\n return labelStart\n }\n /** @type {State} */\n\n function labelStart(code) {\n if (code === 94) {\n effects.enter('gfmFootnoteDefinitionMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteDefinitionMarker')\n effects.enter('gfmFootnoteDefinitionLabelString')\n return atBreak\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function atBreak(code) {\n /** @type {Token} */\n let token\n\n if (code === null || code === 91 || size > 999) {\n return nok(code)\n }\n\n if (code === 93) {\n if (!data) {\n return nok(code)\n }\n\n token = effects.exit('gfmFootnoteDefinitionLabelString')\n identifier = normalizeIdentifier(self.sliceSerialize(token))\n effects.enter('gfmFootnoteDefinitionLabelMarker')\n effects.consume(code)\n effects.exit('gfmFootnoteDefinitionLabelMarker')\n effects.exit('gfmFootnoteDefinitionLabel')\n return labelAfter\n }\n\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n size++\n return atBreak\n }\n\n effects.enter('chunkString').contentType = 'string'\n return label(code)\n }\n /** @type {State} */\n\n function label(code) {\n if (\n code === null ||\n markdownLineEnding(code) ||\n code === 91 ||\n code === 93 ||\n size > 999\n ) {\n effects.exit('chunkString')\n return atBreak(code)\n }\n\n if (!markdownLineEndingOrSpace(code)) {\n data = true\n }\n\n size++\n effects.consume(code)\n return code === 92 ? labelEscape : label\n }\n /** @type {State} */\n\n function labelEscape(code) {\n if (code === 91 || code === 92 || code === 93) {\n effects.consume(code)\n size++\n return label\n }\n\n return label(code)\n }\n /** @type {State} */\n\n function labelAfter(code) {\n if (code === 58) {\n effects.enter('definitionMarker')\n effects.consume(code)\n effects.exit('definitionMarker') // Any whitespace after the marker is eaten, forming indented code\n // is not possible.\n // No space is also fine, just like a block quote marker.\n\n return factorySpace(effects, done, 'gfmFootnoteDefinitionWhitespace')\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function done(code) {\n if (!defined.includes(identifier)) {\n defined.push(identifier)\n }\n\n return ok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeDefinitionContinuation(effects, ok, nok) {\n // Either a blank line, which is okay, or an indented thing.\n return effects.check(blankLine, ok, effects.attempt(indent, ok, nok))\n}\n/** @type {Exiter} */\n\nfunction gfmFootnoteDefinitionEnd(effects) {\n effects.exit('gfmFootnoteDefinition')\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeIndent(effects, ok, nok) {\n const self = this\n return factorySpace(\n effects,\n afterPrefix,\n 'gfmFootnoteDefinitionIndent',\n 4 + 1\n )\n /** @type {State} */\n\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'gfmFootnoteDefinitionIndent' &&\n tail[2].sliceSerialize(tail[1], true).length === 4\n ? ok(code)\n : nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').Event} Event\n */\n\n/**\n * @typedef Options\n * Configuration (optional).\n * @property {boolean} [singleTilde=true]\n * Whether to support strikethrough with a single tilde (`boolean`, default:\n * `true`).\n * Single tildes work on github.com, but are technically prohibited by the\n * GFM spec.\n */\nimport {splice} from 'micromark-util-chunked'\nimport {classifyCharacter} from 'micromark-util-classify-character'\nimport {resolveAll} from 'micromark-util-resolve-all'\n\n/**\n * @param {Options} [options]\n * @returns {Extension}\n */\nexport function gfmStrikethrough(options = {}) {\n let single = options.singleTilde\n const tokenizer = {\n tokenize: tokenizeStrikethrough,\n resolveAll: resolveAllStrikethrough\n }\n\n if (single === null || single === undefined) {\n single = true\n }\n\n return {\n text: {\n [126]: tokenizer\n },\n insideSpan: {\n null: [tokenizer]\n },\n attentionMarkers: {\n null: [126]\n }\n }\n /**\n * Take events and resolve strikethrough.\n *\n * @type {Resolver}\n */\n\n function resolveAllStrikethrough(events, context) {\n let index = -1 // Walk through all events.\n\n while (++index < events.length) {\n // Find a token that can close.\n if (\n events[index][0] === 'enter' &&\n events[index][1].type === 'strikethroughSequenceTemporary' &&\n events[index][1]._close\n ) {\n let open = index // Now walk back to find an opener.\n\n while (open--) {\n // Find a token that can open the closer.\n if (\n events[open][0] === 'exit' &&\n events[open][1].type === 'strikethroughSequenceTemporary' &&\n events[open][1]._open && // If the sizes are the same:\n events[index][1].end.offset - events[index][1].start.offset ===\n events[open][1].end.offset - events[open][1].start.offset\n ) {\n events[index][1].type = 'strikethroughSequence'\n events[open][1].type = 'strikethroughSequence'\n const strikethrough = {\n type: 'strikethrough',\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[index][1].end)\n }\n const text = {\n type: 'strikethroughText',\n start: Object.assign({}, events[open][1].end),\n end: Object.assign({}, events[index][1].start)\n } // Opening.\n\n const nextEvents = [\n ['enter', strikethrough, context],\n ['enter', events[open][1], context],\n ['exit', events[open][1], context],\n ['enter', text, context]\n ] // Between.\n\n splice(\n nextEvents,\n nextEvents.length,\n 0,\n resolveAll(\n context.parser.constructs.insideSpan.null,\n events.slice(open + 1, index),\n context\n )\n ) // Closing.\n\n splice(nextEvents, nextEvents.length, 0, [\n ['exit', text, context],\n ['enter', events[index][1], context],\n ['exit', events[index][1], context],\n ['exit', strikethrough, context]\n ])\n splice(events, open - 1, index - open + 3, nextEvents)\n index = open + nextEvents.length - 2\n break\n }\n }\n }\n }\n\n index = -1\n\n while (++index < events.length) {\n if (events[index][1].type === 'strikethroughSequenceTemporary') {\n events[index][1].type = 'data'\n }\n }\n\n return events\n }\n /** @type {Tokenizer} */\n\n function tokenizeStrikethrough(effects, ok, nok) {\n const previous = this.previous\n const events = this.events\n let size = 0\n return start\n /** @type {State} */\n\n function start(code) {\n if (\n previous === 126 &&\n events[events.length - 1][1].type !== 'characterEscape'\n ) {\n return nok(code)\n }\n\n effects.enter('strikethroughSequenceTemporary')\n return more(code)\n }\n /** @type {State} */\n\n function more(code) {\n const before = classifyCharacter(previous)\n\n if (code === 126) {\n // If this is the third marker, exit.\n if (size > 1) return nok(code)\n effects.consume(code)\n size++\n return more\n }\n\n if (size < 2 && !single) return nok(code)\n const token = effects.exit('strikethroughSequenceTemporary')\n const after = classifyCharacter(code)\n token._open = !after || (after === 2 && Boolean(before))\n token._close = !before || (before === 2 && Boolean(after))\n return ok(code)\n }\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n */\n\n/**\n * @typedef {'left'|'center'|'right'|'none'} Align\n */\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\n\n/** @type {Extension} */\nexport const gfmTable = {\n flow: {\n null: {\n tokenize: tokenizeTable,\n resolve: resolveTable\n }\n }\n}\nconst nextPrefixedOrBlank = {\n tokenize: tokenizeNextPrefixedOrBlank,\n partial: true\n}\n/** @type {Resolver} */\n\nfunction resolveTable(events, context) {\n let index = -1\n /** @type {boolean|undefined} */\n\n let inHead\n /** @type {boolean|undefined} */\n\n let inDelimiterRow\n /** @type {boolean|undefined} */\n\n let inRow\n /** @type {number|undefined} */\n\n let contentStart\n /** @type {number|undefined} */\n\n let contentEnd\n /** @type {number|undefined} */\n\n let cellStart\n /** @type {boolean|undefined} */\n\n let seenCellInRow\n\n while (++index < events.length) {\n const token = events[index][1]\n\n if (inRow) {\n if (token.type === 'temporaryTableCellContent') {\n contentStart = contentStart || index\n contentEnd = index\n }\n\n if (\n // Combine separate content parts into one.\n (token.type === 'tableCellDivider' || token.type === 'tableRow') &&\n contentEnd\n ) {\n const content = {\n type: 'tableContent',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end\n }\n /** @type {Token} */\n\n const text = {\n type: 'chunkText',\n start: content.start,\n end: content.end,\n // @ts-expect-error It’s fine.\n contentType: 'text'\n }\n events.splice(\n contentStart,\n contentEnd - contentStart + 1,\n ['enter', content, context],\n ['enter', text, context],\n ['exit', text, context],\n ['exit', content, context]\n )\n index -= contentEnd - contentStart - 3\n contentStart = undefined\n contentEnd = undefined\n }\n }\n\n if (\n events[index][0] === 'exit' &&\n cellStart !== undefined &&\n cellStart + (seenCellInRow ? 0 : 1) < index &&\n (token.type === 'tableCellDivider' ||\n (token.type === 'tableRow' &&\n (cellStart + 3 < index ||\n events[cellStart][1].type !== 'whitespace')))\n ) {\n const cell = {\n type: inDelimiterRow\n ? 'tableDelimiter'\n : inHead\n ? 'tableHeader'\n : 'tableData',\n start: events[cellStart][1].start,\n end: events[index][1].end\n }\n events.splice(index + (token.type === 'tableCellDivider' ? 1 : 0), 0, [\n 'exit',\n cell,\n context\n ])\n events.splice(cellStart, 0, ['enter', cell, context])\n index += 2\n cellStart = index + 1\n seenCellInRow = true\n }\n\n if (token.type === 'tableRow') {\n inRow = events[index][0] === 'enter'\n\n if (inRow) {\n cellStart = index + 1\n seenCellInRow = false\n }\n }\n\n if (token.type === 'tableDelimiterRow') {\n inDelimiterRow = events[index][0] === 'enter'\n\n if (inDelimiterRow) {\n cellStart = index + 1\n seenCellInRow = false\n }\n }\n\n if (token.type === 'tableHead') {\n inHead = events[index][0] === 'enter'\n }\n }\n\n return events\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeTable(effects, ok, nok) {\n const self = this\n /** @type {Array} */\n\n const align = []\n let tableHeaderCount = 0\n /** @type {boolean|undefined} */\n\n let seenDelimiter\n /** @type {boolean|undefined} */\n\n let hasDash\n return start\n /** @type {State} */\n\n function start(code) {\n // @ts-expect-error Custom.\n effects.enter('table')._align = align\n effects.enter('tableHead')\n effects.enter('tableRow') // If we start with a pipe, we open a cell marker.\n\n if (code === 124) {\n return cellDividerHead(code)\n }\n\n tableHeaderCount++\n effects.enter('temporaryTableCellContent') // Can’t be space or eols at the start of a construct, so we’re in a cell.\n\n return inCellContentHead(code)\n }\n /** @type {State} */\n\n function cellDividerHead(code) {\n effects.enter('tableCellDivider')\n effects.consume(code)\n effects.exit('tableCellDivider')\n seenDelimiter = true\n return cellBreakHead\n }\n /** @type {State} */\n\n function cellBreakHead(code) {\n if (code === null || markdownLineEnding(code)) {\n return atRowEndHead(code)\n }\n\n if (markdownSpace(code)) {\n effects.enter('whitespace')\n effects.consume(code)\n return inWhitespaceHead\n }\n\n if (seenDelimiter) {\n seenDelimiter = undefined\n tableHeaderCount++\n }\n\n if (code === 124) {\n return cellDividerHead(code)\n } // Anything else is cell content.\n\n effects.enter('temporaryTableCellContent')\n return inCellContentHead(code)\n }\n /** @type {State} */\n\n function inWhitespaceHead(code) {\n if (markdownSpace(code)) {\n effects.consume(code)\n return inWhitespaceHead\n }\n\n effects.exit('whitespace')\n return cellBreakHead(code)\n }\n /** @type {State} */\n\n function inCellContentHead(code) {\n // EOF, whitespace, pipe\n if (code === null || code === 124 || markdownLineEndingOrSpace(code)) {\n effects.exit('temporaryTableCellContent')\n return cellBreakHead(code)\n }\n\n effects.consume(code)\n return code === 92 ? inCellContentEscapeHead : inCellContentHead\n }\n /** @type {State} */\n\n function inCellContentEscapeHead(code) {\n if (code === 92 || code === 124) {\n effects.consume(code)\n return inCellContentHead\n } // Anything else.\n\n return inCellContentHead(code)\n }\n /** @type {State} */\n\n function atRowEndHead(code) {\n if (code === null) {\n return nok(code)\n }\n\n effects.exit('tableRow')\n effects.exit('tableHead')\n const originalInterrupt = self.interrupt\n self.interrupt = true\n return effects.attempt(\n {\n tokenize: tokenizeRowEnd,\n partial: true\n },\n function (code) {\n self.interrupt = originalInterrupt\n effects.enter('tableDelimiterRow')\n return atDelimiterRowBreak(code)\n },\n function (code) {\n self.interrupt = originalInterrupt\n return nok(code)\n }\n )(code)\n }\n /** @type {State} */\n\n function atDelimiterRowBreak(code) {\n if (code === null || markdownLineEnding(code)) {\n return rowEndDelimiter(code)\n }\n\n if (markdownSpace(code)) {\n effects.enter('whitespace')\n effects.consume(code)\n return inWhitespaceDelimiter\n }\n\n if (code === 45) {\n effects.enter('tableDelimiterFiller')\n effects.consume(code)\n hasDash = true\n align.push('none')\n return inFillerDelimiter\n }\n\n if (code === 58) {\n effects.enter('tableDelimiterAlignment')\n effects.consume(code)\n effects.exit('tableDelimiterAlignment')\n align.push('left')\n return afterLeftAlignment\n } // If we start with a pipe, we open a cell marker.\n\n if (code === 124) {\n effects.enter('tableCellDivider')\n effects.consume(code)\n effects.exit('tableCellDivider')\n return atDelimiterRowBreak\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function inWhitespaceDelimiter(code) {\n if (markdownSpace(code)) {\n effects.consume(code)\n return inWhitespaceDelimiter\n }\n\n effects.exit('whitespace')\n return atDelimiterRowBreak(code)\n }\n /** @type {State} */\n\n function inFillerDelimiter(code) {\n if (code === 45) {\n effects.consume(code)\n return inFillerDelimiter\n }\n\n effects.exit('tableDelimiterFiller')\n\n if (code === 58) {\n effects.enter('tableDelimiterAlignment')\n effects.consume(code)\n effects.exit('tableDelimiterAlignment')\n align[align.length - 1] =\n align[align.length - 1] === 'left' ? 'center' : 'right'\n return afterRightAlignment\n }\n\n return atDelimiterRowBreak(code)\n }\n /** @type {State} */\n\n function afterLeftAlignment(code) {\n if (code === 45) {\n effects.enter('tableDelimiterFiller')\n effects.consume(code)\n hasDash = true\n return inFillerDelimiter\n } // Anything else is not ok.\n\n return nok(code)\n }\n /** @type {State} */\n\n function afterRightAlignment(code) {\n if (code === null || markdownLineEnding(code)) {\n return rowEndDelimiter(code)\n }\n\n if (markdownSpace(code)) {\n effects.enter('whitespace')\n effects.consume(code)\n return inWhitespaceDelimiter\n } // `|`\n\n if (code === 124) {\n effects.enter('tableCellDivider')\n effects.consume(code)\n effects.exit('tableCellDivider')\n return atDelimiterRowBreak\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function rowEndDelimiter(code) {\n effects.exit('tableDelimiterRow') // Exit if there was no dash at all, or if the header cell count is not the\n // delimiter cell count.\n\n if (!hasDash || tableHeaderCount !== align.length) {\n return nok(code)\n }\n\n if (code === null) {\n return tableClose(code)\n }\n\n return effects.check(\n nextPrefixedOrBlank,\n tableClose,\n effects.attempt(\n {\n tokenize: tokenizeRowEnd,\n partial: true\n },\n factorySpace(effects, bodyStart, 'linePrefix', 4),\n tableClose\n )\n )(code)\n }\n /** @type {State} */\n\n function tableClose(code) {\n effects.exit('table')\n return ok(code)\n }\n /** @type {State} */\n\n function bodyStart(code) {\n effects.enter('tableBody')\n return rowStartBody(code)\n }\n /** @type {State} */\n\n function rowStartBody(code) {\n effects.enter('tableRow') // If we start with a pipe, we open a cell marker.\n\n if (code === 124) {\n return cellDividerBody(code)\n }\n\n effects.enter('temporaryTableCellContent') // Can’t be space or eols at the start of a construct, so we’re in a cell.\n\n return inCellContentBody(code)\n }\n /** @type {State} */\n\n function cellDividerBody(code) {\n effects.enter('tableCellDivider')\n effects.consume(code)\n effects.exit('tableCellDivider')\n return cellBreakBody\n }\n /** @type {State} */\n\n function cellBreakBody(code) {\n if (code === null || markdownLineEnding(code)) {\n return atRowEndBody(code)\n }\n\n if (markdownSpace(code)) {\n effects.enter('whitespace')\n effects.consume(code)\n return inWhitespaceBody\n } // `|`\n\n if (code === 124) {\n return cellDividerBody(code)\n } // Anything else is cell content.\n\n effects.enter('temporaryTableCellContent')\n return inCellContentBody(code)\n }\n /** @type {State} */\n\n function inWhitespaceBody(code) {\n if (markdownSpace(code)) {\n effects.consume(code)\n return inWhitespaceBody\n }\n\n effects.exit('whitespace')\n return cellBreakBody(code)\n }\n /** @type {State} */\n\n function inCellContentBody(code) {\n // EOF, whitespace, pipe\n if (code === null || code === 124 || markdownLineEndingOrSpace(code)) {\n effects.exit('temporaryTableCellContent')\n return cellBreakBody(code)\n }\n\n effects.consume(code)\n return code === 92 ? inCellContentEscapeBody : inCellContentBody\n }\n /** @type {State} */\n\n function inCellContentEscapeBody(code) {\n if (code === 92 || code === 124) {\n effects.consume(code)\n return inCellContentBody\n } // Anything else.\n\n return inCellContentBody(code)\n }\n /** @type {State} */\n\n function atRowEndBody(code) {\n effects.exit('tableRow')\n\n if (code === null) {\n return tableBodyClose(code)\n }\n\n return effects.check(\n nextPrefixedOrBlank,\n tableBodyClose,\n effects.attempt(\n {\n tokenize: tokenizeRowEnd,\n partial: true\n },\n factorySpace(effects, rowStartBody, 'linePrefix', 4),\n tableBodyClose\n )\n )(code)\n }\n /** @type {State} */\n\n function tableBodyClose(code) {\n effects.exit('tableBody')\n return tableClose(code)\n }\n /** @type {Tokenizer} */\n\n function tokenizeRowEnd(effects, ok, nok) {\n return start\n /** @type {State} */\n\n function start(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, prefixed, 'linePrefix')\n }\n /** @type {State} */\n\n function prefixed(code) {\n // Blank or interrupting line.\n if (\n self.parser.lazy[self.now().line] ||\n code === null ||\n markdownLineEnding(code)\n ) {\n return nok(code)\n }\n\n const tail = self.events[self.events.length - 1] // Indented code can interrupt delimiter and body rows.\n\n if (\n !self.parser.constructs.disable.null.includes('codeIndented') &&\n tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ) {\n return nok(code)\n }\n\n self._gfmTableDynamicInterruptHack = true\n return effects.check(\n self.parser.constructs.flow,\n function (code) {\n self._gfmTableDynamicInterruptHack = false\n return nok(code)\n },\n function (code) {\n self._gfmTableDynamicInterruptHack = false\n return ok(code)\n }\n )(code)\n }\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeNextPrefixedOrBlank(effects, ok, nok) {\n let size = 0\n return start\n /** @type {State} */\n\n function start(code) {\n // This is a check, so we don’t care about tokens, but we open a bogus one\n // so we’re valid.\n effects.enter('check') // EOL.\n\n effects.consume(code)\n return whitespace\n }\n /** @type {State} */\n\n function whitespace(code) {\n if (code === -1 || code === 32) {\n effects.consume(code)\n size++\n return size === 4 ? ok : whitespace\n } // EOF or whitespace\n\n if (code === null || markdownLineEndingOrSpace(code)) {\n return ok(code)\n } // Anything else.\n\n return nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Code} Code\n */\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n markdownLineEndingOrSpace,\n markdownLineEnding\n} from 'micromark-util-character'\nconst tasklistCheck = {\n tokenize: tokenizeTasklistCheck\n}\nexport const gfmTaskListItem = {\n text: {\n [91]: tasklistCheck\n }\n}\n/** @type {Tokenizer} */\n\nfunction tokenizeTasklistCheck(effects, ok, nok) {\n const self = this\n return open\n /** @type {State} */\n\n function open(code) {\n if (\n // Exit if there’s stuff before.\n self.previous !== null || // Exit if not in the first content that is the first child of a list\n // item.\n !self._gfmTasklistFirstContentOfListItem\n ) {\n return nok(code)\n }\n\n effects.enter('taskListCheck')\n effects.enter('taskListCheckMarker')\n effects.consume(code)\n effects.exit('taskListCheckMarker')\n return inside\n }\n /** @type {State} */\n\n function inside(code) {\n // To match how GH works in comments, use `markdownSpace` (`[ \\t]`) instead\n // of `markdownLineEndingOrSpace` (`[ \\t\\r\\n]`).\n if (markdownLineEndingOrSpace(code)) {\n effects.enter('taskListCheckValueUnchecked')\n effects.consume(code)\n effects.exit('taskListCheckValueUnchecked')\n return close\n }\n\n if (code === 88 || code === 120) {\n effects.enter('taskListCheckValueChecked')\n effects.consume(code)\n effects.exit('taskListCheckValueChecked')\n return close\n }\n\n return nok(code)\n }\n /** @type {State} */\n\n function close(code) {\n if (code === 93) {\n effects.enter('taskListCheckMarker')\n effects.consume(code)\n effects.exit('taskListCheckMarker')\n effects.exit('taskListCheck')\n return effects.check(\n {\n tokenize: spaceThenNonSpace\n },\n ok,\n nok\n )\n }\n\n return nok(code)\n }\n}\n/** @type {Tokenizer} */\n\nfunction spaceThenNonSpace(effects, ok, nok) {\n const self = this\n return factorySpace(effects, after, 'whitespace')\n /** @type {State} */\n\n function after(code) {\n const tail = self.events[self.events.length - 1]\n return (\n // We either found spaces…\n ((tail && tail[1].type === 'whitespace') || // …or it was followed by a line ending, in which case, there has to be\n // non-whitespace after that line ending, because otherwise we’d get an\n // EOF as the content is closed with blank lines.\n markdownLineEnding(code)) &&\n code !== null\n ? ok(code)\n : nok(code)\n )\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').HtmlExtension} HtmlExtension\n * @typedef {import('micromark-extension-gfm-strikethrough').Options} Options\n * @typedef {import('micromark-extension-gfm-footnote').HtmlOptions} HtmlOptions\n */\n\nimport {\n combineExtensions,\n combineHtmlExtensions\n} from 'micromark-util-combine-extensions'\nimport {\n gfmAutolinkLiteral,\n gfmAutolinkLiteralHtml\n} from 'micromark-extension-gfm-autolink-literal'\nimport {gfmFootnote, gfmFootnoteHtml} from 'micromark-extension-gfm-footnote'\nimport {\n gfmStrikethrough,\n gfmStrikethroughHtml\n} from 'micromark-extension-gfm-strikethrough'\nimport {gfmTable, gfmTableHtml} from 'micromark-extension-gfm-table'\nimport {gfmTagfilterHtml} from 'micromark-extension-gfm-tagfilter'\nimport {\n gfmTaskListItem,\n gfmTaskListItemHtml\n} from 'micromark-extension-gfm-task-list-item'\n\n/**\n * Support GFM or markdown on github.com.\n *\n * @param {Options} [options]\n * @returns {Extension}\n */\nexport function gfm(options) {\n return combineExtensions([\n gfmAutolinkLiteral,\n gfmFootnote(),\n gfmStrikethrough(options),\n gfmTable,\n gfmTaskListItem\n ])\n}\n\n/**\n * Support to compile GFM to HTML.\n *\n * @param {HtmlOptions} [options]\n * @returns {HtmlExtension}\n */\nexport function gfmHtml(options) {\n return combineHtmlExtensions([\n gfmAutolinkLiteralHtml,\n gfmFootnoteHtml(options),\n gfmStrikethroughHtml,\n gfmTableHtml,\n gfmTagfilterHtml,\n gfmTaskListItemHtml\n ])\n}\n","/**\n * Count how often a character (or substring) is used in a string.\n *\n * @param {string} value\n * Value to search in.\n * @param {string} character\n * Character (or substring) to look for.\n * @return {number}\n * Number of times `character` occurred in `value`.\n */\nexport function ccount(value, character) {\n const source = String(value)\n\n if (typeof character !== 'string') {\n throw new TypeError('Expected character')\n }\n\n let count = 0\n let index = source.indexOf(character)\n\n while (index !== -1) {\n count++\n index = source.indexOf(character, index + character.length)\n }\n\n return count\n}\n","/**\n * @typedef Options Configuration.\n * @property {Test} [ignore] `unist-util-is` test used to assert parents\n *\n * @typedef {import('mdast').Root} Root\n * @typedef {import('mdast').Content} Content\n * @typedef {import('mdast').PhrasingContent} PhrasingContent\n * @typedef {import('mdast').Text} Text\n * @typedef {Content|Root} Node\n * @typedef {Extract} Parent\n *\n * @typedef {import('unist-util-visit-parents').Test} Test\n * @typedef {import('unist-util-visit-parents').VisitorResult} VisitorResult\n *\n * @typedef RegExpMatchObject\n * @property {number} index\n * @property {string} input\n *\n * @typedef {string|RegExp} Find\n * @typedef {string|ReplaceFunction} Replace\n *\n * @typedef {[Find, Replace]} FindAndReplaceTuple\n * @typedef {Object.} FindAndReplaceSchema\n * @typedef {Array.} FindAndReplaceList\n *\n * @typedef {[RegExp, ReplaceFunction]} Pair\n * @typedef {Array.} Pairs\n */\n\n/**\n * @callback ReplaceFunction\n * @param {...any} parameters\n * @returns {Array.|PhrasingContent|string|false|undefined|null}\n */\n\nimport escape from 'escape-string-regexp'\nimport {visitParents} from 'unist-util-visit-parents'\nimport {convert} from 'unist-util-is'\n\nconst own = {}.hasOwnProperty\n\n/**\n * @param tree mdast tree\n * @param find Value to find and remove. When `string`, escaped and made into a global `RegExp`\n * @param [replace] Value to insert.\n * * When `string`, turned into a Text node.\n * * When `Function`, called with the results of calling `RegExp.exec` as\n * arguments, in which case it can return a single or a list of `Node`,\n * a `string` (which is wrapped in a `Text` node), or `false` to not replace\n * @param [options] Configuration.\n */\nexport const findAndReplace =\n /**\n * @type {(\n * ((tree: Node, find: Find, replace?: Replace, options?: Options) => Node) &\n * ((tree: Node, schema: FindAndReplaceSchema|FindAndReplaceList, options?: Options) => Node)\n * )}\n **/\n (\n /**\n * @param {Node} tree\n * @param {Find|FindAndReplaceSchema|FindAndReplaceList} find\n * @param {Replace|Options} [replace]\n * @param {Options} [options]\n */\n function (tree, find, replace, options) {\n /** @type {Options|undefined} */\n let settings\n /** @type {FindAndReplaceSchema|FindAndReplaceList} */\n let schema\n\n if (typeof find === 'string' || find instanceof RegExp) {\n // @ts-expect-error don’t expect options twice.\n schema = [[find, replace]]\n settings = options\n } else {\n schema = find\n // @ts-expect-error don’t expect replace twice.\n settings = replace\n }\n\n if (!settings) {\n settings = {}\n }\n\n const ignored = convert(settings.ignore || [])\n const pairs = toPairs(schema)\n let pairIndex = -1\n\n while (++pairIndex < pairs.length) {\n visitParents(tree, 'text', visitor)\n }\n\n return tree\n\n /** @type {import('unist-util-visit-parents').Visitor} */\n function visitor(node, parents) {\n let index = -1\n /** @type {Parent|undefined} */\n let grandparent\n\n while (++index < parents.length) {\n const parent = /** @type {Parent} */ (parents[index])\n\n if (\n ignored(\n parent,\n // @ts-expect-error mdast vs. unist parent.\n grandparent ? grandparent.children.indexOf(parent) : undefined,\n grandparent\n )\n ) {\n return\n }\n\n grandparent = parent\n }\n\n if (grandparent) {\n return handler(node, grandparent)\n }\n }\n\n /**\n * @param {Text} node\n * @param {Parent} parent\n * @returns {VisitorResult}\n */\n function handler(node, parent) {\n const find = pairs[pairIndex][0]\n const replace = pairs[pairIndex][1]\n let start = 0\n // @ts-expect-error: TS is wrong, some of these children can be text.\n let index = parent.children.indexOf(node)\n /** @type {Array.} */\n let nodes = []\n /** @type {number|undefined} */\n let position\n\n find.lastIndex = 0\n\n let match = find.exec(node.value)\n\n while (match) {\n position = match.index\n // @ts-expect-error this is perfectly fine, typescript.\n let value = replace(...match, {\n index: match.index,\n input: match.input\n })\n\n if (typeof value === 'string') {\n value = value.length > 0 ? {type: 'text', value} : undefined\n }\n\n if (value !== false) {\n if (start !== position) {\n nodes.push({\n type: 'text',\n value: node.value.slice(start, position)\n })\n }\n\n if (Array.isArray(value)) {\n nodes.push(...value)\n } else if (value) {\n nodes.push(value)\n }\n\n start = position + match[0].length\n }\n\n if (!find.global) {\n break\n }\n\n match = find.exec(node.value)\n }\n\n if (position === undefined) {\n nodes = [node]\n index--\n } else {\n if (start < node.value.length) {\n nodes.push({type: 'text', value: node.value.slice(start)})\n }\n\n parent.children.splice(index, 1, ...nodes)\n }\n\n return index + nodes.length + 1\n }\n }\n )\n\n/**\n * @param {FindAndReplaceSchema|FindAndReplaceList} schema\n * @returns {Pairs}\n */\nfunction toPairs(schema) {\n /** @type {Pairs} */\n const result = []\n\n if (typeof schema !== 'object') {\n throw new TypeError('Expected array or object as schema')\n }\n\n if (Array.isArray(schema)) {\n let index = -1\n\n while (++index < schema.length) {\n result.push([\n toExpression(schema[index][0]),\n toFunction(schema[index][1])\n ])\n }\n } else {\n /** @type {string} */\n let key\n\n for (key in schema) {\n if (own.call(schema, key)) {\n result.push([toExpression(key), toFunction(schema[key])])\n }\n }\n }\n\n return result\n}\n\n/**\n * @param {Find} find\n * @returns {RegExp}\n */\nfunction toExpression(find) {\n return typeof find === 'string' ? new RegExp(escape(find), 'g') : find\n}\n\n/**\n * @param {Replace} replace\n * @returns {ReplaceFunction}\n */\nfunction toFunction(replace) {\n return typeof replace === 'function' ? replace : () => replace\n}\n","export default function escapeStringRegexp(string) {\n\tif (typeof string !== 'string') {\n\t\tthrow new TypeError('Expected a string');\n\t}\n\n\t// Escape characters with special meaning either inside or outside character sets.\n\t// Use a simple backslash escape when it’s always valid, and a `\\xnn` escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar.\n\treturn string\n\t\t.replace(/[|\\\\{}()[\\]^$+*?.]/g, '\\\\$&')\n\t\t.replace(/-/g, '\\\\x2d');\n}\n","/**\n * @typedef {import('mdast').Link} Link\n * @typedef {import('mdast-util-from-markdown').Extension} FromMarkdownExtension\n * @typedef {import('mdast-util-from-markdown').Transform} FromMarkdownTransform\n * @typedef {import('mdast-util-from-markdown').Handle} FromMarkdownHandle\n * @typedef {import('mdast-util-to-markdown/lib/types.js').Options} ToMarkdownExtension\n * @typedef {import('mdast-util-find-and-replace').ReplaceFunction} ReplaceFunction\n * @typedef {import('mdast-util-find-and-replace').RegExpMatchObject} RegExpMatchObject\n * @typedef {import('mdast-util-find-and-replace').PhrasingContent} PhrasingContent\n */\n\nimport {ccount} from 'ccount'\nimport {findAndReplace} from 'mdast-util-find-and-replace'\nimport {unicodePunctuation, unicodeWhitespace} from 'micromark-util-character'\n\nconst inConstruct = 'phrasing'\nconst notInConstruct = ['autolink', 'link', 'image', 'label']\n\n/** @type {FromMarkdownExtension} */\nexport const gfmAutolinkLiteralFromMarkdown = {\n transforms: [transformGfmAutolinkLiterals],\n enter: {\n literalAutolink: enterLiteralAutolink,\n literalAutolinkEmail: enterLiteralAutolinkValue,\n literalAutolinkHttp: enterLiteralAutolinkValue,\n literalAutolinkWww: enterLiteralAutolinkValue\n },\n exit: {\n literalAutolink: exitLiteralAutolink,\n literalAutolinkEmail: exitLiteralAutolinkEmail,\n literalAutolinkHttp: exitLiteralAutolinkHttp,\n literalAutolinkWww: exitLiteralAutolinkWww\n }\n}\n\n/** @type {ToMarkdownExtension} */\nexport const gfmAutolinkLiteralToMarkdown = {\n unsafe: [\n {\n character: '@',\n before: '[+\\\\-.\\\\w]',\n after: '[\\\\-.\\\\w]',\n inConstruct,\n notInConstruct\n },\n {\n character: '.',\n before: '[Ww]',\n after: '[\\\\-.\\\\w]',\n inConstruct,\n notInConstruct\n },\n {character: ':', before: '[ps]', after: '\\\\/', inConstruct, notInConstruct}\n ]\n}\n\n/** @type {FromMarkdownHandle} */\nfunction enterLiteralAutolink(token) {\n this.enter({type: 'link', title: null, url: '', children: []}, token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction enterLiteralAutolinkValue(token) {\n this.config.enter.autolinkProtocol.call(this, token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitLiteralAutolinkHttp(token) {\n this.config.exit.autolinkProtocol.call(this, token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitLiteralAutolinkWww(token) {\n this.config.exit.data.call(this, token)\n const node = /** @type {Link} */ (this.stack[this.stack.length - 1])\n node.url = 'http://' + this.sliceSerialize(token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitLiteralAutolinkEmail(token) {\n this.config.exit.autolinkEmail.call(this, token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitLiteralAutolink(token) {\n this.exit(token)\n}\n\n/** @type {FromMarkdownTransform} */\nfunction transformGfmAutolinkLiterals(tree) {\n findAndReplace(\n tree,\n [\n [/(https?:\\/\\/|www(?=\\.))([-.\\w]+)([^ \\t\\r\\n]*)/gi, findUrl],\n [/([-.\\w+]+)@([-\\w]+(?:\\.[-\\w]+)+)/g, findEmail]\n ],\n {ignore: ['link', 'linkReference']}\n )\n}\n\n/**\n * @type {ReplaceFunction}\n * @param {string} _\n * @param {string} protocol\n * @param {string} domain\n * @param {string} path\n * @param {RegExpMatchObject} match\n */\n// eslint-disable-next-line max-params\nfunction findUrl(_, protocol, domain, path, match) {\n let prefix = ''\n\n // Not an expected previous character.\n if (!previous(match)) {\n return false\n }\n\n // Treat `www` as part of the domain.\n if (/^w/i.test(protocol)) {\n domain = protocol + domain\n protocol = ''\n prefix = 'http://'\n }\n\n if (!isCorrectDomain(domain)) {\n return false\n }\n\n const parts = splitUrl(domain + path)\n\n if (!parts[0]) return false\n\n /** @type {PhrasingContent} */\n const result = {\n type: 'link',\n title: null,\n url: prefix + protocol + parts[0],\n children: [{type: 'text', value: protocol + parts[0]}]\n }\n\n if (parts[1]) {\n return [result, {type: 'text', value: parts[1]}]\n }\n\n return result\n}\n\n/**\n * @type {ReplaceFunction}\n * @param {string} _\n * @param {string} atext\n * @param {string} label\n * @param {RegExpMatchObject} match\n */\nfunction findEmail(_, atext, label, match) {\n if (\n // Not an expected previous character.\n !previous(match, true) ||\n // Label ends in not allowed character.\n /[_-\\d]$/.test(label)\n ) {\n return false\n }\n\n return {\n type: 'link',\n title: null,\n url: 'mailto:' + atext + '@' + label,\n children: [{type: 'text', value: atext + '@' + label}]\n }\n}\n\n/**\n * @param {string} domain\n * @returns {boolean}\n */\nfunction isCorrectDomain(domain) {\n const parts = domain.split('.')\n\n if (\n parts.length < 2 ||\n (parts[parts.length - 1] &&\n (/_/.test(parts[parts.length - 1]) ||\n !/[a-zA-Z\\d]/.test(parts[parts.length - 1]))) ||\n (parts[parts.length - 2] &&\n (/_/.test(parts[parts.length - 2]) ||\n !/[a-zA-Z\\d]/.test(parts[parts.length - 2])))\n ) {\n return false\n }\n\n return true\n}\n\n/**\n * @param {string} url\n * @returns {[string, string|undefined]}\n */\nfunction splitUrl(url) {\n const trailExec = /[!\"&'),.:;<>?\\]}]+$/.exec(url)\n /** @type {number} */\n let closingParenIndex\n /** @type {number} */\n let openingParens\n /** @type {number} */\n let closingParens\n /** @type {string|undefined} */\n let trail\n\n if (trailExec) {\n url = url.slice(0, trailExec.index)\n trail = trailExec[0]\n closingParenIndex = trail.indexOf(')')\n openingParens = ccount(url, '(')\n closingParens = ccount(url, ')')\n\n while (closingParenIndex !== -1 && openingParens > closingParens) {\n url += trail.slice(0, closingParenIndex + 1)\n trail = trail.slice(closingParenIndex + 1)\n closingParenIndex = trail.indexOf(')')\n closingParens++\n }\n }\n\n return [url, trail]\n}\n\n/**\n * @param {RegExpMatchObject} match\n * @param {boolean} [email=false]\n * @returns {boolean}\n */\nfunction previous(match, email) {\n const code = match.input.charCodeAt(match.index - 1)\n\n return (\n (match.index === 0 ||\n unicodeWhitespace(code) ||\n unicodePunctuation(code)) &&\n (!email || code !== 47)\n )\n}\n","/**\n * @typedef {import('mdast').Association} Association\n */\n\nimport {decodeString} from 'micromark-util-decode-string'\n\n/**\n * The `label` of an association is the string value: character escapes and\n * references work, and casing is intact.\n * The `identifier` is used to match one association to another: controversially,\n * character escapes and references don’t work in this matching: `©` does\n * not match `©`, and `\\+` does not match `+`.\n * But casing is ignored (and whitespace) is trimmed and collapsed: ` A\\nb`\n * matches `a b`.\n * So, we do prefer the label when figuring out how we’re going to serialize:\n * it has whitespace, casing, and we can ignore most useless character escapes\n * and all character references.\n *\n * @param {Association} node\n * @returns {string}\n */\nexport function association(node) {\n if (node.label || !node.identifier) {\n return node.label || ''\n }\n\n return decodeString(node.identifier)\n}\n","/**\n * @typedef {import('unist').Point} Point\n * @typedef {import('../types.js').TrackFields} TrackFields\n */\n\n/**\n * Functions to track output positions.\n * This info isn’t used yet but suchs functionality allows line wrapping,\n * and theoretically source maps (though, is there practical use in that?).\n *\n * @param {TrackFields} options_\n */\nexport function track(options_) {\n // Defaults are used to prevent crashes when older utilities somehow activate\n // this code.\n /* c8 ignore next 5 */\n const options = options_ || {}\n const now = options.now || {}\n let lineShift = options.lineShift || 0\n let line = now.line || 1\n let column = now.column || 1\n\n return {move, current, shift}\n\n /**\n * Get the current tracked info.\n *\n * @returns {{now: Point, lineShift: number}}\n */\n function current() {\n return {now: {line, column}, lineShift}\n }\n\n /**\n * Define an increased line shift (the typical indent for lines).\n *\n * @param {number} value\n */\n function shift(value) {\n lineShift += value\n }\n\n /**\n * Move past a string.\n *\n * @param {string} value\n * @returns {string}\n */\n function move(value = '') {\n const chunks = value.split(/\\r?\\n|\\r/g)\n const tail = chunks[chunks.length - 1]\n line += chunks.length - 1\n column =\n chunks.length === 1 ? column + tail.length : 1 + tail.length + lineShift\n return value\n }\n}\n","/**\n * @typedef {import('../types.js').Node} Node\n * @typedef {import('../types.js').Parent} Parent\n * @typedef {import('../types.js').Join} Join\n * @typedef {import('../types.js').Context} Context\n * @typedef {import('../types.js').TrackFields} TrackFields\n */\n\nimport {track} from './track.js'\n\n/**\n * @param {Parent} parent\n * @param {Context} context\n * @param {TrackFields} safeOptions\n * @returns {string}\n */\nexport function containerFlow(parent, context, safeOptions) {\n const indexStack = context.indexStack\n const children = parent.children || []\n const tracker = track(safeOptions)\n /** @type {Array} */\n const results = []\n let index = -1\n\n indexStack.push(-1)\n\n while (++index < children.length) {\n const child = children[index]\n\n indexStack[indexStack.length - 1] = index\n\n results.push(\n tracker.move(\n context.handle(child, parent, context, {\n before: '\\n',\n after: '\\n',\n ...tracker.current()\n })\n )\n )\n\n if (child.type !== 'list') {\n context.bulletLastUsed = undefined\n }\n\n if (index < children.length - 1) {\n results.push(tracker.move(between(child, children[index + 1])))\n }\n }\n\n indexStack.pop()\n\n return results.join('')\n\n /**\n * @param {Node} left\n * @param {Node} right\n * @returns {string}\n */\n function between(left, right) {\n let index = context.join.length\n\n while (index--) {\n const result = context.join[index](left, right, parent, context)\n\n if (result === true || result === 1) {\n break\n }\n\n if (typeof result === 'number') {\n return '\\n'.repeat(1 + result)\n }\n\n if (result === false) {\n return '\\n\\n\\n\\n'\n }\n }\n\n return '\\n\\n'\n }\n}\n","/**\n * @callback Map\n * @param {string} value\n * @param {number} line\n * @param {boolean} blank\n * @returns {string}\n */\n\nconst eol = /\\r?\\n|\\r/g\n\n/**\n * @param {string} value\n * @param {Map} map\n * @returns {string}\n */\nexport function indentLines(value, map) {\n /** @type {Array} */\n const result = []\n let start = 0\n let line = 0\n /** @type {RegExpExecArray|null} */\n let match\n\n while ((match = eol.exec(value))) {\n one(value.slice(start, match.index))\n result.push(match[0])\n start = match.index + match[0].length\n line++\n }\n\n one(value.slice(start))\n\n return result.join('')\n\n /**\n * @param {string} value\n */\n function one(value) {\n result.push(map(value, line, !value))\n }\n}\n","/**\n * @typedef {import('../types.js').Unsafe} Unsafe\n */\n\n/**\n * @param {Unsafe} pattern\n * @returns {RegExp}\n */\nexport function patternCompile(pattern) {\n if (!pattern._compiled) {\n const before =\n (pattern.atBreak ? '[\\\\r\\\\n][\\\\t ]*' : '') +\n (pattern.before ? '(?:' + pattern.before + ')' : '')\n\n pattern._compiled = new RegExp(\n (before ? '(' + before + ')' : '') +\n (/[|\\\\{}()[\\]^$+*?.-]/.test(pattern.character) ? '\\\\' : '') +\n pattern.character +\n (pattern.after ? '(?:' + pattern.after + ')' : ''),\n 'g'\n )\n }\n\n return pattern._compiled\n}\n","/**\n * @typedef {import('../types.js').Unsafe} Unsafe\n */\n\n/**\n * @param {Array} stack\n * @param {Unsafe} pattern\n * @returns {boolean}\n */\nexport function patternInScope(stack, pattern) {\n return (\n listInScope(stack, pattern.inConstruct, true) &&\n !listInScope(stack, pattern.notInConstruct, false)\n )\n}\n\n/**\n * @param {Array} stack\n * @param {Unsafe['inConstruct']} list\n * @param {boolean} none\n * @returns {boolean}\n */\nfunction listInScope(stack, list, none) {\n if (!list) {\n return none\n }\n\n if (typeof list === 'string') {\n list = [list]\n }\n\n let index = -1\n\n while (++index < list.length) {\n if (stack.includes(list[index])) {\n return true\n }\n }\n\n return false\n}\n","/**\n * @typedef {import('../types.js').Context} Context\n * @typedef {import('../types.js').SafeOptions} SafeOptions\n */\n\nimport {patternCompile} from './pattern-compile.js'\nimport {patternInScope} from './pattern-in-scope.js'\n\n/**\n * @param {Context} context\n * @param {string|null|undefined} input\n * @param {SafeOptions & {encode?: Array}} config\n * @returns {string}\n */\nexport function safe(context, input, config) {\n const value = (config.before || '') + (input || '') + (config.after || '')\n /** @type {Array} */\n const positions = []\n /** @type {Array} */\n const result = []\n /** @type {Record} */\n const infos = {}\n let index = -1\n\n while (++index < context.unsafe.length) {\n const pattern = context.unsafe[index]\n\n if (!patternInScope(context.stack, pattern)) {\n continue\n }\n\n const expression = patternCompile(pattern)\n /** @type {RegExpExecArray|null} */\n let match\n\n while ((match = expression.exec(value))) {\n const before = 'before' in pattern || Boolean(pattern.atBreak)\n const after = 'after' in pattern\n const position = match.index + (before ? match[1].length : 0)\n\n if (positions.includes(position)) {\n if (infos[position].before && !before) {\n infos[position].before = false\n }\n\n if (infos[position].after && !after) {\n infos[position].after = false\n }\n } else {\n positions.push(position)\n infos[position] = {before, after}\n }\n }\n }\n\n positions.sort(numerical)\n\n let start = config.before ? config.before.length : 0\n const end = value.length - (config.after ? config.after.length : 0)\n index = -1\n\n while (++index < positions.length) {\n const position = positions[index]\n\n // Character before or after matched:\n if (position < start || position >= end) {\n continue\n }\n\n // If this character is supposed to be escaped because it has a condition on\n // the next character, and the next character is definitly being escaped,\n // then skip this escape.\n if (\n (position + 1 < end &&\n positions[index + 1] === position + 1 &&\n infos[position].after &&\n !infos[position + 1].before &&\n !infos[position + 1].after) ||\n (positions[index - 1] === position - 1 &&\n infos[position].before &&\n !infos[position - 1].before &&\n !infos[position - 1].after)\n ) {\n continue\n }\n\n if (start !== position) {\n // If we have to use a character reference, an ampersand would be more\n // correct, but as backslashes only care about punctuation, either will\n // do the trick\n result.push(escapeBackslashes(value.slice(start, position), '\\\\'))\n }\n\n start = position\n\n if (\n /[!-/:-@[-`{-~]/.test(value.charAt(position)) &&\n (!config.encode || !config.encode.includes(value.charAt(position)))\n ) {\n // Character escape.\n result.push('\\\\')\n } else {\n // Character reference.\n result.push(\n '&#x' + value.charCodeAt(position).toString(16).toUpperCase() + ';'\n )\n start++\n }\n }\n\n result.push(escapeBackslashes(value.slice(start, end), config.after))\n\n return result.join('')\n}\n\n/**\n * @param {number} a\n * @param {number} b\n * @returns {number}\n */\nfunction numerical(a, b) {\n return a - b\n}\n\n/**\n * @param {string} value\n * @param {string} after\n * @returns {string}\n */\nfunction escapeBackslashes(value, after) {\n const expression = /\\\\(?=[!-/:-@[-`{-~])/g\n /** @type {Array} */\n const positions = []\n /** @type {Array} */\n const results = []\n const whole = value + after\n let index = -1\n let start = 0\n /** @type {RegExpExecArray|null} */\n let match\n\n while ((match = expression.exec(whole))) {\n positions.push(match.index)\n }\n\n while (++index < positions.length) {\n if (start !== positions[index]) {\n results.push(value.slice(start, positions[index]))\n }\n\n results.push('\\\\')\n start = positions[index]\n }\n\n results.push(value.slice(start))\n\n return results.join('')\n}\n","/**\n * @typedef {import('mdast').FootnoteReference} FootnoteReference\n * @typedef {import('mdast').FootnoteDefinition} FootnoteDefinition\n * @typedef {import('mdast-util-from-markdown').Extension} FromMarkdownExtension\n * @typedef {import('mdast-util-from-markdown').Handle} FromMarkdownHandle\n * @typedef {import('mdast-util-to-markdown').Options} ToMarkdownExtension\n * @typedef {import('mdast-util-to-markdown').Handle} ToMarkdownHandle\n * @typedef {import('mdast-util-to-markdown').Map} Map\n */\n\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\nimport {association} from 'mdast-util-to-markdown/lib/util/association.js'\nimport {containerFlow} from 'mdast-util-to-markdown/lib/util/container-flow.js'\nimport {indentLines} from 'mdast-util-to-markdown/lib/util/indent-lines.js'\nimport {safe} from 'mdast-util-to-markdown/lib/util/safe.js'\nimport {track} from 'mdast-util-to-markdown/lib/util/track.js'\n\n/**\n * @returns {FromMarkdownExtension}\n */\nexport function gfmFootnoteFromMarkdown() {\n return {\n enter: {\n gfmFootnoteDefinition: enterFootnoteDefinition,\n gfmFootnoteDefinitionLabelString: enterFootnoteDefinitionLabelString,\n gfmFootnoteCall: enterFootnoteCall,\n gfmFootnoteCallString: enterFootnoteCallString\n },\n exit: {\n gfmFootnoteDefinition: exitFootnoteDefinition,\n gfmFootnoteDefinitionLabelString: exitFootnoteDefinitionLabelString,\n gfmFootnoteCall: exitFootnoteCall,\n gfmFootnoteCallString: exitFootnoteCallString\n }\n }\n\n /** @type {FromMarkdownHandle} */\n function enterFootnoteDefinition(token) {\n this.enter(\n {type: 'footnoteDefinition', identifier: '', label: '', children: []},\n token\n )\n }\n\n /** @type {FromMarkdownHandle} */\n function enterFootnoteDefinitionLabelString() {\n this.buffer()\n }\n\n /** @type {FromMarkdownHandle} */\n function exitFootnoteDefinitionLabelString(token) {\n const label = this.resume()\n const node = /** @type {FootnoteDefinition} */ (\n this.stack[this.stack.length - 1]\n )\n node.label = label\n node.identifier = normalizeIdentifier(\n this.sliceSerialize(token)\n ).toLowerCase()\n }\n\n /** @type {FromMarkdownHandle} */\n function exitFootnoteDefinition(token) {\n this.exit(token)\n }\n\n /** @type {FromMarkdownHandle} */\n function enterFootnoteCall(token) {\n this.enter({type: 'footnoteReference', identifier: '', label: ''}, token)\n }\n\n /** @type {FromMarkdownHandle} */\n function enterFootnoteCallString() {\n this.buffer()\n }\n\n /** @type {FromMarkdownHandle} */\n function exitFootnoteCallString(token) {\n const label = this.resume()\n const node = /** @type {FootnoteDefinition} */ (\n this.stack[this.stack.length - 1]\n )\n node.label = label\n node.identifier = normalizeIdentifier(\n this.sliceSerialize(token)\n ).toLowerCase()\n }\n\n /** @type {FromMarkdownHandle} */\n function exitFootnoteCall(token) {\n this.exit(token)\n }\n}\n\n/**\n * @returns {ToMarkdownExtension}\n */\nexport function gfmFootnoteToMarkdown() {\n footnoteReference.peek = footnoteReferencePeek\n\n return {\n // This is on by default already.\n unsafe: [{character: '[', inConstruct: ['phrasing', 'label', 'reference']}],\n handlers: {footnoteDefinition, footnoteReference}\n }\n\n /**\n * @type {ToMarkdownHandle}\n * @param {FootnoteReference} node\n */\n function footnoteReference(node, _, context, safeOptions) {\n const tracker = track(safeOptions)\n let value = tracker.move('[^')\n const exit = context.enter('footnoteReference')\n const subexit = context.enter('reference')\n value += tracker.move(\n safe(context, association(node), {\n ...tracker.current(),\n before: value,\n after: ']'\n })\n )\n subexit()\n exit()\n value += tracker.move(']')\n return value\n }\n\n /** @type {ToMarkdownHandle} */\n function footnoteReferencePeek() {\n return '['\n }\n\n /**\n * @type {ToMarkdownHandle}\n * @param {FootnoteDefinition} node\n */\n function footnoteDefinition(node, _, context, safeOptions) {\n const tracker = track(safeOptions)\n let value = tracker.move('[^')\n const exit = context.enter('footnoteDefinition')\n const subexit = context.enter('label')\n value += tracker.move(\n safe(context, association(node), {\n ...tracker.current(),\n before: value,\n after: ']'\n })\n )\n subexit()\n value += tracker.move(\n ']:' + (node.children && node.children.length > 0 ? ' ' : '')\n )\n tracker.shift(4)\n value += tracker.move(\n indentLines(containerFlow(node, context, tracker.current()), map)\n )\n exit()\n\n return value\n\n /** @type {Map} */\n function map(line, index, blank) {\n if (index) {\n return (blank ? '' : ' ') + line\n }\n\n return line\n }\n }\n}\n","/**\n * @typedef {import('../types.js').Node} Node\n * @typedef {import('../types.js').Parent} Parent\n * @typedef {import('../types.js').SafeOptions} SafeOptions\n * @typedef {import('../types.js').Context} Context\n */\n\nimport {track} from './track.js'\n\n/**\n * @param {Parent} parent\n * @param {Context} context\n * @param {SafeOptions} safeOptions\n * @returns {string}\n */\nexport function containerPhrasing(parent, context, safeOptions) {\n const indexStack = context.indexStack\n const children = parent.children || []\n /** @type {Array} */\n const results = []\n let index = -1\n let before = safeOptions.before\n\n indexStack.push(-1)\n let tracker = track(safeOptions)\n\n while (++index < children.length) {\n const child = children[index]\n /** @type {string} */\n let after\n\n indexStack[indexStack.length - 1] = index\n\n if (index + 1 < children.length) {\n // @ts-expect-error: hush, it’s actually a `zwitch`.\n let handle = context.handle.handlers[children[index + 1].type]\n if (handle && handle.peek) handle = handle.peek\n after = handle\n ? handle(children[index + 1], parent, context, {\n before: '',\n after: '',\n ...tracker.current()\n }).charAt(0)\n : ''\n } else {\n after = safeOptions.after\n }\n\n // In some cases, html (text) can be found in phrasing right after an eol.\n // When we’d serialize that, in most cases that would be seen as html\n // (flow).\n // As we can’t escape or so to prevent it from happening, we take a somewhat\n // reasonable approach: replace that eol with a space.\n // See: \n if (\n results.length > 0 &&\n (before === '\\r' || before === '\\n') &&\n child.type === 'html'\n ) {\n results[results.length - 1] = results[results.length - 1].replace(\n /(\\r?\\n|\\r)$/,\n ' '\n )\n before = ' '\n\n // To do: does this work to reset tracker?\n tracker = track(safeOptions)\n tracker.move(results.join(''))\n }\n\n results.push(\n tracker.move(\n context.handle(child, parent, context, {\n ...tracker.current(),\n before,\n after\n })\n )\n )\n\n before = results[results.length - 1].slice(-1)\n }\n\n indexStack.pop()\n\n return results.join('')\n}\n","/**\n * @typedef {import('mdast').Delete} Delete\n * @typedef {import('mdast-util-from-markdown').Extension} FromMarkdownExtension\n * @typedef {import('mdast-util-from-markdown').Handle} FromMarkdownHandle\n * @typedef {import('mdast-util-to-markdown').Options} ToMarkdownExtension\n * @typedef {import('mdast-util-to-markdown').Handle} ToMarkdownHandle\n */\n\nimport {containerPhrasing} from 'mdast-util-to-markdown/lib/util/container-phrasing.js'\nimport {track} from 'mdast-util-to-markdown/lib/util/track.js'\n\n/** @type {FromMarkdownExtension} */\nexport const gfmStrikethroughFromMarkdown = {\n canContainEols: ['delete'],\n enter: {strikethrough: enterStrikethrough},\n exit: {strikethrough: exitStrikethrough}\n}\n\n/** @type {ToMarkdownExtension} */\nexport const gfmStrikethroughToMarkdown = {\n unsafe: [{character: '~', inConstruct: 'phrasing'}],\n handlers: {delete: handleDelete}\n}\n\nhandleDelete.peek = peekDelete\n\n/** @type {FromMarkdownHandle} */\nfunction enterStrikethrough(token) {\n this.enter({type: 'delete', children: []}, token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitStrikethrough(token) {\n this.exit(token)\n}\n\n/**\n * @type {ToMarkdownHandle}\n * @param {Delete} node\n */\nfunction handleDelete(node, _, context, safeOptions) {\n const tracker = track(safeOptions)\n const exit = context.enter('emphasis')\n let value = tracker.move('~~')\n value += containerPhrasing(node, context, {\n ...tracker.current(),\n before: value,\n after: '~'\n })\n value += tracker.move('~~')\n exit()\n return value\n}\n\n/** @type {ToMarkdownHandle} */\nfunction peekDelete() {\n return '~'\n}\n","/**\n * @typedef {import('mdast').InlineCode} InlineCode\n * @typedef {import('../types.js').Handle} Handle\n */\n\nimport {patternCompile} from '../util/pattern-compile.js'\n\ninlineCode.peek = inlineCodePeek\n\n/**\n * @type {Handle}\n * @param {InlineCode} node\n */\nexport function inlineCode(node, _, context) {\n let value = node.value || ''\n let sequence = '`'\n let index = -1\n\n // If there is a single grave accent on its own in the code, use a fence of\n // two.\n // If there are two in a row, use one.\n while (new RegExp('(^|[^`])' + sequence + '([^`]|$)').test(value)) {\n sequence += '`'\n }\n\n // If this is not just spaces or eols (tabs don’t count), and either the\n // first or last character are a space, eol, or tick, then pad with spaces.\n if (\n /[^ \\r\\n]/.test(value) &&\n ((/^[ \\r\\n]/.test(value) && /[ \\r\\n]$/.test(value)) || /^`|`$/.test(value))\n ) {\n value = ' ' + value + ' '\n }\n\n // We have a potential problem: certain characters after eols could result in\n // blocks being seen.\n // For example, if someone injected the string `'\\n# b'`, then that would\n // result in an ATX heading.\n // We can’t escape characters in `inlineCode`, but because eols are\n // transformed to spaces when going from markdown to HTML anyway, we can swap\n // them out.\n while (++index < context.unsafe.length) {\n const pattern = context.unsafe[index]\n const expression = patternCompile(pattern)\n /** @type {RegExpExecArray|null} */\n let match\n\n // Only look for `atBreak`s.\n // Btw: note that `atBreak` patterns will always start the regex at LF or\n // CR.\n if (!pattern.atBreak) continue\n\n while ((match = expression.exec(value))) {\n let position = match.index\n\n // Support CRLF (patterns only look for one of the characters).\n if (\n value.charCodeAt(position) === 10 /* `\\n` */ &&\n value.charCodeAt(position - 1) === 13 /* `\\r` */\n ) {\n position--\n }\n\n value = value.slice(0, position) + ' ' + value.slice(match.index + 1)\n }\n }\n\n return sequence + value + sequence\n}\n\n/**\n * @type {Handle}\n */\nfunction inlineCodePeek() {\n return '`'\n}\n","/**\n * @typedef Options\n * Configuration (optional).\n * @property {string|null|Array} [align]\n * One style for all columns, or styles for their respective columns.\n * Each style is either `'l'` (left), `'r'` (right), or `'c'` (center).\n * Other values are treated as `''`, which doesn’t place the colon in the\n * alignment row but does align left.\n * *Only the lowercased first character is used, so `Right` is fine.*\n * @property {boolean} [padding=true]\n * Whether to add a space of padding between delimiters and cells.\n *\n * When `true`, there is padding:\n *\n * ```markdown\n * | Alpha | B |\n * | ----- | ----- |\n * | C | Delta |\n * ```\n *\n * When `false`, there is no padding:\n *\n * ```markdown\n * |Alpha|B |\n * |-----|-----|\n * |C |Delta|\n * ```\n * @property {boolean} [delimiterStart=true]\n * Whether to begin each row with the delimiter.\n *\n * > 👉 **Note**: please don’t use this: it could create fragile structures\n * > that aren’t understandable to some markdown parsers.\n *\n * When `true`, there are starting delimiters:\n *\n * ```markdown\n * | Alpha | B |\n * | ----- | ----- |\n * | C | Delta |\n * ```\n *\n * When `false`, there are no starting delimiters:\n *\n * ```markdown\n * Alpha | B |\n * ----- | ----- |\n * C | Delta |\n * ```\n * @property {boolean} [delimiterEnd=true]\n * Whether to end each row with the delimiter.\n *\n * > 👉 **Note**: please don’t use this: it could create fragile structures\n * > that aren’t understandable to some markdown parsers.\n *\n * When `true`, there are ending delimiters:\n *\n * ```markdown\n * | Alpha | B |\n * | ----- | ----- |\n * | C | Delta |\n * ```\n *\n * When `false`, there are no ending delimiters:\n *\n * ```markdown\n * | Alpha | B\n * | ----- | -----\n * | C | Delta\n * ```\n * @property {boolean} [alignDelimiters=true]\n * Whether to align the delimiters.\n * By default, they are aligned:\n *\n * ```markdown\n * | Alpha | B |\n * | ----- | ----- |\n * | C | Delta |\n * ```\n *\n * Pass `false` to make them staggered:\n *\n * ```markdown\n * | Alpha | B |\n * | - | - |\n * | C | Delta |\n * ```\n * @property {(value: string) => number} [stringLength]\n * Function to detect the length of table cell content.\n * This is used when aligning the delimiters (`|`) between table cells.\n * Full-width characters and emoji mess up delimiter alignment when viewing\n * the markdown source.\n * To fix this, you can pass this function, which receives the cell content\n * and returns its “visible” size.\n * Note that what is and isn’t visible depends on where the text is displayed.\n *\n * Without such a function, the following:\n *\n * ```js\n * markdownTable([\n * ['Alpha', 'Bravo'],\n * ['中文', 'Charlie'],\n * ['👩‍❤️‍👩', 'Delta']\n * ])\n * ```\n *\n * Yields:\n *\n * ```markdown\n * | Alpha | Bravo |\n * | - | - |\n * | 中文 | Charlie |\n * | 👩‍❤️‍👩 | Delta |\n * ```\n *\n * With [`string-width`](https://github.com/sindresorhus/string-width):\n *\n * ```js\n * import stringWidth from 'string-width'\n *\n * markdownTable(\n * [\n * ['Alpha', 'Bravo'],\n * ['中文', 'Charlie'],\n * ['👩‍❤️‍👩', 'Delta']\n * ],\n * {stringLength: stringWidth}\n * )\n * ```\n *\n * Yields:\n *\n * ```markdown\n * | Alpha | Bravo |\n * | ----- | ------- |\n * | 中文 | Charlie |\n * | 👩‍❤️‍👩 | Delta |\n * ```\n */\n\n/**\n * @typedef {Options} MarkdownTableOptions\n * @todo\n * Remove next major.\n */\n\n/**\n * Generate a markdown ([GFM](https://docs.github.com/en/github/writing-on-github/working-with-advanced-formatting/organizing-information-with-tables)) table..\n *\n * @param {Array>} table\n * Table data (matrix of strings).\n * @param {Options} [options]\n * Configuration (optional).\n * @returns {string}\n */\nexport function markdownTable(table, options = {}) {\n const align = (options.align || []).concat()\n const stringLength = options.stringLength || defaultStringLength\n /** @type {Array} Character codes as symbols for alignment per column. */\n const alignments = []\n /** @type {Array>} Cells per row. */\n const cellMatrix = []\n /** @type {Array>} Sizes of each cell per row. */\n const sizeMatrix = []\n /** @type {Array} */\n const longestCellByColumn = []\n let mostCellsPerRow = 0\n let rowIndex = -1\n\n // This is a superfluous loop if we don’t align delimiters, but otherwise we’d\n // do superfluous work when aligning, so optimize for aligning.\n while (++rowIndex < table.length) {\n /** @type {Array} */\n const row = []\n /** @type {Array} */\n const sizes = []\n let columnIndex = -1\n\n if (table[rowIndex].length > mostCellsPerRow) {\n mostCellsPerRow = table[rowIndex].length\n }\n\n while (++columnIndex < table[rowIndex].length) {\n const cell = serialize(table[rowIndex][columnIndex])\n\n if (options.alignDelimiters !== false) {\n const size = stringLength(cell)\n sizes[columnIndex] = size\n\n if (\n longestCellByColumn[columnIndex] === undefined ||\n size > longestCellByColumn[columnIndex]\n ) {\n longestCellByColumn[columnIndex] = size\n }\n }\n\n row.push(cell)\n }\n\n cellMatrix[rowIndex] = row\n sizeMatrix[rowIndex] = sizes\n }\n\n // Figure out which alignments to use.\n let columnIndex = -1\n\n if (typeof align === 'object' && 'length' in align) {\n while (++columnIndex < mostCellsPerRow) {\n alignments[columnIndex] = toAlignment(align[columnIndex])\n }\n } else {\n const code = toAlignment(align)\n\n while (++columnIndex < mostCellsPerRow) {\n alignments[columnIndex] = code\n }\n }\n\n // Inject the alignment row.\n columnIndex = -1\n /** @type {Array} */\n const row = []\n /** @type {Array} */\n const sizes = []\n\n while (++columnIndex < mostCellsPerRow) {\n const code = alignments[columnIndex]\n let before = ''\n let after = ''\n\n if (code === 99 /* `c` */) {\n before = ':'\n after = ':'\n } else if (code === 108 /* `l` */) {\n before = ':'\n } else if (code === 114 /* `r` */) {\n after = ':'\n }\n\n // There *must* be at least one hyphen-minus in each alignment cell.\n let size =\n options.alignDelimiters === false\n ? 1\n : Math.max(\n 1,\n longestCellByColumn[columnIndex] - before.length - after.length\n )\n\n const cell = before + '-'.repeat(size) + after\n\n if (options.alignDelimiters !== false) {\n size = before.length + size + after.length\n\n if (size > longestCellByColumn[columnIndex]) {\n longestCellByColumn[columnIndex] = size\n }\n\n sizes[columnIndex] = size\n }\n\n row[columnIndex] = cell\n }\n\n // Inject the alignment row.\n cellMatrix.splice(1, 0, row)\n sizeMatrix.splice(1, 0, sizes)\n\n rowIndex = -1\n /** @type {Array} */\n const lines = []\n\n while (++rowIndex < cellMatrix.length) {\n const row = cellMatrix[rowIndex]\n const sizes = sizeMatrix[rowIndex]\n columnIndex = -1\n /** @type {Array} */\n const line = []\n\n while (++columnIndex < mostCellsPerRow) {\n const cell = row[columnIndex] || ''\n let before = ''\n let after = ''\n\n if (options.alignDelimiters !== false) {\n const size =\n longestCellByColumn[columnIndex] - (sizes[columnIndex] || 0)\n const code = alignments[columnIndex]\n\n if (code === 114 /* `r` */) {\n before = ' '.repeat(size)\n } else if (code === 99 /* `c` */) {\n if (size % 2) {\n before = ' '.repeat(size / 2 + 0.5)\n after = ' '.repeat(size / 2 - 0.5)\n } else {\n before = ' '.repeat(size / 2)\n after = before\n }\n } else {\n after = ' '.repeat(size)\n }\n }\n\n if (options.delimiterStart !== false && !columnIndex) {\n line.push('|')\n }\n\n if (\n options.padding !== false &&\n // Don’t add the opening space if we’re not aligning and the cell is\n // empty: there will be a closing space.\n !(options.alignDelimiters === false && cell === '') &&\n (options.delimiterStart !== false || columnIndex)\n ) {\n line.push(' ')\n }\n\n if (options.alignDelimiters !== false) {\n line.push(before)\n }\n\n line.push(cell)\n\n if (options.alignDelimiters !== false) {\n line.push(after)\n }\n\n if (options.padding !== false) {\n line.push(' ')\n }\n\n if (\n options.delimiterEnd !== false ||\n columnIndex !== mostCellsPerRow - 1\n ) {\n line.push('|')\n }\n }\n\n lines.push(\n options.delimiterEnd === false\n ? line.join('').replace(/ +$/, '')\n : line.join('')\n )\n }\n\n return lines.join('\\n')\n}\n\n/**\n * @param {string|null|undefined} [value]\n * @returns {string}\n */\nfunction serialize(value) {\n return value === null || value === undefined ? '' : String(value)\n}\n\n/**\n * @param {string} value\n * @returns {number}\n */\nfunction defaultStringLength(value) {\n return value.length\n}\n\n/**\n * @param {string|null|undefined} value\n * @returns {number}\n */\nfunction toAlignment(value) {\n const code = typeof value === 'string' ? value.codePointAt(0) : 0\n\n return code === 67 /* `C` */ || code === 99 /* `c` */\n ? 99 /* `c` */\n : code === 76 /* `L` */ || code === 108 /* `l` */\n ? 108 /* `l` */\n : code === 82 /* `R` */ || code === 114 /* `r` */\n ? 114 /* `r` */\n : 0\n}\n","/**\n * @typedef {import('mdast').AlignType} AlignType\n * @typedef {import('mdast').Table} Table\n * @typedef {import('mdast').TableRow} TableRow\n * @typedef {import('mdast').TableCell} TableCell\n * @typedef {import('mdast').InlineCode} InlineCode\n * @typedef {import('markdown-table').MarkdownTableOptions} MarkdownTableOptions\n * @typedef {import('mdast-util-from-markdown').Extension} FromMarkdownExtension\n * @typedef {import('mdast-util-from-markdown').Handle} FromMarkdownHandle\n * @typedef {import('mdast-util-to-markdown').Options} ToMarkdownExtension\n * @typedef {import('mdast-util-to-markdown').Handle} ToMarkdownHandle\n * @typedef {import('mdast-util-to-markdown').Context} ToMarkdownContext\n * @typedef {import('mdast-util-to-markdown').SafeOptions} SafeOptions\n *\n * @typedef Options\n * @property {boolean} [tableCellPadding=true]\n * @property {boolean} [tablePipeAlign=true]\n * @property {MarkdownTableOptions['stringLength']} [stringLength]\n */\n\nimport {containerPhrasing} from 'mdast-util-to-markdown/lib/util/container-phrasing.js'\nimport {inlineCode} from 'mdast-util-to-markdown/lib/handle/inline-code.js'\nimport {markdownTable} from 'markdown-table'\n\n/** @type {FromMarkdownExtension} */\nexport const gfmTableFromMarkdown = {\n enter: {\n table: enterTable,\n tableData: enterCell,\n tableHeader: enterCell,\n tableRow: enterRow\n },\n exit: {\n codeText: exitCodeText,\n table: exitTable,\n tableData: exit,\n tableHeader: exit,\n tableRow: exit\n }\n}\n\n/** @type {FromMarkdownHandle} */\nfunction enterTable(token) {\n /** @type {Array<'left'|'right'|'center'|'none'>} */\n // @ts-expect-error: `align` is custom.\n const align = token._align\n this.enter(\n {\n type: 'table',\n align: align.map((d) => (d === 'none' ? null : d)),\n children: []\n },\n token\n )\n this.setData('inTable', true)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitTable(token) {\n this.exit(token)\n this.setData('inTable')\n}\n\n/** @type {FromMarkdownHandle} */\nfunction enterRow(token) {\n this.enter({type: 'tableRow', children: []}, token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exit(token) {\n this.exit(token)\n}\n\n/** @type {FromMarkdownHandle} */\nfunction enterCell(token) {\n this.enter({type: 'tableCell', children: []}, token)\n}\n\n// Overwrite the default code text data handler to unescape escaped pipes when\n// they are in tables.\n/** @type {FromMarkdownHandle} */\nfunction exitCodeText(token) {\n let value = this.resume()\n\n if (this.getData('inTable')) {\n value = value.replace(/\\\\([\\\\|])/g, replace)\n }\n\n const node = /** @type {InlineCode} */ (this.stack[this.stack.length - 1])\n node.value = value\n this.exit(token)\n}\n\n/**\n * @param {string} $0\n * @param {string} $1\n * @returns {string}\n */\nfunction replace($0, $1) {\n // Pipes work, backslashes don’t (but can’t escape pipes).\n return $1 === '|' ? $1 : $0\n}\n\n/**\n * @param {Options} [options]\n * @returns {ToMarkdownExtension}\n */\nexport function gfmTableToMarkdown(options) {\n const settings = options || {}\n const padding = settings.tableCellPadding\n const alignDelimiters = settings.tablePipeAlign\n const stringLength = settings.stringLength\n const around = padding ? ' ' : '|'\n\n return {\n unsafe: [\n {character: '\\r', inConstruct: 'tableCell'},\n {character: '\\n', inConstruct: 'tableCell'},\n // A pipe, when followed by a tab or space (padding), or a dash or colon\n // (unpadded delimiter row), could result in a table.\n {atBreak: true, character: '|', after: '[\\t :-]'},\n // A pipe in a cell must be encoded.\n {character: '|', inConstruct: 'tableCell'},\n // A colon must be followed by a dash, in which case it could start a\n // delimiter row.\n {atBreak: true, character: ':', after: '-'},\n // A delimiter row can also start with a dash, when followed by more\n // dashes, a colon, or a pipe.\n // This is a stricter version than the built in check for lists, thematic\n // breaks, and setex heading underlines though:\n // \n {atBreak: true, character: '-', after: '[:|-]'}\n ],\n handlers: {\n table: handleTable,\n tableRow: handleTableRow,\n tableCell: handleTableCell,\n inlineCode: inlineCodeWithTable\n }\n }\n\n /**\n * @type {ToMarkdownHandle}\n * @param {Table} node\n */\n function handleTable(node, _, context, safeOptions) {\n return serializeData(\n handleTableAsData(node, context, safeOptions),\n // @ts-expect-error: fixed in `markdown-table@3.0.1`.\n node.align\n )\n }\n\n /**\n * This function isn’t really used normally, because we handle rows at the\n * table level.\n * But, if someone passes in a table row, this ensures we make somewhat sense.\n *\n * @type {ToMarkdownHandle}\n * @param {TableRow} node\n */\n function handleTableRow(node, _, context, safeOptions) {\n const row = handleTableRowAsData(node, context, safeOptions)\n // `markdown-table` will always add an align row\n const value = serializeData([row])\n return value.slice(0, value.indexOf('\\n'))\n }\n\n /**\n * @type {ToMarkdownHandle}\n * @param {TableCell} node\n */\n function handleTableCell(node, _, context, safeOptions) {\n const exit = context.enter('tableCell')\n const subexit = context.enter('phrasing')\n const value = containerPhrasing(node, context, {\n ...safeOptions,\n before: around,\n after: around\n })\n subexit()\n exit()\n return value\n }\n\n /**\n * @param {Array>} matrix\n * @param {Array} [align]\n */\n function serializeData(matrix, align) {\n return markdownTable(matrix, {\n align,\n alignDelimiters,\n padding,\n stringLength\n })\n }\n\n /**\n * @param {Table} node\n * @param {ToMarkdownContext} context\n * @param {SafeOptions} safeOptions\n */\n function handleTableAsData(node, context, safeOptions) {\n const children = node.children\n let index = -1\n /** @type {Array>} */\n const result = []\n const subexit = context.enter('table')\n\n while (++index < children.length) {\n result[index] = handleTableRowAsData(\n children[index],\n context,\n safeOptions\n )\n }\n\n subexit()\n\n return result\n }\n\n /**\n * @param {TableRow} node\n * @param {ToMarkdownContext} context\n * @param {SafeOptions} safeOptions\n */\n function handleTableRowAsData(node, context, safeOptions) {\n const children = node.children\n let index = -1\n /** @type {Array} */\n const result = []\n const subexit = context.enter('tableRow')\n\n while (++index < children.length) {\n // Note: the positional info as used here is incorrect.\n // Making it correct would be impossible due to aligning cells?\n // And it would need copy/pasting `markdown-table` into this project.\n result[index] = handleTableCell(\n children[index],\n node,\n context,\n safeOptions\n )\n }\n\n subexit()\n\n return result\n }\n\n /**\n * @type {ToMarkdownHandle}\n * @param {InlineCode} node\n */\n function inlineCodeWithTable(node, parent, context) {\n let value = inlineCode(node, parent, context)\n\n if (context.stack.includes('tableCell')) {\n value = value.replace(/\\|/g, '\\\\$&')\n }\n\n return value\n }\n}\n","/**\n * @typedef {import('mdast').ListItem} ListItem\n * @typedef {import('mdast').List} List\n * @typedef {import('../util/indent-lines.js').Map} Map\n * @typedef {import('../types.js').Options} Options\n * @typedef {import('../types.js').Handle} Handle\n */\n\nimport {checkBullet} from '../util/check-bullet.js'\nimport {checkListItemIndent} from '../util/check-list-item-indent.js'\nimport {containerFlow} from '../util/container-flow.js'\nimport {indentLines} from '../util/indent-lines.js'\nimport {track} from '../util/track.js'\n\n/**\n * @type {Handle}\n * @param {ListItem} node\n */\nexport function listItem(node, parent, context, safeOptions) {\n const listItemIndent = checkListItemIndent(context)\n let bullet = context.bulletCurrent || checkBullet(context)\n\n // Add the marker value for ordered lists.\n if (parent && parent.type === 'list' && parent.ordered) {\n bullet =\n (typeof parent.start === 'number' && parent.start > -1\n ? parent.start\n : 1) +\n (context.options.incrementListMarker === false\n ? 0\n : parent.children.indexOf(node)) +\n bullet\n }\n\n let size = bullet.length + 1\n\n if (\n listItemIndent === 'tab' ||\n (listItemIndent === 'mixed' &&\n ((parent && parent.type === 'list' && parent.spread) || node.spread))\n ) {\n size = Math.ceil(size / 4) * 4\n }\n\n const tracker = track(safeOptions)\n tracker.move(bullet + ' '.repeat(size - bullet.length))\n tracker.shift(size)\n const exit = context.enter('listItem')\n const value = indentLines(\n containerFlow(node, context, tracker.current()),\n map\n )\n exit()\n\n return value\n\n /** @type {Map} */\n function map(line, index, blank) {\n if (index) {\n return (blank ? '' : ' '.repeat(size)) + line\n }\n\n return (blank ? bullet : bullet + ' '.repeat(size - bullet.length)) + line\n }\n}\n","/**\n * @typedef {import('../types.js').Context} Context\n * @typedef {import('../types.js').Options} Options\n */\n\n/**\n * @param {Context} context\n * @returns {Exclude}\n */\nexport function checkListItemIndent(context) {\n const style = context.options.listItemIndent || 'tab'\n\n // To do: remove in a major.\n // @ts-expect-error: deprecated.\n if (style === 1 || style === '1') {\n return 'one'\n }\n\n if (style !== 'tab' && style !== 'one' && style !== 'mixed') {\n throw new Error(\n 'Cannot serialize items with `' +\n style +\n '` for `options.listItemIndent`, expected `tab`, `one`, or `mixed`'\n )\n }\n\n return style\n}\n","/**\n * @typedef {import('../types.js').Context} Context\n * @typedef {import('../types.js').Options} Options\n */\n\n/**\n * @param {Context} context\n * @returns {Exclude}\n */\nexport function checkBullet(context) {\n const marker = context.options.bullet || '*'\n\n if (marker !== '*' && marker !== '+' && marker !== '-') {\n throw new Error(\n 'Cannot serialize items with `' +\n marker +\n '` for `options.bullet`, expected `*`, `+`, or `-`'\n )\n }\n\n return marker\n}\n","/**\n * @typedef {Extract} Parent\n * @typedef {import('mdast').ListItem} ListItem\n * @typedef {import('mdast').Paragraph} Paragraph\n * @typedef {import('mdast').BlockContent} BlockContent\n * @typedef {import('mdast-util-from-markdown').Extension} FromMarkdownExtension\n * @typedef {import('mdast-util-from-markdown').Handle} FromMarkdownHandle\n * @typedef {import('mdast-util-to-markdown').Options} ToMarkdownExtension\n * @typedef {import('mdast-util-to-markdown').Handle} ToMarkdownHandle\n */\n\nimport {listItem} from 'mdast-util-to-markdown/lib/handle/list-item.js'\nimport {track} from 'mdast-util-to-markdown/lib/util/track.js'\n\n/** @type {FromMarkdownExtension} */\nexport const gfmTaskListItemFromMarkdown = {\n exit: {\n taskListCheckValueChecked: exitCheck,\n taskListCheckValueUnchecked: exitCheck,\n paragraph: exitParagraphWithTaskListItem\n }\n}\n\n/** @type {ToMarkdownExtension} */\nexport const gfmTaskListItemToMarkdown = {\n unsafe: [{atBreak: true, character: '-', after: '[:|-]'}],\n handlers: {listItem: listItemWithTaskListItem}\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitCheck(token) {\n const node = /** @type {ListItem} */ (this.stack[this.stack.length - 2])\n // We’re always in a paragraph, in a list item.\n node.checked = token.type === 'taskListCheckValueChecked'\n}\n\n/** @type {FromMarkdownHandle} */\nfunction exitParagraphWithTaskListItem(token) {\n const parent = /** @type {Parent} */ (this.stack[this.stack.length - 2])\n const node = /** @type {Paragraph} */ (this.stack[this.stack.length - 1])\n const siblings = parent.children\n const head = node.children[0]\n let index = -1\n /** @type {Paragraph|undefined} */\n let firstParaghraph\n\n if (\n parent &&\n parent.type === 'listItem' &&\n typeof parent.checked === 'boolean' &&\n head &&\n head.type === 'text'\n ) {\n while (++index < siblings.length) {\n const sibling = siblings[index]\n if (sibling.type === 'paragraph') {\n firstParaghraph = sibling\n break\n }\n }\n\n if (firstParaghraph === node) {\n // Must start with a space or a tab.\n head.value = head.value.slice(1)\n\n if (head.value.length === 0) {\n node.children.shift()\n } else if (\n node.position &&\n head.position &&\n typeof head.position.start.offset === 'number'\n ) {\n head.position.start.column++\n head.position.start.offset++\n node.position.start = Object.assign({}, head.position.start)\n }\n }\n }\n\n this.exit(token)\n}\n\n/**\n * @type {ToMarkdownHandle}\n * @param {ListItem} node\n */\nfunction listItemWithTaskListItem(node, parent, context, safeOptions) {\n const head = node.children[0]\n const checkable =\n typeof node.checked === 'boolean' && head && head.type === 'paragraph'\n const checkbox = '[' + (node.checked ? 'x' : ' ') + '] '\n const tracker = track(safeOptions)\n\n if (checkable) {\n tracker.move(checkbox)\n }\n\n let value = listItem(node, parent, context, {\n ...safeOptions,\n ...tracker.current()\n })\n\n if (checkable) {\n value = value.replace(/^(?:[*+-]|\\d+\\.)([\\r\\n]| {1,3})/, check)\n }\n\n return value\n\n /**\n * @param {string} $0\n * @returns {string}\n */\n function check($0) {\n return $0 + checkbox\n }\n}\n","/**\n * @typedef {import('mdast-util-from-markdown').Extension} FromMarkdownExtension\n * @typedef {import('mdast-util-to-markdown').Options} ToMarkdownExtension\n *\n * @typedef {import('mdast-util-gfm-table').Options} Options\n */\n\nimport {\n gfmAutolinkLiteralFromMarkdown,\n gfmAutolinkLiteralToMarkdown\n} from 'mdast-util-gfm-autolink-literal'\nimport {\n gfmFootnoteFromMarkdown,\n gfmFootnoteToMarkdown\n} from 'mdast-util-gfm-footnote'\nimport {\n gfmStrikethroughFromMarkdown,\n gfmStrikethroughToMarkdown\n} from 'mdast-util-gfm-strikethrough'\nimport {gfmTableFromMarkdown, gfmTableToMarkdown} from 'mdast-util-gfm-table'\nimport {\n gfmTaskListItemFromMarkdown,\n gfmTaskListItemToMarkdown\n} from 'mdast-util-gfm-task-list-item'\n\n/**\n * @returns {Array}\n */\nexport function gfmFromMarkdown() {\n return [\n gfmAutolinkLiteralFromMarkdown,\n gfmFootnoteFromMarkdown(),\n gfmStrikethroughFromMarkdown,\n gfmTableFromMarkdown,\n gfmTaskListItemFromMarkdown\n ]\n}\n\n/**\n * @param {Options} [options]\n * @returns {ToMarkdownExtension}\n */\nexport function gfmToMarkdown(options) {\n return {\n extensions: [\n gfmAutolinkLiteralToMarkdown,\n gfmFootnoteToMarkdown(),\n gfmStrikethroughToMarkdown,\n gfmTableToMarkdown(options),\n gfmTaskListItemToMarkdown\n ]\n }\n}\n","/**\n * @typedef {import('mdast').Root} Root\n * @typedef {import('micromark-extension-gfm').Options & import('mdast-util-gfm').Options} Options\n */\n\nimport {gfm} from 'micromark-extension-gfm'\nimport {gfmFromMarkdown, gfmToMarkdown} from 'mdast-util-gfm'\n\n/**\n * Plugin to support GFM (autolink literals, footnotes, strikethrough, tables, tasklists).\n *\n * @type {import('unified').Plugin<[Options?]|void[], Root>}\n */\nexport default function remarkGfm(options = {}) {\n const data = this.data()\n\n add('micromarkExtensions', gfm(options))\n add('fromMarkdownExtensions', gfmFromMarkdown())\n add('toMarkdownExtensions', gfmToMarkdown(options))\n\n /**\n * @param {string} field\n * @param {unknown} value\n */\n function add(field, value) {\n const list = /** @type {unknown[]} */ (\n // Other extensions\n /* c8 ignore next 2 */\n data[field] ? data[field] : (data[field] = [])\n )\n\n list.push(value)\n }\n}\n","var own = {}.hasOwnProperty\n\n/**\n * Check if `node` has a set `name` property.\n *\n * @param {unknown} node\n * @param {string} name\n * @returns {boolean}\n */\nexport function hasProperty(node, name) {\n /** @type {unknown} */\n var value =\n name &&\n node &&\n typeof node === 'object' &&\n // @ts-ignore Looks like a node.\n node.type === 'element' &&\n // @ts-ignore Looks like an element.\n node.properties &&\n // @ts-ignore Looks like an element.\n own.call(node.properties, name) &&\n // @ts-ignore Looks like an element.\n node.properties[name]\n\n return value !== null && value !== undefined && value !== false\n}\n","/**\n * @typedef {import('hast').Parent} Parent\n * @typedef {import('hast').Root} Root\n * @typedef {Root|Parent['children'][number]} Node\n */\n\n/**\n * Rank of a heading: H1 -> 1, H2 -> 2, etc.\n *\n * @param {Node} node\n * @returns {number|null}\n */\nexport function headingRank(node) {\n var name =\n (node && node.type === 'element' && node.tagName.toLowerCase()) || ''\n var code =\n name.length === 2 && name.charCodeAt(0) === 104 /* `h` */\n ? name.charCodeAt(1)\n : 0\n return code > 48 /* `0` */ && code < 55 /* `7` */ ? code - 48 /* `0` */ : null\n}\n","/**\n * @fileoverview\n * Get the plain-text value of a hast node.\n * @longdescription\n * ## Use\n *\n * ```js\n * import {h} from 'hastscript'\n * import {toString} from 'hast-util-to-string'\n *\n * toString(h('p', 'Alpha'))\n * //=> 'Alpha'\n * toString(h('div', [h('b', 'Bold'), ' and ', h('i', 'italic'), '.']))\n * //=> 'Bold and italic.'\n * ```\n *\n * ## API\n *\n * ### `toString(node)`\n *\n * Transform a node to a string.\n */\n\n/**\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').Element} Element\n * @typedef {Root|Root['children'][number]} Node\n */\n\n/**\n * Get the plain-text value of a hast node.\n *\n * @param {Node} node\n * @returns {string}\n */\nexport function toString(node) {\n // “The concatenation of data of all the Text node descendants of the context\n // object, in tree order.”\n if ('children' in node) {\n return all(node)\n }\n\n // “Context object’s data.”\n return 'value' in node ? node.value : ''\n}\n\n/**\n * @param {Node} node\n * @returns {string}\n */\nfunction one(node) {\n if (node.type === 'text') {\n return node.value\n }\n\n return 'children' in node ? all(node) : ''\n}\n\n/**\n * @param {Root|Element} node\n * @returns {string}\n */\nfunction all(node) {\n let index = -1\n /** @type {string[]} */\n const result = []\n\n while (++index < node.children.length) {\n result[index] = one(node.children[index])\n }\n\n return result.join('')\n}\n","/**\n * @typedef {import('hast').Root} Root\n */\n\nimport Slugger from 'github-slugger'\nimport {hasProperty} from 'hast-util-has-property'\nimport {headingRank} from 'hast-util-heading-rank'\nimport {toString} from 'hast-util-to-string'\nimport {visit} from 'unist-util-visit'\n\nconst slugs = new Slugger()\n\n/**\n * Plugin to add `id`s to headings.\n *\n * @type {import('unified').Plugin, Root>}\n */\nexport default function rehypeSlug() {\n return (tree) => {\n slugs.reset()\n\n visit(tree, 'element', (node) => {\n if (headingRank(node) && node.properties && !hasProperty(node, 'id')) {\n node.properties.id = slugs.slug(toString(node))\n }\n })\n }\n}\n","/**\n * @typedef {import('unist').Node} Node\n * @typedef {import('unist').Parent} Parent\n * @typedef {import('hast').Element} Element\n *\n * @typedef {string} TagName\n * @typedef {null|undefined|TagName|TestFunctionAnything|Array.} Test\n */\n\n/**\n * @template {Element} T\n * @typedef {null|undefined|T['tagName']|TestFunctionPredicate|Array.>} PredicateTest\n */\n\n/**\n * Check if an element passes a test\n *\n * @callback TestFunctionAnything\n * @param {Element} element\n * @param {number|null|undefined} [index]\n * @param {Parent|null|undefined} [parent]\n * @returns {boolean|void}\n */\n\n/**\n * Check if an element passes a certain node test\n *\n * @template {Element} X\n * @callback TestFunctionPredicate\n * @param {Element} element\n * @param {number|null|undefined} [index]\n * @param {Parent|null|undefined} [parent]\n * @returns {element is X}\n */\n\n/**\n * Check if a node is an element and passes a certain node test\n *\n * @callback AssertAnything\n * @param {unknown} [node]\n * @param {number|null|undefined} [index]\n * @param {Parent|null|undefined} [parent]\n * @returns {boolean}\n */\n\n/**\n * Check if a node is an element and passes a certain node test\n *\n * @template {Element} Y\n * @callback AssertPredicate\n * @param {unknown} [node]\n * @param {number|null|undefined} [index]\n * @param {Parent|null|undefined} [parent]\n * @returns {node is Y}\n */\n\n// Check if `node` is an `element` and whether it passes the given test.\nexport const isElement =\n /**\n * Check if a node is an element and passes a test.\n * When a `parent` node is known the `index` of node should also be given.\n *\n * @type {(\n * (() => false) &\n * ((node: unknown, test?: PredicateTest, index?: number, parent?: Parent, context?: unknown) => node is T) &\n * ((node: unknown, test: Test, index?: number, parent?: Parent, context?: unknown) => boolean)\n * )}\n */\n (\n /**\n * Check if a node passes a test.\n * When a `parent` node is known the `index` of node should also be given.\n *\n * @param {unknown} [node] Node to check\n * @param {Test} [test] When nullish, checks if `node` is a `Node`.\n * When `string`, works like passing `function (node) {return node.type === test}`.\n * When `function` checks if function passed the node is true.\n * When `array`, checks any one of the subtests pass.\n * @param {number} [index] Position of `node` in `parent`\n * @param {Parent} [parent] Parent of `node`\n * @param {unknown} [context] Context object to invoke `test` with\n * @returns {boolean} Whether test passed and `node` is an `Element` (object with `type` set to `element` and `tagName` set to a non-empty string).\n */\n // eslint-disable-next-line max-params\n function (node, test, index, parent, context) {\n const check = convertElement(test)\n\n if (\n index !== undefined &&\n index !== null &&\n (typeof index !== 'number' ||\n index < 0 ||\n index === Number.POSITIVE_INFINITY)\n ) {\n throw new Error('Expected positive finite index for child node')\n }\n\n if (\n parent !== undefined &&\n parent !== null &&\n (!parent.type || !parent.children)\n ) {\n throw new Error('Expected parent node')\n }\n\n // @ts-expect-error Looks like a node.\n if (!node || !node.type || typeof node.type !== 'string') {\n return false\n }\n\n if (\n (parent === undefined || parent === null) !==\n (index === undefined || index === null)\n ) {\n throw new Error('Expected both parent and index')\n }\n\n return check.call(context, node, index, parent)\n }\n )\n\nexport const convertElement =\n /**\n * @type {(\n * ((test: T['tagName']|TestFunctionPredicate) => AssertPredicate) &\n * ((test?: Test) => AssertAnything)\n * )}\n */\n (\n /**\n * Generate an assertion from a check.\n * @param {Test} [test]\n * When nullish, checks if `node` is a `Node`.\n * When `string`, works like passing `function (node) {return node.type === test}`.\n * When `function` checks if function passed the node is true.\n * When `object`, checks that all keys in test are in node, and that they have (strictly) equal values.\n * When `array`, checks any one of the subtests pass.\n * @returns {AssertAnything}\n */\n function (test) {\n if (test === undefined || test === null) {\n return element\n }\n\n if (typeof test === 'string') {\n return tagNameFactory(test)\n }\n\n if (typeof test === 'object') {\n return anyFactory(test)\n }\n\n if (typeof test === 'function') {\n return castFactory(test)\n }\n\n throw new Error('Expected function, string, or array as test')\n }\n )\n\n/**\n * @param {Array.} tests\n * @returns {AssertAnything}\n */\nfunction anyFactory(tests) {\n /** @type {Array.} */\n const checks = []\n let index = -1\n\n while (++index < tests.length) {\n checks[index] = convertElement(tests[index])\n }\n\n return castFactory(any)\n\n /**\n * @this {unknown}\n * @param {unknown[]} parameters\n * @returns {boolean}\n */\n function any(...parameters) {\n let index = -1\n\n while (++index < checks.length) {\n if (checks[index].call(this, ...parameters)) {\n return true\n }\n }\n\n return false\n }\n}\n\n/**\n * Utility to convert a string into a function which checks a given node’s tag\n * name for said string.\n *\n * @param {TagName} check\n * @returns {AssertAnything}\n */\nfunction tagNameFactory(check) {\n return tagName\n\n /**\n * @param {unknown} node\n * @returns {boolean}\n */\n function tagName(node) {\n return element(node) && node.tagName === check\n }\n}\n\n/**\n * @param {TestFunctionAnything} check\n * @returns {AssertAnything}\n */\nfunction castFactory(check) {\n return assertion\n\n /**\n * @this {unknown}\n * @param {unknown} node\n * @param {Array.} parameters\n * @returns {boolean}\n */\n function assertion(node, ...parameters) {\n // @ts-expect-error: fine.\n return element(node) && Boolean(check.call(this, node, ...parameters))\n }\n}\n\n/**\n * Utility to return true if this is an element.\n * @param {unknown} node\n * @returns {node is Element}\n */\nfunction element(node) {\n return Boolean(\n node &&\n typeof node === 'object' &&\n // @ts-expect-error Looks like a node.\n node.type === 'element' &&\n // @ts-expect-error Looks like an element.\n typeof node.tagName === 'string'\n )\n}\n","/**\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').Parent} Parent\n * @typedef {import('hast').Element} Element\n * @typedef {Element['children'][number]} ElementChild\n * @typedef {import('hast').Properties} Properties\n * @typedef {import('hast-util-is-element').Test} Test\n *\n * @typedef {'prepend'|'append'|'wrap'|'before'|'after'} Behavior\n *\n * @callback Build\n * @param {Element} node\n * @returns {ElementChild|ElementChild[]}\n *\n * @typedef Options\n * Configuration.\n * @property {Behavior} [behavior='prepend']\n * How to create links.\n * @property {Behavior} [behaviour]\n * Please use `behavior` instead\n * @property {Properties} [properties]\n * Extra properties to set on the link when injecting.\n * Defaults to `{ariaHidden: true, tabIndex: -1}` when `'prepend'` or\n * `'append'`.\n * @property {ElementChild|ElementChild[]|Build} [content={type: 'element', tagName: 'span', properties: {className: ['icon', 'icon-link']}, children: []}]\n * hast nodes to insert in the link.\n * @property {ElementChild|ElementChild[]|Build} [group]\n * hast node to wrap the heading and link with, if `behavior` is `'before'` or\n * `'after'`.\n * There is no default.\n * @property {Test} [test]\n * Test to define which heading elements are linked.\n * Any test that can be given to `hast-util-is-element` is supported.\n * The default (no test) is to link all headings.\n * Can be used to link only h1-h3, or for example all except h1.\n */\n\nimport extend from 'extend'\nimport {hasProperty} from 'hast-util-has-property'\nimport {headingRank} from 'hast-util-heading-rank'\nimport {convertElement} from 'hast-util-is-element'\nimport {visit, SKIP} from 'unist-util-visit'\n\n/** @type {Element} */\nconst contentDefaults = {\n type: 'element',\n tagName: 'span',\n properties: {className: ['icon', 'icon-link']},\n children: []\n}\n\n/**\n * Plugin to automatically add links to headings (h1-h6).\n *\n * @type {import('unified').Plugin<[Options?]|void[], Root>}\n */\nexport default function rehypeAutolinkHeadings(options = {}) {\n let props = options.properties\n const behavior = options.behaviour || options.behavior || 'prepend'\n const content = options.content || contentDefaults\n const group = options.group\n const is = convertElement(options.test)\n\n /** @type {import('unist-util-visit/complex-types').Visitor} */\n let method\n\n if (behavior === 'wrap') {\n method = wrap\n } else if (behavior === 'before' || behavior === 'after') {\n method = around\n } else {\n if (!props) {\n props = {ariaHidden: 'true', tabIndex: -1}\n }\n\n method = inject\n }\n\n return (tree) => {\n visit(tree, 'element', (node, index, parent) => {\n if (\n headingRank(node) &&\n hasProperty(node, 'id') &&\n is(node, index, parent)\n ) {\n return method(node, index, parent)\n }\n })\n }\n\n /** @type {import('unist-util-visit/complex-types').Visitor} */\n function inject(node) {\n node.children[behavior === 'prepend' ? 'unshift' : 'push'](\n create(node, extend(true, {}, props), toChildren(content, node))\n )\n\n return [SKIP]\n }\n\n /** @type {import('unist-util-visit/complex-types').Visitor} */\n function around(node, index, parent) {\n // Uncommon.\n /* c8 ignore next */\n if (typeof index !== 'number' || !parent) return\n\n const link = create(\n node,\n extend(true, {}, props),\n toChildren(content, node)\n )\n let nodes = behavior === 'before' ? [link, node] : [node, link]\n\n if (group) {\n const grouping = toNode(group, node)\n\n if (grouping && !Array.isArray(grouping) && grouping.type === 'element') {\n grouping.children = nodes\n nodes = [grouping]\n }\n }\n\n parent.children.splice(index, 1, ...nodes)\n\n return [SKIP, index + nodes.length]\n }\n\n /** @type {import('unist-util-visit/complex-types').Visitor} */\n function wrap(node) {\n node.children = [create(node, extend(true, {}, props), node.children)]\n return [SKIP]\n }\n\n /**\n * @param {ElementChild|ElementChild[]|Build} value\n * @param {Element} node\n * @returns {ElementChild[]}\n */\n function toChildren(value, node) {\n const result = toNode(value, node)\n return Array.isArray(result) ? result : [result]\n }\n\n /**\n * @param {ElementChild|ElementChild[]|Build} value\n * @param {Element} node\n * @returns {ElementChild|ElementChild[]}\n */\n function toNode(value, node) {\n if (typeof value === 'function') return value(node)\n return extend(true, Array.isArray(value) ? [] : {}, value)\n }\n\n /**\n * @param {Element} node\n * @param {Properties} props\n * @param {ElementChild[]} children\n * @returns {Element}\n */\n function create(node, props, children) {\n return {\n type: 'element',\n tagName: 'a',\n properties: Object.assign({}, props, {\n // Fix hast types and make them required.\n /* c8 ignore next */\n href: '#' + (node.properties || {}).id\n }),\n children\n }\n }\n}\n","/**\n * @typedef {import('./core.js').HChild} Child Acceptable child value\n * @typedef {import('./core.js').HProperties} Properties Acceptable properties value.\n *\n * @typedef {import('./jsx-classic').Element} s.JSX.Element\n * @typedef {import('./jsx-classic').IntrinsicAttributes} s.JSX.IntrinsicAttributes\n * @typedef {import('./jsx-classic').IntrinsicElements} s.JSX.IntrinsicElements\n * @typedef {import('./jsx-classic').ElementChildrenAttribute} s.JSX.ElementChildrenAttribute\n */\n\nimport {svg} from 'property-information'\nimport {core} from './core.js'\nimport {svgCaseSensitiveTagNames} from './svg-case-sensitive-tag-names.js'\n\nexport const s = core(svg, 'g', svgCaseSensitiveTagNames)\n","export const svgCaseSensitiveTagNames = [\n 'altGlyph',\n 'altGlyphDef',\n 'altGlyphItem',\n 'animateColor',\n 'animateMotion',\n 'animateTransform',\n 'clipPath',\n 'feBlend',\n 'feColorMatrix',\n 'feComponentTransfer',\n 'feComposite',\n 'feConvolveMatrix',\n 'feDiffuseLighting',\n 'feDisplacementMap',\n 'feDistantLight',\n 'feDropShadow',\n 'feFlood',\n 'feFuncA',\n 'feFuncB',\n 'feFuncG',\n 'feFuncR',\n 'feGaussianBlur',\n 'feImage',\n 'feMerge',\n 'feMergeNode',\n 'feMorphology',\n 'feOffset',\n 'fePointLight',\n 'feSpecularLighting',\n 'feSpotLight',\n 'feTile',\n 'feTurbulence',\n 'foreignObject',\n 'glyphRef',\n 'linearGradient',\n 'radialGradient',\n 'solidColor',\n 'textArea',\n 'textPath'\n]\n","/**\n * Map of web namespaces.\n *\n * @type {Record}\n */\nexport const webNamespaces = {\n html: 'http://www.w3.org/1999/xhtml',\n mathml: 'http://www.w3.org/1998/Math/MathML',\n svg: 'http://www.w3.org/2000/svg',\n xlink: 'http://www.w3.org/1999/xlink',\n xml: 'http://www.w3.org/XML/1998/namespace',\n xmlns: 'http://www.w3.org/2000/xmlns/'\n}\n","/**\n * @typedef {import('vfile').VFile} VFile\n * @typedef {import('property-information').Schema} Schema\n * @typedef {import('unist').Position} Position\n * @typedef {import('unist').Point} Point\n * @typedef {import('hast').Parent} Parent\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').Text} Text\n * @typedef {import('hast').Comment} Comment\n * @typedef {import('hast').DocType} Doctype\n * @typedef {Parent['children'][number]} Child\n * @typedef {Element['children'][number]} ElementChild\n * @typedef {Child|Root} Node\n * @typedef {import('parse5').Document} P5Document\n * @typedef {import('parse5').DocumentType} P5Doctype\n * @typedef {import('parse5').CommentNode} P5Comment\n * @typedef {import('parse5').TextNode} P5Text\n * @typedef {import('parse5').Element} P5Element\n * @typedef {import('parse5').ElementLocation} P5ElementLocation\n * @typedef {import('parse5').Location} P5Location\n * @typedef {import('parse5').Attribute} P5Attribute\n * @typedef {import('parse5').Node} P5Node\n *\n * @typedef {'html'|'svg'} Space\n *\n * @callback Handler\n * @param {Context} ctx\n * @param {P5Node} node\n * @param {Array.} [children]\n * @returns {Node}\n *\n * @typedef Options\n * @property {Space} [space='html'] Whether the root of the tree is in the `'html'` or `'svg'` space. If an element in with the SVG namespace is found in `ast`, `fromParse5` automatically switches to the SVG space when entering the element, and switches back when leaving\n * @property {VFile} [file] `VFile`, used to add positional information to nodes. If given, the file should have the original HTML source as its contents\n * @property {boolean} [verbose=false] Whether to add extra positional information about starting tags, closing tags, and attributes to elements. Note: not used without `file`\n *\n * @typedef Context\n * @property {Schema} schema\n * @property {VFile|undefined} file\n * @property {boolean|undefined} verbose\n * @property {boolean} location\n */\n\nimport {h, s} from 'hastscript'\nimport {html, svg, find} from 'property-information'\nimport {location} from 'vfile-location'\nimport {webNamespaces} from 'web-namespaces'\n\nconst own = {}.hasOwnProperty\n\n// Handlers.\nconst map = {\n '#document': root,\n '#document-fragment': root,\n '#text': text,\n '#comment': comment,\n '#documentType': doctype\n}\n\n/**\n * Transform Parse5’s AST to a hast tree.\n *\n * @param {P5Node} ast\n * @param {Options|VFile} [options]\n */\nexport function fromParse5(ast, options = {}) {\n /** @type {Options} */\n let settings\n /** @type {VFile|undefined} */\n let file\n\n if (isFile(options)) {\n file = options\n settings = {}\n } else {\n file = options.file\n settings = options\n }\n\n return transform(\n {\n schema: settings.space === 'svg' ? svg : html,\n file,\n verbose: settings.verbose,\n location: false\n },\n ast\n )\n}\n\n/**\n * Transform children.\n *\n * @param {Context} ctx\n * @param {P5Node} ast\n * @returns {Node}\n */\nfunction transform(ctx, ast) {\n const schema = ctx.schema\n /** @type {Handler} */\n // @ts-expect-error: index is fine.\n const fn = own.call(map, ast.nodeName) ? map[ast.nodeName] : element\n /** @type {Array.|undefined} */\n let children\n\n // Element.\n if ('tagName' in ast) {\n ctx.schema = ast.namespaceURI === webNamespaces.svg ? svg : html\n }\n\n if ('childNodes' in ast) {\n children = nodes(ctx, ast.childNodes)\n }\n\n const result = fn(ctx, ast, children)\n\n if ('sourceCodeLocation' in ast && ast.sourceCodeLocation && ctx.file) {\n // @ts-expect-error It’s fine.\n const position = createLocation(ctx, result, ast.sourceCodeLocation)\n\n if (position) {\n ctx.location = true\n result.position = position\n }\n }\n\n ctx.schema = schema\n\n return result\n}\n\n/**\n * Transform children.\n *\n * @param {Context} ctx\n * @param {Array.} children\n * @returns {Array.}\n */\nfunction nodes(ctx, children) {\n let index = -1\n /** @type {Array.} */\n const result = []\n\n while (++index < children.length) {\n // @ts-expect-error Assume no roots in children.\n result[index] = transform(ctx, children[index])\n }\n\n return result\n}\n\n/**\n * Transform a document.\n * Stores `ast.quirksMode` in `node.data.quirksMode`.\n *\n * @type {Handler}\n * @param {P5Document} ast\n * @param {Array.} children\n * @returns {Root}\n */\nfunction root(ctx, ast, children) {\n /** @type {Root} */\n const result = {\n type: 'root',\n children,\n data: {quirksMode: ast.mode === 'quirks' || ast.mode === 'limited-quirks'}\n }\n\n if (ctx.file && ctx.location) {\n const doc = String(ctx.file)\n const loc = location(doc)\n result.position = {\n start: loc.toPoint(0),\n end: loc.toPoint(doc.length)\n }\n }\n\n return result\n}\n\n/**\n * Transform a doctype.\n *\n * @type {Handler}\n * @returns {Doctype}\n */\nfunction doctype() {\n // @ts-expect-error Types are out of date.\n return {type: 'doctype'}\n}\n\n/**\n * Transform a text.\n *\n * @type {Handler}\n * @param {P5Text} ast\n * @returns {Text}\n */\nfunction text(_, ast) {\n return {type: 'text', value: ast.value}\n}\n\n/**\n * Transform a comment.\n *\n * @type {Handler}\n * @param {P5Comment} ast\n * @returns {Comment}\n */\nfunction comment(_, ast) {\n return {type: 'comment', value: ast.data}\n}\n\n/**\n * Transform an element.\n *\n * @type {Handler}\n * @param {P5Element} ast\n * @param {Array.} children\n * @returns {Element}\n */\nfunction element(ctx, ast, children) {\n const fn = ctx.schema.space === 'svg' ? s : h\n let index = -1\n /** @type {Object.} */\n const props = {}\n\n while (++index < ast.attrs.length) {\n const attribute = ast.attrs[index]\n props[(attribute.prefix ? attribute.prefix + ':' : '') + attribute.name] =\n attribute.value\n }\n\n const result = fn(ast.tagName, props, children)\n\n if (result.tagName === 'template' && 'content' in ast) {\n const pos = ast.sourceCodeLocation\n const startTag = pos && pos.startTag && position(pos.startTag)\n const endTag = pos && pos.endTag && position(pos.endTag)\n\n /** @type {Root} */\n // @ts-expect-error Types are wrong.\n const content = transform(ctx, ast.content)\n\n if (startTag && endTag && ctx.file) {\n content.position = {start: startTag.end, end: endTag.start}\n }\n\n result.content = content\n }\n\n return result\n}\n\n/**\n * Create clean positional information.\n *\n * @param {Context} ctx\n * @param {Node} node\n * @param {P5ElementLocation} location\n * @returns {Position|null}\n */\nfunction createLocation(ctx, node, location) {\n const result = position(location)\n\n if (node.type === 'element') {\n const tail = node.children[node.children.length - 1]\n\n // Bug for unclosed with children.\n // See: .\n if (\n result &&\n !location.endTag &&\n tail &&\n tail.position &&\n tail.position.end\n ) {\n result.end = Object.assign({}, tail.position.end)\n }\n\n if (ctx.verbose) {\n /** @type {Object.} */\n const props = {}\n /** @type {string} */\n let key\n\n for (key in location.attrs) {\n if (own.call(location.attrs, key)) {\n props[find(ctx.schema, key).property] = position(location.attrs[key])\n }\n }\n\n node.data = {\n position: {\n opening: position(location.startTag),\n closing: location.endTag ? position(location.endTag) : null,\n properties: props\n }\n }\n }\n }\n\n return result\n}\n\n/**\n * @param {P5Location} loc\n * @returns {Position|null}\n */\nfunction position(loc) {\n const start = point({\n line: loc.startLine,\n column: loc.startCol,\n offset: loc.startOffset\n })\n const end = point({\n line: loc.endLine,\n column: loc.endCol,\n offset: loc.endOffset\n })\n // @ts-expect-error `null` is fine.\n return start || end ? {start, end} : null\n}\n\n/**\n * @param {Point} point\n * @returns {Point|null}\n */\nfunction point(point) {\n return point.line && point.column ? point : null\n}\n\n/**\n * @param {VFile|Options} value\n * @returns {value is VFile}\n */\nfunction isFile(value) {\n return 'messages' in value\n}\n","/**\n * @typedef {import('unist').Point} Point\n * @typedef {import('vfile').VFile} VFile\n *\n * @typedef {Pick} PositionalPoint\n * @typedef {Required} FullPoint\n * @typedef {NonNullable} Offset\n */\n\n/**\n * Get transform functions for the given `document`.\n *\n * @param {string|Uint8Array|VFile} file\n */\nexport function location(file) {\n var value = String(file)\n /** @type {Array.} */\n var indices = []\n var search = /\\r?\\n|\\r/g\n\n while (search.test(value)) {\n indices.push(search.lastIndex)\n }\n\n indices.push(value.length + 1)\n\n return {toPoint, toOffset}\n\n /**\n * Get the line and column-based `point` for `offset` in the bound indices.\n * Returns a point with `undefined` values when given invalid or out of bounds\n * input.\n *\n * @param {Offset} offset\n * @returns {FullPoint}\n */\n function toPoint(offset) {\n var index = -1\n\n if (offset > -1 && offset < indices[indices.length - 1]) {\n while (++index < indices.length) {\n if (indices[index] > offset) {\n return {\n line: index + 1,\n column: offset - (indices[index - 1] || 0) + 1,\n offset\n }\n }\n }\n }\n\n return {line: undefined, column: undefined, offset: undefined}\n }\n\n /**\n * Get the `offset` for a line and column-based `point` in the bound indices.\n * Returns `-1` when given invalid or out of bounds input.\n *\n * @param {PositionalPoint} point\n * @returns {Offset}\n */\n function toOffset(point) {\n var line = point && point.line\n var column = point && point.column\n /** @type {number} */\n var offset\n\n if (\n typeof line === 'number' &&\n typeof column === 'number' &&\n !Number.isNaN(line) &&\n !Number.isNaN(column) &&\n line - 1 in indices\n ) {\n offset = (indices[line - 2] || 0) + column - 1 || 0\n }\n\n return offset > -1 && offset < indices[indices.length - 1] ? offset : -1\n }\n}\n","/**\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').Text} Text\n *\n * @typedef {import('unist-util-is').AssertPredicate} AssertElement\n * @typedef {import('unist-util-is').AssertPredicate} AssertText\n * @typedef {import('unist-util-is').AssertPredicate} AssertRoot\n *\n * @callback CreateElementLike\n * @param {string} name\n * @param {any} attributes\n * @param {Array.} [children]\n * @returns {any}\n *\n * @typedef Context\n * @property {html|svg} schema\n * @property {string|null} prefix\n * @property {number} key\n * @property {boolean} react\n * @property {boolean} vue\n * @property {boolean} vdom\n * @property {boolean} hyperscript\n *\n * @typedef Options\n * @property {string|null} [prefix]\n * @property {'html'|'svg'} [space]\n */\n\nimport {html, svg, find, hastToReact} from 'property-information'\nimport {stringify as spaces} from 'space-separated-tokens'\nimport {stringify as commas} from 'comma-separated-tokens'\nimport style from 'style-to-object'\nimport {webNamespaces} from 'web-namespaces'\nimport {convert} from 'unist-util-is'\n\nconst ns = /** @type {Record} */ (webNamespaces)\nconst toReact = /** @type {Record} */ (hastToReact)\n\nconst own = {}.hasOwnProperty\n\n/** @type {AssertRoot} */\n// @ts-expect-error it’s correct.\nconst root = convert('root')\n/** @type {AssertElement} */\n// @ts-expect-error it’s correct.\nconst element = convert('element')\n/** @type {AssertText} */\n// @ts-expect-error it’s correct.\nconst text = convert('text')\n\n/**\n * @template {CreateElementLike} H\n * @param {H} h\n * @param {Element|Root} tree\n * @param {string|boolean|Options} [options]\n * @returns {ReturnType}\n */\nexport function toH(h, tree, options) {\n if (typeof h !== 'function') {\n throw new TypeError('h is not a function')\n }\n\n const r = react(h)\n const v = vue(h)\n const vd = vdom(h)\n /** @type {string|boolean|null|undefined} */\n let prefix\n /** @type {Element} */\n let node\n\n if (typeof options === 'string' || typeof options === 'boolean') {\n prefix = options\n options = {}\n } else {\n if (!options) options = {}\n prefix = options.prefix\n }\n\n if (root(tree)) {\n // @ts-expect-error Allow `doctypes` in there, we’ll filter them out later.\n node =\n tree.children.length === 1 && element(tree.children[0])\n ? tree.children[0]\n : {\n type: 'element',\n tagName: 'div',\n properties: {},\n children: tree.children\n }\n } else if (element(tree)) {\n node = tree\n } else {\n throw new Error(\n // @ts-expect-error runtime.\n 'Expected root or element, not `' + ((tree && tree.type) || tree) + '`'\n )\n }\n\n return transform(h, node, {\n schema: options.space === 'svg' ? svg : html,\n prefix:\n prefix === undefined || prefix === null\n ? r || v || vd\n ? 'h-'\n : null\n : typeof prefix === 'string'\n ? prefix\n : prefix\n ? 'h-'\n : null,\n key: 0,\n react: r,\n vue: v,\n vdom: vd,\n hyperscript: hyperscript(h)\n })\n}\n\n/**\n * Transform a hast node through a hyperscript interface to *anything*!\n *\n * @template {CreateElementLike} H\n * @param {H} h\n * @param {Element} node\n * @param {Context} ctx\n */\nfunction transform(h, node, ctx) {\n const parentSchema = ctx.schema\n let schema = parentSchema\n let name = node.tagName\n /** @type {Record} */\n const attributes = {}\n /** @type {Array.|string>} */\n const nodes = []\n let index = -1\n /** @type {string} */\n let key\n\n if (parentSchema.space === 'html' && name.toLowerCase() === 'svg') {\n schema = svg\n ctx.schema = schema\n }\n\n for (key in node.properties) {\n if (node.properties && own.call(node.properties, key)) {\n addAttribute(attributes, key, node.properties[key], ctx, name)\n }\n }\n\n if (ctx.vdom) {\n if (schema.space === 'html') {\n name = name.toUpperCase()\n } else if (schema.space) {\n attributes.namespace = ns[schema.space]\n }\n }\n\n if (ctx.prefix) {\n ctx.key++\n attributes.key = ctx.prefix + ctx.key\n }\n\n if (node.children) {\n while (++index < node.children.length) {\n const value = node.children[index]\n\n if (element(value)) {\n nodes.push(transform(h, value, ctx))\n } else if (text(value)) {\n nodes.push(value.value)\n }\n }\n }\n\n // Restore parent schema.\n ctx.schema = parentSchema\n\n // Ensure no React warnings are triggered for void elements having children\n // passed in.\n return nodes.length > 0\n ? h.call(node, name, attributes, nodes)\n : h.call(node, name, attributes)\n}\n\n/**\n * @param {Record} props\n * @param {string} prop\n * @param {unknown} value\n * @param {Context} ctx\n * @param {string} name\n */\n// eslint-disable-next-line complexity, max-params\nfunction addAttribute(props, prop, value, ctx, name) {\n const info = find(ctx.schema, prop)\n /** @type {string|undefined} */\n let subprop\n\n // Ignore nullish and `NaN` values.\n // Ignore `false` and falsey known booleans for hyperlike DSLs.\n if (\n value === undefined ||\n value === null ||\n (typeof value === 'number' && Number.isNaN(value)) ||\n (value === false && (ctx.vue || ctx.vdom || ctx.hyperscript)) ||\n (!value && info.boolean && (ctx.vue || ctx.vdom || ctx.hyperscript))\n ) {\n return\n }\n\n if (Array.isArray(value)) {\n // Accept `array`.\n // Most props are space-separated.\n value = info.commaSeparated ? commas(value) : spaces(value)\n }\n\n // Treat `true` and truthy known booleans.\n if (info.boolean && ctx.hyperscript) {\n value = ''\n }\n\n // VDOM, Vue, and React accept `style` as object.\n if (\n info.property === 'style' &&\n typeof value === 'string' &&\n (ctx.react || ctx.vue || ctx.vdom)\n ) {\n value = parseStyle(value, name)\n }\n\n if (ctx.vue) {\n if (info.property !== 'style') subprop = 'attrs'\n } else if (!info.mustUseProperty) {\n if (ctx.vdom) {\n if (info.property !== 'style') subprop = 'attributes'\n } else if (ctx.hyperscript) {\n subprop = 'attrs'\n }\n }\n\n if (subprop) {\n props[subprop] = Object.assign(props[subprop] || {}, {\n [info.attribute]: value\n })\n } else if (info.space && ctx.react) {\n props[toReact[info.property] || info.property] = value\n } else {\n props[info.attribute] = value\n }\n}\n\n/**\n * Check if `h` is `react.createElement`.\n *\n * @param {CreateElementLike} h\n * @returns {boolean}\n */\nfunction react(h) {\n /** @type {unknown} */\n const node = h('div', {})\n return Boolean(\n node &&\n // @ts-expect-error Looks like a React node.\n ('_owner' in node || '_store' in node) &&\n // @ts-expect-error Looks like a React node.\n (node.key === undefined || node.key === null)\n )\n}\n\n/**\n * Check if `h` is `hyperscript`.\n *\n * @param {CreateElementLike} h\n * @returns {boolean}\n */\nfunction hyperscript(h) {\n return 'context' in h && 'cleanup' in h\n}\n\n/**\n * Check if `h` is `virtual-dom/h`.\n *\n * @param {CreateElementLike} h\n * @returns {boolean}\n */\nfunction vdom(h) {\n /** @type {unknown} */\n const node = h('div', {})\n // @ts-expect-error Looks like a vnode.\n return node.type === 'VirtualNode'\n}\n\n/**\n * Check if `h` is Vue.\n *\n * @param {CreateElementLike} h\n * @returns {boolean}\n */\nfunction vue(h) {\n /** @type {unknown} */\n const node = h('div', {})\n // @ts-expect-error Looks like a Vue node.\n return Boolean(node && node.context && node.context._isVue)\n}\n\n/**\n * @param {string} value\n * @param {string} tagName\n * @returns {Record}\n */\nfunction parseStyle(value, tagName) {\n /** @type {Record} */\n const result = {}\n\n try {\n style(value, (name, value) => {\n if (name.slice(0, 4) === '-ms-') name = 'ms-' + name.slice(4)\n\n result[\n name.replace(\n /-([a-z])/g,\n /**\n * @param {string} _\n * @param {string} $1\n * @returns {string}\n */ (_, $1) => $1.toUpperCase()\n )\n ] = value\n })\n } catch (error) {\n error.message =\n tagName + '[style]' + error.message.slice('undefined'.length)\n throw error\n }\n\n return result\n}\n","var own = {}.hasOwnProperty\n\n/**\n * @callback Handler\n * @param {...unknown} value\n * @return {unknown}\n *\n * @typedef {Record} Handlers\n *\n * @typedef {Object} Options\n * @property {Handler} [unknown]\n * @property {Handler} [invalid]\n * @property {Handlers} [handlers]\n */\n\n/**\n * Handle values based on a property.\n *\n * @param {string} key\n * @param {Options} [options]\n */\nexport function zwitch(key, options) {\n var settings = options || {}\n\n /**\n * Handle one value.\n * Based on the bound `key`, a respective handler will be called.\n * If `value` is not an object, or doesn’t have a `key` property, the special\n * “invalid” handler will be called.\n * If `value` has an unknown `key`, the special “unknown” handler will be\n * called.\n *\n * All arguments, and the context object, are passed through to the handler,\n * and it’s result is returned.\n *\n * @param {...unknown} [value]\n * @this {unknown}\n * @returns {unknown}\n * @property {Handler} invalid\n * @property {Handler} unknown\n * @property {Handlers} handlers\n */\n function one(value) {\n var fn = one.invalid\n var handlers = one.handlers\n\n if (value && own.call(value, key)) {\n fn = own.call(handlers, value[key]) ? handlers[value[key]] : one.unknown\n }\n\n if (fn) {\n return fn.apply(this, arguments)\n }\n }\n\n one.handlers = settings.handlers || {}\n one.invalid = settings.invalid\n one.unknown = settings.unknown\n\n return one\n}\n","/**\n * @typedef {import('parse5').Node} P5Node\n * @typedef {import('parse5').Document} P5Document\n * @typedef {import('parse5').DocumentFragment} P5Fragment\n * @typedef {import('parse5').DocumentType} P5Doctype\n * @typedef {import('parse5').CommentNode} P5Comment\n * @typedef {import('parse5').TextNode} P5Text\n * @typedef {import('parse5').Element} P5Element\n * @typedef {import('parse5').Attribute} P5Attribute\n * @typedef {import('parse5').ParentNode} P5Parent\n * @typedef {Exclude} P5Child\n * @typedef {import('property-information').Schema} Schema\n * @typedef {import('property-information').Info} Info\n * @typedef {'html'|'svg'} Space\n * @typedef {import('hast').Parent} Parent\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').DocType} Doctype\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').Text} Text\n * @typedef {import('hast').Comment} Comment\n * @typedef {Parent['children'][number]} Child\n * @typedef {Child|Root} Node\n *\n * @callback Handle\n * @param {Node} node\n * @param {Schema} schema\n * @returns {P5Node}\n */\n\nimport {html, svg, find} from 'property-information'\nimport {toH} from 'hast-to-hyperscript'\nimport {webNamespaces} from 'web-namespaces'\nimport {zwitch} from 'zwitch'\n\nvar own = {}.hasOwnProperty\n\nvar one = zwitch('type', {handlers: {root, element, text, comment, doctype}})\n\n/**\n * Transform a tree from hast to Parse5’s AST.\n *\n * @param {Node} tree\n * @param {Space} [space='html']\n * @returns {P5Node}\n */\nexport function toParse5(tree, space) {\n // @ts-ignore Types are wrong.\n return one(tree, space === 'svg' ? svg : html)\n}\n\n/**\n * @type {Handle}\n * @param {Root} node\n * @returns {P5Document}\n */\nfunction root(node, schema) {\n /** @type {P5Document} */\n var p5 = {\n nodeName: '#document',\n mode: (node.data || {}).quirksMode ? 'quirks' : 'no-quirks',\n childNodes: []\n }\n // @ts-ignore Assume correct children.\n p5.childNodes = all(node.children, p5, schema)\n return patch(node, p5)\n}\n\n/**\n * @type {Handle}\n * @param {Root} node\n * @returns {P5Fragment}\n */\nfunction fragment(node, schema) {\n /** @type {P5Fragment} */\n var p5 = {nodeName: '#document-fragment', childNodes: []}\n // @ts-ignore Assume correct children.\n p5.childNodes = all(node.children, p5, schema)\n return patch(node, p5)\n}\n\n/**\n * @type {Handle}\n * @param {Doctype} node\n * @returns {P5Doctype}\n */\nfunction doctype(node) {\n return patch(node, {\n nodeName: '#documentType',\n name: 'html',\n publicId: '',\n systemId: '',\n parentNode: undefined\n })\n}\n\n/**\n * @type {Handle}\n * @param {Text} node\n * @returns {P5Text}\n */\nfunction text(node) {\n return patch(node, {\n nodeName: '#text',\n value: node.value,\n parentNode: undefined\n })\n}\n\n/**\n * @type {Handle}\n * @param {Comment} node\n * @returns {P5Comment}\n */\nfunction comment(node) {\n return patch(node, {\n nodeName: '#comment',\n data: node.value,\n parentNode: undefined\n })\n}\n\n/**\n * @type {Handle}\n * @param {Element} node\n * @returns {P5Element}\n */\nfunction element(node, schema) {\n /** @type {Space} */\n // @ts-ignore Assume space.\n var space = schema.space\n return toH(h, Object.assign({}, node, {children: []}), {space})\n\n /**\n * @param {string} name\n * @param {Object.} attrs\n */\n function h(name, attrs) {\n /** @type {Array.} */\n var values = []\n /** @type {Info} */\n var info\n /** @type {P5Attribute} */\n var value\n /** @type {string} */\n var key\n /** @type {number} */\n var index\n /** @type {P5Element} */\n var p5\n\n for (key in attrs) {\n if (!own.call(attrs, key) || attrs[key] === false) {\n continue\n }\n\n info = find(schema, key)\n\n if (info.boolean && !attrs[key]) {\n continue\n }\n\n value = {name: key, value: attrs[key] === true ? '' : String(attrs[key])}\n\n if (info.space && info.space !== 'html' && info.space !== 'svg') {\n index = key.indexOf(':')\n\n if (index < 0) {\n value.prefix = ''\n } else {\n value.name = key.slice(index + 1)\n value.prefix = key.slice(0, index)\n }\n\n value.namespace = webNamespaces[info.space]\n }\n\n values.push(value)\n }\n\n if (schema.space === 'html' && node.tagName === 'svg') schema = svg\n\n p5 = patch(node, {\n nodeName: name,\n tagName: name,\n attrs: values,\n namespaceURI: webNamespaces[schema.space],\n childNodes: [],\n parentNode: undefined\n })\n\n // @ts-ignore Assume correct children.\n p5.childNodes = all(node.children, p5, schema)\n\n // @ts-ignore Types are wrong.\n if (name === 'template') p5.content = fragment(node.content, schema)\n\n return p5\n }\n}\n\n/**\n * @param {Array.} children\n * @param {P5Parent} p5\n * @param {Schema} schema\n * @returns {Array.}\n */\nfunction all(children, p5, schema) {\n var index = -1\n /** @type {Array.} */\n var result = []\n /** @type {P5Child} */\n var child\n\n if (children) {\n while (++index < children.length) {\n // @ts-ignore Assume child.\n child = one(children[index], schema)\n\n // @ts-ignore types are wrong.\n child.parentNode = p5\n\n result.push(child)\n }\n }\n\n return result\n}\n\n/**\n * Patch specific properties.\n *\n * @template {P5Node} T\n * @param {Node} node\n * @param {T} p5\n * @returns {T}\n */\nfunction patch(node, p5) {\n var position = node.position\n\n if (position && position.start && position.end) {\n // @ts-ignore Types are wrong.\n p5.sourceCodeLocation = {\n startLine: position.start.line,\n startCol: position.start.column,\n startOffset: position.start.offset,\n endLine: position.end.line,\n endCol: position.end.column,\n endOffset: position.end.offset\n }\n }\n\n return p5\n}\n","/**\n * List of HTML void tag names.\n *\n * @type {Array}\n */\nexport const htmlVoidElements = [\n 'area',\n 'base',\n 'basefont',\n 'bgsound',\n 'br',\n 'col',\n 'command',\n 'embed',\n 'frame',\n 'hr',\n 'image',\n 'img',\n 'input',\n 'isindex',\n 'keygen',\n 'link',\n 'menuitem',\n 'meta',\n 'nextid',\n 'param',\n 'source',\n 'track',\n 'wbr'\n]\n","/**\n * @typedef {import('vfile').VFile} VFile\n * @typedef {import('parse5').Document} P5Document\n * @typedef {import('parse5').DocumentFragment} P5Fragment\n * @typedef {Omit} P5Element\n * @typedef {import('parse5').Attribute} P5Attribute\n * @typedef {Omit & {startOffset: number|undefined, endOffset: number|undefined}} P5Location\n * @typedef {import('parse5').ParserOptions} P5ParserOptions\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast').DocType} Doctype\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').Text} Text\n * @typedef {import('hast').Comment} Comment\n * @typedef {import('hast').Content} Content\n * @typedef {Root|Content} Node\n * @typedef {import('../complex-types').Raw} Raw\n *\n * @typedef {Omit & {value: {stitch: Node}}} Stitch\n *\n * @typedef Options\n * @property {Array.} [passThrough]\n * List of custom hast node types to pass through (keep) in hast.\n * If the passed through nodes have children, those children are expected to\n * be hast and will be handled.\n *\n * @typedef HiddenTokenizer\n * @property {Array.} __mixins\n * Way too simple, but works for us.\n * @property {HiddenPreprocessor} preprocessor\n * @property {(value: string) => void} write\n * @property {() => number} _consume\n * @property {Array.} tokenQueue\n * @property {string} state\n * @property {string} returnState\n * @property {number} charRefCode\n * @property {Array.} tempBuff\n * @property {Function} _flushCodePointsConsumedAsCharacterReference\n * @property {string} lastStartTagName\n * @property {number} consumedAfterSnapshot\n * @property {boolean} active\n * @property {HiddenToken|undefined} currentCharacterToken\n * @property {HiddenToken|undefined} currentToken\n * @property {unknown} currentAttr\n * @property {Function} NAMED_CHARACTER_REFERENCE_STATE\n * @property {Function} NUMERIC_CHARACTER_REFERENCE_END_STATE\n *\n * @typedef {Object. & {location: P5Location}} HiddenToken\n *\n * @typedef HiddenPreprocessor\n * @property {string|undefined} html\n * @property {number} pos\n * @property {number} lastGapPos\n * @property {number} lastCharPos\n * @property {Array.} gapStack\n * @property {boolean} skipNextNewLine\n * @property {boolean} lastChunkWritten\n * @property {boolean} endOfChunkHit\n *\n * @typedef HiddenLocationTracker\n * @property {P5Location|undefined} currentAttrLocation\n * @property {P5Location} ctLoc\n * @property {HiddenPosTracker} posTracker\n *\n * @typedef HiddenPosTracker\n * @property {boolean} isEol\n * @property {number} lineStartPos\n * @property {number} droppedBufferSize\n * @property {number} offset\n * @property {number} col\n * @property {number} line\n */\n\n// @ts-expect-error: untyped.\nimport Parser from 'parse5/lib/parser/index.js'\nimport {pointStart, pointEnd} from 'unist-util-position'\nimport {visit} from 'unist-util-visit'\nimport {fromParse5} from 'hast-util-from-parse5'\nimport {toParse5} from 'hast-util-to-parse5'\nimport {htmlVoidElements} from 'html-void-elements'\nimport {webNamespaces} from 'web-namespaces'\nimport {zwitch} from 'zwitch'\n\nconst inTemplateMode = 'IN_TEMPLATE_MODE'\nconst dataState = 'DATA_STATE'\nconst characterToken = 'CHARACTER_TOKEN'\nconst startTagToken = 'START_TAG_TOKEN'\nconst endTagToken = 'END_TAG_TOKEN'\nconst commentToken = 'COMMENT_TOKEN'\nconst doctypeToken = 'DOCTYPE_TOKEN'\n\n/** @type {P5ParserOptions} */\nconst parseOptions = {sourceCodeLocationInfo: true, scriptingEnabled: false}\n\n/**\n * Given a hast tree and an optional vfile (for positional info), return a new\n * parsed-again hast tree.\n *\n * @param tree\n * Original hast tree.\n * @param file\n * Virtual file for positional info, optional.\n * @param options\n * Configuration.\n */\nexport const raw =\n /**\n * @type {(\n * ((tree: Node, file: VFile|undefined, options?: Options) => Node) &\n * ((tree: Node, options?: Options) => Node)\n * )}\n */\n (\n /**\n * @param {Node} tree\n * @param {VFile} [file]\n * @param {Options} [options]\n */\n function (tree, file, options) {\n let index = -1\n const parser = new Parser(parseOptions)\n const one = zwitch('type', {\n // @ts-expect-error: hush.\n handlers: {root, element, text, comment, doctype, raw: handleRaw},\n // @ts-expect-error: hush.\n unknown\n })\n /** @type {boolean|undefined} */\n let stitches\n /** @type {HiddenTokenizer|undefined} */\n let tokenizer\n /** @type {HiddenPreprocessor|undefined} */\n let preprocessor\n /** @type {HiddenPosTracker|undefined} */\n let posTracker\n /** @type {HiddenLocationTracker|undefined} */\n let locationTracker\n\n if (isOptions(file)) {\n options = file\n file = undefined\n }\n\n if (options && options.passThrough) {\n while (++index < options.passThrough.length) {\n // @ts-expect-error: hush.\n one.handlers[options.passThrough[index]] = stitch\n }\n }\n\n const result = fromParse5(\n documentMode(tree) ? document() : fragment(),\n file\n )\n\n if (stitches) {\n visit(result, 'comment', (node, index, parent) => {\n const stitch = /** @type {Stitch} */ (/** @type {unknown} */ (node))\n if (stitch.value.stitch && parent !== null && index !== null) {\n // @ts-expect-error: assume the stitch is allowed.\n parent.children[index] = stitch.value.stitch\n return index\n }\n })\n }\n\n // Unpack if possible and when not given a `root`.\n if (\n tree.type !== 'root' &&\n result.type === 'root' &&\n result.children.length === 1\n ) {\n return result.children[0]\n }\n\n return result\n\n /**\n * @returns {P5Fragment}\n */\n function fragment() {\n /** @type {P5Element} */\n const context = {\n nodeName: 'template',\n tagName: 'template',\n attrs: [],\n namespaceURI: webNamespaces.html,\n childNodes: []\n }\n /** @type {P5Element} */\n const mock = {\n nodeName: 'documentmock',\n tagName: 'documentmock',\n attrs: [],\n namespaceURI: webNamespaces.html,\n childNodes: []\n }\n /** @type {P5Fragment} */\n const doc = {nodeName: '#document-fragment', childNodes: []}\n\n parser._bootstrap(mock, context)\n parser._pushTmplInsertionMode(inTemplateMode)\n parser._initTokenizerForFragmentParsing()\n parser._insertFakeRootElement()\n parser._resetInsertionMode()\n parser._findFormInFragmentContext()\n\n tokenizer = parser.tokenizer\n /* c8 ignore next */\n if (!tokenizer) throw new Error('Expected `tokenizer`')\n preprocessor = tokenizer.preprocessor\n locationTracker = tokenizer.__mixins[0]\n posTracker = locationTracker.posTracker\n\n one(tree)\n\n parser._adoptNodes(mock.childNodes[0], doc)\n\n return doc\n }\n\n /**\n * @returns {P5Document}\n */\n function document() {\n /** @type {P5Document} */\n const doc = parser.treeAdapter.createDocument()\n\n parser._bootstrap(doc, undefined)\n tokenizer = parser.tokenizer\n /* c8 ignore next */\n if (!tokenizer) throw new Error('Expected `tokenizer`')\n preprocessor = tokenizer.preprocessor\n locationTracker = tokenizer.__mixins[0]\n posTracker = locationTracker.posTracker\n\n one(tree)\n\n return doc\n }\n\n /**\n * @param {Content[]} nodes\n * @returns {void}\n */\n function all(nodes) {\n let index = -1\n\n /* istanbul ignore else - invalid nodes, see rehypejs/rehype-raw#7. */\n if (nodes) {\n while (++index < nodes.length) {\n one(nodes[index])\n }\n }\n }\n\n /**\n * @param {Root} node\n * @returns {void}\n */\n function root(node) {\n all(node.children)\n }\n\n /**\n * @param {Element} node\n * @returns {void}\n */\n function element(node) {\n resetTokenizer()\n parser._processToken(startTag(node), webNamespaces.html)\n\n all(node.children)\n\n if (!htmlVoidElements.includes(node.tagName)) {\n resetTokenizer()\n parser._processToken(endTag(node))\n }\n }\n\n /**\n * @param {Text} node\n * @returns {void}\n */\n function text(node) {\n resetTokenizer()\n parser._processToken({\n type: characterToken,\n chars: node.value,\n location: createParse5Location(node)\n })\n }\n\n /**\n * @param {Doctype} node\n * @returns {void}\n */\n function doctype(node) {\n resetTokenizer()\n parser._processToken({\n type: doctypeToken,\n name: 'html',\n forceQuirks: false,\n publicId: '',\n systemId: '',\n location: createParse5Location(node)\n })\n }\n\n /**\n * @param {Comment|Stitch} node\n * @returns {void}\n */\n function comment(node) {\n resetTokenizer()\n parser._processToken({\n type: commentToken,\n data: node.value,\n location: createParse5Location(node)\n })\n }\n\n /**\n * @param {Raw} node\n * @returns {void}\n */\n function handleRaw(node) {\n const start = pointStart(node)\n const line = start.line || 1\n const column = start.column || 1\n const offset = start.offset || 0\n\n /* c8 ignore next 4 */\n if (!preprocessor) throw new Error('Expected `preprocessor`')\n if (!tokenizer) throw new Error('Expected `tokenizer`')\n if (!posTracker) throw new Error('Expected `posTracker`')\n if (!locationTracker) throw new Error('Expected `locationTracker`')\n\n // Reset preprocessor:\n // See: .\n preprocessor.html = undefined\n preprocessor.pos = -1\n preprocessor.lastGapPos = -1\n preprocessor.lastCharPos = -1\n preprocessor.gapStack = []\n preprocessor.skipNextNewLine = false\n preprocessor.lastChunkWritten = false\n preprocessor.endOfChunkHit = false\n\n // Reset preprocessor mixin:\n // See: .\n posTracker.isEol = false\n posTracker.lineStartPos = -column + 1 // Looks weird, but ensures we get correct positional info.\n posTracker.droppedBufferSize = offset\n posTracker.offset = 0\n posTracker.col = 1\n posTracker.line = line\n\n // Reset location tracker:\n // See: .\n locationTracker.currentAttrLocation = undefined\n locationTracker.ctLoc = createParse5Location(node)\n\n // See the code for `parse` and `parseFragment`:\n // See: .\n tokenizer.write(node.value)\n parser._runParsingLoop(null)\n\n // Character references hang, so if we ended there, we need to flush\n // those too.\n // We reset the preprocessor as if the document ends here.\n // Then one single call to the relevant state does the trick, parse5\n // consumes the whole token.\n if (\n tokenizer.state === 'NAMED_CHARACTER_REFERENCE_STATE' ||\n tokenizer.state === 'NUMERIC_CHARACTER_REFERENCE_END_STATE'\n ) {\n preprocessor.lastChunkWritten = true\n tokenizer[tokenizer.state](tokenizer._consume())\n }\n\n // Process final characters if they’re still there after hibernating.\n // Similar to:\n // See: .\n const token = tokenizer.currentCharacterToken\n\n if (token) {\n token.location.endLine = posTracker.line\n token.location.endCol = posTracker.col + 1\n token.location.endOffset = posTracker.offset + 1\n parser._processToken(token)\n }\n }\n\n /**\n * @param {Node} node\n */\n function stitch(node) {\n stitches = true\n\n /** @type {Node} */\n let clone\n\n // Recurse, because to somewhat handle `[]` (where `[]` denotes the\n // passed through node).\n if ('children' in node) {\n clone = {\n ...node,\n children: raw(\n {type: 'root', children: node.children},\n file,\n options\n // @ts-expect-error Assume a given parent yields a parent.\n ).children\n }\n } else {\n clone = {...node}\n }\n\n // Hack: `value` is supposed to be a string, but as none of the tools\n // (`parse5` or `hast-util-from-parse5`) looks at it, we can pass nodes\n // through.\n comment({type: 'comment', value: {stitch: clone}})\n }\n\n function resetTokenizer() {\n /* c8 ignore next */\n if (!tokenizer) throw new Error('Expected `tokenizer`')\n\n // Reset tokenizer:\n // See: .\n // Especially putting it back in the `data` state is useful: some elements,\n // like textareas and iframes, change the state.\n // See GH-7.\n // But also if broken HTML is in `raw`, and then a correct element is given.\n // See GH-11.\n tokenizer.tokenQueue = []\n tokenizer.state = dataState\n tokenizer.returnState = ''\n tokenizer.charRefCode = -1\n tokenizer.tempBuff = []\n tokenizer.lastStartTagName = ''\n tokenizer.consumedAfterSnapshot = -1\n tokenizer.active = false\n tokenizer.currentCharacterToken = undefined\n tokenizer.currentToken = undefined\n tokenizer.currentAttr = undefined\n }\n }\n )\n/**\n * @param {Element} node\n * @returns {HiddenToken}\n */\nfunction startTag(node) {\n /** @type {P5Location} */\n const location = Object.assign(createParse5Location(node))\n // @ts-expect-error extra positional info.\n location.startTag = Object.assign({}, location)\n\n // Untyped token.\n return {\n type: startTagToken,\n tagName: node.tagName,\n selfClosing: false,\n attrs: attributes(node),\n location\n }\n}\n\n/**\n * @param {Element} node\n * @returns {Array.}\n */\nfunction attributes(node) {\n return toParse5({\n tagName: node.tagName,\n type: 'element',\n properties: node.properties,\n children: []\n // @ts-expect-error Assume element.\n }).attrs\n}\n\n/**\n * @param {Element} node\n * @returns {HiddenToken}\n */\nfunction endTag(node) {\n /** @type {P5Location} */\n const location = Object.assign(createParse5Location(node))\n // @ts-expect-error extra positional info.\n location.startTag = Object.assign({}, location)\n\n // Untyped token.\n return {\n type: endTagToken,\n tagName: node.tagName,\n attrs: [],\n location\n }\n}\n\n/**\n * @param {Node} node\n */\nfunction unknown(node) {\n throw new Error('Cannot compile `' + node.type + '` node')\n}\n\n/**\n * @param {Node} node\n * @returns {boolean}\n */\nfunction documentMode(node) {\n const head = node.type === 'root' ? node.children[0] : node\n return Boolean(\n head &&\n (head.type === 'doctype' ||\n (head.type === 'element' && head.tagName === 'html'))\n )\n}\n\n/**\n * @param {Node|Stitch} node\n * @returns {P5Location}\n */\nfunction createParse5Location(node) {\n const start = pointStart(node)\n const end = pointEnd(node)\n\n return {\n startLine: start.line,\n startCol: start.column,\n startOffset: start.offset,\n endLine: end.line,\n endCol: end.column,\n endOffset: end.offset\n }\n}\n\n/**\n * @param {VFile|Options|undefined} value\n * @return {value is Options}\n */\nfunction isOptions(value) {\n return Boolean(value && !('message' in value && 'messages' in value))\n}\n","/**\n * @typedef {import('hast').Root} Root\n * @typedef {import('hast-util-raw').Options} Options\n * @typedef {import('hast-util-raw')} DoNotTouchAsThisImportIncludesRawInTree\n */\n\nimport {raw} from 'hast-util-raw'\n\n/**\n * Plugin to parse the tree again (and raw nodes).\n * Keeping positional info OK. 🙌\n *\n * @type {import('unified').Plugin<[Options?] | Array, Root>}\n */\nexport default function rehypeRaw(options = {}) {\n return (tree, file) => {\n // Assume that when a root was given, it’s also returned.\n const result = /** @type {Root} */ (raw(tree, file, options))\n return result\n }\n}\n","import { Element, Comment, Literal, ElementContent, RootContent, Properties } from 'hast';\nimport { RehypeAttrsOptions } from './';\n\nexport const getURLParameters = (url: string): Record =>\n(url.match(/([^?=&]+)(=([^&]*))/g) || []).reduce(\n (a: Record, v: string) => (\n (a[v.slice(0, v.indexOf('='))] = v.slice(v.indexOf('=') + 1)), a\n ),\n {},\n);\n\nexport const prevChild = (data: Literal[] = [], index: number): Comment | undefined => {\n let i = index;\n while (i > -1) {\n i--;\n if (!data[i]) return\n if ((data[i] && data[i].value && (data[i].value as string).replace(/(\\n|\\s)/g, '') !== '') || data[i].type !== 'text') {\n if (!/^rehype:/.test(data[i].value as string) || (data[i].type as string) !== 'comment') return;\n return data[i] as unknown as Comment;\n }\n }\n return;\n}\n\nexport const nextChild = (data: RootContent[] | ElementContent[] = [], index: number, tagName?: string): ElementContent | undefined => {\n let i = index;\n while (i < data.length) {\n i++;\n if (tagName) {\n const element = data[i] as Literal & Element;\n if (element && element.value && (element.value as string).replace(/(\\n|\\s)/g, '') !== '' || data[i] && (data[i].type as string) === 'element') {\n return element.tagName === tagName ? element : undefined\n }\n } else {\n const element = data[i] as ElementContent & Literal;\n if (!element || (element.type !== 'text' && (element.type as string) !== 'comment') || (element.type === 'text' && (element.value as string).replace(/(\\n|\\s)/g, '') !== '')) return;\n if ((element.type as string) === 'comment') {\n if (!/^rehype:/.test(element.value as string)) return;\n const nextNode = nextChild(data, i, 'pre')\n if (nextNode) return;\n return element;\n }\n }\n }\n return\n}\n\n/**\n * 获取代码注视的位置\n * @param data 数据\n * @param index 当前数据所在的位置\n * @returns 返回 当前参数数据 Object,`{}`\n */\nexport const getCommentObject = ({ value = '' }: Comment): Properties => {\n const param = getURLParameters(value.replace(/^rehype:/, ''));\n Object.keys(param).forEach((keyName: string) => {\n if (param[keyName] === 'true') {\n param[keyName] = true;\n }\n if (param[keyName] === 'false') {\n param[keyName] = false;\n }\n if (typeof param[keyName] === 'string' && !/^0/.test(param[keyName] as string) && !isNaN(+param[keyName])) {\n param[keyName] = +param[keyName];\n }\n })\n return param;\n}\n\nexport type DataConfig = {\n 'data-config': Properties\n}\n\nexport const propertiesHandle = (defaultAttrs?: Properties | null, attrs?: Properties, type?: RehypeAttrsOptions['properties']): Properties | DataConfig => {\n if (type === 'string') {\n return { ...defaultAttrs, 'data-config': JSON.stringify({ ...attrs, rehyp: true })}\n } else if (type === 'attr') {\n return { ...defaultAttrs, ...attrs}\n }\n return { ...defaultAttrs, 'data-config': { ...attrs, rehyp: true }}\n}","import { Plugin } from 'unified';\nimport { Root, Element, Comment, Properties, Literal } from 'hast';\nimport { visit } from 'unist-util-visit';\nimport { propertiesHandle, nextChild, prevChild, getCommentObject } from './utils';\n\nexport type RehypeAttrsOptions = {\n /**\n * ## `data`\n * \n * ```markdown\n * text\n * \n * ```\n * \n * ⇣⇣⇣⇣⇣⇣\n * \n * ```html\n *

text

\n * ```\n * \n * ## `string`\n * \n * ```markdown\n * text\n * \n * ```\n * \n * ⇣⇣⇣⇣⇣⇣\n * \n * ```html\n *

text

\n * ```\n * \n * ## attr\n * \n * ```markdown\n * text\n * \n * ```\n * ⇣⇣⇣⇣⇣⇣\n * ```html\n *

text

\n * ```\n */\n properties: 'data' | 'string' | 'attr';\n}\n\nconst defaultOptions: RehypeAttrsOptions = {\n properties: 'data',\n}\n\nconst rehypeAttrs: Plugin<[RehypeAttrsOptions?], Root> = (options) => {\n const opts = { ...defaultOptions, ...options }\n return (tree) => {\n visit(tree, 'element', (node, index, parent) => {\n if (node.tagName === 'pre' && node && Array.isArray(node.children) && parent && Array.isArray(parent.children) && parent.children.length > 1) {\n const firstChild = node.children[0] as Element;\n if (firstChild && firstChild.tagName === 'code' && typeof index === 'number') {\n const child = prevChild(parent.children as Literal[], index);\n if (child) {\n const attr = getCommentObject(child);\n if (Object.keys(attr).length > 0) {\n node.properties = { ...node.properties, ...{ 'data-type': 'rehyp' } }\n firstChild.properties = propertiesHandle(firstChild.properties, attr, opts.properties) as Properties\n }\n }\n }\n }\n\n if (/^(em|strong|b|a|i|p|pre|kbd|blockquote|h(1|2|3|4|5|6)|code|table|img|del|ul|ol)$/.test(node.tagName) && parent && Array.isArray(parent.children) && typeof index === 'number') {\n const child = nextChild(parent.children, index)\n if (child) {\n const attr = getCommentObject(child as Comment)\n if (Object.keys(attr).length > 0) {\n node.properties = propertiesHandle(node.properties, attr, opts.properties) as Properties\n }\n }\n }\n });\n }\n}\n\n\nexport default rehypeAttrs\n","import { Plugin } from 'unified';\nimport { Root, RootContent } from 'hast';\nimport { visit } from 'unist-util-visit';\n\nexport type RehypeIgnoreOptions = {\n /**\n * Character to use for opening delimiter, by default `rehype:ignore:start`\n */\n openDelimiter?: string;\n /**\n * Character to use for closing delimiter, by default `rehype:ignore:end`\n */\n closeDelimiter?: string;\n}\n\nconst rehypeIgnore: Plugin<[RehypeIgnoreOptions?], Root> = (options = {}) => {\n const { openDelimiter = 'rehype:ignore:start', closeDelimiter = 'rehype:ignore:end' } = options;\n return (tree) => {\n visit(tree, (node: Root | RootContent, index, parent) => {\n if (node.type === 'element' || node.type === 'root') {\n // const start = node.children.findIndex((item) => item.type === 'comment' && item.value === openDelimiter);\n // const end = node.children.findIndex((item) => item.type === 'comment' && item.value === closeDelimiter);\n // if (start > -1 && end > -1) {\n // node.children = node.children.filter((_, idx) => idx < start || idx > end);\n // }\n let start = false;\n node.children = node.children.filter((item) => {\n if (item.type === 'comment' && item.value.trim() === openDelimiter) {\n start = true;\n return false\n }\n if (item.type === 'comment' && item.value.trim() === closeDelimiter) {\n start = false;\n return false\n }\n \n return !start;\n })\n }\n });\n }\n}\n\nexport default rehypeIgnore;\n","/**\n * @typedef {import('unist').Node} Node\n * @typedef {import('unist').Parent} Parent\n * @typedef {import('unist-util-is').Test} Test\n */\n\n/**\n * Options for unist util filter\n *\n * @typedef {Object} FilterOptions\n * @property {boolean} [cascade=true] Whether to drop parent nodes if they had children, but all their children were filtered out.\n */\n\nimport {convert} from 'unist-util-is'\n\nconst own = {}.hasOwnProperty\n\n/**\n * Create a new tree consisting of copies of all nodes that pass test.\n * The tree is walked in preorder (NLR), visiting the node itself, then its head, etc.\n *\n * @param tree Tree to filter.\n * @param options Configuration (optional).\n * @param test is-compatible test (such as a type).\n * @returns Given `tree` or `null` if it didn’t pass `test`.\n */\nexport const filter =\n /**\n * @type {(\n * ((node: Tree, options: FilterOptions, test: Check) => import('./complex-types').Matches) &\n * ((node: Tree, test: Check) => import('./complex-types').Matches) &\n * ((node: Tree, options?: FilterOptions) => Tree)\n * )}\n */\n (\n /**\n * @param {Node} tree\n * @param {FilterOptions} options\n * @param {Test} test\n * @returns {Node|null}\n */\n function (tree, options, test) {\n const is = convert(test || options)\n const cascade =\n options.cascade === undefined || options.cascade === null\n ? true\n : options.cascade\n\n return preorder(tree)\n\n /**\n * @param {Node} node\n * @param {number|undefined} [index]\n * @param {Parent|undefined} [parent]\n * @returns {Node|null}\n */\n function preorder(node, index, parent) {\n /** @type {Array.} */\n const children = []\n /** @type {number} */\n let childIndex\n /** @type {Node} */\n let result\n /** @type {string} */\n let key\n\n if (!is(node, index, parent)) return null\n\n // @ts-expect-error: Looks like a parent.\n if (node.children) {\n childIndex = -1\n\n // @ts-expect-error Looks like a parent.\n while (++childIndex < node.children.length) {\n // @ts-expect-error Looks like a parent.\n result = preorder(node.children[childIndex], childIndex, node)\n\n if (result) {\n children.push(result)\n }\n }\n\n // @ts-expect-error Looks like a parent.\n if (cascade && node.children.length > 0 && children.length === 0)\n return null\n }\n\n // Create a shallow clone, using the new children.\n /** @type {typeof node} */\n // @ts-expect-error all the fields will be copied over.\n const next = {}\n\n for (key in node) {\n if (own.call(node, key)) {\n // @ts-expect-error: Looks like a record.\n next[key] = key === 'children' ? children : node[key]\n }\n }\n\n return next\n }\n }\n )\n","/**\n * @typedef {import('hast').Element} Element\n * @typedef {import('hast').Root} Root\n * @typedef Options options\n * Configuration.\n * @property {boolean} [showLineNumbers]\n * Set `showLineNumbers` to `true` to always display line number\n * @property {boolean} [ignoreMissing]\n * Set `ignoreMissing` to `true` to ignore unsupported languages and line highlighting when no language is specified\n */\n\nimport { visit } from 'unist-util-visit'\nimport { toString } from 'hast-util-to-string'\nimport { filter } from 'unist-util-filter'\nimport rangeParser from 'parse-numeric-range'\n\n/**\n * @param {Element} node\n * @return {string|null}\n */\nconst getLanguage = (node) => {\n const className = node.properties.className\n //@ts-ignore\n for (const classListItem of className) {\n if (classListItem.slice(0, 9) === 'language-') {\n return classListItem.slice(9).toLowerCase()\n }\n }\n return null\n}\n\n/**\n * Create a closure that determines if we have to highlight the given index\n *\n * @param {string} meta\n * @return { (index:number) => boolean }\n */\nconst calculateLinesToHighlight = (meta) => {\n const RE = /{([\\d,-]+)}/\n // Remove space between {} e.g. {1, 3}\n const parsedMeta = meta\n .split(',')\n .map((str) => str.trim())\n .join()\n if (RE.test(parsedMeta)) {\n const strlineNumbers = RE.exec(parsedMeta)[1]\n const lineNumbers = rangeParser(strlineNumbers)\n return (index) => lineNumbers.includes(index + 1)\n } else {\n return () => false\n }\n}\n\n/**\n * Check if we want to start the line numbering from a given number or 1\n * showLineNumbers=5, will start the numbering from 5\n * @param {string} meta\n * @returns {number}\n */\nconst calculateStartingLine = (meta) => {\n const RE = /showLineNumbers=(?\\d+)/i\n // pick the line number after = using a named capturing group\n if (RE.test(meta)) {\n const {\n groups: { lines },\n } = RE.exec(meta)\n return Number(lines)\n }\n return 1\n}\n\n/**\n * Create container AST for node lines\n *\n * @param {number} number\n * @return {Element[]}\n */\nconst createLineNodes = (number) => {\n const a = new Array(number)\n for (let i = 0; i < number; i++) {\n a[i] = {\n type: 'element',\n tagName: 'span',\n properties: { className: [] },\n children: [],\n }\n }\n return a\n}\n\n/**\n * Split multiline text nodes into individual nodes with positioning\n * Add a node start and end line position information for each text node\n *\n * @return { (ast:Element['children']) => Element['children'] }\n *\n */\nconst addNodePositionClosure = () => {\n let startLineNum = 1\n /**\n * @param {Element['children']} ast\n * @return {Element['children']}\n */\n const addNodePosition = (ast) => {\n return ast.reduce((result, node) => {\n if (node.type === 'text') {\n const value = /** @type {string} */ (node.value)\n const numLines = (value.match(/\\n/g) || '').length\n if (numLines === 0) {\n node.position = {\n // column: 0 is to make the ts compiler happy but we do not use this field\n start: { line: startLineNum, column: 0 },\n end: { line: startLineNum, column: 0 },\n }\n result.push(node)\n } else {\n const lines = value.split('\\n')\n for (const [i, line] of lines.entries()) {\n result.push({\n type: 'text',\n value: i === lines.length - 1 ? line : line + '\\n',\n position: {\n start: { line: startLineNum + i },\n end: { line: startLineNum + i },\n },\n })\n }\n }\n startLineNum = startLineNum + numLines\n\n return result\n }\n\n if (Object.prototype.hasOwnProperty.call(node, 'children')) {\n const initialLineNum = startLineNum\n // @ts-ignore\n node.children = addNodePosition(node.children, startLineNum)\n result.push(node)\n node.position = {\n start: { line: initialLineNum, column: 0 },\n end: { line: startLineNum, column: 0 },\n }\n return result\n }\n\n result.push(node)\n return result\n }, [])\n }\n return addNodePosition\n}\n\n/**\n * Rehype prism plugin generator that highlights code blocks with refractor (prismjs)\n *\n * Pass in your own refractor object with the required languages registered:\n * https://github.com/wooorm/refractor#refractorregistersyntax\n *\n * @param {import('refractor/lib/core').Refractor} refractor\n * @return {import('unified').Plugin<[Options?], Root>}\n */\nconst rehypePrismGenerator = (refractor) => {\n return (options = {}) => {\n return (tree) => {\n visit(tree, 'element', visitor)\n }\n\n /**\n * @param {Element} node\n * @param {number} index\n * @param {Element} parent\n */\n function visitor(node, index, parent) {\n if (!parent || parent.tagName !== 'pre' || node.tagName !== 'code') {\n return\n }\n\n let meta = node.data && node.data.meta ? /** @type {string} */ (node.data.meta) : ''\n // Coerce className to array\n if (node.properties.className) {\n if (typeof node.properties.className === 'boolean') {\n node.properties.className = []\n } else if (!Array.isArray(node.properties.className)) {\n node.properties.className = [node.properties.className]\n }\n } else {\n node.properties.className = []\n }\n node.properties.className.push('code-highlight')\n const lang = getLanguage(node)\n\n /** @type {Element} */\n let refractorRoot\n\n // Syntax highlight\n if (lang) {\n try {\n // @ts-ignore\n refractorRoot = refractor.highlight(toString(node), lang)\n // @ts-ignore className is already an array\n parent.properties.className = (parent.properties.className || []).concat(\n 'language-' + lang\n )\n } catch (err) {\n if (options.ignoreMissing && /Unknown language/.test(err.message)) {\n refractorRoot = node\n } else {\n throw err\n }\n }\n } else {\n refractorRoot = node\n }\n\n refractorRoot.children = addNodePositionClosure()(refractorRoot.children)\n\n // Add position info to root\n if (refractorRoot.children.length > 0) {\n refractorRoot.position = {\n start: { line: refractorRoot.children[0].position.start.line, column: 0 },\n end: {\n line: refractorRoot.children[refractorRoot.children.length - 1].position.end.line,\n column: 0,\n },\n }\n } else {\n refractorRoot.position = {\n start: { line: 0, column: 0 },\n end: { line: 0, column: 0 },\n }\n }\n\n const shouldHighlightLine = calculateLinesToHighlight(meta)\n const startingLineNumber = calculateStartingLine(meta)\n const codeLineArray = createLineNodes(refractorRoot.position.end.line)\n\n const falseShowLineNumbersStr = [\n 'showlinenumbers=false',\n 'showlinenumbers=\"false\"',\n 'showlinenumbers={false}',\n ]\n for (const [i, line] of codeLineArray.entries()) {\n // Default class name for each line\n line.properties.className = ['code-line']\n\n // Syntax highlight\n const treeExtract = filter(\n refractorRoot,\n (node) => node.position.start.line <= i + 1 && node.position.end.line >= i + 1\n )\n line.children = treeExtract.children\n\n // Line number\n if (\n (meta.toLowerCase().includes('showLineNumbers'.toLowerCase()) ||\n options.showLineNumbers) &&\n !falseShowLineNumbersStr.some((str) => meta.toLowerCase().includes(str))\n ) {\n line.properties.line = [(i + startingLineNumber).toString()]\n line.properties.className.push('line-number')\n }\n\n // Line highlight\n if (shouldHighlightLine(i)) {\n line.properties.className.push('highlight-line')\n }\n\n // Diff classes\n if (lang === 'diff' && toString(line).substring(0, 1) === '-') {\n line.properties.className.push('deleted')\n } else if (lang === 'diff' && toString(line).substring(0, 1) === '+') {\n line.properties.className.push('inserted')\n }\n }\n\n // Remove possible trailing line when splitting by \\n which results in empty array\n if (\n codeLineArray.length > 0 &&\n toString(codeLineArray[codeLineArray.length - 1]).trim() === ''\n ) {\n codeLineArray.pop()\n }\n\n node.children = codeLineArray\n }\n }\n}\n\nexport default rehypePrismGenerator\n","import { refractor as refractorAll } from 'refractor/lib/all.js'\nimport rehypePrismGenerator from './generator.js'\n\n/**\n * Rehype prism plugin that highlights code blocks with refractor (prismjs)\n * This supports all the languages and should be used on the server side.\n *\n * Consider using rehypePrismCommon or rehypePrismGenerator to generate a plugin\n * that supports your required languages.\n */\nconst rehypePrismAll = rehypePrismGenerator(refractorAll)\n\nexport default rehypePrismAll\n","import { refractor as refractorCommon } from 'refractor/lib/common.js'\nimport rehypePrismGenerator from './generator.js'\n\n/**\n * Rehype prism plugin that highlights code blocks with refractor (prismjs)\n * Supported languages: https://github.com/wooorm/refractor#data\n *\n * Consider using rehypePrismGenerator to generate a plugin\n * that supports your required languages.\n */\nconst rehypePrismCommon = rehypePrismGenerator(refractorCommon)\n\nexport default rehypePrismCommon\n","const rtlRange = '\\u0591-\\u07FF\\uFB1D-\\uFDFD\\uFE70-\\uFEFC'\nconst ltrRange =\n 'A-Za-z\\u00C0-\\u00D6\\u00D8-\\u00F6' +\n '\\u00F8-\\u02B8\\u0300-\\u0590\\u0800-\\u1FFF\\u200E\\u2C00-\\uFB1C' +\n '\\uFE00-\\uFE6F\\uFEFD-\\uFFFF'\n\n/* eslint-disable no-misleading-character-class */\nconst rtl = new RegExp('^[^' + ltrRange + ']*[' + rtlRange + ']')\nconst ltr = new RegExp('^[^' + rtlRange + ']*[' + ltrRange + ']')\n/* eslint-enable no-misleading-character-class */\n\n/**\n * Detect the direction of text: left-to-right, right-to-left, or neutral\n *\n * @param {string} value\n * @returns {'rtl'|'ltr'|'neutral'}\n */\nexport function direction(value) {\n const source = String(value || '')\n return rtl.test(source) ? 'rtl' : ltr.test(source) ? 'ltr' : 'neutral'\n}\n","/**\n * @typedef {import('./types.js').Node} Node\n * @typedef {import('./types.js').Element} Element\n * @typedef {import('./types.js').Parent} Parent\n * @typedef {import('hast-util-is-element').AssertPredicate} IsElement\n */\n\nimport {convertElement} from 'hast-util-is-element'\n\n/**\n * @param {Node} node\n * @returns {node is Parent}\n */\nexport function parent(node) {\n // @ts-expect-error: hush.\n return Array.isArray(node.children)\n}\n\n/** @type {IsElement} */\n// @ts-expect-error it works.\nexport const element = convertElement()\n","/**\n * @typedef {import('./types.js').SelectState} SelectState\n * @typedef {import('./types.js').HastNode} HastNode\n * @typedef {import('./types.js').ElementChild} ElementChild\n * @typedef {import('./types.js').Direction} Direction\n * @typedef {import('unist-util-visit').Visitor} Visitor\n */\n\nimport {direction} from 'direction'\nimport {isElement} from 'hast-util-is-element'\nimport {toString} from 'hast-util-to-string'\nimport {svg} from 'property-information'\nimport {visit, EXIT, SKIP} from 'unist-util-visit'\nimport {element} from './util.js'\n\n/**\n * @param {SelectState} state\n * @param {HastNode} node\n * @returns {() => void}\n */\n// eslint-disable-next-line complexity\nexport function enterState(state, node) {\n const schema = state.schema\n const language = state.language\n const currentDirection = state.direction\n const editableOrEditingHost = state.editableOrEditingHost\n /** @type {Direction|undefined} */\n let dirInferred\n /** @type {boolean|undefined} */\n let found\n\n if (element(node) && node.properties) {\n const lang = node.properties.xmlLang || node.properties.lang\n const type = node.properties.type || 'text'\n const dir = dirProperty(node)\n\n if (lang !== undefined && lang !== null) {\n state.language = String(lang)\n found = true\n }\n\n if (schema && schema.space === 'html') {\n if (node.properties.contentEditable === 'true') {\n state.editableOrEditingHost = true\n found = true\n }\n\n if (isElement(node, 'svg')) {\n state.schema = svg\n found = true\n }\n\n // See: .\n // Explicit `[dir=rtl]`.\n if (dir === 'rtl') {\n dirInferred = dir\n } else if (\n // Explicit `[dir=ltr]`.\n dir === 'ltr' ||\n // HTML with an invalid or no `[dir]`.\n (dir !== 'auto' && isElement(node, 'html')) ||\n // `input[type=tel]` with an invalid or no `[dir]`.\n (dir !== 'auto' && isElement(node, 'input') && type === 'tel')\n ) {\n dirInferred = 'ltr'\n // `[dir=auto]` or `bdi` with an invalid or no `[dir]`.\n } else if (dir === 'auto' || isElement(node, 'bdi')) {\n if (isElement(node, 'textarea')) {\n // Check contents of `