diff --git a/components/ttcmd/index.tsx b/components/ttcmd/index.tsx index 252101e..4e97bfe 100644 --- a/components/ttcmd/index.tsx +++ b/components/ttcmd/index.tsx @@ -16,6 +16,7 @@ import React, { import { sanitize } from "isomorphic-dompurify"; import { MDSkeletonLoader } from "../loader"; +import { Token } from "@/types"; export const TTCMD: FC< { body: string; escapeTOC?: (tokens: Token[]) => boolean } @@ -64,7 +65,9 @@ export const TTCMD: FC< ); }; -export const TTCMDRenderer: FC<{ tokens: Token[] }> = ({ tokens }) => { +export const TTCMDRenderer: FC<{ tokens: Token[] }> = ( + { tokens }, +) => { const tada = useMemo( () => ( <> @@ -90,9 +93,7 @@ export const TTCMDRenderer: FC<{ tokens: Token[] }> = ({ tokens }) => { function renderer(tokens: Token[]) { const usedIds: string[] = []; - return tokens.map((t) => ( -
{render(t, usedIds)}
- )); + return tokens.map((t) =>
{t.render(t)}
); } function render(token: Token, usedIds: string[]) { @@ -175,7 +176,7 @@ function render(token: Token, usedIds: string[]) { } case "inline-code": return ( - + {token.content} ); diff --git a/lib/tcmd/TokenIdentifiers.ts b/lib/tcmd/TokenIdentifiers.ts deleted file mode 100644 index c4b025a..0000000 --- a/lib/tcmd/TokenIdentifiers.ts +++ /dev/null @@ -1,411 +0,0 @@ -import { parse } from "path"; - -type TokenIdentifier = { - rx: RegExp; - parse: (s: string) => Token; - search?: (s: string, start: number, end: number) => { - start: number; - end: number; - text: string; - lastIndex: number; - }; -}; - -export const TokenIdentifiers = new Map< - string, - TokenIdentifier ->(); - -// TokenIdentifiers.set("p", { -// rx: /\n{2,}((?:.|\n)*?)\n{2,}/g, -// parse(s) { -// const [_, content] = s.match(new RegExp(this.rx, ""))!; - -// return { -// // content, -// content, -// raw: s, -// metadata: {}, -// type: "p", -// uuid: crypto.randomUUID(), -// }; -// }, -// }); -const rendersContentOnly = true; -const rendersChildrenOnly = true; -TokenIdentifiers.set("grid", { - search(s, start, end) { - const rx = /(?>/g, - parse(s) { - const [_, title, content] = s.match(new RegExp(this.rx, ""))!; - - return { - // content, - content, - raw: s, - metadata: { title }, - type: "popover", - uuid: crypto.randomUUID(), - rendersContentOnly, - }; - }, -}); -TokenIdentifiers.set("accordion", { - rx: /\[accordion(\s.*?)?]\n+((?:.|\n)*?)\n+\[\/accordion\]/g, - parse(s) { - const [_, title, content] = s.match(new RegExp(this.rx, ""))!; - - return { - // content, - content, - raw: s, - metadata: { title }, - type: "accordion", - uuid: crypto.randomUUID(), - }; - }, -}); -TokenIdentifiers.set("p", { - // rx: /(?<=\n)\n?([\s\S]*?)\n(?=\n)/g, - rx: /(?<=\n\n)([\s\S]*?)(?=\n\n)/g, - parse(s) { - // const [_, content] = s.match(new RegExp(this.rx, ""))!; - - return { - // content, - content: s, - raw: s, - metadata: {}, - type: "p", - uuid: crypto.randomUUID(), - }; - }, -}); - -TokenIdentifiers.set("hr", { - rx: /^-{3,}$/gm, - parse(s) { - return { - content: s, - raw: s, - metadata: {}, - type: "hr", - uuid: crypto.randomUUID(), - rendersContentOnly, - }; - }, -}); - -TokenIdentifiers.set("comment", { - rx: //g, - parse(s) { - return { - content: "", - metadata: { comment: s }, - raw: "", - type: "comment", - uuid: crypto.randomUUID(), - rendersContentOnly, - }; - }, -}); - -TokenIdentifiers.set("frontmatter", { - rx: /^---([\s\S]*?)---/g, - parse(s) { - return { - content: "", - metadata: { - frontmatterString: s.match(this.rx)?.at(0) || "", - }, - raw: "", - type: "frontmatter", - uuid: "frontmatter", - }; - }, -}); - -function findMatchingClosedParenthesis( - str: string, - openRegex: RegExp, - closedRegex: RegExp, -): number | null { - let openings = 0; - let closings = 0; - - openRegex = new RegExp(openRegex, "g"); - closedRegex = new RegExp(closedRegex, "g"); - - let lastOpeningSuccessIndex = 0; - let lastClosingSuccessIndex = 0; - - do { - const openingMatch = openRegex.exec(str); - const closingMatch = closedRegex.exec(str); - - if ((openingMatch && !closingMatch)) { - throw Error("Things have gone horribly wrong"); - } - - // if ((!openingMatch && closingMatch) || (!openingMatch && !closingMatch)) break; - - if ( - openingMatch && closingMatch && openingMatch.index < closingMatch.index - ) { - openings++; - lastOpeningSuccessIndex = openingMatch.index + openingMatch[0].length; - closedRegex.lastIndex = lastClosingSuccessIndex; - } else if ( - (!openingMatch && closingMatch) || - (openingMatch && closingMatch && openingMatch.index > closingMatch.index) - ) { - closings++; - lastClosingSuccessIndex = closingMatch.index + closingMatch[0].length; - openRegex.lastIndex = lastOpeningSuccessIndex; - } else { - return closingMatch?.index ?? null; - } - } while (openings > closings); - - return closedRegex.lastIndex; -} - -interface SearchResult { - start: number; - end: number; - text: string; - lastIndex: number; -} - -function search( - s: string, - start: number, - end: number, - openRx: RegExp, - closeRx: RegExp, -): SearchResult { - const oldEnd = end; - - const newEnd = findMatchingClosedParenthesis( - s, - // s.substring(0, end - start), - openRx, - closeRx, - ); - - if (newEnd === null) throw Error("There was an issue finding a closing tag"); - - end = newEnd + start; - - return { - start, - end, - text: s.substring(0, newEnd), - lastIndex: oldEnd === end ? end : start + s.match(openRx)![0].length, - }; -} diff --git a/lib/tcmd/TokenIdentifiers.tsx b/lib/tcmd/TokenIdentifiers.tsx index 339b325..6e82568 100644 --- a/lib/tcmd/TokenIdentifiers.tsx +++ b/lib/tcmd/TokenIdentifiers.tsx @@ -4,6 +4,11 @@ import { TokenAttributes, TokenRenderer, } from "@/types"; +import { sanitize } from "isomorphic-dompurify"; +import Link from "next/link"; +import { Fragment } from "react"; +import { Poppable } from "../poppables/components/poppable"; +import { Accordion, AccordionContent } from "../accordion"; type SearchFunction = (s: string, start: number, end: number) => { start: number; @@ -23,9 +28,9 @@ type TokenIdentifierMap = Map< TokenIdentifier >; -export const TokenIdentifiers = new Map< +export const TokenRenderers = new Map< string, - TokenIdentifier + TokenRenderer >(); type IdentifierRegistration = ( @@ -91,6 +96,7 @@ export function buildIdentifierMap(): [ } : undefined, }); + TokenRenderers.set(type, renderFunction); } return [TokenIdentifiers, registerIdentifier]; @@ -99,6 +105,15 @@ export function buildIdentifierMap(): [ export const buildOnlyDefaultElements = () => { const [TokenIdentifiers, registerIdentifier] = buildIdentifierMap(); + TokenRenderers.set("text", (t) => { + debugger; + return ( + + {t.content.replaceAll("\\n", "\n")} + + ); + }); + const rendersContentOnly = true; const rendersChildrenOnly = true; @@ -120,8 +135,22 @@ export const buildOnlyDefaultElements = () => { rendersChildrenOnly, }; }, - (t) => { - return <>{t.raw}; + (token) => { + const { content, children, metadata, uuid } = token; + return ( +
+ {children?.map((c, i) => ( +
+ {c.render(c)} +
+ ))} +
+ ); }, /(? { (s) => { const rx = /\[{2}(!?)\s*?\n+([\s\S]*)\n+\]{2}/; const match = s.match(rx); - if (!match) debugger; - const [_, isBlock, content] = match || - ["", "", s]; + const [_, isBlock, content] = match || ["", "", s]; return { content: content.trim(), @@ -148,8 +175,20 @@ export const buildOnlyDefaultElements = () => { rendersChildrenOnly, }; }, - (t) => { - return <>{t.raw}; + (token) => { + const { children, metadata, uuid } = token; + return ( +
+ {children?.map((e) => ( + + {e.render(e)} + + ))} +
+ ); }, /\[\[/g, /\]\]/g, @@ -165,8 +204,12 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return ( +
+      {token.content}
+      
+ ); }); // list @@ -187,8 +230,29 @@ export const buildOnlyDefaultElements = () => { rendersChildrenOnly, }; }, - (t) => { - return <>{t.raw}; + (token) => { + const { children, metadata, uuid } = token; + return ( + <> + + + ); }, ); @@ -209,8 +273,20 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), }; }, - (t) => { - return <>{t.raw}; + (token) => { + const { children, metadata, uuid } = token; + return ( +
  • + {children?.map((c) => ( + + (c.render(c)) + + ))} +
  • + ); }, ); @@ -226,8 +302,21 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return ( +
    + {token.content} +
    + ); }); // image @@ -244,8 +333,23 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + const { metadata } = token; + metadata.src = metadata.src as string; + if (metadata.src.startsWith(" + + ); + } + // eslint-disable-next-line @next/next/no-img-element + return {token.content}; }); // anchor @@ -271,8 +375,17 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + const { metadata } = token; + return ( + + {token.content} + + ); }); // inline-code @@ -289,8 +402,12 @@ export const buildOnlyDefaultElements = () => { rendersContentOnly, }; }, - (t) => { - return <>{t.raw}; + (token) => { + return ( + + {token.content} + + ); }, ); @@ -304,8 +421,12 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return ( + + {token.content} + + ); }); // italic @@ -318,8 +439,12 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return ( + + {token.content} + + ); }); // popover @@ -333,8 +458,23 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + const { children, metadata, uuid } = token; + return ( + ( + {c.render(c)} + )) || + metadata.content} + preferredAlign="centered" + preferredEdge="bottom" + className="cursor-pointer mx-2" + > + + {metadata.title} + + + ); }); registerIdentifier( @@ -350,8 +490,23 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), }; }, - (t) => { - return <>{t.raw}; + (token) => { + const { children, metadata, uuid } = token; + return ( +
    + + + {children?.map((e, i) => ( + + {e.render(e)} + + ))} + + +
    + ); }, ); @@ -362,8 +517,23 @@ export const buildOnlyDefaultElements = () => { metadata: {}, uuid: crypto.randomUUID(), }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + const { children, uuid } = token; + + debugger; + + return ( +
    + {children?.map((e) => { + console.log(e); + return ( + + {e.render(e)} + + ); + })} +
    + ); }); registerIdentifier("hr", /^-{3,}$/gm, (s, rx) => { @@ -374,8 +544,8 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return
    ; }); registerIdentifier("comment", //g, (s, rx) => { @@ -386,8 +556,8 @@ export const buildOnlyDefaultElements = () => { uuid: crypto.randomUUID(), rendersContentOnly, }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return <>; }); registerIdentifier("frontmatter", /^---([\s\S]*?)---/g, (s, rx) => { @@ -399,8 +569,8 @@ export const buildOnlyDefaultElements = () => { raw: "", uuid: "frontmatter", }; - }, (t) => { - return <>{t.raw}; + }, (token) => { + return <>{token.raw}; }); registerIdentifier("table", /^\|\s[\s\S]*?\|(?=(\n\n)|$)/g, (s, rx) => { diff --git a/lib/tcmd/index.ts b/lib/tcmd/index.ts index 794df84..9e93631 100644 --- a/lib/tcmd/index.ts +++ b/lib/tcmd/index.ts @@ -1,7 +1,8 @@ "use client"; +import { FrontMatter, Token, TokenMarker } from "@/types"; import { zipArrays } from "../zip"; -import { TokenIdentifiers } from "./TokenIdentifiers"; +import { buildOnlyDefaultElements, TokenRenderers } from "./TokenIdentifiers"; export const createElements = (body: string): Token[] => { const tokens = tokenize(body); @@ -15,7 +16,9 @@ const tokenize = (body: string) => { tokenizedBody.push(thing); }; - for (const [type, token] of TokenIdentifiers.entries()) { + const ti = buildOnlyDefaultElements(); + + for (const [type, token] of ti.entries()) { const rx = new RegExp(token.rx); let match; while ((match = rx.exec(body)) !== null) { @@ -114,13 +117,13 @@ function isAcceptableChild(parentType: string, childType: string): boolean { return acceptableChildren ? acceptableChildren.includes(childType) : true; } +// Occasionally, some P blocks start exactly at the same point as another block (a side effect of needing to exclude preceding linebreaks from the regex while also having the only clear delineation being those linebreaks) so we just remove those P blocks so that when searching for a parent, it doesn't need to figure out if the P block is valid or not. This doesn't cause issues during rendering since each block handles its own container element function filterOverlappingPBlocks(blocks: TokenMarker[]): TokenMarker[] { return blocks.filter((block) => { if (block.type !== "p") { - return true; // Keep blocks that are not 'p' type + return true; } - // Filter out 'p' blocks that overlap with any other block for (const otherBlock of blocks) { if ( otherBlock !== block && @@ -129,11 +132,11 @@ function filterOverlappingPBlocks(blocks: TokenMarker[]): TokenMarker[] { (otherBlock.end === block.end && otherBlock.start < block.start) ) ) { - return false; // Overlapping 'p' block found, filter it out + return false; } } - return true; // Keep 'p' block if it doesn't overlap with any other block + return true; }); } @@ -157,7 +160,8 @@ const contentToChildren = (token: Token) => { raw: c, type: token.rendersChildrenOnly ? "p" : "text", uuid: crypto.randomUUID(), - rendersContentOnly: true, + rendersContentOnly: token.rendersChildrenOnly ? false : true, + render: TokenRenderers.get(token.rendersChildrenOnly ? "p" : "text")!, children: token.rendersChildrenOnly && c.replaceAll("\n", "") ? [ { @@ -166,6 +170,8 @@ const contentToChildren = (token: Token) => { raw: c, type: "text", uuid: crypto.randomUUID(), + render: TokenRenderers.get("text")!, + rendersContentOnly: true, }, ] : undefined, @@ -238,6 +244,7 @@ function processChunks(chunks: Token[][]) { metadata: { initialDepth: currentChunk[0].metadata.initialDepth }, uuid: crypto.randomUUID(), children: currentChunk, + render: TokenRenderers.get("list")!, }); mergedChunks.push(currentChunk); break; diff --git a/lib/tcmd/tokenizeParagraph.ts b/lib/tcmd/tokenizeParagraph.ts index d0c1b3d..1a9f5c1 100644 --- a/lib/tcmd/tokenizeParagraph.ts +++ b/lib/tcmd/tokenizeParagraph.ts @@ -42,3 +42,48 @@ const paragraphTokens: { }, }, ]; + +TokenIdentifiers.set("table", { + rx: /^\|\s[\s\S]*?\|(?=(\n\n)|$)/g, + parse(s) { + const rowSections = s.split(/-/gm).map((s) => + s.split("\n").map((r) => r.split(/\s?\|\s?/g)) + ); + + let headerRows: string[][] = [], + bodyRows: string[][] = [], + footerRows: string[][] = []; + + switch (rowSections.length) { + case 1: + bodyRows = rowSections[0]; + break; + case 2: + headerRows = rowSections[0]; + bodyRows = rowSections[1]; + break; + case 3: + headerRows = rowSections[0]; + bodyRows = rowSections[1]; + footerRows = rowSections[3]; + break; + } + + const maxColumns = Math.max( + ...[...headerRows, ...bodyRows, ...footerRows].map((r) => r.length), + ); + + return { + content: s, + raw: s, + metadata: { + headerRows: headerRows.join(" | "), + bodyRows: bodyRows.join(" | "), + footerRows: footerRows.join(" | "), + columns: maxColumns.toString(), + }, + type: "table", + uuid: crypto.randomUUID(), + }; + }, +}); diff --git a/md/help articles/test.md b/md/help articles/test.md index 9918c1d..593986b 100644 --- a/md/help articles/test.md +++ b/md/help articles/test.md @@ -1,40 +1,4 @@ -[][][] -[[ - -``` -[][][] - -This will make three columns, just like how this is laid out right now. - -Each element will get its own cell in the grid. - -So each of these paragraphs will end up in a separate column. - -/[] -``` - -]] - -[[ -``` -[][] - -This will make two columns - -[[ -Each column can use a different element -]] - -/[] -``` -]] - -[[ -This card will end up in the third column... -]] - -[[ -... but since there isn't enough for this one, it will automatically get moved to the next row. -]] - -/[] \ No newline at end of file +| test | Table | header | +------------------------- +| test | table | row | +| look | another | \ No newline at end of file diff --git a/types.d.ts b/types.d.ts index a04a730..142510f 100644 --- a/types.d.ts +++ b/types.d.ts @@ -1,3 +1,5 @@ +import { ReactNode } from "react"; + type InlineToken = { type: | "text" @@ -39,8 +41,7 @@ type SingleLineToken = { cfg?: SingleLineCfg; uuid: string; }; -type Token = { - type: string; +type IdentifiedToken = { metadata: Record; children?: Token[]; uuid: string; @@ -50,6 +51,15 @@ type Token = { rendersContentOnly?: boolean; }; +type TokenRenderer = (t: Token) => ReactNode; + +type TokenAttributes = { + type: string; + render: TokenRenderer; +}; + +type Token = IdentifiedToken & TokenAttributes; + type TokenMarker = { start: number; end: number;