diff --git a/components/ttcmd/index.tsx b/components/ttcmd/index.tsx index 4e97bfe..08b3dad 100644 --- a/components/ttcmd/index.tsx +++ b/components/ttcmd/index.tsx @@ -1,20 +1,8 @@ "use client"; -import { Accordion, AccordionContent } from "@/lib/accordion"; -import { Poppable } from "@/lib/poppables/components/poppable"; import { createElements } from "@/lib/tcmd"; -import Link from "next/link"; -import React, { - FC, - Fragment, - Suspense, - use, - useEffect, - useMemo, - useState, -} from "react"; +import React, { FC, Suspense, useEffect, useMemo, useState } from "react"; -import { sanitize } from "isomorphic-dompurify"; import { MDSkeletonLoader } from "../loader"; import { Token } from "@/types"; @@ -95,203 +83,3 @@ function renderer(tokens: Token[]) { const usedIds: string[] = []; return tokens.map((t) =>
{t.render(t)}
); } - -function render(token: Token, usedIds: string[]) { - switch (token.type) { - case "heading": - return ( -
- {token.content} -
- ); - case "grid": - return ( -
- {token.children?.map((c, i) => ( -
- {render(c, usedIds)} -
- ))} -
- ); - case "code": - return ( -
-          {token.content}
-        
- ); - case "card": - return ( -
- {token.children?.map((e) => ( - - {render(e, usedIds)} - - ))} -
- ); - case "anchor": - return ( - - {token.content} - - ); - case "image": { - token.metadata.src = token.metadata.src as string; - if (token.metadata.src.startsWith(" - - ); - } - // eslint-disable-next-line @next/next/no-img-element - return {token.content}; - } - case "inline-code": - return ( - - {token.content} - - ); - case "popover": - return ( - render(c, usedIds)) || - token.content} - preferredAlign="centered" - preferredEdge="bottom" - className="cursor-pointer mx-2" - > - - {token.metadata.title} - - - ); - case "text": - return ( - - {token.content.replaceAll("\\n", "\n")} - - ); - case "p": - return ( -
- {token.children?.map((e, i) => ( - - {render(e, usedIds)} - - ))} -
- ); - case "accordion": - return ( -
- - - {token.children?.map((e, i) => ( - - {render(e, usedIds)} - - ))} - - -
- ); - case "bold": - return ( - - {token.content} - - ); - case "italic": - return ( - - {token.content} - - ); - case "list": - const items = token.children || []; - return ( - <> - - - ); - case "list-item": - // This probably doesn't need to exist, but I'm leaving it anyway - return ( -
  • - {token.children?.map((c) => render(c, usedIds))} -
  • - ); - case "hr": - return
    ; - case "comment": - return <>; - default: - return ( -
    - Block or paragraph missing implementation: {token.type} -
    - ); - } -} - -function generateId(t: string, usedIds: string[]) { - let id = t.toLowerCase().replace(/[^a-z\s]/ig, "").trim().replaceAll( - " ", - "-", - ); - let idNum = 1; - while (usedIds.includes(id)) { - id = id.replace(/-[0-9]+$/g, ""); - id += "-" + idNum; - idNum++; - } - return id; -} diff --git a/lib/tcmd/tokenizeBlock.ts b/lib/tcmd/tokenizeBlock.ts deleted file mode 100644 index 6495818..0000000 --- a/lib/tcmd/tokenizeBlock.ts +++ /dev/null @@ -1,60 +0,0 @@ -export const tokenizeBlock = (paragraph: string) => { - for (const block of blockTokens) { - const openTest = block.rx.test(paragraph), - closeTest = block.closeRx.test(paragraph); - - if (closeTest) return block.create(paragraph).type; - if (!openTest) continue; - return block.create(paragraph); - } -}; - -const blockTokens: { - rx: RegExp; - closeRx: RegExp; - create: (line: string) => BlockToken; -}[] = [ - // this indicates that this is a grid block, all paragraphs within this block will be placed in a number of columns that match the number of sets of brackets are in this line - { - rx: /^(\[\]){2,}/g, - closeRx: /\/\[\]/, - create(line) { - return { - type: "grid", - metadata: { - columns: line.match(/\[\]/g)?.length, - }, - children: [], - closed: false, - uuid: crypto.randomUUID(), - }; - }, - }, - { - rx: /^(\[\[)/, - closeRx: /\]\]/, - create() { - return { - type: "card", - metadata: {}, - children: [], - closed: false, - uuid: crypto.randomUUID(), - }; - }, - }, - { - rx: /^\[accordion\s?([a-z\s]*)\]/i, - closeRx: /^\[\/accordion\]/, - create(line) { - const title = line.match(this.rx)?.at(1); - return { - type: "accordion", - metadata: { title }, - children: [], - closed: false, - uuid: crypto.randomUUID(), - }; - }, - }, -]; diff --git a/lib/tcmd/tokenizeInline.ts b/lib/tcmd/tokenizeInline.ts deleted file mode 100644 index 0ee2f38..0000000 --- a/lib/tcmd/tokenizeInline.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { zipArrays } from "../zip"; - -export const tokenizeInline = (line: string) => { - line = line.trim(); - const originalLine = line; - const insertMarker = "\u{03A9}"; - const tokens: InlineTokenInsert[] = []; - - for (const token of inlineTokens) { - const rx = new RegExp(token.rx); - let match; - while ((match = rx.exec(line)) !== null) { - const tokenStart = match.index; - const tokenEnd = match.index + match[0].length; - - const wrappingToken = tokens.find((t) => - t.start < tokenStart && t.end > tokenStart - ); - if (wrappingToken) continue; - - let wrappedToken; - while ( - (wrappedToken = tokens.findIndex((t) => - t.start > tokenStart && t.start < tokenEnd - )) !== -1 - ) { - tokens.splice(wrappedToken, 1); - } - - token.create(match, tokenStart, tokenEnd, tokens); - } - } - - if (tokens.length) { - for (const insert of tokens) { - line = line.slice(0, insert.start) + - "".padStart(insert.end - insert.start, insertMarker) + - line.slice(insert.end, line.length); - } - - return zipArrays( - line.split(new RegExp(insertMarker + "{2,}")).map((t): InlineToken => ({ - content: t, - type: "text", - uuid: crypto.randomUUID(), - })), - tokens, - ).filter((t) => t.content); - } - return originalLine; -}; - -const joiner = "<><>"; -export const inlineTokens: { - rx: RegExp; - create: ( - content: RegExpExecArray, - start: number, - end: number, - tokens: InlineTokenInsert[], - ) => void; - replace: (line: string) => string; -}[] = [ - { - rx: /\s?`(.*?)`[^a-z0-9`]\s?/gi, - create(content, start, end, tokens) { - tokens.push({ - content: this.replace(content[0]), - type: "inline-code", - end, - start, - uuid: crypto.randomUUID(), - }); - }, - replace(l) { - return l.replace(this.rx, (...all) => all[1]); - }, - }, - { - rx: /(\*\*)(.*?)(\*\*)/g, - create(content, start, end, tokens) { - tokens.push({ - content: this.replace(content[0]), - type: "bold", - end, - start, - uuid: crypto.randomUUID(), - }); - }, - replace(l) { - return l.replace(this.rx, (_, __, val) => val); - }, - }, - { - rx: /(? all[1]); - }, - }, - { - rx: /(? s.rx.test(label)); - - if (style) label = label.replace(style.rx, ""); - - tokens.push({ - content: label, - type: "anchor", - data: { - href, - style, - }, - start, - end, - uuid: crypto.randomUUID(), - }); - }, - replace(l) { - return l.replace(this.rx, (_, label, href) => [label, href].join(joiner)); - // return l - }, - }, - { - rx: /!\[(.*?)\]\((.*?)\)/g, - create(content, start, end, tokens) { - const [_, alt, src] = content; - tokens.push({ - content: alt, - end, - start, - type: "image", - data: { - src, - }, - uuid: crypto.randomUUID(), - }); - }, - replace(l) { - return l; - }, - }, - { - rx: /\^\[(.*?)\]<<(.*?)>>/gm, - create(content, start, end, tokens) { - const [_, text, popover] = content; - tokens.push({ - content: text, - end, - start, - type: "popover", - data: { - popover: tokenizeInline(popover), - }, - uuid: crypto.randomUUID(), - }); - }, - replace(l) { - return l; - }, - }, -]; diff --git a/lib/tcmd/tokenizeLine.ts b/lib/tcmd/tokenizeLine.ts deleted file mode 100644 index af73f7e..0000000 --- a/lib/tcmd/tokenizeLine.ts +++ /dev/null @@ -1,103 +0,0 @@ -import { tokenizeInline } from "./tokenizeInline"; - -export const tokenizeLine = ( - line: string, - previous?: SingleLineToken, -): SingleLineToken => { - for (const token of singleLineTokens) { - if (!token.rx.test(line)) continue; - - const t = token.create(line); - - if (t.type === "h2") { - } - - t.line = tokenizeInline(line.replace(token.replaceRx, "")); - return t; - } - - if (previous?.mends) { - previous.raw += " " + line; - previous.line = tokenizeInline(previous.raw.replace(previous.cfg!.rx, "")); - return previous; - } - - return { - line: tokenizeInline(line), - type: "text", - raw: line, - uuid: crypto.randomUUID(), - }; -}; - -export const singleLineTokens: SingleLineCfg[] = [ - { - rx: /^#\s/, - create(line) { - return ({ - type: "h1", - line, - raw: line, - cfg: this, - uuid: crypto.randomUUID(), - }); - }, - replaceRx: /^#\s/, - }, - { - rx: /^##\s/, - create(line) { - return ({ - type: "h2", - line, - raw: line, - cfg: this, - uuid: crypto.randomUUID(), - }); - }, - replaceRx: /^##\s/, - }, - { - rx: /^###\s/, - create(line) { - return ({ - type: "h3", - line, - raw: line, - cfg: this, - uuid: crypto.randomUUID(), - }); - }, - replaceRx: /^###\s/, - }, - { - rx: /^-\s/, - create(line) { - return ({ - type: "list1", - line, - raw: line, - mends: true, - cfg: this, - uuid: crypto.randomUUID(), - }); - }, - replaceRx: /^-\s/, - shouldMendNextLine: true, - }, - { - rx: /^[\t\s]{2}-\s/, - create(line) { - return ({ - type: "list2", - line, - raw: line, - mends: true, - cfg: this, - uuid: crypto.randomUUID(), - }); - }, - replaceRx: /^[\t\s]{2}-\s/, - shouldMendNextLine: true, - }, -]; diff --git a/lib/tcmd/tokenizeParagraph.ts b/lib/tcmd/tokenizeParagraph.ts deleted file mode 100644 index 1a9f5c1..0000000 --- a/lib/tcmd/tokenizeParagraph.ts +++ /dev/null @@ -1,89 +0,0 @@ -export const tokenizeParagraph = (paragraph: string) => { - for (const pgraph of paragraphTokens) { - const openTest = pgraph.rx.test(paragraph), - closeTest = pgraph.closeRx.test(paragraph); - if (openTest && closeTest) { - const p = pgraph.create(paragraph); - p.closed = true; - return p; - } - if (closeTest) return pgraph.create(paragraph).content; - - if (openTest) { - return pgraph.create(paragraph); - } - } -}; - -const paragraphTokens: { - rx: RegExp; - closeRx: RegExp; - create: (line: string) => ParagraphToken; -}[] = [ - { - rx: /\n```/g, - closeRx: /\n```/g, - create(line) { - return { - type: "code", - metadata: { - // language: line.split("\n").at(0)!.replace(this.rx, ""), - }, - closed: false, - content: [{ - line: line.match(/```(.*?)\n```/g)?.at(1) || line, - type: "text", - raw: line, - uuid: crypto.randomUUID(), - }], - allowsInline: false, - uuid: crypto.randomUUID(), - }; - }, - }, -]; - -TokenIdentifiers.set("table", { - rx: /^\|\s[\s\S]*?\|(?=(\n\n)|$)/g, - parse(s) { - const rowSections = s.split(/-/gm).map((s) => - s.split("\n").map((r) => r.split(/\s?\|\s?/g)) - ); - - let headerRows: string[][] = [], - bodyRows: string[][] = [], - footerRows: string[][] = []; - - switch (rowSections.length) { - case 1: - bodyRows = rowSections[0]; - break; - case 2: - headerRows = rowSections[0]; - bodyRows = rowSections[1]; - break; - case 3: - headerRows = rowSections[0]; - bodyRows = rowSections[1]; - footerRows = rowSections[3]; - break; - } - - const maxColumns = Math.max( - ...[...headerRows, ...bodyRows, ...footerRows].map((r) => r.length), - ); - - return { - content: s, - raw: s, - metadata: { - headerRows: headerRows.join(" | "), - bodyRows: bodyRows.join(" | "), - footerRows: footerRows.join(" | "), - columns: maxColumns.toString(), - }, - type: "table", - uuid: crypto.randomUUID(), - }; - }, -});