ttcMD: Removes unused token logic

This commit is contained in:
2024-03-15 08:25:26 -06:00
parent a2f50b1fe9
commit 1cc514d83a
5 changed files with 1 additions and 647 deletions

View File

@@ -1,60 +0,0 @@
export const tokenizeBlock = (paragraph: string) => {
for (const block of blockTokens) {
const openTest = block.rx.test(paragraph),
closeTest = block.closeRx.test(paragraph);
if (closeTest) return block.create(paragraph).type;
if (!openTest) continue;
return block.create(paragraph);
}
};
const blockTokens: {
rx: RegExp;
closeRx: RegExp;
create: (line: string) => BlockToken;
}[] = [
// this indicates that this is a grid block, all paragraphs within this block will be placed in a number of columns that match the number of sets of brackets are in this line
{
rx: /^(\[\]){2,}/g,
closeRx: /\/\[\]/,
create(line) {
return {
type: "grid",
metadata: {
columns: line.match(/\[\]/g)?.length,
},
children: [],
closed: false,
uuid: crypto.randomUUID(),
};
},
},
{
rx: /^(\[\[)/,
closeRx: /\]\]/,
create() {
return {
type: "card",
metadata: {},
children: [],
closed: false,
uuid: crypto.randomUUID(),
};
},
},
{
rx: /^\[accordion\s?([a-z\s]*)\]/i,
closeRx: /^\[\/accordion\]/,
create(line) {
const title = line.match(this.rx)?.at(1);
return {
type: "accordion",
metadata: { title },
children: [],
closed: false,
uuid: crypto.randomUUID(),
};
},
},
];

View File

@@ -1,182 +0,0 @@
import { zipArrays } from "../zip";
export const tokenizeInline = (line: string) => {
line = line.trim();
const originalLine = line;
const insertMarker = "\u{03A9}";
const tokens: InlineTokenInsert[] = [];
for (const token of inlineTokens) {
const rx = new RegExp(token.rx);
let match;
while ((match = rx.exec(line)) !== null) {
const tokenStart = match.index;
const tokenEnd = match.index + match[0].length;
const wrappingToken = tokens.find((t) =>
t.start < tokenStart && t.end > tokenStart
);
if (wrappingToken) continue;
let wrappedToken;
while (
(wrappedToken = tokens.findIndex((t) =>
t.start > tokenStart && t.start < tokenEnd
)) !== -1
) {
tokens.splice(wrappedToken, 1);
}
token.create(match, tokenStart, tokenEnd, tokens);
}
}
if (tokens.length) {
for (const insert of tokens) {
line = line.slice(0, insert.start) +
"".padStart(insert.end - insert.start, insertMarker) +
line.slice(insert.end, line.length);
}
return zipArrays(
line.split(new RegExp(insertMarker + "{2,}")).map((t): InlineToken => ({
content: t,
type: "text",
uuid: crypto.randomUUID(),
})),
tokens,
).filter((t) => t.content);
}
return originalLine;
};
const joiner = "<><>";
export const inlineTokens: {
rx: RegExp;
create: (
content: RegExpExecArray,
start: number,
end: number,
tokens: InlineTokenInsert[],
) => void;
replace: (line: string) => string;
}[] = [
{
rx: /\s?`(.*?)`[^a-z0-9`]\s?/gi,
create(content, start, end, tokens) {
tokens.push({
content: this.replace(content[0]),
type: "inline-code",
end,
start,
uuid: crypto.randomUUID(),
});
},
replace(l) {
return l.replace(this.rx, (...all) => all[1]);
},
},
{
rx: /(\*\*)(.*?)(\*\*)/g,
create(content, start, end, tokens) {
tokens.push({
content: this.replace(content[0]),
type: "bold",
end,
start,
uuid: crypto.randomUUID(),
});
},
replace(l) {
return l.replace(this.rx, (_, __, val) => val);
},
},
{
rx: /(?<!\*)\*([^\*]+?)\*(?!\*)/g,
create(content, start, end, tokens) {
tokens.push({
content: this.replace(content[0]),
type: "italic",
end,
start,
uuid: crypto.randomUUID(),
});
},
replace(l) {
return l.replace(this.rx, (...all) => all[1]);
},
},
{
rx: /(?<![\!\?|^])\[(.*?)\]\((.*?)\)/g,
create(content, start, end, tokens) {
let [_, label, href] = content;
const style = [
{
classes: "btn-primary inline-block",
rx: /^```button\s/,
},
{
classes: "btn-secondary inline-block uppercase",
rx: /^```cta\s/,
},
].find((s) => s.rx.test(label));
if (style) label = label.replace(style.rx, "");
tokens.push({
content: label,
type: "anchor",
data: {
href,
style,
},
start,
end,
uuid: crypto.randomUUID(),
});
},
replace(l) {
return l.replace(this.rx, (_, label, href) => [label, href].join(joiner));
// return l
},
},
{
rx: /!\[(.*?)\]\((.*?)\)/g,
create(content, start, end, tokens) {
const [_, alt, src] = content;
tokens.push({
content: alt,
end,
start,
type: "image",
data: {
src,
},
uuid: crypto.randomUUID(),
});
},
replace(l) {
return l;
},
},
{
rx: /\^\[(.*?)\]<<(.*?)>>/gm,
create(content, start, end, tokens) {
const [_, text, popover] = content;
tokens.push({
content: text,
end,
start,
type: "popover",
data: {
popover: tokenizeInline(popover),
},
uuid: crypto.randomUUID(),
});
},
replace(l) {
return l;
},
},
];

View File

@@ -1,103 +0,0 @@
import { tokenizeInline } from "./tokenizeInline";
export const tokenizeLine = (
line: string,
previous?: SingleLineToken,
): SingleLineToken => {
for (const token of singleLineTokens) {
if (!token.rx.test(line)) continue;
const t = token.create(line);
if (t.type === "h2") {
}
t.line = tokenizeInline(line.replace(token.replaceRx, ""));
return t;
}
if (previous?.mends) {
previous.raw += " " + line;
previous.line = tokenizeInline(previous.raw.replace(previous.cfg!.rx, ""));
return previous;
}
return {
line: tokenizeInline(line),
type: "text",
raw: line,
uuid: crypto.randomUUID(),
};
};
export const singleLineTokens: SingleLineCfg[] = [
{
rx: /^#\s/,
create(line) {
return ({
type: "h1",
line,
raw: line,
cfg: this,
uuid: crypto.randomUUID(),
});
},
replaceRx: /^#\s/,
},
{
rx: /^##\s/,
create(line) {
return ({
type: "h2",
line,
raw: line,
cfg: this,
uuid: crypto.randomUUID(),
});
},
replaceRx: /^##\s/,
},
{
rx: /^###\s/,
create(line) {
return ({
type: "h3",
line,
raw: line,
cfg: this,
uuid: crypto.randomUUID(),
});
},
replaceRx: /^###\s/,
},
{
rx: /^-\s/,
create(line) {
return ({
type: "list1",
line,
raw: line,
mends: true,
cfg: this,
uuid: crypto.randomUUID(),
});
},
replaceRx: /^-\s/,
shouldMendNextLine: true,
},
{
rx: /^[\t\s]{2}-\s/,
create(line) {
return ({
type: "list2",
line,
raw: line,
mends: true,
cfg: this,
uuid: crypto.randomUUID(),
});
},
replaceRx: /^[\t\s]{2}-\s/,
shouldMendNextLine: true,
},
];

View File

@@ -1,89 +0,0 @@
export const tokenizeParagraph = (paragraph: string) => {
for (const pgraph of paragraphTokens) {
const openTest = pgraph.rx.test(paragraph),
closeTest = pgraph.closeRx.test(paragraph);
if (openTest && closeTest) {
const p = pgraph.create(paragraph);
p.closed = true;
return p;
}
if (closeTest) return pgraph.create(paragraph).content;
if (openTest) {
return pgraph.create(paragraph);
}
}
};
const paragraphTokens: {
rx: RegExp;
closeRx: RegExp;
create: (line: string) => ParagraphToken;
}[] = [
{
rx: /\n```/g,
closeRx: /\n```/g,
create(line) {
return {
type: "code",
metadata: {
// language: line.split("\n").at(0)!.replace(this.rx, ""),
},
closed: false,
content: [{
line: line.match(/```(.*?)\n```/g)?.at(1) || line,
type: "text",
raw: line,
uuid: crypto.randomUUID(),
}],
allowsInline: false,
uuid: crypto.randomUUID(),
};
},
},
];
TokenIdentifiers.set("table", {
rx: /^\|\s[\s\S]*?\|(?=(\n\n)|$)/g,
parse(s) {
const rowSections = s.split(/-/gm).map((s) =>
s.split("\n").map((r) => r.split(/\s?\|\s?/g))
);
let headerRows: string[][] = [],
bodyRows: string[][] = [],
footerRows: string[][] = [];
switch (rowSections.length) {
case 1:
bodyRows = rowSections[0];
break;
case 2:
headerRows = rowSections[0];
bodyRows = rowSections[1];
break;
case 3:
headerRows = rowSections[0];
bodyRows = rowSections[1];
footerRows = rowSections[3];
break;
}
const maxColumns = Math.max(
...[...headerRows, ...bodyRows, ...footerRows].map((r) => r.length),
);
return {
content: s,
raw: s,
metadata: {
headerRows: headerRows.join(" | "),
bodyRows: bodyRows.join(" | "),
footerRows: footerRows.join(" | "),
columns: maxColumns.toString(),
},
type: "table",
uuid: crypto.randomUUID(),
};
},
});