ttcmd: did I.... did I win? I think I won

This commit is contained in:
2024-03-12 08:33:51 -06:00
parent 3ae4dfcc82
commit 42a671d49d
5 changed files with 332 additions and 439 deletions

View File

@@ -140,6 +140,36 @@ TokenIdentifiers.set("inline-code", {
};
},
});
TokenIdentifiers.set("bold", {
rx: /\*{2}(.*?)\*{2}/g,
parse(s) {
return {
// content: inline,
content: s.match(new RegExp(this.rx, "i"))?.at(1) ||
"Unable to parse bold",
raw: s,
metadata: {},
type: "bold",
uuid: crypto.randomUUID(),
rendersContentOnly,
};
},
});
TokenIdentifiers.set("italic", {
rx: /(?<!\*)\*([^\*]+?)\*(?!\*)/g,
parse(s) {
return {
// content: inline,
content: s.match(new RegExp(this.rx, "i"))?.at(1) ||
"Unable to parse italic",
raw: s,
metadata: {},
type: "italic",
uuid: crypto.randomUUID(),
rendersContentOnly,
};
},
});
TokenIdentifiers.set("popover", {
rx: /\^\[(.*?)\]\<<(.*?)\>>/g,
parse(s) {
@@ -172,6 +202,7 @@ TokenIdentifiers.set("accordion", {
},
});
TokenIdentifiers.set("p", {
// rx: /(?<=\n)\n?([\s\S]*?)\n(?=\n)/g,
rx: /(?<=\n\n)([\s\S]*?)(?=\n\n)/g,
parse(s) {
// const [_, content] = s.match(new RegExp(this.rx, ""))!;

View File

@@ -3,7 +3,7 @@
import { zipArrays } from "../zip";
import { TokenIdentifiers } from "./TokenIdentifiers";
export const createElements = (body: string): [Token[], number] => {
export const createElements = (body: string): [TokenMarker[], number] => {
const tabOptions = [
/^\s{2}(?!\s|\t)/m,
/^\s{4}(?!\s|\t)/m,
@@ -18,13 +18,13 @@ export const createElements = (body: string): [Token[], number] => {
}
}
const tokens = tokenize(body);
return [buildAbstractSyntaxTree(tokens, body), tabSpacing];
return [buildAbstractSyntaxTree(tokens), tabSpacing];
};
const tokenize = (body: string) => {
const tokenizedBody: tokenMarker[] = [];
const tokenizedBody: TokenMarker[] = [];
const addToken = (thing: tokenMarker) => {
const addToken = (thing: TokenMarker) => {
tokenizedBody.push(thing);
};
@@ -35,145 +35,93 @@ const tokenize = (body: string) => {
const start = match.index;
const end = rx.lastIndex;
if (type !== "p" || !tokenizedBody.find((i) => i.start === start)) {
addToken({
start,
end,
type,
});
}
addToken({
start,
end,
type,
token: token.parse(match[0]),
});
}
}
return tokenizedBody;
};
export const buildAbstractSyntaxTree = (
markers: tokenMarker[],
body: string,
): Token[] => {
ensureNoOrphans(markers);
function buildAbstractSyntaxTree(markers: TokenMarker[]) {
markers.sort((a, b) => a.start - b.start);
markers.sort((a, b) => {
if (a.start === b.start) {
console.log(a, b);
if (a.type === "p") return -1;
if (b.type === "p") return 1;
}
// if (a.type === "p" && a.start === b.start) return -1;
// if (b.type === "p" && a.start === b.start) return 1;
return a.start - b.start;
});
markers = filterOverlappingPBlocks(markers);
establishClosestParent(markers);
for (const marker of markers) {
marker.token = TokenIdentifiers.get(marker.type)?.parse(
body.substring(marker.start, marker.end),
);
// if (marker.type === "p" && marker.parent && marker.parent?.type !== "p") {
// marker.parent = undefined;
// continue;
// }
if (!marker.token) {
throw new Error("Failed to parse token. Token type not found?");
if (marker.parent) {
marker.parent.token.children = marker.parent.token.children || [];
marker.parent.token.children.push(marker.token);
}
if (!marker.parent) continue;
if (!marker.parent.token) {
// debugger;
throw new Error("Failed to parse token. Child tokenized before parent");
}
marker.parent.token.children = marker.parent.token.children || [];
marker.parent.token.children.push(marker.token);
// marker.token.parent = marker.parent.token;
}
const tokens = markers.filter((m) =>
markers.filter((a) => a !== m && (a.end === m.end || a.start === m.start))
.length || m.type !== "p"
).map((t) => t.token!);
for (const token of tokens) {
contentToChildren(token);
// By starting at the end, we can always assure that we are not filtering out children that haven't been processed yet
for (const marker of [...markers].reverse()) {
contentToChildren(marker.token);
}
return tokens.filter((t) => !t.parent);
};
return markers.filter((m) => !m.parent);
// return markers;
}
const ensureNoOrphansOld = (tokens: tokenMarker[]) => {
for (const token of tokens) {
const parentPs = tokens.filter((t) => (
t.type === "p" && (
// any p that fully encapsulates the token
(t.start <= token.start && t.end >= token.end) ||
// any p that contains the start of the token
(t.start <= token.start && t.end >= token.start) ||
// any p that contains the end of the token
(t.start <= token.end && t.end >= token.end)
)
)).sort((a, b) => (a.start - b.start));
function establishClosestParent(blocks: TokenMarker[]): void {
blocks.sort((a, b) => a.start - b.start); // Sort blocks by start position
if (parentPs.length > 1) {
parentPs[0].end = parentPs.at(-1)!.end;
const remainingParents = parentPs.slice(1);
for (const token of tokens) {
if (token.parent && remainingParents.includes(token.parent)) {
token.parent = parentPs[0];
for (let i = 0; i < blocks.length; i++) {
const block = blocks[i];
if (block.parent) continue; // Skip blocks that already have a parent
let closestParent: TokenMarker | undefined = undefined;
let minDistance = Number.MAX_SAFE_INTEGER;
// Find the closest parent block for each block
for (let j = 0; j < i; j++) {
const otherBlock = blocks[j];
if (otherBlock.end >= block.start && otherBlock.start <= block.start) {
const distance = block.start - otherBlock.start;
if (distance < minDistance) {
minDistance = distance;
closestParent = otherBlock;
}
}
if (parentPs[0] && parentPs[0].end < token.end) {
parentPs[0].end = token.end;
}
tokens = tokens.filter((t) => !remainingParents.includes(t));
}
const potentialParents = tokens.filter((t) =>
(t.start < token.start && t.end > token.end) ||
(t.type === "p" && t.start <= token.start &&
t.end >= token.end && t !== token)
).sort((a, b) => {
if (token.start - a.start < token.start - b.start) return -1;
return 1;
});
token.parent = potentialParents.find((p) => p.type !== "p") ??
potentialParents[0];
if (token.type === "grid") {
debugger;
if (closestParent) {
block.parent = closestParent; // Assign the closest parent block
}
}
};
}
const ensureNoOrphans = (tokens: tokenMarker[]) => {
ensureNoOrphansOld(tokens);
};
function filterOverlappingPBlocks(blocks: TokenMarker[]): TokenMarker[] {
return blocks.filter((block) => {
if (block.type !== "p") {
return true; // Keep blocks that are not 'p' type
}
// Filter out 'p' blocks that overlap with any other block
for (const otherBlock of blocks) {
if (
otherBlock !== block && (
otherBlock.start === block.start ||
otherBlock.end === block.end
)
) {
return false; // Overlapping 'p' block found, filter it out
}
}
return true; // Keep 'p' block if it doesn't overlap with any other block
});
}
const contentToChildren = (token: Token) => {
const children: Token[] = [];
let part, content = token.content;
let content = token.content;
// for (const child of token.children || []) {
// if (!content) continue;
// [part, content] = content.split(child.raw);
// part && children.push({
// content: part.trim(),
// metadata: {},
// raw: part,
// type: "text",
// uuid: crypto.randomUUID(),
// });
// children.push(child);
// }
// if (content) {
// children.push({
// content: content.trim(),
// metadata: {},
// raw: content,
// type: "text",
// uuid: crypto.randomUUID(),
// });
// }
const splitMarker = "{{^^}}";
for (const child of token.children || []) {
content = content.replace(child.raw, splitMarker);
@@ -181,7 +129,7 @@ const contentToChildren = (token: Token) => {
token.children = zipArrays(
content.split(splitMarker).map((c): Token => ({
content: c.trim(),
content: c.replaceAll("\n", ""),
metadata: {},
raw: c,
type: "text",
@@ -191,96 +139,3 @@ const contentToChildren = (token: Token) => {
token.children || [],
).filter((c) => c.children?.length || (c.rendersContentOnly && c.content));
};
// const tokenize = (body: string) => {
// body = body.replace(/\n?<!--(.*?)-->\n?/gs, "");
// const paragraphs = body.split("\n\n");
// const blockTokens: BlockToken[] = [];
// const paragraphTokens: ParagraphToken[] = [];
// for (const paragraph of paragraphs) {
// const block = tokenizeBlock(paragraph);
// let openBT = blockTokens.findLast((bt) => !bt.closed);
// if (block) {
// if (typeof block === "string") {
// if (openBT) {
// openBT.closed = true;
// }
// continue;
// }
// if (openBT) {
// openBT.children.push(block);
// block.parent = openBT.type;
// }
// blockTokens.push(block);
// continue;
// }
// if (!openBT) {
// openBT = {
// children: [],
// closed: false,
// metadata: {},
// type: "block",
// uuid: crypto.randomUUID(),
// };
// blockTokens.push(openBT);
// }
// const multiline = tokenizeParagraph(paragraph);
// let openP = paragraphTokens.findLast((p) => !p.closed);
// if (multiline) {
// if (Array.isArray(multiline)) {
// if (openP) {
// openP.closed = true;
// openP.content = openP.content.concat(multiline);
// }
// continue;
// }
// openBT.children.push(multiline);
// paragraphTokens.push(multiline);
// continue;
// } else if (openP && !openP?.allowsInline) {
// openP.content.push({
// line: paragraph,
// raw: paragraph,
// type: "text",
// uuid: crypto.randomUUID(),
// });
// }
// // I don't think the closed check is necessary, but just in case
// // if (openP && !openP.closed && !openP.allowsInline) continue;
// if (!openP) {
// openP = {
// allowsInline: true,
// closed: true,
// content: [],
// metadata: {},
// type: "p",
// uuid: crypto.randomUUID(),
// };
// openBT.children.push(openP);
// paragraphTokens.push(openP);
// }
// const lines = paragraph.split("\n");
// let previous;
// for (const line of lines) {
// const singleLine = tokenizeLine(line, previous);
// if (singleLine) {
// if (singleLine !== previous) {
// openP.content.push(singleLine);
// }
// previous = singleLine;
// }
// }
// }
// return blockTokens.filter((b) => !b.parent);
// };