initial public commit

This commit is contained in:
2026-02-22 21:26:15 -05:00
commit 9dbf7ae796
100 changed files with 18823 additions and 0 deletions

1
dist-node/constants.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
export declare const __dirname: string;

2
dist-node/hashTwt.d.ts vendored Normal file
View File

@@ -0,0 +1,2 @@
import { Twt } from './types.ts';
export default function hashTwt(twt: Twt): string;

41
dist-node/hashTwt.js Normal file
View File

@@ -0,0 +1,41 @@
import { Buffer } from "vite-plugin-node-polyfills/shims/buffer";
import { blake2b } from "@exodus/blakejs";
import { base32Encode } from "./utils.js";
globalThis.Buffer = Buffer;
const dateRegex = /^(\d{4})-(\d{2})-(\d{2})([tT ])(\d{2}):(\d{2}):(\d{2})\.?(\d{3})?(?:(?:([+-]\d{2}):?(\d{2}))|Z)?$/;
const formatRFC3339 = (date) => {
const pad = (num = 0) => `${+num < 10 ? 0 : ""}${+num}`;
const padYear = (num = 0) => `${+num < 1e3 ? 0 : ""}${+num < 100 ? 0 : ""}${+num < 10 ? 0 : ""}${+num}`;
let m = dateRegex.exec(date);
if (m && m?.[9] === void 0) {
m[9] = "+00";
}
if (m && m?.[10] === void 0) {
m[10] = "00";
}
const offset = `${m?.[9]}:${m?.[10]}`.replace(/[+-]?00:00$/, "Z");
return [
padYear(m?.[1]),
"-",
pad(m?.[2]),
"-",
pad(m?.[3]),
m?.[4],
pad(m?.[5]),
":",
pad(m?.[6]),
":",
pad(m?.[7]),
//ignore milliseconds (m[8])
offset
].join("");
};
function hashTwt(twt) {
const created = formatRFC3339(twt.created);
const payload = [twt.url, created, twt.content].join("\n");
return base32Encode(blake2b(payload, void 0, 32)).toLowerCase().slice(-7);
}
export {
hashTwt as default
};
//# sourceMappingURL=hashTwt.js.map

1
dist-node/hashTwt.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"hashTwt.js","sources":["../src/hashTwt.ts"],"sourcesContent":["import { Buffer } from \"buffer\";\nglobalThis.Buffer = Buffer;\n\nimport type { Twt } from \"./types.ts\";\n\nimport { blake2b } from \"@exodus/blakejs\";\n\nimport { base32Encode } from \"./utils.ts\";\n\nconst dateRegex =\n\t/^(\\d{4})-(\\d{2})-(\\d{2})([tT ])(\\d{2}):(\\d{2}):(\\d{2})\\.?(\\d{3})?(?:(?:([+-]\\d{2}):?(\\d{2}))|Z)?$/;\n\nconst formatRFC3339 = (date: string) => {\n\tconst pad = (num: number | string = 0) => `${+num < 10 ? 0 : \"\"}${+num}`;\n\tconst padYear = (num: number | string = 0) =>\n\t\t`${+num < 1000 ? 0 : \"\"}${+num < 100 ? 0 : \"\"}${\n\t\t\t+num < 10 ? 0 : \"\"\n\t\t}${+num}`;\n\n\tlet m = dateRegex.exec(date);\n\n\t//if timezone is undefined, it must be Z or nothing (otherwise the group would have captured).\n\tif (m && m?.[9] === undefined) {\n\t\t//Use UTC.\n\t\tm[9] = \"+00\";\n\t}\n\tif (m && m?.[10] === undefined) {\n\t\tm[10] = \"00\";\n\t}\n\n\tconst offset = `${m?.[9]}:${m?.[10]}`.replace(/[+-]?00:00$/, \"Z\");\n\n\treturn [\n\t\tpadYear(m?.[1]),\n\t\t\"-\",\n\t\tpad(m?.[2]),\n\t\t\"-\",\n\t\tpad(m?.[3]),\n\t\tm?.[4],\n\t\tpad(m?.[5]),\n\t\t\":\",\n\t\tpad(m?.[6]),\n\t\t\":\",\n\t\tpad(m?.[7]),\n\t\t//ignore milliseconds (m[8])\n\t\toffset,\n\t].join(\"\");\n};\n\nexport default function hashTwt(twt: Twt): string {\n\tconst created = formatRFC3339(twt.created);\n\tconst payload = [twt.url, created, twt.content].join(\"\\n\");\n\n\treturn base32Encode(blake2b(payload, undefined, 32))\n\t\t.toLowerCase()\n\t\t.slice(-7);\n}\n"],"names":[],"mappings":";;;AACA,WAAW,SAAS;AAQpB,MAAM,YACL;AAED,MAAM,gBAAgB,CAAC,SAAiB;AACvC,QAAM,MAAM,CAAC,MAAuB,MAAM,GAAG,CAAC,MAAM,KAAK,IAAI,EAAE,GAAG,CAAC,GAAG;AACtE,QAAM,UAAU,CAAC,MAAuB,MACvC,GAAG,CAAC,MAAM,MAAO,IAAI,EAAE,GAAG,CAAC,MAAM,MAAM,IAAI,EAAE,GAC5C,CAAC,MAAM,KAAK,IAAI,EACjB,GAAG,CAAC,GAAG;AAER,MAAI,IAAI,UAAU,KAAK,IAAI;AAG3B,MAAI,KAAK,IAAI,CAAC,MAAM,QAAW;AAE9B,MAAE,CAAC,IAAI;AAAA,EACR;AACA,MAAI,KAAK,IAAI,EAAE,MAAM,QAAW;AAC/B,MAAE,EAAE,IAAI;AAAA,EACT;AAEA,QAAM,SAAS,GAAG,IAAI,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC,GAAG,QAAQ,eAAe,GAAG;AAEhE,SAAO;AAAA,IACN,QAAQ,IAAI,CAAC,CAAC;AAAA,IACd;AAAA,IACA,IAAI,IAAI,CAAC,CAAC;AAAA,IACV;AAAA,IACA,IAAI,IAAI,CAAC,CAAC;AAAA,IACV,IAAI,CAAC;AAAA,IACL,IAAI,IAAI,CAAC,CAAC;AAAA,IACV;AAAA,IACA,IAAI,IAAI,CAAC,CAAC;AAAA,IACV;AAAA,IACA,IAAI,IAAI,CAAC,CAAC;AAAA;AAAA,IAEV;AAAA,EAAA,EACC,KAAK,EAAE;AACV;AAEA,SAAwB,QAAQ,KAAkB;AACjD,QAAM,UAAU,cAAc,IAAI,OAAO;AACzC,QAAM,UAAU,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,IAAI;AAEzD,SAAO,aAAa,QAAQ,SAAS,QAAW,EAAE,CAAC,EACjD,YAAA,EACA,MAAM,EAAE;AACX;"}

5
dist-node/index.d.ts vendored Normal file
View File

@@ -0,0 +1,5 @@
export type * from './types.ts';
export { default as hashTwt } from './hashTwt.ts';
export { default as loadAndParseTwtxtFile } from './loadAndParseTwtxt.ts';
export { default as parseTwtxt } from './parseTwtxt.ts';
export { base32Encode } from './utils.ts';

13
dist-node/index.js Normal file
View File

@@ -0,0 +1,13 @@
// @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&dn=expat.txt
import { default as default2 } from "./hashTwt.js";
import { default as default3 } from "./loadAndParseTwtxt.js";
import { default as default4 } from "./parseTwtxt.js";
import { base32Encode } from "./utils.js";
export {
base32Encode,
default2 as hashTwt,
default3 as loadAndParseTwtxtFile,
default4 as parseTwtxt
};
//# sourceMappingURL=index.js.map
// @license-end

1
dist-node/index.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;"}

14
dist-node/loadAndParseTwtxt.d.ts vendored Normal file
View File

@@ -0,0 +1,14 @@
export default function loadAndParseTwtxtFile(url?: string): Promise<{
lastModified: string;
following: import('./types.ts').Twttr[];
metadata: import('./types.ts').Metadata;
twts: {
content: string;
created: string;
createdUTC: string;
hash: string;
replyHash: string | undefined;
replyNick: string | undefined;
replyUrl: string | undefined;
}[];
}>;

View File

@@ -0,0 +1,23 @@
import dayjs from "dayjs";
import parseTwtxt from "./parseTwtxt.js";
async function loadAndParseTwtxtFile(url = "") {
if (!url) throw new Error("URL is required");
try {
const response = await fetch(url);
const twtxtFile = await response.text();
const lastModified = dayjs(
response.headers.get("Last-Modified")
).toISOString();
return {
...parseTwtxt(twtxtFile),
lastModified
};
} catch (err) {
console.error(err);
throw err;
}
}
export {
loadAndParseTwtxtFile as default
};
//# sourceMappingURL=loadAndParseTwtxt.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"loadAndParseTwtxt.js","sources":["../src/loadAndParseTwtxt.ts"],"sourcesContent":["import dayjs from \"dayjs\";\n\nimport parseTwtxt from \"./parseTwtxt.js\";\n\nexport default async function loadAndParseTwtxtFile(url = \"\") {\n\tif (!url) throw new Error(\"URL is required\");\n\n\ttry {\n\t\tconst response = await fetch(url);\n\t\tconst twtxtFile = await response.text();\n\t\tconst lastModified = dayjs(\n\t\t\tresponse.headers.get(\"Last-Modified\"),\n\t\t).toISOString();\n\n\t\treturn {\n\t\t\t...parseTwtxt(twtxtFile),\n\t\t\tlastModified,\n\t\t};\n\t} catch (err) {\n\t\tconsole.error(err);\n\t\tthrow err;\n\t}\n}\n"],"names":[],"mappings":";;AAIA,eAA8B,sBAAsB,MAAM,IAAI;AAC7D,MAAI,CAAC,IAAK,OAAM,IAAI,MAAM,iBAAiB;AAE3C,MAAI;AACH,UAAM,WAAW,MAAM,MAAM,GAAG;AAChC,UAAM,YAAY,MAAM,SAAS,KAAA;AACjC,UAAM,eAAe;AAAA,MACpB,SAAS,QAAQ,IAAI,eAAe;AAAA,IAAA,EACnC,YAAA;AAEF,WAAO;AAAA,MACN,GAAG,WAAW,SAAS;AAAA,MACvB;AAAA,IAAA;AAAA,EAEF,SAAS,KAAK;AACb,YAAQ,MAAM,GAAG;AACjB,UAAM;AAAA,EACP;AACD;"}

18
dist-node/parseTwtxt.d.ts vendored Normal file
View File

@@ -0,0 +1,18 @@
import { Metadata, Twttr } from './types.ts';
/**
* @param twtxt
* @returns object containing: following, metadata, twts
*/
export default function parseTwtxt(twtxt: string): {
following: Twttr[];
metadata: Metadata;
twts: {
content: string;
created: string;
createdUTC: string;
hash: string;
replyHash: string | undefined;
replyNick: string | undefined;
replyUrl: string | undefined;
}[];
};

83
dist-node/parseTwtxt.js Normal file
View File

@@ -0,0 +1,83 @@
import dayjs from "dayjs";
import utc from "dayjs/plugin/utc.js";
import hashTwt from "./hashTwt.js";
import { getValueOrFirstEntry } from "./utils.js";
dayjs.extend(utc);
function parseTwtxt(twtxt) {
const allLines = twtxt.split("\n");
const { commentLines = [], contentLines = [] } = allLines.reduce(
(acc, originalLine) => {
const line = originalLine.trim();
if (line === "") return acc;
if (line.startsWith("#")) acc.commentLines.push(line);
else acc.contentLines.push(line);
return acc;
},
{
commentLines: [],
contentLines: []
}
);
const { following = [], metadata = {} } = commentLines.filter((line) => line.includes("=")).reduce(
(acc, line) => {
const [key, ...vals] = line.substring(1).split("=").map((field) => field.trim());
const val = vals.join("=");
if (key === "follow") {
const [nick, url] = val.trim().split(/\s+/);
acc.following.push({ nick, url });
} else {
if (acc.metadata[key]) {
if (!Array.isArray(acc.metadata[key]))
acc.metadata[key] = [acc.metadata[key], val];
else acc.metadata[key].push(val);
} else acc.metadata[key] = val;
}
return acc;
},
{
following: [],
metadata: {}
}
);
const replyRegEx = /\(#([\w]+)\) (\<\@(\S+) ([^>]+)>)*/;
const twts = contentLines.map((line) => {
const [created, content] = line.split(/\t/).map((val) => val.trim());
if (typeof content === "undefined")
throw new Error(`Content is undefined: ${line}`);
const createdDayjs = dayjs.utc(created);
if (!createdDayjs.isValid())
throw new Error(`Date is invalid: ${line}`);
const createdUTC = createdDayjs.toISOString();
const replyMatches = replyRegEx.exec(content);
let replyHash, replyNick, replyUrl;
if (replyMatches?.length) {
replyHash = replyMatches?.[1];
replyNick = replyMatches?.[3];
replyUrl = replyMatches?.[4];
}
const hash = hashTwt({
content,
created,
createdUTC,
url: getValueOrFirstEntry(metadata?.url ?? "")
});
return {
content,
created,
createdUTC,
hash,
replyHash,
replyNick,
replyUrl
};
}).sort((a, b) => dayjs(a.created).diff(dayjs(b.created)));
return {
following,
metadata,
twts
};
}
export {
parseTwtxt as default
};
//# sourceMappingURL=parseTwtxt.js.map

File diff suppressed because one or more lines are too long

31
dist-node/types.d.ts vendored Normal file
View File

@@ -0,0 +1,31 @@
export interface LoadAndParseTwtxtWithCacheConfig {
cacheKeyPrefix: string;
onLoad?: (data: Twtxt) => void;
user?: Twttr;
}
export interface Metadata {
[key: string]: string | string[];
}
export interface Twt {
avatar?: string;
content: string;
created: string;
createdUTC: string;
hash?: string;
nick?: string;
noDom?: boolean;
replyHash?: string;
replyNick?: string;
replyUrl?: string;
url?: string;
}
export interface Twttr {
avatar?: string;
nick: string;
url: string;
}
export interface Twtxt {
following: Twttr[];
metadata: Metadata;
twts: Twt[];
}

2
dist-node/utils.d.ts vendored Normal file
View File

@@ -0,0 +1,2 @@
export declare const base32Encode: (payload: string | Uint8Array<ArrayBufferLike>) => any;
export declare const getValueOrFirstEntry: (value: unknown | unknown[]) => any;

11
dist-node/utils.js Normal file
View File

@@ -0,0 +1,11 @@
import base32 from "base32.js";
const base32Encode = (payload) => {
const encoder = new base32.Encoder({ type: "rfc4648" });
return encoder.write(payload).finalize();
};
const getValueOrFirstEntry = (value) => Array.isArray(value) && value.length ? value[0] : value;
export {
base32Encode,
getValueOrFirstEntry
};
//# sourceMappingURL=utils.js.map

1
dist-node/utils.js.map Normal file
View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sources":["../src/utils.ts"],"sourcesContent":["import base32 from \"base32.js\";\n\nexport const base32Encode = (payload: string | Uint8Array<ArrayBufferLike>) => {\n\tconst encoder = new base32.Encoder({ type: \"rfc4648\" });\n\treturn encoder.write(payload).finalize();\n};\n\nexport const getValueOrFirstEntry = (value: unknown | unknown[]) =>\n\tArray.isArray(value) && value.length ? value[0] : value;\n"],"names":[],"mappings":";AAEO,MAAM,eAAe,CAAC,YAAkD;AAC9E,QAAM,UAAU,IAAI,OAAO,QAAQ,EAAE,MAAM,WAAW;AACtD,SAAO,QAAQ,MAAM,OAAO,EAAE,SAAA;AAC/B;AAEO,MAAM,uBAAuB,CAAC,UACpC,MAAM,QAAQ,KAAK,KAAK,MAAM,SAAS,MAAM,CAAC,IAAI;"}